var/home/core/zuul-output/0000755000175000017500000000000015067027444014536 5ustar corecorevar/home/core/zuul-output/logs/0000755000175000017500000000000015067045377015507 5ustar corecorevar/home/core/zuul-output/logs/kubelet.log0000644000000000000000005757036515067045313017723 0ustar rootrootSep 30 19:31:09 crc systemd[1]: Starting Kubernetes Kubelet... Sep 30 19:31:09 crc restorecon[4713]: Relabeled /var/lib/kubelet/config.json from system_u:object_r:unlabeled_t:s0 to system_u:object_r:container_var_lib_t:s0 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/device-plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/device-plugins/kubelet.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/volumes/kubernetes.io~configmap/nginx-conf/..2025_02_23_05_40_35.4114275528/nginx.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/22e96971 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/21c98286 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/0f1869e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/46889d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/5b6a5969 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/6c7921f5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4804f443 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/2a46b283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/a6b5573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4f88ee5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/5a4eee4b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/cd87c521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/38602af4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/1483b002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/0346718b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/d3ed4ada not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/3bb473a5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/8cd075a9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/00ab4760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/54a21c09 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/70478888 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/43802770 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/955a0edc not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/bca2d009 not reset as customized by admin to system_u:object_r:container_file_t:s0:c140,c1009 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/b295f9bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/bc46ea27 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5731fc1b not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5e1b2a3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/943f0936 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/3f764ee4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/8695e3f9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/aed7aa86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/c64d7448 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/0ba16bd2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/207a939f not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/54aa8cdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/1f5fa595 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/bf9c8153 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/47fba4ea not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/7ae55ce9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7906a268 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/ce43fa69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7fc7ea3a not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/d8c38b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/9ef015fb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/b9db6a41 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/b1733d79 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/afccd338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/9df0a185 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/18938cf8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/7ab4eb23 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/56930be6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_35.630010865 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/0d8e3722 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/d22b2e76 not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/e036759f not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/2734c483 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/57878fe7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/3f3c2e58 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/375bec3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/7bc41e08 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/48c7a72d not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/4b66701f not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/a5a1c202 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_40.1388695756 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/26f3df5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/6d8fb21d not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/50e94777 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208473b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/ec9e08ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3b787c39 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208eaed5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/93aa3a2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3c697968 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/ba950ec9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/cb5cdb37 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/f2df9827 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/fedaa673 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/9ca2df95 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/b2d7460e not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2207853c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/241c1c29 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2d910eaf not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/c6c0f2e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/399edc97 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8049f7cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/0cec5484 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/312446d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c406,c828 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8e56a35d not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/2d30ddb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/eca8053d not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/c3a25c9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c168,c522 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/b9609c22 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/e8b0eca9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/b36a9c3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/38af7b07 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/ae821620 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/baa23338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/2c534809 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/59b29eae not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/c91a8e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/4d87494a not reset as customized by admin to system_u:object_r:container_file_t:s0:c442,c857 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/1e33ca63 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/8dea7be2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d0b04a99 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d84f01e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/4109059b not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/a7258a3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/05bdf2b6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/f3261b51 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/315d045e not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/5fdcf278 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/d053f757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/c2850dc7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fcfb0b2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c7ac9b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fa0c0d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c609b6ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/2be6c296 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/89a32653 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/4eb9afeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/13af6efa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/b03f9724 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/e3d105cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/3aed4d83 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/0765fa6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/2cefc627 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/3dcc6345 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/365af391 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b1130c0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/236a5913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b9432e26 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/5ddb0e3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/986dc4fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/8a23ff9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/9728ae68 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/665f31d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/136c9b42 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/98a1575b not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/cac69136 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/5deb77a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/2ae53400 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/e46f2326 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/dc688d3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/3497c3cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/177eb008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/af5a2afa not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/d780cb1f not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/49b0f374 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/26fbb125 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/cf14125a not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/b7f86972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/e51d739c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/88ba6a69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/669a9acf not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/5cd51231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/75349ec7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/15c26839 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/45023dcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/2bb66a50 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/64d03bdd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/ab8e7ca0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/bb9be25f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/9a0b61d3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/d471b9d2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/8cb76b8e not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/11a00840 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/ec355a92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/992f735e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d59cdbbc not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/72133ff0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/c56c834c not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d13724c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/0a498258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa471982 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fc900d92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa7d68da not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/4bacf9b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/424021b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/fc2e31a3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/f51eefac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/c8997f2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/7481f599 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/fdafea19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/d0e1c571 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/ee398915 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/682bb6b8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a3e67855 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a989f289 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/915431bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/7796fdab not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/dcdb5f19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/a3aaa88c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/5508e3e6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/160585de not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/e99f8da3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/8bc85570 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/a5861c91 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/84db1135 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/9e1a6043 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/c1aba1c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/d55ccd6d not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/971cc9f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/8f2e3dcf not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/ceb35e9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/1c192745 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/5209e501 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/f83de4df not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/e7b978ac not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/c64304a1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/5384386b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/cce3e3ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/8fb75465 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/740f573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/32fd1134 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/0a861bd3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/80363026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/bfa952a8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..2025_02_23_05_33_31.333075221 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/793bf43d not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/7db1bb6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/4f6a0368 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/c12c7d86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/36c4a773 not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/4c1e98ae not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/a4c8115c not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/setup/7db1802e not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver/a008a7ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-syncer/2c836bac not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-regeneration-controller/0ce62299 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-insecure-readyz/945d2457 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-check-endpoints/7d5c1dd8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/index.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/bundle-v1.15.0.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/channel.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/package.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/bc8d0691 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/6b76097a not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/34d1af30 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/312ba61c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/645d5dd1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/16e825f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/4cf51fc9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/2a23d348 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/075dbd49 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/dd585ddd not reset as customized by admin to system_u:object_r:container_file_t:s0:c377,c642 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/17ebd0ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c343 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/005579f4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_23_11.1287037894 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/bf5f3b9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/af276eb7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/ea28e322 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/692e6683 not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/871746a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/4eb2e958 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/ca9b62da not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/0edd6fce not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/containers/controller-manager/89b4555f not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/655fcd71 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/0d43c002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/e68efd17 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/9acf9b65 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/5ae3ff11 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/1e59206a not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/27af16d1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c304,c1017 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/7918e729 not reset as customized by admin to system_u:object_r:container_file_t:s0:c853,c893 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/5d976d0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c585,c981 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/d7f55cbb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/f0812073 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/1a56cbeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/7fdd437e not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/cdfb5652 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/fix-audit-permissions/fb93119e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver/f1e8fc0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver-check-endpoints/218511f3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server/serving-certs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/ca8af7b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/72cc8a75 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/6e8a3760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4c3455c0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/2278acb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4b453e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/3ec09bda not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2/cacerts.bin not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java/cacerts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl/ca-bundle.trust.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/email-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/objsign-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2ae6433e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fde84897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75680d2e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/openshift-service-serving-signer_1740288168.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/facfc4fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f5a969c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CFCA_EV_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9ef4a08a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ingress-operator_1740288202.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2f332aed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/248c8271.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d10a21f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ACCVRAIZ1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a94d09e5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c9a4d3b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40193066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd8c0d63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b936d1c6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CA_Disig_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4fd49c6c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM_SERVIDORES_SEGUROS.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b81b93f0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f9a69fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b30d5fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ANF_Secure_Server_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b433981b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93851c9e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9282e51c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7dd1bc4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Actalis_Authentication_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/930ac5d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f47b495.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e113c810.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5931b5bc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Commercial.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2b349938.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e48193cf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/302904dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a716d4ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Networking.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93bc0acc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/86212b19.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b727005e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbc54cab.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f51bb24c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c28a8a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9c8dfbd4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ccc52f49.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cb1c3204.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ce5e74ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd08c599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6d41d539.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb5fa911.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e35234b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8cb5ee0f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a7c655d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f8fc53da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/de6d66f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d41b5e2a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/41a3f684.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1df5a75f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_2011.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e36a6752.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b872f2b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9576d26b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/228f89db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_ECC_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb717492.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d21b73c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b1b94ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/595e996b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_RSA_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b46e03d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/128f4b91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_3_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81f2d2b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Autoridad_de_Certificacion_Firmaprofesional_CIF_A62634068.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3bde41ac.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d16a5865.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_EC-384_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0179095f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ffa7f1eb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9482e63a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4dae3dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e359ba6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7e067d03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/95aff9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7746a63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Baltimore_CyberTrust_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/653b494a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3ad48a91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_2_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/54657681.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/82223c44.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8de2f56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d9dafe4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d96b65e2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee64a828.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40547a79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5a3f0ff8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a780d93.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/34d996fb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/eed8c118.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/89c02a45.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b1159c4c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d6325660.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4c339cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8312c4c1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_E1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8508e720.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5fdd185d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48bec511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/69105f4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b9bc432.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/32888f65.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b03dec0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/219d9499.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5acf816d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbf06781.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc99f41e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AAA_Certificate_Services.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/985c1f52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8794b4e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_BR_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7c037b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ef954a4e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_EV_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2add47b6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/90c5a3c8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0f3e76e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/53a1b57a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_EV_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5ad8a5d6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/68dd7389.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d04f354.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d6437c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/062cdee6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bd43e1dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7f3d5d1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c491639e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3513523f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/399e7759.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/feffd413.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d18e9066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/607986c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c90bc37d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1b0f7e5c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e08bfd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dd8e9d41.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed39abd0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a3418fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bc3f2570.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_High_Assurance_EV_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/244b5494.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81b9768f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4be590e0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_ECC_P384_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9846683b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/252252d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e8e7201.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_RSA4096_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d52c538d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c44cc0c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Trusted_Root_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75d1b2ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a2c66da8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ecccd8db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust.net_Certification_Authority__2048_.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/aee5f10d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e7271e8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0e59380.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4c3982f2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b99d060.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf64f35b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0a775a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/002c0b4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cc450945.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_EC1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/106f3e4d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b3fb433b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4042bcee.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/02265526.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/455f1b52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0d69c7e1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9f727ac7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5e98733a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0cd152c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc4d6a89.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6187b673.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/FIRMAPROFESIONAL_CA_ROOT-A_WEB.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ba8887ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/068570d1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f081611a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48a195d8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GDCA_TrustAUTH_R5_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f6fa695.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab59055e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b92fd57f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GLOBALTRUST_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fa5da96b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ec40989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7719f463.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1001acf7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f013ecaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/626dceaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c559d742.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1d3472b9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9479c8c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a81e292b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4bfab552.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e071171e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/57bcb2da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_ECC_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab5346f4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5046c355.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_RSA_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/865fbdf9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da0cfd1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/85cde254.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_ECC_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbb3f32b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureSign_RootCA11.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5860aaa6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/31188b5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HiPKI_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c7f1359b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f15c80c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hongkong_Post_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/09789157.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/18856ac4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e09d511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Commercial_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cf701eeb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d06393bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Public_Sector_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/10531352.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Izenpe.com.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureTrust_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0ed035a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsec_e-Szigno_Root_CA_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8160b96c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8651083.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2c63f966.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_ECC_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d89cda1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/01419da9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_RSA_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7a5b843.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_RSA_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf53fb88.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9591a472.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3afde786.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Gold_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NAVER_Global_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3fb36b73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d39b0a2c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a89d74c2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd58d51e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7db1890.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NetLock_Arany__Class_Gold__F__tan__s__tv__ny.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/988a38cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/60afe812.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f39fc864.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5443e9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GB_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e73d606e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dfc0fe80.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b66938e9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e1eab7c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GC_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/773e07ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c899c73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d59297b8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ddcda989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_1_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/749e9e03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/52b525c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7e8dc79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a819ef2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/08063a00.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b483515.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/064e0aa9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1f58a078.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6f7454b3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7fa05551.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76faf6c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9339512a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f387163d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee37c333.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e18bfb83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e442e424.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fe8a2cd8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/23f4c490.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5cd81ad7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0c70a8d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7892ad52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SZAFIR_ROOT_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4f316efb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_RSA_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/06dc52d5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/583d0756.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0bf05006.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/88950faa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9046744a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c860d51.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_RSA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6fa5da56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/33ee480d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Secure_Global_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/63a2c897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_ECC_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bdacca6f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ff34af3f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbff3a01.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_ECC_RootCA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_C1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/406c9bb1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_C3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Services_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Silver_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/99e1b953.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/14bc7599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TUBITAK_Kamu_SM_SSL_Kok_Sertifikasi_-_Surum_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a3adc42.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f459871d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_ECC_Root_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_RSA_Root_2023.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TeliaSonera_Root_CA_v1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telia_Root_CA_v2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f103249.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f058632f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-certificates.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9bf03295.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/98aaf404.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1cef98f5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/073bfcc5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2923b3f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f249de83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/edcbddb5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P256_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b5697b0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ae85e5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b74d2bd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P384_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d887a5bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9aef356c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TunTrust_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd64f3fc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e13665f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Extended_Validation_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f5dc4f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da7377f6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Global_G2_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c01eb047.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/304d27c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed858448.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f30dd6ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/04f60c28.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_ECC_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fc5a8f99.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/35105088.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee532fd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/XRamp_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/706f604c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76579174.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d86cdd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/882de061.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f618aec.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a9d40e02.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e-Szigno_Root_CA_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e868b802.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/83e9984f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ePKI_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca6e4ad9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d6523ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4b718d9b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/869fbf79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/containers/registry/f8d22bdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/6e8bbfac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/54dd7996 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/a4f1bb05 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/207129da not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/c1df39e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/15b8f1cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/77bd6913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/2382c1b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/704ce128 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/70d16fe0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/bfb95535 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/57a8e8e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/1b9d3e5e not reset as customized by admin to system_u:object_r:container_file_t:s0:c107,c917 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/fddb173c not reset as customized by admin to system_u:object_r:container_file_t:s0:c202,c983 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/95d3c6c4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/bfb5fff5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/2aef40aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/c0391cad not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/1119e69d not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/660608b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/8220bd53 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/85f99d5c not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/4b0225f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/9c2a3394 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/e820b243 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/1ca52ea0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/e6988e45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/6655f00b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/98bc3986 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/08e3458a not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/2a191cb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/6c4eeefb not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/f61a549c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/24891863 not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/fbdfd89c not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/9b63b3bc not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/8acde6d6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/node-driver-registrar/59ecbba3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/csi-provisioner/685d4be3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/containers/route-controller-manager/feaea55e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:09 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/63709497 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/d966b7fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/f5773757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/81c9edb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/57bf57ee not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/86f5e6aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/0aabe31d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/d2af85c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/09d157d9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c0fe7256 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c30319e4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/e6b1dd45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/2bb643f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/920de426 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/70fa1e87 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/a1c12a2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/9442e6c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/5b45ec72 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/3c9f3a59 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/1091c11b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/9a6821c6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/ec0c35e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/517f37e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/6214fe78 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/ba189c8b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/351e4f31 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/c0f219ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/8069f607 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/559c3d82 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/605ad488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/148df488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/3bf6dcb4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/022a2feb not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/938c3924 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/729fe23e not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/1fd5cbd4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/a96697e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/e155ddca not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/10dd0e0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/6f2c8392 not reset as customized by admin to system_u:object_r:container_file_t:s0:c267,c588 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/bd241ad9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/plugins/csi-hostpath not reset as customized by admin to system_u:object_r:container_file_t:s0 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/plugins/csi-hostpath/csi.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/plugins/kubernetes.io not reset as customized by admin to system_u:object_r:container_file_t:s0 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/plugins/kubernetes.io/csi not reset as customized by admin to system_u:object_r:container_file_t:s0 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983 not reset as customized by admin to system_u:object_r:container_file_t:s0 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount not reset as customized by admin to system_u:object_r:container_file_t:s0 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/vol_data.json not reset as customized by admin to system_u:object_r:container_file_t:s0 Sep 30 19:31:10 crc restorecon[4713]: /var/lib/kubelet/plugins_registry not reset as customized by admin to system_u:object_r:container_file_t:s0 Sep 30 19:31:10 crc restorecon[4713]: Relabeled /var/usrlocal/bin/kubenswrapper from system_u:object_r:bin_t:s0 to system_u:object_r:kubelet_exec_t:s0 Sep 30 19:31:10 crc kubenswrapper[4756]: Flag --container-runtime-endpoint has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Sep 30 19:31:10 crc kubenswrapper[4756]: Flag --minimum-container-ttl-duration has been deprecated, Use --eviction-hard or --eviction-soft instead. Will be removed in a future version. Sep 30 19:31:10 crc kubenswrapper[4756]: Flag --volume-plugin-dir has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Sep 30 19:31:10 crc kubenswrapper[4756]: Flag --register-with-taints has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Sep 30 19:31:10 crc kubenswrapper[4756]: Flag --pod-infra-container-image has been deprecated, will be removed in a future release. Image garbage collector will get sandbox image information from CRI. Sep 30 19:31:10 crc kubenswrapper[4756]: Flag --system-reserved has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Sep 30 19:31:10 crc kubenswrapper[4756]: I0930 19:31:10.831703 4756 server.go:211] "--pod-infra-container-image will not be pruned by the image garbage collector in kubelet and should also be set in the remote runtime" Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.842468 4756 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.842499 4756 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.842509 4756 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.842520 4756 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.842529 4756 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.842539 4756 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.842547 4756 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.842557 4756 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.842567 4756 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.842576 4756 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.842584 4756 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.842592 4756 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.842600 4756 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.842608 4756 feature_gate.go:330] unrecognized feature gate: GatewayAPI Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.842616 4756 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.842624 4756 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.842631 4756 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.842639 4756 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.842647 4756 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.842655 4756 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.842662 4756 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.842670 4756 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.842678 4756 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.842686 4756 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.842697 4756 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.842709 4756 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.842719 4756 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.842727 4756 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.842735 4756 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.842750 4756 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.842759 4756 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.842767 4756 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.842775 4756 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.842783 4756 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.842791 4756 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.842799 4756 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.842807 4756 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.842815 4756 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.842824 4756 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.842833 4756 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.842843 4756 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.842855 4756 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.842867 4756 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.842877 4756 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.842887 4756 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.842896 4756 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.842904 4756 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.842912 4756 feature_gate.go:330] unrecognized feature gate: PlatformOperators Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.842920 4756 feature_gate.go:330] unrecognized feature gate: NewOLM Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.842928 4756 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.842936 4756 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.842944 4756 feature_gate.go:330] unrecognized feature gate: Example Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.842952 4756 feature_gate.go:330] unrecognized feature gate: InsightsConfig Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.842960 4756 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.842968 4756 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.842979 4756 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.842989 4756 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.842998 4756 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.843006 4756 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.843015 4756 feature_gate.go:330] unrecognized feature gate: SignatureStores Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.843025 4756 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.843034 4756 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.843042 4756 feature_gate.go:330] unrecognized feature gate: PinnedImages Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.843050 4756 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.843058 4756 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.843065 4756 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.843074 4756 feature_gate.go:330] unrecognized feature gate: OVNObservability Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.843081 4756 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.843089 4756 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.843097 4756 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.843104 4756 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Sep 30 19:31:10 crc kubenswrapper[4756]: I0930 19:31:10.843245 4756 flags.go:64] FLAG: --address="0.0.0.0" Sep 30 19:31:10 crc kubenswrapper[4756]: I0930 19:31:10.843261 4756 flags.go:64] FLAG: --allowed-unsafe-sysctls="[]" Sep 30 19:31:10 crc kubenswrapper[4756]: I0930 19:31:10.843276 4756 flags.go:64] FLAG: --anonymous-auth="true" Sep 30 19:31:10 crc kubenswrapper[4756]: I0930 19:31:10.843288 4756 flags.go:64] FLAG: --application-metrics-count-limit="100" Sep 30 19:31:10 crc kubenswrapper[4756]: I0930 19:31:10.843300 4756 flags.go:64] FLAG: --authentication-token-webhook="false" Sep 30 19:31:10 crc kubenswrapper[4756]: I0930 19:31:10.843309 4756 flags.go:64] FLAG: --authentication-token-webhook-cache-ttl="2m0s" Sep 30 19:31:10 crc kubenswrapper[4756]: I0930 19:31:10.843322 4756 flags.go:64] FLAG: --authorization-mode="AlwaysAllow" Sep 30 19:31:10 crc kubenswrapper[4756]: I0930 19:31:10.843334 4756 flags.go:64] FLAG: --authorization-webhook-cache-authorized-ttl="5m0s" Sep 30 19:31:10 crc kubenswrapper[4756]: I0930 19:31:10.843344 4756 flags.go:64] FLAG: --authorization-webhook-cache-unauthorized-ttl="30s" Sep 30 19:31:10 crc kubenswrapper[4756]: I0930 19:31:10.843354 4756 flags.go:64] FLAG: --boot-id-file="/proc/sys/kernel/random/boot_id" Sep 30 19:31:10 crc kubenswrapper[4756]: I0930 19:31:10.843365 4756 flags.go:64] FLAG: --bootstrap-kubeconfig="/etc/kubernetes/kubeconfig" Sep 30 19:31:10 crc kubenswrapper[4756]: I0930 19:31:10.843378 4756 flags.go:64] FLAG: --cert-dir="/var/lib/kubelet/pki" Sep 30 19:31:10 crc kubenswrapper[4756]: I0930 19:31:10.843388 4756 flags.go:64] FLAG: --cgroup-driver="cgroupfs" Sep 30 19:31:10 crc kubenswrapper[4756]: I0930 19:31:10.843422 4756 flags.go:64] FLAG: --cgroup-root="" Sep 30 19:31:10 crc kubenswrapper[4756]: I0930 19:31:10.843432 4756 flags.go:64] FLAG: --cgroups-per-qos="true" Sep 30 19:31:10 crc kubenswrapper[4756]: I0930 19:31:10.843441 4756 flags.go:64] FLAG: --client-ca-file="" Sep 30 19:31:10 crc kubenswrapper[4756]: I0930 19:31:10.843450 4756 flags.go:64] FLAG: --cloud-config="" Sep 30 19:31:10 crc kubenswrapper[4756]: I0930 19:31:10.843459 4756 flags.go:64] FLAG: --cloud-provider="" Sep 30 19:31:10 crc kubenswrapper[4756]: I0930 19:31:10.843468 4756 flags.go:64] FLAG: --cluster-dns="[]" Sep 30 19:31:10 crc kubenswrapper[4756]: I0930 19:31:10.843479 4756 flags.go:64] FLAG: --cluster-domain="" Sep 30 19:31:10 crc kubenswrapper[4756]: I0930 19:31:10.843490 4756 flags.go:64] FLAG: --config="/etc/kubernetes/kubelet.conf" Sep 30 19:31:10 crc kubenswrapper[4756]: I0930 19:31:10.843500 4756 flags.go:64] FLAG: --config-dir="" Sep 30 19:31:10 crc kubenswrapper[4756]: I0930 19:31:10.843509 4756 flags.go:64] FLAG: --container-hints="/etc/cadvisor/container_hints.json" Sep 30 19:31:10 crc kubenswrapper[4756]: I0930 19:31:10.843519 4756 flags.go:64] FLAG: --container-log-max-files="5" Sep 30 19:31:10 crc kubenswrapper[4756]: I0930 19:31:10.843545 4756 flags.go:64] FLAG: --container-log-max-size="10Mi" Sep 30 19:31:10 crc kubenswrapper[4756]: I0930 19:31:10.843554 4756 flags.go:64] FLAG: --container-runtime-endpoint="/var/run/crio/crio.sock" Sep 30 19:31:10 crc kubenswrapper[4756]: I0930 19:31:10.843563 4756 flags.go:64] FLAG: --containerd="/run/containerd/containerd.sock" Sep 30 19:31:10 crc kubenswrapper[4756]: I0930 19:31:10.843573 4756 flags.go:64] FLAG: --containerd-namespace="k8s.io" Sep 30 19:31:10 crc kubenswrapper[4756]: I0930 19:31:10.843583 4756 flags.go:64] FLAG: --contention-profiling="false" Sep 30 19:31:10 crc kubenswrapper[4756]: I0930 19:31:10.843591 4756 flags.go:64] FLAG: --cpu-cfs-quota="true" Sep 30 19:31:10 crc kubenswrapper[4756]: I0930 19:31:10.843600 4756 flags.go:64] FLAG: --cpu-cfs-quota-period="100ms" Sep 30 19:31:10 crc kubenswrapper[4756]: I0930 19:31:10.843610 4756 flags.go:64] FLAG: --cpu-manager-policy="none" Sep 30 19:31:10 crc kubenswrapper[4756]: I0930 19:31:10.843619 4756 flags.go:64] FLAG: --cpu-manager-policy-options="" Sep 30 19:31:10 crc kubenswrapper[4756]: I0930 19:31:10.843630 4756 flags.go:64] FLAG: --cpu-manager-reconcile-period="10s" Sep 30 19:31:10 crc kubenswrapper[4756]: I0930 19:31:10.843639 4756 flags.go:64] FLAG: --enable-controller-attach-detach="true" Sep 30 19:31:10 crc kubenswrapper[4756]: I0930 19:31:10.843648 4756 flags.go:64] FLAG: --enable-debugging-handlers="true" Sep 30 19:31:10 crc kubenswrapper[4756]: I0930 19:31:10.843657 4756 flags.go:64] FLAG: --enable-load-reader="false" Sep 30 19:31:10 crc kubenswrapper[4756]: I0930 19:31:10.843667 4756 flags.go:64] FLAG: --enable-server="true" Sep 30 19:31:10 crc kubenswrapper[4756]: I0930 19:31:10.843675 4756 flags.go:64] FLAG: --enforce-node-allocatable="[pods]" Sep 30 19:31:10 crc kubenswrapper[4756]: I0930 19:31:10.843688 4756 flags.go:64] FLAG: --event-burst="100" Sep 30 19:31:10 crc kubenswrapper[4756]: I0930 19:31:10.843698 4756 flags.go:64] FLAG: --event-qps="50" Sep 30 19:31:10 crc kubenswrapper[4756]: I0930 19:31:10.843706 4756 flags.go:64] FLAG: --event-storage-age-limit="default=0" Sep 30 19:31:10 crc kubenswrapper[4756]: I0930 19:31:10.843716 4756 flags.go:64] FLAG: --event-storage-event-limit="default=0" Sep 30 19:31:10 crc kubenswrapper[4756]: I0930 19:31:10.843725 4756 flags.go:64] FLAG: --eviction-hard="" Sep 30 19:31:10 crc kubenswrapper[4756]: I0930 19:31:10.843744 4756 flags.go:64] FLAG: --eviction-max-pod-grace-period="0" Sep 30 19:31:10 crc kubenswrapper[4756]: I0930 19:31:10.843753 4756 flags.go:64] FLAG: --eviction-minimum-reclaim="" Sep 30 19:31:10 crc kubenswrapper[4756]: I0930 19:31:10.843762 4756 flags.go:64] FLAG: --eviction-pressure-transition-period="5m0s" Sep 30 19:31:10 crc kubenswrapper[4756]: I0930 19:31:10.843772 4756 flags.go:64] FLAG: --eviction-soft="" Sep 30 19:31:10 crc kubenswrapper[4756]: I0930 19:31:10.843782 4756 flags.go:64] FLAG: --eviction-soft-grace-period="" Sep 30 19:31:10 crc kubenswrapper[4756]: I0930 19:31:10.843791 4756 flags.go:64] FLAG: --exit-on-lock-contention="false" Sep 30 19:31:10 crc kubenswrapper[4756]: I0930 19:31:10.843800 4756 flags.go:64] FLAG: --experimental-allocatable-ignore-eviction="false" Sep 30 19:31:10 crc kubenswrapper[4756]: I0930 19:31:10.843809 4756 flags.go:64] FLAG: --experimental-mounter-path="" Sep 30 19:31:10 crc kubenswrapper[4756]: I0930 19:31:10.843819 4756 flags.go:64] FLAG: --fail-cgroupv1="false" Sep 30 19:31:10 crc kubenswrapper[4756]: I0930 19:31:10.843828 4756 flags.go:64] FLAG: --fail-swap-on="true" Sep 30 19:31:10 crc kubenswrapper[4756]: I0930 19:31:10.843837 4756 flags.go:64] FLAG: --feature-gates="" Sep 30 19:31:10 crc kubenswrapper[4756]: I0930 19:31:10.843848 4756 flags.go:64] FLAG: --file-check-frequency="20s" Sep 30 19:31:10 crc kubenswrapper[4756]: I0930 19:31:10.843859 4756 flags.go:64] FLAG: --global-housekeeping-interval="1m0s" Sep 30 19:31:10 crc kubenswrapper[4756]: I0930 19:31:10.843869 4756 flags.go:64] FLAG: --hairpin-mode="promiscuous-bridge" Sep 30 19:31:10 crc kubenswrapper[4756]: I0930 19:31:10.843879 4756 flags.go:64] FLAG: --healthz-bind-address="127.0.0.1" Sep 30 19:31:10 crc kubenswrapper[4756]: I0930 19:31:10.843889 4756 flags.go:64] FLAG: --healthz-port="10248" Sep 30 19:31:10 crc kubenswrapper[4756]: I0930 19:31:10.843898 4756 flags.go:64] FLAG: --help="false" Sep 30 19:31:10 crc kubenswrapper[4756]: I0930 19:31:10.843907 4756 flags.go:64] FLAG: --hostname-override="" Sep 30 19:31:10 crc kubenswrapper[4756]: I0930 19:31:10.843916 4756 flags.go:64] FLAG: --housekeeping-interval="10s" Sep 30 19:31:10 crc kubenswrapper[4756]: I0930 19:31:10.843926 4756 flags.go:64] FLAG: --http-check-frequency="20s" Sep 30 19:31:10 crc kubenswrapper[4756]: I0930 19:31:10.843935 4756 flags.go:64] FLAG: --image-credential-provider-bin-dir="" Sep 30 19:31:10 crc kubenswrapper[4756]: I0930 19:31:10.843944 4756 flags.go:64] FLAG: --image-credential-provider-config="" Sep 30 19:31:10 crc kubenswrapper[4756]: I0930 19:31:10.843954 4756 flags.go:64] FLAG: --image-gc-high-threshold="85" Sep 30 19:31:10 crc kubenswrapper[4756]: I0930 19:31:10.843963 4756 flags.go:64] FLAG: --image-gc-low-threshold="80" Sep 30 19:31:10 crc kubenswrapper[4756]: I0930 19:31:10.843972 4756 flags.go:64] FLAG: --image-service-endpoint="" Sep 30 19:31:10 crc kubenswrapper[4756]: I0930 19:31:10.843981 4756 flags.go:64] FLAG: --kernel-memcg-notification="false" Sep 30 19:31:10 crc kubenswrapper[4756]: I0930 19:31:10.843990 4756 flags.go:64] FLAG: --kube-api-burst="100" Sep 30 19:31:10 crc kubenswrapper[4756]: I0930 19:31:10.843999 4756 flags.go:64] FLAG: --kube-api-content-type="application/vnd.kubernetes.protobuf" Sep 30 19:31:10 crc kubenswrapper[4756]: I0930 19:31:10.844009 4756 flags.go:64] FLAG: --kube-api-qps="50" Sep 30 19:31:10 crc kubenswrapper[4756]: I0930 19:31:10.844017 4756 flags.go:64] FLAG: --kube-reserved="" Sep 30 19:31:10 crc kubenswrapper[4756]: I0930 19:31:10.844027 4756 flags.go:64] FLAG: --kube-reserved-cgroup="" Sep 30 19:31:10 crc kubenswrapper[4756]: I0930 19:31:10.844036 4756 flags.go:64] FLAG: --kubeconfig="/var/lib/kubelet/kubeconfig" Sep 30 19:31:10 crc kubenswrapper[4756]: I0930 19:31:10.844045 4756 flags.go:64] FLAG: --kubelet-cgroups="" Sep 30 19:31:10 crc kubenswrapper[4756]: I0930 19:31:10.844054 4756 flags.go:64] FLAG: --local-storage-capacity-isolation="true" Sep 30 19:31:10 crc kubenswrapper[4756]: I0930 19:31:10.844062 4756 flags.go:64] FLAG: --lock-file="" Sep 30 19:31:10 crc kubenswrapper[4756]: I0930 19:31:10.844072 4756 flags.go:64] FLAG: --log-cadvisor-usage="false" Sep 30 19:31:10 crc kubenswrapper[4756]: I0930 19:31:10.844081 4756 flags.go:64] FLAG: --log-flush-frequency="5s" Sep 30 19:31:10 crc kubenswrapper[4756]: I0930 19:31:10.844090 4756 flags.go:64] FLAG: --log-json-info-buffer-size="0" Sep 30 19:31:10 crc kubenswrapper[4756]: I0930 19:31:10.844104 4756 flags.go:64] FLAG: --log-json-split-stream="false" Sep 30 19:31:10 crc kubenswrapper[4756]: I0930 19:31:10.844116 4756 flags.go:64] FLAG: --log-text-info-buffer-size="0" Sep 30 19:31:10 crc kubenswrapper[4756]: I0930 19:31:10.844127 4756 flags.go:64] FLAG: --log-text-split-stream="false" Sep 30 19:31:10 crc kubenswrapper[4756]: I0930 19:31:10.844137 4756 flags.go:64] FLAG: --logging-format="text" Sep 30 19:31:10 crc kubenswrapper[4756]: I0930 19:31:10.844145 4756 flags.go:64] FLAG: --machine-id-file="/etc/machine-id,/var/lib/dbus/machine-id" Sep 30 19:31:10 crc kubenswrapper[4756]: I0930 19:31:10.844155 4756 flags.go:64] FLAG: --make-iptables-util-chains="true" Sep 30 19:31:10 crc kubenswrapper[4756]: I0930 19:31:10.844164 4756 flags.go:64] FLAG: --manifest-url="" Sep 30 19:31:10 crc kubenswrapper[4756]: I0930 19:31:10.844173 4756 flags.go:64] FLAG: --manifest-url-header="" Sep 30 19:31:10 crc kubenswrapper[4756]: I0930 19:31:10.844185 4756 flags.go:64] FLAG: --max-housekeeping-interval="15s" Sep 30 19:31:10 crc kubenswrapper[4756]: I0930 19:31:10.844194 4756 flags.go:64] FLAG: --max-open-files="1000000" Sep 30 19:31:10 crc kubenswrapper[4756]: I0930 19:31:10.844205 4756 flags.go:64] FLAG: --max-pods="110" Sep 30 19:31:10 crc kubenswrapper[4756]: I0930 19:31:10.844214 4756 flags.go:64] FLAG: --maximum-dead-containers="-1" Sep 30 19:31:10 crc kubenswrapper[4756]: I0930 19:31:10.844223 4756 flags.go:64] FLAG: --maximum-dead-containers-per-container="1" Sep 30 19:31:10 crc kubenswrapper[4756]: I0930 19:31:10.844233 4756 flags.go:64] FLAG: --memory-manager-policy="None" Sep 30 19:31:10 crc kubenswrapper[4756]: I0930 19:31:10.844242 4756 flags.go:64] FLAG: --minimum-container-ttl-duration="6m0s" Sep 30 19:31:10 crc kubenswrapper[4756]: I0930 19:31:10.844251 4756 flags.go:64] FLAG: --minimum-image-ttl-duration="2m0s" Sep 30 19:31:10 crc kubenswrapper[4756]: I0930 19:31:10.844260 4756 flags.go:64] FLAG: --node-ip="192.168.126.11" Sep 30 19:31:10 crc kubenswrapper[4756]: I0930 19:31:10.844269 4756 flags.go:64] FLAG: --node-labels="node-role.kubernetes.io/control-plane=,node-role.kubernetes.io/master=,node.openshift.io/os_id=rhcos" Sep 30 19:31:10 crc kubenswrapper[4756]: I0930 19:31:10.844289 4756 flags.go:64] FLAG: --node-status-max-images="50" Sep 30 19:31:10 crc kubenswrapper[4756]: I0930 19:31:10.844299 4756 flags.go:64] FLAG: --node-status-update-frequency="10s" Sep 30 19:31:10 crc kubenswrapper[4756]: I0930 19:31:10.844308 4756 flags.go:64] FLAG: --oom-score-adj="-999" Sep 30 19:31:10 crc kubenswrapper[4756]: I0930 19:31:10.844318 4756 flags.go:64] FLAG: --pod-cidr="" Sep 30 19:31:10 crc kubenswrapper[4756]: I0930 19:31:10.844357 4756 flags.go:64] FLAG: --pod-infra-container-image="quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:33549946e22a9ffa738fd94b1345f90921bc8f92fa6137784cb33c77ad806f9d" Sep 30 19:31:10 crc kubenswrapper[4756]: I0930 19:31:10.844371 4756 flags.go:64] FLAG: --pod-manifest-path="" Sep 30 19:31:10 crc kubenswrapper[4756]: I0930 19:31:10.844380 4756 flags.go:64] FLAG: --pod-max-pids="-1" Sep 30 19:31:10 crc kubenswrapper[4756]: I0930 19:31:10.844390 4756 flags.go:64] FLAG: --pods-per-core="0" Sep 30 19:31:10 crc kubenswrapper[4756]: I0930 19:31:10.844423 4756 flags.go:64] FLAG: --port="10250" Sep 30 19:31:10 crc kubenswrapper[4756]: I0930 19:31:10.844433 4756 flags.go:64] FLAG: --protect-kernel-defaults="false" Sep 30 19:31:10 crc kubenswrapper[4756]: I0930 19:31:10.844443 4756 flags.go:64] FLAG: --provider-id="" Sep 30 19:31:10 crc kubenswrapper[4756]: I0930 19:31:10.844452 4756 flags.go:64] FLAG: --qos-reserved="" Sep 30 19:31:10 crc kubenswrapper[4756]: I0930 19:31:10.844461 4756 flags.go:64] FLAG: --read-only-port="10255" Sep 30 19:31:10 crc kubenswrapper[4756]: I0930 19:31:10.844470 4756 flags.go:64] FLAG: --register-node="true" Sep 30 19:31:10 crc kubenswrapper[4756]: I0930 19:31:10.844479 4756 flags.go:64] FLAG: --register-schedulable="true" Sep 30 19:31:10 crc kubenswrapper[4756]: I0930 19:31:10.844489 4756 flags.go:64] FLAG: --register-with-taints="node-role.kubernetes.io/master=:NoSchedule" Sep 30 19:31:10 crc kubenswrapper[4756]: I0930 19:31:10.844512 4756 flags.go:64] FLAG: --registry-burst="10" Sep 30 19:31:10 crc kubenswrapper[4756]: I0930 19:31:10.844521 4756 flags.go:64] FLAG: --registry-qps="5" Sep 30 19:31:10 crc kubenswrapper[4756]: I0930 19:31:10.844530 4756 flags.go:64] FLAG: --reserved-cpus="" Sep 30 19:31:10 crc kubenswrapper[4756]: I0930 19:31:10.844540 4756 flags.go:64] FLAG: --reserved-memory="" Sep 30 19:31:10 crc kubenswrapper[4756]: I0930 19:31:10.844552 4756 flags.go:64] FLAG: --resolv-conf="/etc/resolv.conf" Sep 30 19:31:10 crc kubenswrapper[4756]: I0930 19:31:10.844561 4756 flags.go:64] FLAG: --root-dir="/var/lib/kubelet" Sep 30 19:31:10 crc kubenswrapper[4756]: I0930 19:31:10.844571 4756 flags.go:64] FLAG: --rotate-certificates="false" Sep 30 19:31:10 crc kubenswrapper[4756]: I0930 19:31:10.844579 4756 flags.go:64] FLAG: --rotate-server-certificates="false" Sep 30 19:31:10 crc kubenswrapper[4756]: I0930 19:31:10.844588 4756 flags.go:64] FLAG: --runonce="false" Sep 30 19:31:10 crc kubenswrapper[4756]: I0930 19:31:10.844599 4756 flags.go:64] FLAG: --runtime-cgroups="/system.slice/crio.service" Sep 30 19:31:10 crc kubenswrapper[4756]: I0930 19:31:10.844608 4756 flags.go:64] FLAG: --runtime-request-timeout="2m0s" Sep 30 19:31:10 crc kubenswrapper[4756]: I0930 19:31:10.844618 4756 flags.go:64] FLAG: --seccomp-default="false" Sep 30 19:31:10 crc kubenswrapper[4756]: I0930 19:31:10.844626 4756 flags.go:64] FLAG: --serialize-image-pulls="true" Sep 30 19:31:10 crc kubenswrapper[4756]: I0930 19:31:10.844636 4756 flags.go:64] FLAG: --storage-driver-buffer-duration="1m0s" Sep 30 19:31:10 crc kubenswrapper[4756]: I0930 19:31:10.844645 4756 flags.go:64] FLAG: --storage-driver-db="cadvisor" Sep 30 19:31:10 crc kubenswrapper[4756]: I0930 19:31:10.844655 4756 flags.go:64] FLAG: --storage-driver-host="localhost:8086" Sep 30 19:31:10 crc kubenswrapper[4756]: I0930 19:31:10.844664 4756 flags.go:64] FLAG: --storage-driver-password="root" Sep 30 19:31:10 crc kubenswrapper[4756]: I0930 19:31:10.844673 4756 flags.go:64] FLAG: --storage-driver-secure="false" Sep 30 19:31:10 crc kubenswrapper[4756]: I0930 19:31:10.844682 4756 flags.go:64] FLAG: --storage-driver-table="stats" Sep 30 19:31:10 crc kubenswrapper[4756]: I0930 19:31:10.844691 4756 flags.go:64] FLAG: --storage-driver-user="root" Sep 30 19:31:10 crc kubenswrapper[4756]: I0930 19:31:10.844700 4756 flags.go:64] FLAG: --streaming-connection-idle-timeout="4h0m0s" Sep 30 19:31:10 crc kubenswrapper[4756]: I0930 19:31:10.844710 4756 flags.go:64] FLAG: --sync-frequency="1m0s" Sep 30 19:31:10 crc kubenswrapper[4756]: I0930 19:31:10.844719 4756 flags.go:64] FLAG: --system-cgroups="" Sep 30 19:31:10 crc kubenswrapper[4756]: I0930 19:31:10.844728 4756 flags.go:64] FLAG: --system-reserved="cpu=200m,ephemeral-storage=350Mi,memory=350Mi" Sep 30 19:31:10 crc kubenswrapper[4756]: I0930 19:31:10.844744 4756 flags.go:64] FLAG: --system-reserved-cgroup="" Sep 30 19:31:10 crc kubenswrapper[4756]: I0930 19:31:10.844753 4756 flags.go:64] FLAG: --tls-cert-file="" Sep 30 19:31:10 crc kubenswrapper[4756]: I0930 19:31:10.844762 4756 flags.go:64] FLAG: --tls-cipher-suites="[]" Sep 30 19:31:10 crc kubenswrapper[4756]: I0930 19:31:10.844773 4756 flags.go:64] FLAG: --tls-min-version="" Sep 30 19:31:10 crc kubenswrapper[4756]: I0930 19:31:10.844783 4756 flags.go:64] FLAG: --tls-private-key-file="" Sep 30 19:31:10 crc kubenswrapper[4756]: I0930 19:31:10.844791 4756 flags.go:64] FLAG: --topology-manager-policy="none" Sep 30 19:31:10 crc kubenswrapper[4756]: I0930 19:31:10.844802 4756 flags.go:64] FLAG: --topology-manager-policy-options="" Sep 30 19:31:10 crc kubenswrapper[4756]: I0930 19:31:10.844812 4756 flags.go:64] FLAG: --topology-manager-scope="container" Sep 30 19:31:10 crc kubenswrapper[4756]: I0930 19:31:10.844823 4756 flags.go:64] FLAG: --v="2" Sep 30 19:31:10 crc kubenswrapper[4756]: I0930 19:31:10.844835 4756 flags.go:64] FLAG: --version="false" Sep 30 19:31:10 crc kubenswrapper[4756]: I0930 19:31:10.844846 4756 flags.go:64] FLAG: --vmodule="" Sep 30 19:31:10 crc kubenswrapper[4756]: I0930 19:31:10.844857 4756 flags.go:64] FLAG: --volume-plugin-dir="/etc/kubernetes/kubelet-plugins/volume/exec" Sep 30 19:31:10 crc kubenswrapper[4756]: I0930 19:31:10.844867 4756 flags.go:64] FLAG: --volume-stats-agg-period="1m0s" Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.845080 4756 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.845096 4756 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.845109 4756 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.845120 4756 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.845132 4756 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.845143 4756 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.845158 4756 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.845171 4756 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.845180 4756 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.845189 4756 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.845200 4756 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.845209 4756 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.845217 4756 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.845227 4756 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.845238 4756 feature_gate.go:330] unrecognized feature gate: PinnedImages Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.845248 4756 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.845257 4756 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.845266 4756 feature_gate.go:330] unrecognized feature gate: OVNObservability Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.845274 4756 feature_gate.go:330] unrecognized feature gate: InsightsConfig Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.845283 4756 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.845292 4756 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.845302 4756 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.845311 4756 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.845320 4756 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.845329 4756 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.845339 4756 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.845348 4756 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.845357 4756 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.845365 4756 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.845373 4756 feature_gate.go:330] unrecognized feature gate: NewOLM Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.845382 4756 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.845390 4756 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.845426 4756 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.845436 4756 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.845445 4756 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.845455 4756 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.845464 4756 feature_gate.go:330] unrecognized feature gate: SignatureStores Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.845473 4756 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.845484 4756 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.845493 4756 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.845502 4756 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.845511 4756 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.845519 4756 feature_gate.go:330] unrecognized feature gate: PlatformOperators Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.845528 4756 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.845536 4756 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.845544 4756 feature_gate.go:330] unrecognized feature gate: Example Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.845552 4756 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.845560 4756 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.845568 4756 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.845576 4756 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.845584 4756 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.845593 4756 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.845601 4756 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.845608 4756 feature_gate.go:330] unrecognized feature gate: GatewayAPI Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.845616 4756 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.845624 4756 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.845633 4756 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.845641 4756 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.845649 4756 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.845658 4756 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.845666 4756 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.845674 4756 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.845690 4756 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.845698 4756 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.845705 4756 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.845716 4756 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.845726 4756 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.845734 4756 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.845742 4756 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.845750 4756 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.845758 4756 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Sep 30 19:31:10 crc kubenswrapper[4756]: I0930 19:31:10.846928 4756 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Sep 30 19:31:10 crc kubenswrapper[4756]: I0930 19:31:10.862700 4756 server.go:491] "Kubelet version" kubeletVersion="v1.31.5" Sep 30 19:31:10 crc kubenswrapper[4756]: I0930 19:31:10.864184 4756 server.go:493] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK="" Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.864375 4756 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.864426 4756 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.864434 4756 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.864441 4756 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.864447 4756 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.864453 4756 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.864461 4756 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.864470 4756 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.864476 4756 feature_gate.go:330] unrecognized feature gate: PinnedImages Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.864482 4756 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.864488 4756 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.864494 4756 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.864500 4756 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.864505 4756 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.864509 4756 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.864514 4756 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.864519 4756 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.864524 4756 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.864529 4756 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.864534 4756 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.864541 4756 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.864548 4756 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.864554 4756 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.864560 4756 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.864566 4756 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.864571 4756 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.864576 4756 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.864581 4756 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.864587 4756 feature_gate.go:330] unrecognized feature gate: PlatformOperators Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.864595 4756 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.864600 4756 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.864605 4756 feature_gate.go:330] unrecognized feature gate: NewOLM Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.864611 4756 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.864619 4756 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.864625 4756 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.864631 4756 feature_gate.go:330] unrecognized feature gate: OVNObservability Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.864635 4756 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.864640 4756 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.864645 4756 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.864650 4756 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.864656 4756 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.864661 4756 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.864666 4756 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.864672 4756 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.864677 4756 feature_gate.go:330] unrecognized feature gate: SignatureStores Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.864682 4756 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.864690 4756 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.864697 4756 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.864703 4756 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.864708 4756 feature_gate.go:330] unrecognized feature gate: Example Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.864714 4756 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.864719 4756 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.864724 4756 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.864729 4756 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.864735 4756 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.864740 4756 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.864745 4756 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.864750 4756 feature_gate.go:330] unrecognized feature gate: InsightsConfig Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.864758 4756 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.864765 4756 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.864770 4756 feature_gate.go:330] unrecognized feature gate: GatewayAPI Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.864775 4756 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.864781 4756 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.864786 4756 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.864791 4756 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.864801 4756 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.864807 4756 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.864812 4756 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.864818 4756 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.864823 4756 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.864829 4756 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Sep 30 19:31:10 crc kubenswrapper[4756]: I0930 19:31:10.864839 4756 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.865080 4756 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.865092 4756 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.865098 4756 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.865104 4756 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.865109 4756 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.865115 4756 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.865120 4756 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.865126 4756 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.865131 4756 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.865137 4756 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.865142 4756 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.865148 4756 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.865153 4756 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.865159 4756 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.865164 4756 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.865169 4756 feature_gate.go:330] unrecognized feature gate: SignatureStores Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.865175 4756 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.865180 4756 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.865185 4756 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.865191 4756 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.865197 4756 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.865203 4756 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.865208 4756 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.865213 4756 feature_gate.go:330] unrecognized feature gate: InsightsConfig Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.865218 4756 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.865229 4756 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.865238 4756 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.865244 4756 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.865250 4756 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.865257 4756 feature_gate.go:330] unrecognized feature gate: OVNObservability Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.865263 4756 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.865268 4756 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.865274 4756 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.865280 4756 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.865285 4756 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.865291 4756 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.865296 4756 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.865301 4756 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.865309 4756 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.865316 4756 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.865323 4756 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.865328 4756 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.865334 4756 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.865340 4756 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.865345 4756 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.865351 4756 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.865356 4756 feature_gate.go:330] unrecognized feature gate: PinnedImages Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.865363 4756 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.865370 4756 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.865377 4756 feature_gate.go:330] unrecognized feature gate: NewOLM Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.865383 4756 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.865409 4756 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.865417 4756 feature_gate.go:330] unrecognized feature gate: GatewayAPI Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.865423 4756 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.865430 4756 feature_gate.go:330] unrecognized feature gate: PlatformOperators Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.865437 4756 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.865442 4756 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.865448 4756 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.865455 4756 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.865461 4756 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.865466 4756 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.865475 4756 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.865480 4756 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.865486 4756 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.865491 4756 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.865501 4756 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.865506 4756 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.865512 4756 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.865517 4756 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.865523 4756 feature_gate.go:330] unrecognized feature gate: Example Sep 30 19:31:10 crc kubenswrapper[4756]: W0930 19:31:10.865529 4756 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Sep 30 19:31:10 crc kubenswrapper[4756]: I0930 19:31:10.865539 4756 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Sep 30 19:31:10 crc kubenswrapper[4756]: I0930 19:31:10.865866 4756 server.go:940] "Client rotation is on, will bootstrap in background" Sep 30 19:31:10 crc kubenswrapper[4756]: I0930 19:31:10.874679 4756 bootstrap.go:85] "Current kubeconfig file contents are still valid, no bootstrap necessary" Sep 30 19:31:10 crc kubenswrapper[4756]: I0930 19:31:10.874813 4756 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-client-current.pem". Sep 30 19:31:10 crc kubenswrapper[4756]: I0930 19:31:10.876558 4756 server.go:997] "Starting client certificate rotation" Sep 30 19:31:10 crc kubenswrapper[4756]: I0930 19:31:10.876595 4756 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate rotation is enabled Sep 30 19:31:10 crc kubenswrapper[4756]: I0930 19:31:10.878203 4756 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate expiration is 2026-02-24 05:52:08 +0000 UTC, rotation deadline is 2026-01-16 18:04:38.321493475 +0000 UTC Sep 30 19:31:10 crc kubenswrapper[4756]: I0930 19:31:10.878356 4756 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Waiting 2590h33m27.443143446s for next certificate rotation Sep 30 19:31:10 crc kubenswrapper[4756]: I0930 19:31:10.902279 4756 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Sep 30 19:31:10 crc kubenswrapper[4756]: I0930 19:31:10.906146 4756 dynamic_cafile_content.go:161] "Starting controller" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Sep 30 19:31:10 crc kubenswrapper[4756]: I0930 19:31:10.921616 4756 log.go:25] "Validated CRI v1 runtime API" Sep 30 19:31:10 crc kubenswrapper[4756]: I0930 19:31:10.965986 4756 log.go:25] "Validated CRI v1 image API" Sep 30 19:31:10 crc kubenswrapper[4756]: I0930 19:31:10.969042 4756 server.go:1437] "Using cgroup driver setting received from the CRI runtime" cgroupDriver="systemd" Sep 30 19:31:10 crc kubenswrapper[4756]: I0930 19:31:10.978347 4756 fs.go:133] Filesystem UUIDs: map[0b076daa-c26a-46d2-b3a6-72a8dbc6e257:/dev/vda4 2025-09-30-19-26-31-00:/dev/sr0 7B77-95E7:/dev/vda2 de0497b0-db1b-465a-b278-03db02455c71:/dev/vda3] Sep 30 19:31:10 crc kubenswrapper[4756]: I0930 19:31:10.978418 4756 fs.go:134] Filesystem partitions: map[/dev/shm:{mountpoint:/dev/shm major:0 minor:22 fsType:tmpfs blockSize:0} /dev/vda3:{mountpoint:/boot major:252 minor:3 fsType:ext4 blockSize:0} /dev/vda4:{mountpoint:/var major:252 minor:4 fsType:xfs blockSize:0} /run:{mountpoint:/run major:0 minor:24 fsType:tmpfs blockSize:0} /run/user/1000:{mountpoint:/run/user/1000 major:0 minor:42 fsType:tmpfs blockSize:0} /tmp:{mountpoint:/tmp major:0 minor:30 fsType:tmpfs blockSize:0} /var/lib/etcd:{mountpoint:/var/lib/etcd major:0 minor:43 fsType:tmpfs blockSize:0}] Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.009758 4756 manager.go:217] Machine: {Timestamp:2025-09-30 19:31:11.004820617 +0000 UTC m=+0.625754164 CPUVendorID:AuthenticAMD NumCores:12 NumPhysicalCores:1 NumSockets:12 CpuFrequency:2800000 MemoryCapacity:33654124544 SwapCapacity:0 MemoryByType:map[] NVMInfo:{MemoryModeCapacity:0 AppDirectModeCapacity:0 AvgPowerBudget:0} HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] MachineID:21801e6708c44f15b81395eb736a7cec SystemUUID:9e1f3995-67e3-42b5-a320-7b79274c960e BootID:0611c06e-4fb1-42eb-9eae-fbc2363d05f7 Filesystems:[{Device:/dev/shm DeviceMajor:0 DeviceMinor:22 Capacity:16827060224 Type:vfs Inodes:4108169 HasInodes:true} {Device:/run DeviceMajor:0 DeviceMinor:24 Capacity:6730825728 Type:vfs Inodes:819200 HasInodes:true} {Device:/dev/vda4 DeviceMajor:252 DeviceMinor:4 Capacity:85292941312 Type:vfs Inodes:41679680 HasInodes:true} {Device:/tmp DeviceMajor:0 DeviceMinor:30 Capacity:16827064320 Type:vfs Inodes:1048576 HasInodes:true} {Device:/dev/vda3 DeviceMajor:252 DeviceMinor:3 Capacity:366869504 Type:vfs Inodes:98304 HasInodes:true} {Device:/run/user/1000 DeviceMajor:0 DeviceMinor:42 Capacity:3365408768 Type:vfs Inodes:821633 HasInodes:true} {Device:/var/lib/etcd DeviceMajor:0 DeviceMinor:43 Capacity:1073741824 Type:vfs Inodes:4108169 HasInodes:true}] DiskMap:map[252:0:{Name:vda Major:252 Minor:0 Size:214748364800 Scheduler:none}] NetworkDevices:[{Name:br-ex MacAddress:fa:16:3e:87:22:ba Speed:0 Mtu:1500} {Name:br-int MacAddress:d6:39:55:2e:22:71 Speed:0 Mtu:1400} {Name:ens3 MacAddress:fa:16:3e:87:22:ba Speed:-1 Mtu:1500} {Name:ens7 MacAddress:fa:16:3e:a3:3c:0b Speed:-1 Mtu:1500} {Name:ens7.20 MacAddress:52:54:00:4e:98:dd Speed:-1 Mtu:1496} {Name:ens7.21 MacAddress:52:54:00:48:a1:ce Speed:-1 Mtu:1496} {Name:ens7.22 MacAddress:52:54:00:61:78:bb Speed:-1 Mtu:1496} {Name:ens7.23 MacAddress:52:54:00:9c:64:29 Speed:-1 Mtu:1496} {Name:eth10 MacAddress:de:31:2b:e6:5a:21 Speed:0 Mtu:1500} {Name:ovn-k8s-mp0 MacAddress:0a:58:0a:d9:00:02 Speed:0 Mtu:1400} {Name:ovs-system MacAddress:da:47:a5:aa:a6:31 Speed:0 Mtu:1500}] Topology:[{Id:0 Memory:33654124544 HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] Cores:[{Id:0 Threads:[0] Caches:[{Id:0 Size:32768 Type:Data Level:1} {Id:0 Size:32768 Type:Instruction Level:1} {Id:0 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:0 Size:16777216 Type:Unified Level:3}] SocketID:0 BookID: DrawerID:} {Id:0 Threads:[1] Caches:[{Id:1 Size:32768 Type:Data Level:1} {Id:1 Size:32768 Type:Instruction Level:1} {Id:1 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:1 Size:16777216 Type:Unified Level:3}] SocketID:1 BookID: DrawerID:} {Id:0 Threads:[10] Caches:[{Id:10 Size:32768 Type:Data Level:1} {Id:10 Size:32768 Type:Instruction Level:1} {Id:10 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:10 Size:16777216 Type:Unified Level:3}] SocketID:10 BookID: DrawerID:} {Id:0 Threads:[11] Caches:[{Id:11 Size:32768 Type:Data Level:1} {Id:11 Size:32768 Type:Instruction Level:1} {Id:11 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:11 Size:16777216 Type:Unified Level:3}] SocketID:11 BookID: DrawerID:} {Id:0 Threads:[2] Caches:[{Id:2 Size:32768 Type:Data Level:1} {Id:2 Size:32768 Type:Instruction Level:1} {Id:2 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:2 Size:16777216 Type:Unified Level:3}] SocketID:2 BookID: DrawerID:} {Id:0 Threads:[3] Caches:[{Id:3 Size:32768 Type:Data Level:1} {Id:3 Size:32768 Type:Instruction Level:1} {Id:3 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:3 Size:16777216 Type:Unified Level:3}] SocketID:3 BookID: DrawerID:} {Id:0 Threads:[4] Caches:[{Id:4 Size:32768 Type:Data Level:1} {Id:4 Size:32768 Type:Instruction Level:1} {Id:4 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:4 Size:16777216 Type:Unified Level:3}] SocketID:4 BookID: DrawerID:} {Id:0 Threads:[5] Caches:[{Id:5 Size:32768 Type:Data Level:1} {Id:5 Size:32768 Type:Instruction Level:1} {Id:5 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:5 Size:16777216 Type:Unified Level:3}] SocketID:5 BookID: DrawerID:} {Id:0 Threads:[6] Caches:[{Id:6 Size:32768 Type:Data Level:1} {Id:6 Size:32768 Type:Instruction Level:1} {Id:6 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:6 Size:16777216 Type:Unified Level:3}] SocketID:6 BookID: DrawerID:} {Id:0 Threads:[7] Caches:[{Id:7 Size:32768 Type:Data Level:1} {Id:7 Size:32768 Type:Instruction Level:1} {Id:7 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:7 Size:16777216 Type:Unified Level:3}] SocketID:7 BookID: DrawerID:} {Id:0 Threads:[8] Caches:[{Id:8 Size:32768 Type:Data Level:1} {Id:8 Size:32768 Type:Instruction Level:1} {Id:8 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:8 Size:16777216 Type:Unified Level:3}] SocketID:8 BookID: DrawerID:} {Id:0 Threads:[9] Caches:[{Id:9 Size:32768 Type:Data Level:1} {Id:9 Size:32768 Type:Instruction Level:1} {Id:9 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:9 Size:16777216 Type:Unified Level:3}] SocketID:9 BookID: DrawerID:}] Caches:[] Distances:[10]}] CloudProvider:Unknown InstanceType:Unknown InstanceID:None} Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.010209 4756 manager_no_libpfm.go:29] cAdvisor is build without cgo and/or libpfm support. Perf event counters are not available. Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.010538 4756 manager.go:233] Version: {KernelVersion:5.14.0-427.50.2.el9_4.x86_64 ContainerOsVersion:Red Hat Enterprise Linux CoreOS 418.94.202502100215-0 DockerVersion: DockerAPIVersion: CadvisorVersion: CadvisorRevision:} Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.011036 4756 swap_util.go:113] "Swap is on" /proc/swaps contents="Filename\t\t\t\tType\t\tSize\t\tUsed\t\tPriority" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.011320 4756 container_manager_linux.go:267] "Container manager verified user specified cgroup-root exists" cgroupRoot=[] Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.011371 4756 container_manager_linux.go:272] "Creating Container Manager object based on Node Config" nodeConfig={"NodeName":"crc","RuntimeCgroupsName":"/system.slice/crio.service","SystemCgroupsName":"/system.slice","KubeletCgroupsName":"","KubeletOOMScoreAdj":-999,"ContainerRuntime":"","CgroupsPerQOS":true,"CgroupRoot":"/","CgroupDriver":"systemd","KubeletRootDir":"/var/lib/kubelet","ProtectKernelDefaults":true,"KubeReservedCgroupName":"","SystemReservedCgroupName":"","ReservedSystemCPUs":{},"EnforceNodeAllocatable":{"pods":{}},"KubeReserved":null,"SystemReserved":{"cpu":"200m","ephemeral-storage":"350Mi","memory":"350Mi"},"HardEvictionThresholds":[{"Signal":"memory.available","Operator":"LessThan","Value":{"Quantity":"100Mi","Percentage":0},"GracePeriod":0,"MinReclaim":null},{"Signal":"nodefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.1},"GracePeriod":0,"MinReclaim":null},{"Signal":"nodefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null},{"Signal":"imagefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.15},"GracePeriod":0,"MinReclaim":null},{"Signal":"imagefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null}],"QOSReserved":{},"CPUManagerPolicy":"none","CPUManagerPolicyOptions":null,"TopologyManagerScope":"container","CPUManagerReconcilePeriod":10000000000,"ExperimentalMemoryManagerPolicy":"None","ExperimentalMemoryManagerReservedMemory":null,"PodPidsLimit":4096,"EnforceCPULimits":true,"CPUCFSQuotaPeriod":100000000,"TopologyManagerPolicy":"none","TopologyManagerPolicyOptions":null,"CgroupVersion":2} Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.011761 4756 topology_manager.go:138] "Creating topology manager with none policy" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.011781 4756 container_manager_linux.go:303] "Creating device plugin manager" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.012545 4756 manager.go:142] "Creating Device Plugin manager" path="/var/lib/kubelet/device-plugins/kubelet.sock" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.012601 4756 server.go:66] "Creating device plugin registration server" version="v1beta1" socket="/var/lib/kubelet/device-plugins/kubelet.sock" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.014651 4756 state_mem.go:36] "Initialized new in-memory state store" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.014792 4756 server.go:1245] "Using root directory" path="/var/lib/kubelet" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.020460 4756 kubelet.go:418] "Attempting to sync node with API server" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.020499 4756 kubelet.go:313] "Adding static pod path" path="/etc/kubernetes/manifests" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.020550 4756 file.go:69] "Watching path" path="/etc/kubernetes/manifests" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.020575 4756 kubelet.go:324] "Adding apiserver pod source" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.020594 4756 apiserver.go:42] "Waiting for node sync before watching apiserver pods" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.025484 4756 kuberuntime_manager.go:262] "Container runtime initialized" containerRuntime="cri-o" version="1.31.5-4.rhaos4.18.gitdad78d5.el9" apiVersion="v1" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.026596 4756 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-server-current.pem". Sep 30 19:31:11 crc kubenswrapper[4756]: W0930 19:31:11.028614 4756 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.12:6443: connect: connection refused Sep 30 19:31:11 crc kubenswrapper[4756]: E0930 19:31:11.028786 4756 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.12:6443: connect: connection refused" logger="UnhandledError" Sep 30 19:31:11 crc kubenswrapper[4756]: W0930 19:31:11.028622 4756 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.12:6443: connect: connection refused Sep 30 19:31:11 crc kubenswrapper[4756]: E0930 19:31:11.028911 4756 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.12:6443: connect: connection refused" logger="UnhandledError" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.029296 4756 kubelet.go:854] "Not starting ClusterTrustBundle informer because we are in static kubelet mode" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.032075 4756 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/portworx-volume" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.032116 4756 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/empty-dir" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.032129 4756 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/git-repo" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.032144 4756 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/host-path" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.032172 4756 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/nfs" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.032191 4756 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/secret" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.032211 4756 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/iscsi" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.032278 4756 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/downward-api" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.032294 4756 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/fc" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.032323 4756 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/configmap" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.032352 4756 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/projected" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.032365 4756 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/local-volume" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.034480 4756 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/csi" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.035288 4756 server.go:1280] "Started kubelet" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.035532 4756 server.go:163] "Starting to listen" address="0.0.0.0" port=10250 Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.037262 4756 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.12:6443: connect: connection refused Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.035940 4756 ratelimit.go:55] "Setting rate limiting for endpoint" service="podresources" qps=100 burstTokens=10 Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.039691 4756 server.go:236] "Starting to serve the podresources API" endpoint="unix:/var/lib/kubelet/pod-resources/kubelet.sock" Sep 30 19:31:11 crc systemd[1]: Started Kubernetes Kubelet. Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.041223 4756 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate rotation is enabled Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.041306 4756 fs_resource_analyzer.go:67] "Starting FS ResourceAnalyzer" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.043112 4756 volume_manager.go:287] "The desired_state_of_world populator starts" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.043475 4756 volume_manager.go:289] "Starting Kubelet Volume Manager" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.044874 4756 desired_state_of_world_populator.go:146] "Desired state populator starts to run" Sep 30 19:31:11 crc kubenswrapper[4756]: E0930 19:31:11.045866 4756 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.041843 4756 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-16 06:04:09.918692458 +0000 UTC Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.048781 4756 certificate_manager.go:356] kubernetes.io/kubelet-serving: Waiting 2578h32m58.869926952s for next certificate rotation Sep 30 19:31:11 crc kubenswrapper[4756]: W0930 19:31:11.049238 4756 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.12:6443: connect: connection refused Sep 30 19:31:11 crc kubenswrapper[4756]: E0930 19:31:11.049464 4756 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.12:6443: connect: connection refused" logger="UnhandledError" Sep 30 19:31:11 crc kubenswrapper[4756]: E0930 19:31:11.050885 4756 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.12:6443: connect: connection refused" interval="200ms" Sep 30 19:31:11 crc kubenswrapper[4756]: E0930 19:31:11.051853 4756 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/default/events\": dial tcp 38.102.83.12:6443: connect: connection refused" event="&Event{ObjectMeta:{crc.186a2642645ef59b default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:Starting,Message:Starting kubelet.,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-09-30 19:31:11.035241883 +0000 UTC m=+0.656175400,LastTimestamp:2025-09-30 19:31:11.035241883 +0000 UTC m=+0.656175400,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.055558 4756 factory.go:55] Registering systemd factory Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.055591 4756 factory.go:221] Registration of the systemd container factory successfully Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.056007 4756 factory.go:153] Registering CRI-O factory Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.056065 4756 factory.go:221] Registration of the crio container factory successfully Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.056251 4756 factory.go:219] Registration of the containerd container factory failed: unable to create containerd client: containerd: cannot unix dial containerd api service: dial unix /run/containerd/containerd.sock: connect: no such file or directory Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.056302 4756 factory.go:103] Registering Raw factory Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.056336 4756 manager.go:1196] Started watching for new ooms in manager Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.056853 4756 server.go:460] "Adding debug handlers to kubelet server" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.057697 4756 manager.go:319] Starting recovery of all containers Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.062767 4756 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" seLinuxMountContext="" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.062823 4756 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" seLinuxMountContext="" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.062841 4756 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" volumeName="kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" seLinuxMountContext="" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.062853 4756 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" seLinuxMountContext="" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.062864 4756 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" seLinuxMountContext="" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.062875 4756 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" seLinuxMountContext="" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.062886 4756 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" seLinuxMountContext="" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.062901 4756 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" seLinuxMountContext="" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.062922 4756 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" seLinuxMountContext="" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.062962 4756 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" seLinuxMountContext="" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.062973 4756 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" seLinuxMountContext="" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.062986 4756 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" seLinuxMountContext="" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.062999 4756 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" seLinuxMountContext="" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.063012 4756 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" seLinuxMountContext="" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.063022 4756 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" seLinuxMountContext="" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.063031 4756 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" seLinuxMountContext="" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.063042 4756 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3b6479f0-333b-4a96-9adf-2099afdc2447" volumeName="kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr" seLinuxMountContext="" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.063052 4756 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="44663579-783b-4372-86d6-acf235a62d72" volumeName="kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" seLinuxMountContext="" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.063065 4756 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" seLinuxMountContext="" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.063078 4756 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" seLinuxMountContext="" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.063093 4756 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" seLinuxMountContext="" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.063105 4756 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" seLinuxMountContext="" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.063117 4756 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" seLinuxMountContext="" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.063128 4756 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" seLinuxMountContext="" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.063142 4756 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" seLinuxMountContext="" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.063153 4756 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" seLinuxMountContext="" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.063169 4756 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" seLinuxMountContext="" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.063221 4756 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" seLinuxMountContext="" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.063235 4756 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" seLinuxMountContext="" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.063290 4756 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" seLinuxMountContext="" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.063301 4756 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" seLinuxMountContext="" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.063314 4756 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" seLinuxMountContext="" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.063328 4756 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" seLinuxMountContext="" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.063342 4756 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" seLinuxMountContext="" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.063354 4756 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" seLinuxMountContext="" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.063366 4756 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" seLinuxMountContext="" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.063377 4756 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" seLinuxMountContext="" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.063408 4756 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" seLinuxMountContext="" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.063421 4756 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" seLinuxMountContext="" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.063434 4756 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" seLinuxMountContext="" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.063446 4756 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" seLinuxMountContext="" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.063459 4756 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d751cbb-f2e2-430d-9754-c882a5e924a5" volumeName="kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl" seLinuxMountContext="" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.063472 4756 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" seLinuxMountContext="" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.063485 4756 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" seLinuxMountContext="" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.063501 4756 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" seLinuxMountContext="" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.063515 4756 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" seLinuxMountContext="" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.063528 4756 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" seLinuxMountContext="" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.063543 4756 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" seLinuxMountContext="" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.063557 4756 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" seLinuxMountContext="" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.063570 4756 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" seLinuxMountContext="" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.063584 4756 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf" seLinuxMountContext="" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.063599 4756 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" seLinuxMountContext="" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.063617 4756 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" seLinuxMountContext="" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.063630 4756 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" seLinuxMountContext="" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.063645 4756 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" seLinuxMountContext="" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.063663 4756 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" seLinuxMountContext="" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.063675 4756 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" seLinuxMountContext="" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.063688 4756 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" seLinuxMountContext="" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.063701 4756 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" seLinuxMountContext="" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.063714 4756 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" seLinuxMountContext="" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.063767 4756 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" seLinuxMountContext="" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.063780 4756 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" seLinuxMountContext="" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.063791 4756 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" seLinuxMountContext="" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.063806 4756 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" seLinuxMountContext="" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.063818 4756 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" seLinuxMountContext="" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.063830 4756 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" seLinuxMountContext="" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.063841 4756 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" seLinuxMountContext="" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.063854 4756 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" seLinuxMountContext="" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.063866 4756 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" seLinuxMountContext="" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.063899 4756 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" seLinuxMountContext="" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.063915 4756 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" seLinuxMountContext="" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.063927 4756 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" seLinuxMountContext="" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.063938 4756 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" seLinuxMountContext="" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.063957 4756 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" seLinuxMountContext="" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.063969 4756 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" seLinuxMountContext="" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.063980 4756 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" seLinuxMountContext="" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.063994 4756 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" seLinuxMountContext="" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.064006 4756 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" seLinuxMountContext="" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.064017 4756 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" seLinuxMountContext="" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.064028 4756 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" seLinuxMountContext="" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.064045 4756 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" seLinuxMountContext="" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.064057 4756 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" seLinuxMountContext="" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.064068 4756 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides" seLinuxMountContext="" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.064079 4756 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" seLinuxMountContext="" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.064091 4756 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" seLinuxMountContext="" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.064103 4756 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" seLinuxMountContext="" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.064115 4756 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" seLinuxMountContext="" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.064127 4756 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb" seLinuxMountContext="" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.064144 4756 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5" seLinuxMountContext="" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.064160 4756 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" seLinuxMountContext="" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.064176 4756 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" seLinuxMountContext="" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.064190 4756 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" seLinuxMountContext="" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.064226 4756 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" seLinuxMountContext="" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.064243 4756 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" seLinuxMountContext="" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.064255 4756 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls" seLinuxMountContext="" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.064272 4756 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" seLinuxMountContext="" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.064288 4756 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" seLinuxMountContext="" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.064307 4756 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" seLinuxMountContext="" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.064320 4756 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" seLinuxMountContext="" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.064333 4756 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" seLinuxMountContext="" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.064344 4756 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" seLinuxMountContext="" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.064356 4756 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" seLinuxMountContext="" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.064371 4756 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" seLinuxMountContext="" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.064384 4756 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" seLinuxMountContext="" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.064419 4756 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" seLinuxMountContext="" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.064433 4756 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert" seLinuxMountContext="" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.064446 4756 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" seLinuxMountContext="" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.064459 4756 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" seLinuxMountContext="" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.064475 4756 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" seLinuxMountContext="" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.064489 4756 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" seLinuxMountContext="" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.064503 4756 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" seLinuxMountContext="" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.064559 4756 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" seLinuxMountContext="" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.064584 4756 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" seLinuxMountContext="" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.064599 4756 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" seLinuxMountContext="" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.064617 4756 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" seLinuxMountContext="" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.067987 4756 reconstruct.go:144] "Volume is marked device as uncertain and added into the actual state" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" deviceMountPath="/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.068014 4756 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" seLinuxMountContext="" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.068025 4756 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" seLinuxMountContext="" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.068037 4756 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" seLinuxMountContext="" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.068055 4756 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" seLinuxMountContext="" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.068066 4756 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" seLinuxMountContext="" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.068077 4756 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" seLinuxMountContext="" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.068086 4756 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" seLinuxMountContext="" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.068095 4756 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" seLinuxMountContext="" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.068104 4756 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script" seLinuxMountContext="" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.068114 4756 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" seLinuxMountContext="" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.068123 4756 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" seLinuxMountContext="" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.068134 4756 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" seLinuxMountContext="" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.068152 4756 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" seLinuxMountContext="" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.068164 4756 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" seLinuxMountContext="" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.068174 4756 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" seLinuxMountContext="" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.068187 4756 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" seLinuxMountContext="" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.068217 4756 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm" seLinuxMountContext="" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.068229 4756 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" seLinuxMountContext="" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.068242 4756 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" seLinuxMountContext="" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.068251 4756 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" seLinuxMountContext="" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.068265 4756 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" seLinuxMountContext="" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.068274 4756 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" seLinuxMountContext="" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.068286 4756 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" seLinuxMountContext="" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.068295 4756 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" seLinuxMountContext="" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.068308 4756 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" seLinuxMountContext="" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.068319 4756 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" seLinuxMountContext="" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.068330 4756 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49ef4625-1d3a-4a9f-b595-c2433d32326d" volumeName="kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" seLinuxMountContext="" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.068342 4756 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" seLinuxMountContext="" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.068356 4756 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" seLinuxMountContext="" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.068367 4756 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" seLinuxMountContext="" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.068452 4756 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" seLinuxMountContext="" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.068464 4756 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" seLinuxMountContext="" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.068473 4756 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" seLinuxMountContext="" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.068483 4756 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" seLinuxMountContext="" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.068522 4756 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" seLinuxMountContext="" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.068532 4756 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" seLinuxMountContext="" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.068541 4756 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" seLinuxMountContext="" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.069015 4756 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" seLinuxMountContext="" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.069064 4756 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" seLinuxMountContext="" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.069094 4756 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" seLinuxMountContext="" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.069168 4756 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" seLinuxMountContext="" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.069197 4756 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" seLinuxMountContext="" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.069216 4756 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" seLinuxMountContext="" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.069247 4756 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" seLinuxMountContext="" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.069274 4756 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" seLinuxMountContext="" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.069291 4756 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf" seLinuxMountContext="" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.069302 4756 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" seLinuxMountContext="" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.069322 4756 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" seLinuxMountContext="" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.069347 4756 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" volumeName="kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" seLinuxMountContext="" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.069360 4756 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" seLinuxMountContext="" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.069378 4756 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" seLinuxMountContext="" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.069461 4756 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" seLinuxMountContext="" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.069480 4756 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" seLinuxMountContext="" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.069491 4756 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" seLinuxMountContext="" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.069543 4756 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" seLinuxMountContext="" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.069562 4756 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" seLinuxMountContext="" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.069576 4756 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" seLinuxMountContext="" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.069595 4756 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" seLinuxMountContext="" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.069654 4756 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" seLinuxMountContext="" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.069670 4756 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" seLinuxMountContext="" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.069705 4756 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" seLinuxMountContext="" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.069718 4756 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" seLinuxMountContext="" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.069734 4756 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" seLinuxMountContext="" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.069746 4756 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" seLinuxMountContext="" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.069773 4756 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" seLinuxMountContext="" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.069793 4756 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" seLinuxMountContext="" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.069808 4756 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" seLinuxMountContext="" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.069824 4756 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" seLinuxMountContext="" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.069862 4756 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" seLinuxMountContext="" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.069933 4756 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" seLinuxMountContext="" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.069954 4756 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" seLinuxMountContext="" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.069965 4756 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert" seLinuxMountContext="" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.069976 4756 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" seLinuxMountContext="" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.069992 4756 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" seLinuxMountContext="" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.070016 4756 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" seLinuxMountContext="" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.070095 4756 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" seLinuxMountContext="" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.070106 4756 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" seLinuxMountContext="" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.070116 4756 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" seLinuxMountContext="" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.070196 4756 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" seLinuxMountContext="" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.070210 4756 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" seLinuxMountContext="" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.070230 4756 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" seLinuxMountContext="" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.070256 4756 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" seLinuxMountContext="" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.070268 4756 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" seLinuxMountContext="" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.070411 4756 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" seLinuxMountContext="" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.070422 4756 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" seLinuxMountContext="" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.070523 4756 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" seLinuxMountContext="" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.070583 4756 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" seLinuxMountContext="" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.070594 4756 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" seLinuxMountContext="" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.070609 4756 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" seLinuxMountContext="" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.070619 4756 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" seLinuxMountContext="" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.070633 4756 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" seLinuxMountContext="" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.070757 4756 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" seLinuxMountContext="" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.070904 4756 reconstruct.go:97] "Volume reconstruction finished" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.070912 4756 reconciler.go:26] "Reconciler: start to sync state" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.092483 4756 manager.go:324] Recovery completed Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.105969 4756 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.109638 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.109690 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.109706 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.111714 4756 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv4" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.112043 4756 cpu_manager.go:225] "Starting CPU manager" policy="none" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.112102 4756 cpu_manager.go:226] "Reconciling" reconcilePeriod="10s" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.112124 4756 state_mem.go:36] "Initialized new in-memory state store" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.114374 4756 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv6" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.114426 4756 status_manager.go:217] "Starting to sync pod status with apiserver" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.114455 4756 kubelet.go:2335] "Starting kubelet main sync loop" Sep 30 19:31:11 crc kubenswrapper[4756]: E0930 19:31:11.114800 4756 kubelet.go:2359] "Skipping pod synchronization" err="[container runtime status check may not have completed yet, PLEG is not healthy: pleg has yet to be successful]" Sep 30 19:31:11 crc kubenswrapper[4756]: W0930 19:31:11.116735 4756 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.12:6443: connect: connection refused Sep 30 19:31:11 crc kubenswrapper[4756]: E0930 19:31:11.116802 4756 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.12:6443: connect: connection refused" logger="UnhandledError" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.132505 4756 policy_none.go:49] "None policy: Start" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.133604 4756 memory_manager.go:170] "Starting memorymanager" policy="None" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.133652 4756 state_mem.go:35] "Initializing new in-memory state store" Sep 30 19:31:11 crc kubenswrapper[4756]: E0930 19:31:11.146014 4756 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.195029 4756 manager.go:334] "Starting Device Plugin manager" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.195111 4756 manager.go:513] "Failed to read data from checkpoint" checkpoint="kubelet_internal_checkpoint" err="checkpoint is not found" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.195131 4756 server.go:79] "Starting device plugin registration server" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.195781 4756 eviction_manager.go:189] "Eviction manager: starting control loop" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.195809 4756 container_log_manager.go:189] "Initializing container log rotate workers" workers=1 monitorPeriod="10s" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.196480 4756 plugin_watcher.go:51] "Plugin Watcher Start" path="/var/lib/kubelet/plugins_registry" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.196646 4756 plugin_manager.go:116] "The desired_state_of_world populator (plugin watcher) starts" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.196672 4756 plugin_manager.go:118] "Starting Kubelet Plugin Manager" Sep 30 19:31:11 crc kubenswrapper[4756]: E0930 19:31:11.204546 4756 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.215731 4756 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-machine-config-operator/kube-rbac-proxy-crio-crc","openshift-etcd/etcd-crc","openshift-kube-apiserver/kube-apiserver-crc","openshift-kube-controller-manager/kube-controller-manager-crc","openshift-kube-scheduler/openshift-kube-scheduler-crc"] Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.216553 4756 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.218139 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.218191 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.218206 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.218462 4756 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.218693 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.218763 4756 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.219741 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.219776 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.219788 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.219954 4756 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.220257 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.220343 4756 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.220667 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.220710 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.220726 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.220899 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.220930 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.220975 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.221184 4756 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.221370 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.221451 4756 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.221657 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.221701 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.221720 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.224280 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.224308 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.224320 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.224503 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.224542 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.224559 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.224747 4756 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.224876 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.224916 4756 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.225675 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.225696 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.225707 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.225879 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.225918 4756 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.226737 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.226799 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.226819 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.227570 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.227607 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.227620 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:11 crc kubenswrapper[4756]: E0930 19:31:11.252026 4756 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.12:6443: connect: connection refused" interval="400ms" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.272795 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.272893 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.272947 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.272995 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.273047 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.273091 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.273174 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.273263 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.273571 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.273715 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.273783 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.273815 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.273851 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.273891 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.274021 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.298645 4756 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.300421 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.300471 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.300486 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.300524 4756 kubelet_node_status.go:76] "Attempting to register node" node="crc" Sep 30 19:31:11 crc kubenswrapper[4756]: E0930 19:31:11.301174 4756 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.12:6443: connect: connection refused" node="crc" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.376030 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.376103 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.376127 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.376171 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.376190 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.376208 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.376229 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.376232 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.376252 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.376276 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.376323 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.376354 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.376354 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.376364 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.376441 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.376466 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.376409 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.376387 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.376361 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.376332 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.376522 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.376430 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.376377 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.376453 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.376572 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.376566 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.376556 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.376580 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.376675 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.376659 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.501915 4756 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.503786 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.503873 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.503903 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.503957 4756 kubelet_node_status.go:76] "Attempting to register node" node="crc" Sep 30 19:31:11 crc kubenswrapper[4756]: E0930 19:31:11.504742 4756 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.12:6443: connect: connection refused" node="crc" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.549698 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.568184 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.577511 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.582463 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 30 19:31:11 crc kubenswrapper[4756]: W0930 19:31:11.604295 4756 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd1b160f5dda77d281dd8e69ec8d817f9.slice/crio-2d5a1419b84ab5d0f181e40a2c9330d2adb795969b926f72b8ae40082c7cb9a7 WatchSource:0}: Error finding container 2d5a1419b84ab5d0f181e40a2c9330d2adb795969b926f72b8ae40082c7cb9a7: Status 404 returned error can't find the container with id 2d5a1419b84ab5d0f181e40a2c9330d2adb795969b926f72b8ae40082c7cb9a7 Sep 30 19:31:11 crc kubenswrapper[4756]: W0930 19:31:11.607482 4756 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2139d3e2895fc6797b9c76a1b4c9886d.slice/crio-455c7949d7049d5cdec2617fefddfd538f84b155043415d31d1ce8bbb5001c10 WatchSource:0}: Error finding container 455c7949d7049d5cdec2617fefddfd538f84b155043415d31d1ce8bbb5001c10: Status 404 returned error can't find the container with id 455c7949d7049d5cdec2617fefddfd538f84b155043415d31d1ce8bbb5001c10 Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.612009 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Sep 30 19:31:11 crc kubenswrapper[4756]: W0930 19:31:11.616217 4756 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf4b27818a5e8e43d0dc095d08835c792.slice/crio-7c0ae4720f35e792cecaf3afb45cb3be8f794784ee7c43616740c2e1f0d6f1df WatchSource:0}: Error finding container 7c0ae4720f35e792cecaf3afb45cb3be8f794784ee7c43616740c2e1f0d6f1df: Status 404 returned error can't find the container with id 7c0ae4720f35e792cecaf3afb45cb3be8f794784ee7c43616740c2e1f0d6f1df Sep 30 19:31:11 crc kubenswrapper[4756]: W0930 19:31:11.619664 4756 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf614b9022728cf315e60c057852e563e.slice/crio-c2bdec0c3769fef9651c603375db8d642cc25b290ec2964652eec8929ef41dc0 WatchSource:0}: Error finding container c2bdec0c3769fef9651c603375db8d642cc25b290ec2964652eec8929ef41dc0: Status 404 returned error can't find the container with id c2bdec0c3769fef9651c603375db8d642cc25b290ec2964652eec8929ef41dc0 Sep 30 19:31:11 crc kubenswrapper[4756]: W0930 19:31:11.635980 4756 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3dcd261975c3d6b9a6ad6367fd4facd3.slice/crio-7dc79082f12c74b73190dc54d237f2c6974d449f2b83583f175692e6a3ab1bf6 WatchSource:0}: Error finding container 7dc79082f12c74b73190dc54d237f2c6974d449f2b83583f175692e6a3ab1bf6: Status 404 returned error can't find the container with id 7dc79082f12c74b73190dc54d237f2c6974d449f2b83583f175692e6a3ab1bf6 Sep 30 19:31:11 crc kubenswrapper[4756]: E0930 19:31:11.653130 4756 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.12:6443: connect: connection refused" interval="800ms" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.905476 4756 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.908581 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.908667 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.908707 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:11 crc kubenswrapper[4756]: I0930 19:31:11.908753 4756 kubelet_node_status.go:76] "Attempting to register node" node="crc" Sep 30 19:31:11 crc kubenswrapper[4756]: E0930 19:31:11.909680 4756 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.12:6443: connect: connection refused" node="crc" Sep 30 19:31:11 crc kubenswrapper[4756]: W0930 19:31:11.933009 4756 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.12:6443: connect: connection refused Sep 30 19:31:11 crc kubenswrapper[4756]: E0930 19:31:11.933185 4756 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.12:6443: connect: connection refused" logger="UnhandledError" Sep 30 19:31:12 crc kubenswrapper[4756]: I0930 19:31:12.038902 4756 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.12:6443: connect: connection refused Sep 30 19:31:12 crc kubenswrapper[4756]: W0930 19:31:12.109970 4756 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.12:6443: connect: connection refused Sep 30 19:31:12 crc kubenswrapper[4756]: E0930 19:31:12.110114 4756 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.12:6443: connect: connection refused" logger="UnhandledError" Sep 30 19:31:12 crc kubenswrapper[4756]: I0930 19:31:12.120364 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"7c0ae4720f35e792cecaf3afb45cb3be8f794784ee7c43616740c2e1f0d6f1df"} Sep 30 19:31:12 crc kubenswrapper[4756]: I0930 19:31:12.122451 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"455c7949d7049d5cdec2617fefddfd538f84b155043415d31d1ce8bbb5001c10"} Sep 30 19:31:12 crc kubenswrapper[4756]: I0930 19:31:12.124522 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"2d5a1419b84ab5d0f181e40a2c9330d2adb795969b926f72b8ae40082c7cb9a7"} Sep 30 19:31:12 crc kubenswrapper[4756]: I0930 19:31:12.127274 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"7dc79082f12c74b73190dc54d237f2c6974d449f2b83583f175692e6a3ab1bf6"} Sep 30 19:31:12 crc kubenswrapper[4756]: I0930 19:31:12.128611 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"c2bdec0c3769fef9651c603375db8d642cc25b290ec2964652eec8929ef41dc0"} Sep 30 19:31:12 crc kubenswrapper[4756]: W0930 19:31:12.169789 4756 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.12:6443: connect: connection refused Sep 30 19:31:12 crc kubenswrapper[4756]: E0930 19:31:12.169930 4756 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.12:6443: connect: connection refused" logger="UnhandledError" Sep 30 19:31:12 crc kubenswrapper[4756]: E0930 19:31:12.454537 4756 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.12:6443: connect: connection refused" interval="1.6s" Sep 30 19:31:12 crc kubenswrapper[4756]: W0930 19:31:12.630881 4756 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.12:6443: connect: connection refused Sep 30 19:31:12 crc kubenswrapper[4756]: E0930 19:31:12.631026 4756 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.12:6443: connect: connection refused" logger="UnhandledError" Sep 30 19:31:12 crc kubenswrapper[4756]: I0930 19:31:12.710464 4756 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 19:31:12 crc kubenswrapper[4756]: I0930 19:31:12.712370 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:12 crc kubenswrapper[4756]: I0930 19:31:12.712486 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:12 crc kubenswrapper[4756]: I0930 19:31:12.712516 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:12 crc kubenswrapper[4756]: I0930 19:31:12.712550 4756 kubelet_node_status.go:76] "Attempting to register node" node="crc" Sep 30 19:31:12 crc kubenswrapper[4756]: E0930 19:31:12.713779 4756 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.12:6443: connect: connection refused" node="crc" Sep 30 19:31:13 crc kubenswrapper[4756]: I0930 19:31:13.038667 4756 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.12:6443: connect: connection refused Sep 30 19:31:13 crc kubenswrapper[4756]: I0930 19:31:13.135875 4756 generic.go:334] "Generic (PLEG): container finished" podID="3dcd261975c3d6b9a6ad6367fd4facd3" containerID="4f9299d12673c7a8e182e0d4354088d1aa539ca35a788ba6ff507d2ca96cff07" exitCode=0 Sep 30 19:31:13 crc kubenswrapper[4756]: I0930 19:31:13.136010 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerDied","Data":"4f9299d12673c7a8e182e0d4354088d1aa539ca35a788ba6ff507d2ca96cff07"} Sep 30 19:31:13 crc kubenswrapper[4756]: I0930 19:31:13.136116 4756 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 19:31:13 crc kubenswrapper[4756]: I0930 19:31:13.138380 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:13 crc kubenswrapper[4756]: I0930 19:31:13.138478 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:13 crc kubenswrapper[4756]: I0930 19:31:13.138506 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:13 crc kubenswrapper[4756]: I0930 19:31:13.139584 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"746e37834d01867d327b2cb407df8e9a8489f58884a28f1da6a336f74e4d1033"} Sep 30 19:31:13 crc kubenswrapper[4756]: I0930 19:31:13.139643 4756 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 19:31:13 crc kubenswrapper[4756]: I0930 19:31:13.139642 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"6195128a0e66f0f932a444f4717084f00ea05ff648f7c26364bb9c96483b652a"} Sep 30 19:31:13 crc kubenswrapper[4756]: I0930 19:31:13.139747 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"a78698331fc3db3b12b06c8816256c5d055d1decfd12cc0099d6cfd155d673d5"} Sep 30 19:31:13 crc kubenswrapper[4756]: I0930 19:31:13.139762 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"32fb926334772dc454adcdabd6070f751ef1aded82a5c13732875e1ec04d2c48"} Sep 30 19:31:13 crc kubenswrapper[4756]: I0930 19:31:13.140897 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:13 crc kubenswrapper[4756]: I0930 19:31:13.140929 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:13 crc kubenswrapper[4756]: I0930 19:31:13.140942 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:13 crc kubenswrapper[4756]: I0930 19:31:13.143463 4756 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="7365492f5fe72c39f569bfa6e2b1a44236da10b4687a738b97ce95626081c1b5" exitCode=0 Sep 30 19:31:13 crc kubenswrapper[4756]: I0930 19:31:13.143537 4756 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 19:31:13 crc kubenswrapper[4756]: I0930 19:31:13.143578 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"7365492f5fe72c39f569bfa6e2b1a44236da10b4687a738b97ce95626081c1b5"} Sep 30 19:31:13 crc kubenswrapper[4756]: I0930 19:31:13.144650 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:13 crc kubenswrapper[4756]: I0930 19:31:13.144682 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:13 crc kubenswrapper[4756]: I0930 19:31:13.144695 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:13 crc kubenswrapper[4756]: I0930 19:31:13.146002 4756 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 19:31:13 crc kubenswrapper[4756]: I0930 19:31:13.146831 4756 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="a5550fd486f7fad8080fb3eed0f4c73140172c68983fa8a423cd48da8f31ef3a" exitCode=0 Sep 30 19:31:13 crc kubenswrapper[4756]: I0930 19:31:13.146878 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"a5550fd486f7fad8080fb3eed0f4c73140172c68983fa8a423cd48da8f31ef3a"} Sep 30 19:31:13 crc kubenswrapper[4756]: I0930 19:31:13.146997 4756 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 19:31:13 crc kubenswrapper[4756]: I0930 19:31:13.147590 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:13 crc kubenswrapper[4756]: I0930 19:31:13.147641 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:13 crc kubenswrapper[4756]: I0930 19:31:13.147654 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:13 crc kubenswrapper[4756]: I0930 19:31:13.148239 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:13 crc kubenswrapper[4756]: I0930 19:31:13.148283 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:13 crc kubenswrapper[4756]: I0930 19:31:13.148298 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:13 crc kubenswrapper[4756]: I0930 19:31:13.149517 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerDied","Data":"4346c24591ed37e8c2cb6d5693d16c1c3d54d3239edd35f193e2e030e8bfa221"} Sep 30 19:31:13 crc kubenswrapper[4756]: I0930 19:31:13.149706 4756 generic.go:334] "Generic (PLEG): container finished" podID="d1b160f5dda77d281dd8e69ec8d817f9" containerID="4346c24591ed37e8c2cb6d5693d16c1c3d54d3239edd35f193e2e030e8bfa221" exitCode=0 Sep 30 19:31:13 crc kubenswrapper[4756]: I0930 19:31:13.149802 4756 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 19:31:13 crc kubenswrapper[4756]: I0930 19:31:13.151696 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:13 crc kubenswrapper[4756]: I0930 19:31:13.151728 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:13 crc kubenswrapper[4756]: I0930 19:31:13.151742 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:13 crc kubenswrapper[4756]: E0930 19:31:13.337446 4756 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/default/events\": dial tcp 38.102.83.12:6443: connect: connection refused" event="&Event{ObjectMeta:{crc.186a2642645ef59b default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:Starting,Message:Starting kubelet.,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-09-30 19:31:11.035241883 +0000 UTC m=+0.656175400,LastTimestamp:2025-09-30 19:31:11.035241883 +0000 UTC m=+0.656175400,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Sep 30 19:31:14 crc kubenswrapper[4756]: I0930 19:31:14.039149 4756 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.12:6443: connect: connection refused Sep 30 19:31:14 crc kubenswrapper[4756]: E0930 19:31:14.056346 4756 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.12:6443: connect: connection refused" interval="3.2s" Sep 30 19:31:14 crc kubenswrapper[4756]: I0930 19:31:14.155382 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"01164eb1afdb771ef4d2dc268107dadd206959e1ef66b154caf3ff18717e20de"} Sep 30 19:31:14 crc kubenswrapper[4756]: I0930 19:31:14.155451 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"57f786181b4762f1e04a48ac6e352090b62e1691091ddc390821abb693e8f7a6"} Sep 30 19:31:14 crc kubenswrapper[4756]: I0930 19:31:14.155462 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"2c275af67ad8675a437b3f7fae34236bbdfbdf1f216be6a442d6765471d1d11e"} Sep 30 19:31:14 crc kubenswrapper[4756]: I0930 19:31:14.155472 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"0a76d38dbf936ca79e57470de2c8bf98e8681f7d94f2c572aa5c0d2b2acbb484"} Sep 30 19:31:14 crc kubenswrapper[4756]: I0930 19:31:14.159110 4756 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="390ef7ee5c5d98dc226f1d061c6c9bb83236c988f2af15635e7834f643862840" exitCode=0 Sep 30 19:31:14 crc kubenswrapper[4756]: I0930 19:31:14.159291 4756 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 19:31:14 crc kubenswrapper[4756]: I0930 19:31:14.159449 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"390ef7ee5c5d98dc226f1d061c6c9bb83236c988f2af15635e7834f643862840"} Sep 30 19:31:14 crc kubenswrapper[4756]: I0930 19:31:14.161346 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:14 crc kubenswrapper[4756]: I0930 19:31:14.161564 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:14 crc kubenswrapper[4756]: I0930 19:31:14.161695 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:14 crc kubenswrapper[4756]: I0930 19:31:14.165382 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"38fdd34c9534d8b27068f856bae35d51ce2f284f03aa5176b20e82f0dfe82abe"} Sep 30 19:31:14 crc kubenswrapper[4756]: I0930 19:31:14.165474 4756 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 19:31:14 crc kubenswrapper[4756]: I0930 19:31:14.166750 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:14 crc kubenswrapper[4756]: I0930 19:31:14.166805 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:14 crc kubenswrapper[4756]: I0930 19:31:14.166818 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:14 crc kubenswrapper[4756]: I0930 19:31:14.172385 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"b904814bd38b98f875532fedfa07321ce8d149f697e027ad926f8f40ddce61e2"} Sep 30 19:31:14 crc kubenswrapper[4756]: I0930 19:31:14.172451 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"d2adbf3b2ade3e39ca3d2f7a70212537c9e7531907dd4a1e7748220183d34b9d"} Sep 30 19:31:14 crc kubenswrapper[4756]: I0930 19:31:14.172462 4756 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 19:31:14 crc kubenswrapper[4756]: I0930 19:31:14.172470 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"6cbb0f481cae6dbc480af93bb03f4b4e597381979fed1a9b91fb23aef7868b65"} Sep 30 19:31:14 crc kubenswrapper[4756]: I0930 19:31:14.172496 4756 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 19:31:14 crc kubenswrapper[4756]: I0930 19:31:14.173953 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:14 crc kubenswrapper[4756]: I0930 19:31:14.174001 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:14 crc kubenswrapper[4756]: I0930 19:31:14.174014 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:14 crc kubenswrapper[4756]: I0930 19:31:14.174243 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:14 crc kubenswrapper[4756]: I0930 19:31:14.174292 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:14 crc kubenswrapper[4756]: I0930 19:31:14.174318 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:14 crc kubenswrapper[4756]: I0930 19:31:14.313908 4756 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 19:31:14 crc kubenswrapper[4756]: I0930 19:31:14.315196 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:14 crc kubenswrapper[4756]: I0930 19:31:14.315231 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:14 crc kubenswrapper[4756]: I0930 19:31:14.315241 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:14 crc kubenswrapper[4756]: I0930 19:31:14.315270 4756 kubelet_node_status.go:76] "Attempting to register node" node="crc" Sep 30 19:31:14 crc kubenswrapper[4756]: E0930 19:31:14.315800 4756 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.12:6443: connect: connection refused" node="crc" Sep 30 19:31:14 crc kubenswrapper[4756]: W0930 19:31:14.577112 4756 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.12:6443: connect: connection refused Sep 30 19:31:14 crc kubenswrapper[4756]: E0930 19:31:14.577231 4756 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.12:6443: connect: connection refused" logger="UnhandledError" Sep 30 19:31:15 crc kubenswrapper[4756]: I0930 19:31:15.180564 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"8f5786b74e1c05cccdbb4ed46b2fc96112d8b112c92e7e30c08d3f38d63d8a5c"} Sep 30 19:31:15 crc kubenswrapper[4756]: I0930 19:31:15.180757 4756 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 19:31:15 crc kubenswrapper[4756]: I0930 19:31:15.182287 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:15 crc kubenswrapper[4756]: I0930 19:31:15.182359 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:15 crc kubenswrapper[4756]: I0930 19:31:15.182386 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:15 crc kubenswrapper[4756]: I0930 19:31:15.183894 4756 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="6f98786c914dcdd3ca1cde8421686a1d536abb97c375f26f458d8f514bdccd52" exitCode=0 Sep 30 19:31:15 crc kubenswrapper[4756]: I0930 19:31:15.183962 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"6f98786c914dcdd3ca1cde8421686a1d536abb97c375f26f458d8f514bdccd52"} Sep 30 19:31:15 crc kubenswrapper[4756]: I0930 19:31:15.184037 4756 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 19:31:15 crc kubenswrapper[4756]: I0930 19:31:15.184105 4756 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 19:31:15 crc kubenswrapper[4756]: I0930 19:31:15.184113 4756 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 19:31:15 crc kubenswrapper[4756]: I0930 19:31:15.184585 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Sep 30 19:31:15 crc kubenswrapper[4756]: I0930 19:31:15.185972 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:15 crc kubenswrapper[4756]: I0930 19:31:15.186025 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:15 crc kubenswrapper[4756]: I0930 19:31:15.185983 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:15 crc kubenswrapper[4756]: I0930 19:31:15.186081 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:15 crc kubenswrapper[4756]: I0930 19:31:15.186106 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:15 crc kubenswrapper[4756]: I0930 19:31:15.186128 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:15 crc kubenswrapper[4756]: I0930 19:31:15.187626 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:15 crc kubenswrapper[4756]: I0930 19:31:15.187674 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:15 crc kubenswrapper[4756]: I0930 19:31:15.187699 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:16 crc kubenswrapper[4756]: I0930 19:31:16.191218 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"1ee47cdb77e416535bd00569159712241bbd0952651774af6440fd04000d1722"} Sep 30 19:31:16 crc kubenswrapper[4756]: I0930 19:31:16.191279 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"b852143b26114c05edce20729cf706c5394609a74f522b7ecf44d92f9d66aed7"} Sep 30 19:31:16 crc kubenswrapper[4756]: I0930 19:31:16.191294 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"99767cc72b9d6057d38c45fbec6231bafa47ccffefb78e5924b5e9c1e797722b"} Sep 30 19:31:16 crc kubenswrapper[4756]: I0930 19:31:16.191306 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"621b37e117d0eeef289e110d75521c3262ced12144d665be35d6823780b4bae2"} Sep 30 19:31:16 crc kubenswrapper[4756]: I0930 19:31:16.191345 4756 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 19:31:16 crc kubenswrapper[4756]: I0930 19:31:16.191383 4756 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 19:31:16 crc kubenswrapper[4756]: I0930 19:31:16.191421 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 30 19:31:16 crc kubenswrapper[4756]: I0930 19:31:16.192629 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:16 crc kubenswrapper[4756]: I0930 19:31:16.192687 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:16 crc kubenswrapper[4756]: I0930 19:31:16.192714 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:16 crc kubenswrapper[4756]: I0930 19:31:16.193198 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:16 crc kubenswrapper[4756]: I0930 19:31:16.193245 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:16 crc kubenswrapper[4756]: I0930 19:31:16.193263 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:16 crc kubenswrapper[4756]: I0930 19:31:16.926941 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 30 19:31:17 crc kubenswrapper[4756]: I0930 19:31:17.200931 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"5cca81a0aeb985d6044f19cccc580ca5c78553a0abb60fb2a2bed955233f7a97"} Sep 30 19:31:17 crc kubenswrapper[4756]: I0930 19:31:17.201006 4756 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 19:31:17 crc kubenswrapper[4756]: I0930 19:31:17.200966 4756 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 19:31:17 crc kubenswrapper[4756]: I0930 19:31:17.202796 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:17 crc kubenswrapper[4756]: I0930 19:31:17.202852 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:17 crc kubenswrapper[4756]: I0930 19:31:17.202874 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:17 crc kubenswrapper[4756]: I0930 19:31:17.203143 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:17 crc kubenswrapper[4756]: I0930 19:31:17.203190 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:17 crc kubenswrapper[4756]: I0930 19:31:17.203212 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:17 crc kubenswrapper[4756]: I0930 19:31:17.516137 4756 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 19:31:17 crc kubenswrapper[4756]: I0930 19:31:17.517953 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:17 crc kubenswrapper[4756]: I0930 19:31:17.518000 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:17 crc kubenswrapper[4756]: I0930 19:31:17.518010 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:17 crc kubenswrapper[4756]: I0930 19:31:17.518036 4756 kubelet_node_status.go:76] "Attempting to register node" node="crc" Sep 30 19:31:18 crc kubenswrapper[4756]: I0930 19:31:18.204048 4756 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 19:31:18 crc kubenswrapper[4756]: I0930 19:31:18.204861 4756 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 19:31:18 crc kubenswrapper[4756]: I0930 19:31:18.205917 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:18 crc kubenswrapper[4756]: I0930 19:31:18.206017 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:18 crc kubenswrapper[4756]: I0930 19:31:18.206077 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:18 crc kubenswrapper[4756]: I0930 19:31:18.206457 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:18 crc kubenswrapper[4756]: I0930 19:31:18.206522 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:18 crc kubenswrapper[4756]: I0930 19:31:18.206540 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:18 crc kubenswrapper[4756]: I0930 19:31:18.709289 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 30 19:31:18 crc kubenswrapper[4756]: I0930 19:31:18.709608 4756 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 19:31:18 crc kubenswrapper[4756]: I0930 19:31:18.711377 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:18 crc kubenswrapper[4756]: I0930 19:31:18.711455 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:18 crc kubenswrapper[4756]: I0930 19:31:18.711473 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:19 crc kubenswrapper[4756]: I0930 19:31:19.582007 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 30 19:31:19 crc kubenswrapper[4756]: I0930 19:31:19.582302 4756 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 19:31:19 crc kubenswrapper[4756]: I0930 19:31:19.583976 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:19 crc kubenswrapper[4756]: I0930 19:31:19.584032 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:19 crc kubenswrapper[4756]: I0930 19:31:19.584050 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:20 crc kubenswrapper[4756]: I0930 19:31:20.145256 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 30 19:31:20 crc kubenswrapper[4756]: I0930 19:31:20.145448 4756 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 19:31:20 crc kubenswrapper[4756]: I0930 19:31:20.146829 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:20 crc kubenswrapper[4756]: I0930 19:31:20.146857 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:20 crc kubenswrapper[4756]: I0930 19:31:20.146871 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:20 crc kubenswrapper[4756]: I0930 19:31:20.151355 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 30 19:31:20 crc kubenswrapper[4756]: I0930 19:31:20.209654 4756 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 19:31:20 crc kubenswrapper[4756]: I0930 19:31:20.210713 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:20 crc kubenswrapper[4756]: I0930 19:31:20.210763 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:20 crc kubenswrapper[4756]: I0930 19:31:20.210786 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:21 crc kubenswrapper[4756]: I0930 19:31:21.059534 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-etcd/etcd-crc" Sep 30 19:31:21 crc kubenswrapper[4756]: I0930 19:31:21.059751 4756 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 19:31:21 crc kubenswrapper[4756]: I0930 19:31:21.062355 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:21 crc kubenswrapper[4756]: I0930 19:31:21.062445 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:21 crc kubenswrapper[4756]: I0930 19:31:21.062460 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:21 crc kubenswrapper[4756]: E0930 19:31:21.204726 4756 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Sep 30 19:31:22 crc kubenswrapper[4756]: I0930 19:31:22.812514 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 30 19:31:22 crc kubenswrapper[4756]: I0930 19:31:22.813222 4756 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 19:31:22 crc kubenswrapper[4756]: I0930 19:31:22.815477 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:22 crc kubenswrapper[4756]: I0930 19:31:22.815534 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:22 crc kubenswrapper[4756]: I0930 19:31:22.815549 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:22 crc kubenswrapper[4756]: I0930 19:31:22.823390 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 30 19:31:22 crc kubenswrapper[4756]: I0930 19:31:22.984391 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 30 19:31:23 crc kubenswrapper[4756]: I0930 19:31:23.219653 4756 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 19:31:23 crc kubenswrapper[4756]: I0930 19:31:23.220835 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:23 crc kubenswrapper[4756]: I0930 19:31:23.220885 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:23 crc kubenswrapper[4756]: I0930 19:31:23.220902 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:23 crc kubenswrapper[4756]: I0930 19:31:23.451287 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-etcd/etcd-crc" Sep 30 19:31:23 crc kubenswrapper[4756]: I0930 19:31:23.451573 4756 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 19:31:23 crc kubenswrapper[4756]: I0930 19:31:23.453154 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:23 crc kubenswrapper[4756]: I0930 19:31:23.453196 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:23 crc kubenswrapper[4756]: I0930 19:31:23.453211 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:24 crc kubenswrapper[4756]: I0930 19:31:24.222275 4756 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 19:31:24 crc kubenswrapper[4756]: I0930 19:31:24.223751 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:24 crc kubenswrapper[4756]: I0930 19:31:24.223856 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:24 crc kubenswrapper[4756]: I0930 19:31:24.223887 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:24 crc kubenswrapper[4756]: W0930 19:31:24.743266 4756 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": net/http: TLS handshake timeout Sep 30 19:31:24 crc kubenswrapper[4756]: I0930 19:31:24.743449 4756 trace.go:236] Trace[1552911775]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (30-Sep-2025 19:31:14.741) (total time: 10001ms): Sep 30 19:31:24 crc kubenswrapper[4756]: Trace[1552911775]: ---"Objects listed" error:Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": net/http: TLS handshake timeout 10001ms (19:31:24.743) Sep 30 19:31:24 crc kubenswrapper[4756]: Trace[1552911775]: [10.001489719s] [10.001489719s] END Sep 30 19:31:24 crc kubenswrapper[4756]: E0930 19:31:24.743504 4756 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": net/http: TLS handshake timeout" logger="UnhandledError" Sep 30 19:31:24 crc kubenswrapper[4756]: W0930 19:31:24.928094 4756 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": net/http: TLS handshake timeout Sep 30 19:31:24 crc kubenswrapper[4756]: I0930 19:31:24.928274 4756 trace.go:236] Trace[1799890690]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (30-Sep-2025 19:31:14.926) (total time: 10001ms): Sep 30 19:31:24 crc kubenswrapper[4756]: Trace[1799890690]: ---"Objects listed" error:Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": net/http: TLS handshake timeout 10001ms (19:31:24.928) Sep 30 19:31:24 crc kubenswrapper[4756]: Trace[1799890690]: [10.00155797s] [10.00155797s] END Sep 30 19:31:24 crc kubenswrapper[4756]: E0930 19:31:24.928320 4756 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": net/http: TLS handshake timeout" logger="UnhandledError" Sep 30 19:31:25 crc kubenswrapper[4756]: I0930 19:31:25.039504 4756 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": net/http: TLS handshake timeout Sep 30 19:31:25 crc kubenswrapper[4756]: I0930 19:31:25.227806 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Sep 30 19:31:25 crc kubenswrapper[4756]: W0930 19:31:25.229089 4756 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": net/http: TLS handshake timeout Sep 30 19:31:25 crc kubenswrapper[4756]: I0930 19:31:25.229269 4756 trace.go:236] Trace[1456768393]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (30-Sep-2025 19:31:15.227) (total time: 10001ms): Sep 30 19:31:25 crc kubenswrapper[4756]: Trace[1456768393]: ---"Objects listed" error:Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": net/http: TLS handshake timeout 10001ms (19:31:25.229) Sep 30 19:31:25 crc kubenswrapper[4756]: Trace[1456768393]: [10.001492909s] [10.001492909s] END Sep 30 19:31:25 crc kubenswrapper[4756]: E0930 19:31:25.229320 4756 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": net/http: TLS handshake timeout" logger="UnhandledError" Sep 30 19:31:25 crc kubenswrapper[4756]: I0930 19:31:25.230858 4756 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="8f5786b74e1c05cccdbb4ed46b2fc96112d8b112c92e7e30c08d3f38d63d8a5c" exitCode=255 Sep 30 19:31:25 crc kubenswrapper[4756]: I0930 19:31:25.230931 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"8f5786b74e1c05cccdbb4ed46b2fc96112d8b112c92e7e30c08d3f38d63d8a5c"} Sep 30 19:31:25 crc kubenswrapper[4756]: I0930 19:31:25.231867 4756 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 19:31:25 crc kubenswrapper[4756]: I0930 19:31:25.233286 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:25 crc kubenswrapper[4756]: I0930 19:31:25.233356 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:25 crc kubenswrapper[4756]: I0930 19:31:25.233374 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:25 crc kubenswrapper[4756]: I0930 19:31:25.234282 4756 scope.go:117] "RemoveContainer" containerID="8f5786b74e1c05cccdbb4ed46b2fc96112d8b112c92e7e30c08d3f38d63d8a5c" Sep 30 19:31:25 crc kubenswrapper[4756]: I0930 19:31:25.510302 4756 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 403" start-of-body={"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\"","reason":"Forbidden","details":{},"code":403} Sep 30 19:31:25 crc kubenswrapper[4756]: I0930 19:31:25.510419 4756 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 403" Sep 30 19:31:25 crc kubenswrapper[4756]: I0930 19:31:25.519432 4756 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 403" start-of-body={"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\"","reason":"Forbidden","details":{},"code":403} Sep 30 19:31:25 crc kubenswrapper[4756]: I0930 19:31:25.519524 4756 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 403" Sep 30 19:31:25 crc kubenswrapper[4756]: I0930 19:31:25.766242 4756 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 30 19:31:25 crc kubenswrapper[4756]: I0930 19:31:25.813466 4756 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/cluster-policy-controller namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10357/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Sep 30 19:31:25 crc kubenswrapper[4756]: I0930 19:31:25.813553 4756 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="cluster-policy-controller" probeResult="failure" output="Get \"https://192.168.126.11:10357/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Sep 30 19:31:26 crc kubenswrapper[4756]: I0930 19:31:26.237747 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Sep 30 19:31:26 crc kubenswrapper[4756]: I0930 19:31:26.240068 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"a2e0caca690592c9bd17dae14f79390b94f47507afb05a1c1732453700b99786"} Sep 30 19:31:26 crc kubenswrapper[4756]: I0930 19:31:26.240226 4756 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 19:31:26 crc kubenswrapper[4756]: I0930 19:31:26.241196 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:26 crc kubenswrapper[4756]: I0930 19:31:26.241234 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:26 crc kubenswrapper[4756]: I0930 19:31:26.241245 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:27 crc kubenswrapper[4756]: I0930 19:31:27.243161 4756 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 19:31:27 crc kubenswrapper[4756]: I0930 19:31:27.243299 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 30 19:31:27 crc kubenswrapper[4756]: I0930 19:31:27.244645 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:27 crc kubenswrapper[4756]: I0930 19:31:27.244714 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:27 crc kubenswrapper[4756]: I0930 19:31:27.244740 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:28 crc kubenswrapper[4756]: I0930 19:31:28.247076 4756 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 19:31:28 crc kubenswrapper[4756]: I0930 19:31:28.248747 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:28 crc kubenswrapper[4756]: I0930 19:31:28.248793 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:28 crc kubenswrapper[4756]: I0930 19:31:28.248805 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:28 crc kubenswrapper[4756]: I0930 19:31:28.350377 4756 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 Sep 30 19:31:28 crc kubenswrapper[4756]: I0930 19:31:28.995077 4756 reflector.go:368] Caches populated for *v1.RuntimeClass from k8s.io/client-go/informers/factory.go:160 Sep 30 19:31:29 crc kubenswrapper[4756]: I0930 19:31:29.585777 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 30 19:31:29 crc kubenswrapper[4756]: I0930 19:31:29.585935 4756 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 19:31:29 crc kubenswrapper[4756]: I0930 19:31:29.587354 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:29 crc kubenswrapper[4756]: I0930 19:31:29.587419 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:29 crc kubenswrapper[4756]: I0930 19:31:29.587432 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:29 crc kubenswrapper[4756]: I0930 19:31:29.592833 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 30 19:31:30 crc kubenswrapper[4756]: I0930 19:31:30.253334 4756 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 19:31:30 crc kubenswrapper[4756]: I0930 19:31:30.254172 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:30 crc kubenswrapper[4756]: I0930 19:31:30.254217 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:30 crc kubenswrapper[4756]: I0930 19:31:30.254233 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:30 crc kubenswrapper[4756]: I0930 19:31:30.421021 4756 reflector.go:368] Caches populated for *v1.Node from k8s.io/client-go/informers/factory.go:160 Sep 30 19:31:30 crc kubenswrapper[4756]: E0930 19:31:30.517476 4756 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": context deadline exceeded" interval="6.4s" Sep 30 19:31:30 crc kubenswrapper[4756]: I0930 19:31:30.520828 4756 trace.go:236] Trace[1767350417]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (30-Sep-2025 19:31:18.644) (total time: 11875ms): Sep 30 19:31:30 crc kubenswrapper[4756]: Trace[1767350417]: ---"Objects listed" error: 11875ms (19:31:30.520) Sep 30 19:31:30 crc kubenswrapper[4756]: Trace[1767350417]: [11.875781494s] [11.875781494s] END Sep 30 19:31:30 crc kubenswrapper[4756]: I0930 19:31:30.520867 4756 reflector.go:368] Caches populated for *v1.CSIDriver from k8s.io/client-go/informers/factory.go:160 Sep 30 19:31:30 crc kubenswrapper[4756]: I0930 19:31:30.521074 4756 reconstruct.go:205] "DevicePaths of reconstructed volumes updated" Sep 30 19:31:30 crc kubenswrapper[4756]: E0930 19:31:30.523037 4756 kubelet_node_status.go:99] "Unable to register node with API server" err="nodes \"crc\" is forbidden: autoscaling.openshift.io/ManagedNode infra config cache not synchronized" node="crc" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.031795 4756 apiserver.go:52] "Watching apiserver" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.034011 4756 reflector.go:368] Caches populated for *v1.Pod from pkg/kubelet/config/apiserver.go:66 Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.034305 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-network-diagnostics/network-check-target-xd92c","openshift-network-node-identity/network-node-identity-vrzqb","openshift-network-operator/iptables-alerter-4ln5h","openshift-network-operator/network-operator-58b4c7f79c-55gtf","openshift-network-console/networking-console-plugin-85b44fc459-gdk6g","openshift-network-diagnostics/network-check-source-55646444c4-trplf"] Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.034805 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.034832 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.034880 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.034884 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 19:31:31 crc kubenswrapper[4756]: E0930 19:31:31.034922 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 19:31:31 crc kubenswrapper[4756]: E0930 19:31:31.034939 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.034957 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 19:31:31 crc kubenswrapper[4756]: E0930 19:31:31.035015 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.035078 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.037767 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.037795 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.037856 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.037956 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.038772 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.038869 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"env-overrides" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.039691 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.039735 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.039993 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.046730 4756 desired_state_of_world_populator.go:154] "Finished populating initial desired state of world" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.077348 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.095681 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.119883 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.124908 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.125647 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.125677 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.126438 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.125973 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.126155 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" (OuterVolumeSpecName: "audit") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "audit". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.126352 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" (OuterVolumeSpecName: "image-import-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "image-import-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.126535 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.126553 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.126719 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") pod \"49ef4625-1d3a-4a9f-b595-c2433d32326d\" (UID: \"49ef4625-1d3a-4a9f-b595-c2433d32326d\") " Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.126907 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.127674 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.127967 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.126820 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" (OuterVolumeSpecName: "kube-api-access-xcphl") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "kube-api-access-xcphl". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.126845 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.126861 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" (OuterVolumeSpecName: "kube-api-access-pj782") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "kube-api-access-pj782". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.127119 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" (OuterVolumeSpecName: "kube-api-access-pjr6v") pod "49ef4625-1d3a-4a9f-b595-c2433d32326d" (UID: "49ef4625-1d3a-4a9f-b595-c2433d32326d"). InnerVolumeSpecName "kube-api-access-pjr6v". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.127612 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" (OuterVolumeSpecName: "utilities") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.127900 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" (OuterVolumeSpecName: "kube-api-access-jkwtn") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "kube-api-access-jkwtn". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.128082 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.128103 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.128289 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" (OuterVolumeSpecName: "kube-api-access-cfbct") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "kube-api-access-cfbct". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.128474 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.128718 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" (OuterVolumeSpecName: "kube-api-access-2w9zh") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "kube-api-access-2w9zh". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.128776 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.129136 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.129373 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.128810 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.129469 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.129491 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.129512 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.129530 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") pod \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\" (UID: \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\") " Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.129599 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.129623 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.129643 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.129662 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.129681 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.130201 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.129870 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" (OuterVolumeSpecName: "kube-api-access-6ccd8") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "kube-api-access-6ccd8". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.130189 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.130243 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.130226 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.130330 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.130348 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.130364 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.130367 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.130412 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.130439 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.130464 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.130467 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" (OuterVolumeSpecName: "kube-api-access-rnphk") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "kube-api-access-rnphk". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.130485 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.130504 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.130524 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.130547 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.130567 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.130589 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.130608 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.130631 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.130652 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.130670 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.130688 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.130736 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.130755 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.130775 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.130823 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.130845 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.130867 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.130885 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.130906 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.130925 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.130943 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.130960 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.130978 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.131020 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.131049 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.131071 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.131092 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.131114 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.131132 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.131150 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.131166 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.131187 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.131202 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.131218 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.130587 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" (OuterVolumeSpecName: "kube-api-access-2d4wz") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "kube-api-access-2d4wz". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.130729 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.130794 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" (OuterVolumeSpecName: "mcd-auth-proxy-config") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "mcd-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.130827 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" (OuterVolumeSpecName: "kube-api-access-nzwt7") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "kube-api-access-nzwt7". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.130847 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" (OuterVolumeSpecName: "etcd-service-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.130846 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.130998 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" (OuterVolumeSpecName: "cert") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.131024 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" (OuterVolumeSpecName: "kube-api-access-zgdk5") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "kube-api-access-zgdk5". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.131094 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.131221 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" (OuterVolumeSpecName: "stats-auth") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "stats-auth". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.131233 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.131353 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.131388 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.131430 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.131454 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.131478 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.131501 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.131524 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.131543 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.131565 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.131584 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.131605 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.131629 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.131650 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.131675 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.131696 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.131715 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.131740 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.131760 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.131779 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.131899 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.131922 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.131945 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.131966 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.131987 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.132008 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.132027 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.132048 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.132069 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.132088 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.132110 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.132129 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.132155 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.132178 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.132199 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.132218 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.132238 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.132256 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.132283 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.132304 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.132340 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.132358 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.132375 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.132417 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.132437 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") pod \"44663579-783b-4372-86d6-acf235a62d72\" (UID: \"44663579-783b-4372-86d6-acf235a62d72\") " Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.132460 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.132481 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.132501 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.132520 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.132540 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.132587 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.132607 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.132630 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.132651 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.132672 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.132691 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.132711 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.132732 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.132752 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.132770 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.132789 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.132808 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.132830 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.132851 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.132869 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.132888 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.132911 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") pod \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\" (UID: \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\") " Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.132932 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.132956 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.132978 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.133000 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.133020 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.133043 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.133064 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.133087 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.133109 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.133130 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.133151 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.133174 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.133197 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.133222 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.133242 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.133265 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.133291 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.133312 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.133331 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.133355 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.133377 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.134240 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.134276 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.134303 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.134324 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.134343 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.134364 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.134407 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.134433 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.134455 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.134476 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.134499 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.134531 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.134552 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.134571 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.134596 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.134614 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.134632 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.134652 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.134672 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.134692 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.134712 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.134732 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.134753 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.134772 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.134792 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.134814 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.134834 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.134854 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.134874 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.134894 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.134916 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.134939 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.134959 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.134982 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.135006 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.135026 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.135046 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.135066 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.135095 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.135117 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.135140 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.135163 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.135182 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.135236 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.135266 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.135294 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.135324 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.135348 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.135375 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.135480 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.135511 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.135535 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.135557 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.135582 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.135603 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.135626 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.135653 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.135742 4756 reconciler_common.go:293] "Volume detached for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") on node \"crc\" DevicePath \"\"" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.135759 4756 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.135773 4756 reconciler_common.go:293] "Volume detached for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") on node \"crc\" DevicePath \"\"" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.135788 4756 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.135804 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") on node \"crc\" DevicePath \"\"" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.135818 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") on node \"crc\" DevicePath \"\"" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.135828 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") on node \"crc\" DevicePath \"\"" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.135840 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") on node \"crc\" DevicePath \"\"" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.135850 4756 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.135862 4756 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.135880 4756 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.135894 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") on node \"crc\" DevicePath \"\"" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.135909 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") on node \"crc\" DevicePath \"\"" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.135922 4756 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.135934 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") on node \"crc\" DevicePath \"\"" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.135945 4756 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.135956 4756 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.135966 4756 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") on node \"crc\" DevicePath \"\"" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.135977 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") on node \"crc\" DevicePath \"\"" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.135988 4756 reconciler_common.go:293] "Volume detached for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") on node \"crc\" DevicePath \"\"" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.136001 4756 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.136013 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") on node \"crc\" DevicePath \"\"" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.136024 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") on node \"crc\" DevicePath \"\"" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.136037 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") on node \"crc\" DevicePath \"\"" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.136052 4756 reconciler_common.go:293] "Volume detached for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.136065 4756 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") on node \"crc\" DevicePath \"\"" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.136076 4756 reconciler_common.go:293] "Volume detached for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") on node \"crc\" DevicePath \"\"" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.136088 4756 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") on node \"crc\" DevicePath \"\"" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.136100 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") on node \"crc\" DevicePath \"\"" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.136111 4756 reconciler_common.go:293] "Volume detached for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") on node \"crc\" DevicePath \"\"" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.131355 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" (OuterVolumeSpecName: "config") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.131432 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" (OuterVolumeSpecName: "signing-cabundle") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-cabundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.131549 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.131651 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.131678 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.137885 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" (OuterVolumeSpecName: "config-volume") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.131824 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.131964 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.132054 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.132125 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" (OuterVolumeSpecName: "config") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.132249 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.132292 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" (OuterVolumeSpecName: "kube-api-access-pcxfs") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "kube-api-access-pcxfs". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.132288 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" (OuterVolumeSpecName: "apiservice-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "apiservice-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.132326 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" (OuterVolumeSpecName: "kube-api-access-qs4fp") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "kube-api-access-qs4fp". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.132536 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" (OuterVolumeSpecName: "webhook-certs") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "webhook-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.132633 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.132899 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" (OuterVolumeSpecName: "kube-api-access-fcqwp") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "kube-api-access-fcqwp". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.133149 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.133161 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" (OuterVolumeSpecName: "mcc-auth-proxy-config") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "mcc-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.133166 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" (OuterVolumeSpecName: "kube-api-access-sb6h7") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "kube-api-access-sb6h7". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.133204 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" (OuterVolumeSpecName: "config") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.133403 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" (OuterVolumeSpecName: "kube-api-access-jhbk2") pod "bd23aa5c-e532-4e53-bccf-e79f130c5ae8" (UID: "bd23aa5c-e532-4e53-bccf-e79f130c5ae8"). InnerVolumeSpecName "kube-api-access-jhbk2". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.137543 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" (OuterVolumeSpecName: "kube-api-access-7c4vf") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "kube-api-access-7c4vf". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.137824 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.138130 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" (OuterVolumeSpecName: "ovn-control-plane-metrics-cert") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovn-control-plane-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.138331 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.138344 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.138548 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" (OuterVolumeSpecName: "kube-api-access-dbsvg") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "kube-api-access-dbsvg". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.138566 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.138580 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.138784 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" (OuterVolumeSpecName: "kube-api-access-gf66m") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "kube-api-access-gf66m". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.138791 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.139055 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" (OuterVolumeSpecName: "kube-api-access-6g6sz") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "kube-api-access-6g6sz". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.139332 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" (OuterVolumeSpecName: "webhook-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "webhook-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.139624 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.139682 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" (OuterVolumeSpecName: "node-bootstrap-token") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "node-bootstrap-token". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.139766 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" (OuterVolumeSpecName: "kube-api-access-htfz6") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "kube-api-access-htfz6". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.140014 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" (OuterVolumeSpecName: "kube-api-access-bf2bz") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "kube-api-access-bf2bz". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.140077 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" (OuterVolumeSpecName: "kube-api-access-lz9wn") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "kube-api-access-lz9wn". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.140113 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.140245 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" (OuterVolumeSpecName: "available-featuregates") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "available-featuregates". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.141968 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" (OuterVolumeSpecName: "tmpfs") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "tmpfs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.142590 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.142757 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" (OuterVolumeSpecName: "kube-api-access-x7zkh") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "kube-api-access-x7zkh". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.143067 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.143485 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" (OuterVolumeSpecName: "kube-api-access-zkvpv") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "kube-api-access-zkvpv". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.145052 4756 swap_util.go:74] "error creating dir to test if tmpfs noswap is enabled. Assuming not supported" mount path="" error="stat /var/lib/kubelet/plugins/kubernetes.io/empty-dir: no such file or directory" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.146594 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.146829 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.147499 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" (OuterVolumeSpecName: "config") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.152033 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.152656 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.152961 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" (OuterVolumeSpecName: "utilities") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.153186 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.156341 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.157955 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" (OuterVolumeSpecName: "config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.158031 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" (OuterVolumeSpecName: "kube-api-access-vt5rc") pod "44663579-783b-4372-86d6-acf235a62d72" (UID: "44663579-783b-4372-86d6-acf235a62d72"). InnerVolumeSpecName "kube-api-access-vt5rc". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.158303 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.158796 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" (OuterVolumeSpecName: "samples-operator-tls") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "samples-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.158971 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" (OuterVolumeSpecName: "kube-api-access-x4zgh") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "kube-api-access-x4zgh". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.159156 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.159301 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.159285 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.159539 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.159914 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" (OuterVolumeSpecName: "kube-api-access-qg5z5") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "kube-api-access-qg5z5". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.159892 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" (OuterVolumeSpecName: "kube-api-access-w9rds") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "kube-api-access-w9rds". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.160046 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.160585 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.160650 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.160835 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" (OuterVolumeSpecName: "config") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.160932 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.161652 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.161850 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" (OuterVolumeSpecName: "machine-api-operator-tls") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "machine-api-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.162741 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" (OuterVolumeSpecName: "client-ca") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.162987 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" (OuterVolumeSpecName: "kube-api-access-mg5zb") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "kube-api-access-mg5zb". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.163010 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.163030 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" (OuterVolumeSpecName: "kube-api-access-279lb") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "kube-api-access-279lb". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.163073 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" (OuterVolumeSpecName: "kube-api-access-d6qdx") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "kube-api-access-d6qdx". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.163156 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" (OuterVolumeSpecName: "config") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.163240 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.163445 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" (OuterVolumeSpecName: "kube-api-access-249nr") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "kube-api-access-249nr". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.163455 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.163496 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" (OuterVolumeSpecName: "kube-api-access-w7l8j") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "kube-api-access-w7l8j". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.163527 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.163961 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:31:31 crc kubenswrapper[4756]: E0930 19:31:31.164359 4756 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Sep 30 19:31:31 crc kubenswrapper[4756]: E0930 19:31:31.164471 4756 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-09-30 19:31:31.664441494 +0000 UTC m=+21.285374981 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.165564 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.166269 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" (OuterVolumeSpecName: "default-certificate") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "default-certificate". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.166496 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:31:31 crc kubenswrapper[4756]: E0930 19:31:31.166669 4756 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.166729 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.167108 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.167285 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.167359 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" (OuterVolumeSpecName: "image-registry-operator-tls") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "image-registry-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.167719 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" (OuterVolumeSpecName: "utilities") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.167733 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" (OuterVolumeSpecName: "kube-api-access-s4n52") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "kube-api-access-s4n52". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.167928 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.167952 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" (OuterVolumeSpecName: "utilities") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.168209 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.168220 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" (OuterVolumeSpecName: "config") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.168235 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" (OuterVolumeSpecName: "kube-api-access-ngvvp") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "kube-api-access-ngvvp". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.168488 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.168580 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" (OuterVolumeSpecName: "kube-api-access-w4xd4") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "kube-api-access-w4xd4". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.168590 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" (OuterVolumeSpecName: "kube-api-access-v47cf") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "kube-api-access-v47cf". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.168658 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.168792 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" (OuterVolumeSpecName: "kube-api-access-9xfj7") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "kube-api-access-9xfj7". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.169064 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.169110 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" (OuterVolumeSpecName: "config") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.169184 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.169384 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" (OuterVolumeSpecName: "signing-key") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.169527 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.169686 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.169814 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" (OuterVolumeSpecName: "machine-approver-tls") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "machine-approver-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.169949 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.170186 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" (OuterVolumeSpecName: "etcd-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.170650 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" (OuterVolumeSpecName: "kube-api-access-8tdtz") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "kube-api-access-8tdtz". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.171783 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" (OuterVolumeSpecName: "config") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.172504 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.172524 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" (OuterVolumeSpecName: "kube-api-access-fqsjt") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "kube-api-access-fqsjt". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.173297 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" (OuterVolumeSpecName: "serviceca") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "serviceca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.173886 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.176948 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.176982 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.177299 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" (OuterVolumeSpecName: "service-ca") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.177383 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" (OuterVolumeSpecName: "images") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.178490 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" (OuterVolumeSpecName: "certs") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.178530 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" (OuterVolumeSpecName: "kube-api-access-xcgwh") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "kube-api-access-xcgwh". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.178737 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.178893 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" (OuterVolumeSpecName: "kube-api-access-tk88c") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "kube-api-access-tk88c". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.179031 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" (OuterVolumeSpecName: "images") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.179474 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" (OuterVolumeSpecName: "config") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:31:31 crc kubenswrapper[4756]: E0930 19:31:31.179537 4756 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-09-30 19:31:31.672595681 +0000 UTC m=+21.293529158 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.179668 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.180440 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" (OuterVolumeSpecName: "client-ca") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.180471 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" (OuterVolumeSpecName: "multus-daemon-config") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "multus-daemon-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:31:31 crc kubenswrapper[4756]: E0930 19:31:31.180584 4756 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.180597 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" (OuterVolumeSpecName: "service-ca") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:31:31 crc kubenswrapper[4756]: E0930 19:31:31.180621 4756 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Sep 30 19:31:31 crc kubenswrapper[4756]: E0930 19:31:31.180644 4756 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 30 19:31:31 crc kubenswrapper[4756]: E0930 19:31:31.180724 4756 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-09-30 19:31:31.680701616 +0000 UTC m=+21.301635103 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.181073 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.181145 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.181215 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.181216 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.181344 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" (OuterVolumeSpecName: "config") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:31:31 crc kubenswrapper[4756]: E0930 19:31:31.181358 4756 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 19:31:31.681339802 +0000 UTC m=+21.302273279 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.181490 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" (OuterVolumeSpecName: "config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.181506 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.181981 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" (OuterVolumeSpecName: "kube-api-access-kfwg7") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "kube-api-access-kfwg7". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.181999 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.182359 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.182378 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.182609 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" (OuterVolumeSpecName: "kube-api-access-lzf88") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "kube-api-access-lzf88". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.182814 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" (OuterVolumeSpecName: "control-plane-machine-set-operator-tls") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "control-plane-machine-set-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.182837 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" (OuterVolumeSpecName: "kube-api-access-d4lsv") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "kube-api-access-d4lsv". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.182966 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" (OuterVolumeSpecName: "config") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.190660 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.190855 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.191260 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" (OuterVolumeSpecName: "console-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.191740 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.192081 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" (OuterVolumeSpecName: "cni-sysctl-allowlist") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-sysctl-allowlist". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.192716 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.195542 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" (OuterVolumeSpecName: "kube-api-access-wxkg8") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "kube-api-access-wxkg8". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.195671 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.195840 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.195973 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" (OuterVolumeSpecName: "kube-api-access-mnrrd") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "kube-api-access-mnrrd". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.196201 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.196852 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.196930 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.197328 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.200584 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:31:31 crc kubenswrapper[4756]: E0930 19:31:31.204741 4756 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Sep 30 19:31:31 crc kubenswrapper[4756]: E0930 19:31:31.204772 4756 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Sep 30 19:31:31 crc kubenswrapper[4756]: E0930 19:31:31.204786 4756 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 30 19:31:31 crc kubenswrapper[4756]: E0930 19:31:31.204841 4756 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-09-30 19:31:31.704822128 +0000 UTC m=+21.325755605 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.204843 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" (OuterVolumeSpecName: "package-server-manager-serving-cert") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "package-server-manager-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.205013 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" (OuterVolumeSpecName: "kube-api-access-4d4hj") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "kube-api-access-4d4hj". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.205131 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.206424 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.209098 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" (OuterVolumeSpecName: "kube-api-access-x2m85") pod "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" (UID: "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d"). InnerVolumeSpecName "kube-api-access-x2m85". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.209815 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.209545 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" (OuterVolumeSpecName: "config") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.212964 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.212241 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.219999 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.235436 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.237073 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.237555 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.237584 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.237681 4756 reconciler_common.go:293] "Volume detached for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") on node \"crc\" DevicePath \"\"" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.237693 4756 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.237703 4756 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") on node \"crc\" DevicePath \"\"" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.237712 4756 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.237721 4756 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.237731 4756 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") on node \"crc\" DevicePath \"\"" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.237742 4756 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.237753 4756 reconciler_common.go:293] "Volume detached for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") on node \"crc\" DevicePath \"\"" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.237762 4756 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") on node \"crc\" DevicePath \"\"" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.237771 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") on node \"crc\" DevicePath \"\"" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.237779 4756 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") on node \"crc\" DevicePath \"\"" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.237788 4756 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") on node \"crc\" DevicePath \"\"" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.237796 4756 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.237805 4756 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.237815 4756 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.237825 4756 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.237835 4756 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") on node \"crc\" DevicePath \"\"" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.237846 4756 reconciler_common.go:293] "Volume detached for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.237941 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.238043 4756 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") on node \"crc\" DevicePath \"\"" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.238295 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.238313 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") on node \"crc\" DevicePath \"\"" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.238326 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") on node \"crc\" DevicePath \"\"" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.238337 4756 reconciler_common.go:293] "Volume detached for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") on node \"crc\" DevicePath \"\"" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.238347 4756 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") on node \"crc\" DevicePath \"\"" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.238357 4756 reconciler_common.go:293] "Volume detached for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") on node \"crc\" DevicePath \"\"" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.238366 4756 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.238376 4756 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") on node \"crc\" DevicePath \"\"" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.238387 4756 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.238413 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") on node \"crc\" DevicePath \"\"" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.238423 4756 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") on node \"crc\" DevicePath \"\"" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.238433 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") on node \"crc\" DevicePath \"\"" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.238443 4756 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") on node \"crc\" DevicePath \"\"" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.238453 4756 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") on node \"crc\" DevicePath \"\"" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.238462 4756 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") on node \"crc\" DevicePath \"\"" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.238471 4756 reconciler_common.go:293] "Volume detached for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") on node \"crc\" DevicePath \"\"" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.238483 4756 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.238493 4756 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") on node \"crc\" DevicePath \"\"" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.238503 4756 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.238513 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") on node \"crc\" DevicePath \"\"" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.238523 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") on node \"crc\" DevicePath \"\"" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.238533 4756 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") on node \"crc\" DevicePath \"\"" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.238543 4756 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") on node \"crc\" DevicePath \"\"" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.238551 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") on node \"crc\" DevicePath \"\"" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.238561 4756 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") on node \"crc\" DevicePath \"\"" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.238570 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") on node \"crc\" DevicePath \"\"" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.238580 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") on node \"crc\" DevicePath \"\"" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.238590 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") on node \"crc\" DevicePath \"\"" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.238621 4756 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.238639 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") on node \"crc\" DevicePath \"\"" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.238843 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") on node \"crc\" DevicePath \"\"" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.238860 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") on node \"crc\" DevicePath \"\"" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.238871 4756 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") on node \"crc\" DevicePath \"\"" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.238882 4756 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") on node \"crc\" DevicePath \"\"" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.238891 4756 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") on node \"crc\" DevicePath \"\"" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.238902 4756 reconciler_common.go:293] "Volume detached for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.238912 4756 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.238923 4756 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.238932 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") on node \"crc\" DevicePath \"\"" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.238941 4756 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.238950 4756 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") on node \"crc\" DevicePath \"\"" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.238960 4756 reconciler_common.go:293] "Volume detached for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") on node \"crc\" DevicePath \"\"" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.238970 4756 reconciler_common.go:293] "Volume detached for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") on node \"crc\" DevicePath \"\"" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.238980 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") on node \"crc\" DevicePath \"\"" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.238989 4756 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.238998 4756 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.239007 4756 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") on node \"crc\" DevicePath \"\"" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.239016 4756 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") on node \"crc\" DevicePath \"\"" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.239026 4756 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") on node \"crc\" DevicePath \"\"" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.239035 4756 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.239044 4756 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") on node \"crc\" DevicePath \"\"" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.239139 4756 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") on node \"crc\" DevicePath \"\"" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.239152 4756 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.239183 4756 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.239195 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") on node \"crc\" DevicePath \"\"" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.239223 4756 reconciler_common.go:293] "Volume detached for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") on node \"crc\" DevicePath \"\"" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.239234 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") on node \"crc\" DevicePath \"\"" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.239264 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") on node \"crc\" DevicePath \"\"" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.239290 4756 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.239330 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") on node \"crc\" DevicePath \"\"" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.239341 4756 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") on node \"crc\" DevicePath \"\"" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.239426 4756 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.239485 4756 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") on node \"crc\" DevicePath \"\"" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.239497 4756 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") on node \"crc\" DevicePath \"\"" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.239556 4756 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") on node \"crc\" DevicePath \"\"" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.239567 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") on node \"crc\" DevicePath \"\"" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.239576 4756 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.239586 4756 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.239614 4756 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.239696 4756 reconciler_common.go:293] "Volume detached for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") on node \"crc\" DevicePath \"\"" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.239735 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") on node \"crc\" DevicePath \"\"" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.239759 4756 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.239784 4756 reconciler_common.go:293] "Volume detached for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") on node \"crc\" DevicePath \"\"" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.239793 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") on node \"crc\" DevicePath \"\"" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.239802 4756 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") on node \"crc\" DevicePath \"\"" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.239811 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") on node \"crc\" DevicePath \"\"" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.239821 4756 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.239832 4756 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") on node \"crc\" DevicePath \"\"" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.239843 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") on node \"crc\" DevicePath \"\"" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.239853 4756 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.239863 4756 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.239873 4756 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") on node \"crc\" DevicePath \"\"" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.239900 4756 reconciler_common.go:293] "Volume detached for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") on node \"crc\" DevicePath \"\"" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.239911 4756 reconciler_common.go:293] "Volume detached for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") on node \"crc\" DevicePath \"\"" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.239921 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") on node \"crc\" DevicePath \"\"" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.239931 4756 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.239942 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") on node \"crc\" DevicePath \"\"" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.239952 4756 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.239968 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") on node \"crc\" DevicePath \"\"" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.239979 4756 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") on node \"crc\" DevicePath \"\"" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.239989 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") on node \"crc\" DevicePath \"\"" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.239998 4756 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") on node \"crc\" DevicePath \"\"" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.240009 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") on node \"crc\" DevicePath \"\"" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.240020 4756 reconciler_common.go:293] "Volume detached for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") on node \"crc\" DevicePath \"\"" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.240029 4756 reconciler_common.go:293] "Volume detached for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") on node \"crc\" DevicePath \"\"" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.240039 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") on node \"crc\" DevicePath \"\"" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.240050 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") on node \"crc\" DevicePath \"\"" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.240060 4756 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") on node \"crc\" DevicePath \"\"" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.240069 4756 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.240079 4756 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.240089 4756 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.240099 4756 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.240110 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") on node \"crc\" DevicePath \"\"" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.240121 4756 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") on node \"crc\" DevicePath \"\"" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.240131 4756 reconciler_common.go:293] "Volume detached for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") on node \"crc\" DevicePath \"\"" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.240140 4756 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") on node \"crc\" DevicePath \"\"" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.240150 4756 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.240161 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") on node \"crc\" DevicePath \"\"" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.240172 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") on node \"crc\" DevicePath \"\"" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.240180 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") on node \"crc\" DevicePath \"\"" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.240189 4756 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.240202 4756 reconciler_common.go:293] "Volume detached for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") on node \"crc\" DevicePath \"\"" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.240211 4756 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.240220 4756 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.240230 4756 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") on node \"crc\" DevicePath \"\"" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.240240 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") on node \"crc\" DevicePath \"\"" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.240250 4756 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") on node \"crc\" DevicePath \"\"" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.240258 4756 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.240266 4756 reconciler_common.go:293] "Volume detached for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") on node \"crc\" DevicePath \"\"" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.240275 4756 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") on node \"crc\" DevicePath \"\"" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.240285 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") on node \"crc\" DevicePath \"\"" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.240295 4756 reconciler_common.go:293] "Volume detached for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") on node \"crc\" DevicePath \"\"" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.240304 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") on node \"crc\" DevicePath \"\"" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.240314 4756 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.240323 4756 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") on node \"crc\" DevicePath \"\"" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.240333 4756 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") on node \"crc\" DevicePath \"\"" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.240343 4756 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") on node \"crc\" DevicePath \"\"" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.240352 4756 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") on node \"crc\" DevicePath \"\"" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.240361 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") on node \"crc\" DevicePath \"\"" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.240369 4756 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.240378 4756 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.240386 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") on node \"crc\" DevicePath \"\"" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.240430 4756 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") on node \"crc\" DevicePath \"\"" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.240440 4756 reconciler_common.go:293] "Volume detached for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") on node \"crc\" DevicePath \"\"" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.240449 4756 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.240458 4756 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.240468 4756 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.240477 4756 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") on node \"crc\" DevicePath \"\"" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.240487 4756 reconciler_common.go:293] "Volume detached for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") on node \"crc\" DevicePath \"\"" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.240496 4756 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") on node \"crc\" DevicePath \"\"" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.240505 4756 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") on node \"crc\" DevicePath \"\"" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.240513 4756 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") on node \"crc\" DevicePath \"\"" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.240524 4756 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") on node \"crc\" DevicePath \"\"" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.240535 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") on node \"crc\" DevicePath \"\"" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.240320 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.243287 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.249623 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.261526 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.273072 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.285624 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.297792 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.315729 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.335650 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.340936 4756 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.340969 4756 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.352880 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Sep 30 19:31:31 crc kubenswrapper[4756]: W0930 19:31:31.364882 4756 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podef543e1b_8068_4ea3_b32a_61027b32e95d.slice/crio-b754ce38bf8681c1a810fbfaa5532f7fa5de347b46d6ee006b5ccd7256ec557b WatchSource:0}: Error finding container b754ce38bf8681c1a810fbfaa5532f7fa5de347b46d6ee006b5ccd7256ec557b: Status 404 returned error can't find the container with id b754ce38bf8681c1a810fbfaa5532f7fa5de347b46d6ee006b5ccd7256ec557b Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.368466 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.378646 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Sep 30 19:31:31 crc kubenswrapper[4756]: W0930 19:31:31.381705 4756 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd75a4c96_2883_4a0b_bab2_0fab2b6c0b49.slice/crio-080268ffeb3510885ec962f0d5f0f2827219d286bd0bc036d267c814f1f3397a WatchSource:0}: Error finding container 080268ffeb3510885ec962f0d5f0f2827219d286bd0bc036d267c814f1f3397a: Status 404 returned error can't find the container with id 080268ffeb3510885ec962f0d5f0f2827219d286bd0bc036d267c814f1f3397a Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.744978 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.745052 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.745085 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.745123 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 19:31:31 crc kubenswrapper[4756]: I0930 19:31:31.745149 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 19:31:31 crc kubenswrapper[4756]: E0930 19:31:31.745261 4756 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Sep 30 19:31:31 crc kubenswrapper[4756]: E0930 19:31:31.745279 4756 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Sep 30 19:31:31 crc kubenswrapper[4756]: E0930 19:31:31.745333 4756 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Sep 30 19:31:31 crc kubenswrapper[4756]: E0930 19:31:31.745363 4756 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Sep 30 19:31:31 crc kubenswrapper[4756]: E0930 19:31:31.745364 4756 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 19:31:32.745168204 +0000 UTC m=+22.366101681 (durationBeforeRetry 1s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:31:31 crc kubenswrapper[4756]: E0930 19:31:31.745446 4756 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-09-30 19:31:32.74543488 +0000 UTC m=+22.366368357 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Sep 30 19:31:31 crc kubenswrapper[4756]: E0930 19:31:31.745482 4756 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-09-30 19:31:32.745476021 +0000 UTC m=+22.366409498 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Sep 30 19:31:31 crc kubenswrapper[4756]: E0930 19:31:31.745336 4756 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Sep 30 19:31:31 crc kubenswrapper[4756]: E0930 19:31:31.745511 4756 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Sep 30 19:31:31 crc kubenswrapper[4756]: E0930 19:31:31.745523 4756 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 30 19:31:31 crc kubenswrapper[4756]: E0930 19:31:31.745570 4756 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-09-30 19:31:32.745550463 +0000 UTC m=+22.366484110 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 30 19:31:31 crc kubenswrapper[4756]: E0930 19:31:31.745378 4756 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 30 19:31:31 crc kubenswrapper[4756]: E0930 19:31:31.745619 4756 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-09-30 19:31:32.745612324 +0000 UTC m=+22.366545801 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 30 19:31:32 crc kubenswrapper[4756]: I0930 19:31:32.123884 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/node-resolver-ghp5t"] Sep 30 19:31:32 crc kubenswrapper[4756]: I0930 19:31:32.124245 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-ghp5t" Sep 30 19:31:32 crc kubenswrapper[4756]: I0930 19:31:32.124938 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-daemon-4n9zj"] Sep 30 19:31:32 crc kubenswrapper[4756]: I0930 19:31:32.125409 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" Sep 30 19:31:32 crc kubenswrapper[4756]: I0930 19:31:32.126517 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq" Sep 30 19:31:32 crc kubenswrapper[4756]: I0930 19:31:32.129639 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"node-resolver-dockercfg-kz9s7" Sep 30 19:31:32 crc kubenswrapper[4756]: I0930 19:31:32.130248 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Sep 30 19:31:32 crc kubenswrapper[4756]: I0930 19:31:32.130535 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"proxy-tls" Sep 30 19:31:32 crc kubenswrapper[4756]: I0930 19:31:32.130788 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-root-ca.crt" Sep 30 19:31:32 crc kubenswrapper[4756]: I0930 19:31:32.131057 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"openshift-service-ca.crt" Sep 30 19:31:32 crc kubenswrapper[4756]: I0930 19:31:32.131419 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-rbac-proxy" Sep 30 19:31:32 crc kubenswrapper[4756]: I0930 19:31:32.134794 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"kube-root-ca.crt" Sep 30 19:31:32 crc kubenswrapper[4756]: I0930 19:31:32.148420 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:32Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:32 crc kubenswrapper[4756]: I0930 19:31:32.171499 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:32Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:32 crc kubenswrapper[4756]: I0930 19:31:32.187561 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:32Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:32 crc kubenswrapper[4756]: I0930 19:31:32.198075 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-ghp5t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"32ba8a0a-215c-415b-a893-74f7a40e3c20\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pslwb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:32Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-ghp5t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:32Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:32 crc kubenswrapper[4756]: I0930 19:31:32.213667 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:32Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:32 crc kubenswrapper[4756]: I0930 19:31:32.224303 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:32Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:32 crc kubenswrapper[4756]: I0930 19:31:32.236064 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:32Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:32 crc kubenswrapper[4756]: I0930 19:31:32.249331 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/3370c2ca-fec3-4f90-8df7-51e21e6c7e1c-proxy-tls\") pod \"machine-config-daemon-4n9zj\" (UID: \"3370c2ca-fec3-4f90-8df7-51e21e6c7e1c\") " pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" Sep 30 19:31:32 crc kubenswrapper[4756]: I0930 19:31:32.249377 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/3370c2ca-fec3-4f90-8df7-51e21e6c7e1c-mcd-auth-proxy-config\") pod \"machine-config-daemon-4n9zj\" (UID: \"3370c2ca-fec3-4f90-8df7-51e21e6c7e1c\") " pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" Sep 30 19:31:32 crc kubenswrapper[4756]: I0930 19:31:32.249429 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/32ba8a0a-215c-415b-a893-74f7a40e3c20-hosts-file\") pod \"node-resolver-ghp5t\" (UID: \"32ba8a0a-215c-415b-a893-74f7a40e3c20\") " pod="openshift-dns/node-resolver-ghp5t" Sep 30 19:31:32 crc kubenswrapper[4756]: I0930 19:31:32.249470 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pslwb\" (UniqueName: \"kubernetes.io/projected/32ba8a0a-215c-415b-a893-74f7a40e3c20-kube-api-access-pslwb\") pod \"node-resolver-ghp5t\" (UID: \"32ba8a0a-215c-415b-a893-74f7a40e3c20\") " pod="openshift-dns/node-resolver-ghp5t" Sep 30 19:31:32 crc kubenswrapper[4756]: I0930 19:31:32.249492 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pstrc\" (UniqueName: \"kubernetes.io/projected/3370c2ca-fec3-4f90-8df7-51e21e6c7e1c-kube-api-access-pstrc\") pod \"machine-config-daemon-4n9zj\" (UID: \"3370c2ca-fec3-4f90-8df7-51e21e6c7e1c\") " pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" Sep 30 19:31:32 crc kubenswrapper[4756]: I0930 19:31:32.249553 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/3370c2ca-fec3-4f90-8df7-51e21e6c7e1c-rootfs\") pod \"machine-config-daemon-4n9zj\" (UID: \"3370c2ca-fec3-4f90-8df7-51e21e6c7e1c\") " pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" Sep 30 19:31:32 crc kubenswrapper[4756]: I0930 19:31:32.250911 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:32Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:32 crc kubenswrapper[4756]: I0930 19:31:32.259727 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"d39cfd1b725ded02a27e8f7bed73221dd180e3fffb7f06d1877d1bc2f6910334"} Sep 30 19:31:32 crc kubenswrapper[4756]: I0930 19:31:32.259788 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"6ca3510c27f3be2d59b52d5faf8f09c9c47587d8f3a1652741ce2e0728faf446"} Sep 30 19:31:32 crc kubenswrapper[4756]: I0930 19:31:32.261046 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"080268ffeb3510885ec962f0d5f0f2827219d286bd0bc036d267c814f1f3397a"} Sep 30 19:31:32 crc kubenswrapper[4756]: I0930 19:31:32.262974 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"449a4827a34c17e8cdc851d2714ced931581b5c3a1bcd2f7f058b95d68403740"} Sep 30 19:31:32 crc kubenswrapper[4756]: I0930 19:31:32.263017 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"163fab064016913eb90cd87fe0da1c702eeb7f000c92b108bfb24a21ef0e98d1"} Sep 30 19:31:32 crc kubenswrapper[4756]: I0930 19:31:32.263029 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"b754ce38bf8681c1a810fbfaa5532f7fa5de347b46d6ee006b5ccd7256ec557b"} Sep 30 19:31:32 crc kubenswrapper[4756]: I0930 19:31:32.265190 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/1.log" Sep 30 19:31:32 crc kubenswrapper[4756]: I0930 19:31:32.265766 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Sep 30 19:31:32 crc kubenswrapper[4756]: I0930 19:31:32.267110 4756 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="a2e0caca690592c9bd17dae14f79390b94f47507afb05a1c1732453700b99786" exitCode=255 Sep 30 19:31:32 crc kubenswrapper[4756]: I0930 19:31:32.267151 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"a2e0caca690592c9bd17dae14f79390b94f47507afb05a1c1732453700b99786"} Sep 30 19:31:32 crc kubenswrapper[4756]: I0930 19:31:32.267210 4756 scope.go:117] "RemoveContainer" containerID="8f5786b74e1c05cccdbb4ed46b2fc96112d8b112c92e7e30c08d3f38d63d8a5c" Sep 30 19:31:32 crc kubenswrapper[4756]: I0930 19:31:32.268798 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:32Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:32 crc kubenswrapper[4756]: I0930 19:31:32.282180 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:32Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:32 crc kubenswrapper[4756]: I0930 19:31:32.295501 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-ghp5t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"32ba8a0a-215c-415b-a893-74f7a40e3c20\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pslwb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:32Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-ghp5t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:32Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:32 crc kubenswrapper[4756]: I0930 19:31:32.308191 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:32Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:32 crc kubenswrapper[4756]: I0930 19:31:32.320730 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:32Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:32 crc kubenswrapper[4756]: I0930 19:31:32.332860 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:32Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:32 crc kubenswrapper[4756]: I0930 19:31:32.347731 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3370c2ca-fec3-4f90-8df7-51e21e6c7e1c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pstrc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pstrc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:32Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-4n9zj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:32Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:32 crc kubenswrapper[4756]: I0930 19:31:32.350741 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pslwb\" (UniqueName: \"kubernetes.io/projected/32ba8a0a-215c-415b-a893-74f7a40e3c20-kube-api-access-pslwb\") pod \"node-resolver-ghp5t\" (UID: \"32ba8a0a-215c-415b-a893-74f7a40e3c20\") " pod="openshift-dns/node-resolver-ghp5t" Sep 30 19:31:32 crc kubenswrapper[4756]: I0930 19:31:32.350794 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pstrc\" (UniqueName: \"kubernetes.io/projected/3370c2ca-fec3-4f90-8df7-51e21e6c7e1c-kube-api-access-pstrc\") pod \"machine-config-daemon-4n9zj\" (UID: \"3370c2ca-fec3-4f90-8df7-51e21e6c7e1c\") " pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" Sep 30 19:31:32 crc kubenswrapper[4756]: I0930 19:31:32.350831 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/3370c2ca-fec3-4f90-8df7-51e21e6c7e1c-rootfs\") pod \"machine-config-daemon-4n9zj\" (UID: \"3370c2ca-fec3-4f90-8df7-51e21e6c7e1c\") " pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" Sep 30 19:31:32 crc kubenswrapper[4756]: I0930 19:31:32.350864 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/3370c2ca-fec3-4f90-8df7-51e21e6c7e1c-proxy-tls\") pod \"machine-config-daemon-4n9zj\" (UID: \"3370c2ca-fec3-4f90-8df7-51e21e6c7e1c\") " pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" Sep 30 19:31:32 crc kubenswrapper[4756]: I0930 19:31:32.350889 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/3370c2ca-fec3-4f90-8df7-51e21e6c7e1c-mcd-auth-proxy-config\") pod \"machine-config-daemon-4n9zj\" (UID: \"3370c2ca-fec3-4f90-8df7-51e21e6c7e1c\") " pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" Sep 30 19:31:32 crc kubenswrapper[4756]: I0930 19:31:32.350968 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/32ba8a0a-215c-415b-a893-74f7a40e3c20-hosts-file\") pod \"node-resolver-ghp5t\" (UID: \"32ba8a0a-215c-415b-a893-74f7a40e3c20\") " pod="openshift-dns/node-resolver-ghp5t" Sep 30 19:31:32 crc kubenswrapper[4756]: I0930 19:31:32.351354 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/32ba8a0a-215c-415b-a893-74f7a40e3c20-hosts-file\") pod \"node-resolver-ghp5t\" (UID: \"32ba8a0a-215c-415b-a893-74f7a40e3c20\") " pod="openshift-dns/node-resolver-ghp5t" Sep 30 19:31:32 crc kubenswrapper[4756]: I0930 19:31:32.351446 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/3370c2ca-fec3-4f90-8df7-51e21e6c7e1c-rootfs\") pod \"machine-config-daemon-4n9zj\" (UID: \"3370c2ca-fec3-4f90-8df7-51e21e6c7e1c\") " pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" Sep 30 19:31:32 crc kubenswrapper[4756]: I0930 19:31:32.353812 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/3370c2ca-fec3-4f90-8df7-51e21e6c7e1c-mcd-auth-proxy-config\") pod \"machine-config-daemon-4n9zj\" (UID: \"3370c2ca-fec3-4f90-8df7-51e21e6c7e1c\") " pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" Sep 30 19:31:32 crc kubenswrapper[4756]: I0930 19:31:32.356931 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/3370c2ca-fec3-4f90-8df7-51e21e6c7e1c-proxy-tls\") pod \"machine-config-daemon-4n9zj\" (UID: \"3370c2ca-fec3-4f90-8df7-51e21e6c7e1c\") " pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" Sep 30 19:31:32 crc kubenswrapper[4756]: I0930 19:31:32.367797 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://449a4827a34c17e8cdc851d2714ced931581b5c3a1bcd2f7f058b95d68403740\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://163fab064016913eb90cd87fe0da1c702eeb7f000c92b108bfb24a21ef0e98d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:32Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:32 crc kubenswrapper[4756]: I0930 19:31:32.371790 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pslwb\" (UniqueName: \"kubernetes.io/projected/32ba8a0a-215c-415b-a893-74f7a40e3c20-kube-api-access-pslwb\") pod \"node-resolver-ghp5t\" (UID: \"32ba8a0a-215c-415b-a893-74f7a40e3c20\") " pod="openshift-dns/node-resolver-ghp5t" Sep 30 19:31:32 crc kubenswrapper[4756]: I0930 19:31:32.372224 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pstrc\" (UniqueName: \"kubernetes.io/projected/3370c2ca-fec3-4f90-8df7-51e21e6c7e1c-kube-api-access-pstrc\") pod \"machine-config-daemon-4n9zj\" (UID: \"3370c2ca-fec3-4f90-8df7-51e21e6c7e1c\") " pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" Sep 30 19:31:32 crc kubenswrapper[4756]: I0930 19:31:32.381272 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d39cfd1b725ded02a27e8f7bed73221dd180e3fffb7f06d1877d1bc2f6910334\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:32Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:32 crc kubenswrapper[4756]: I0930 19:31:32.392101 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-ghp5t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"32ba8a0a-215c-415b-a893-74f7a40e3c20\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pslwb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:32Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-ghp5t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:32Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:32 crc kubenswrapper[4756]: I0930 19:31:32.407805 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:32Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:32 crc kubenswrapper[4756]: I0930 19:31:32.420529 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:32Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:32 crc kubenswrapper[4756]: I0930 19:31:32.434607 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:32Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:32 crc kubenswrapper[4756]: I0930 19:31:32.438972 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Sep 30 19:31:32 crc kubenswrapper[4756]: I0930 19:31:32.439123 4756 scope.go:117] "RemoveContainer" containerID="a2e0caca690592c9bd17dae14f79390b94f47507afb05a1c1732453700b99786" Sep 30 19:31:32 crc kubenswrapper[4756]: E0930 19:31:32.439324 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-apiserver-check-endpoints\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\"" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" Sep 30 19:31:32 crc kubenswrapper[4756]: I0930 19:31:32.442872 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-ghp5t" Sep 30 19:31:32 crc kubenswrapper[4756]: I0930 19:31:32.448100 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" Sep 30 19:31:32 crc kubenswrapper[4756]: I0930 19:31:32.458096 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:32Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:32 crc kubenswrapper[4756]: W0930 19:31:32.461680 4756 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3370c2ca_fec3_4f90_8df7_51e21e6c7e1c.slice/crio-b5a48b46005813b4949cb13b53196850a282973e2628bf78eae8ab9d5883858c WatchSource:0}: Error finding container b5a48b46005813b4949cb13b53196850a282973e2628bf78eae8ab9d5883858c: Status 404 returned error can't find the container with id b5a48b46005813b4949cb13b53196850a282973e2628bf78eae8ab9d5883858c Sep 30 19:31:32 crc kubenswrapper[4756]: I0930 19:31:32.483383 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3370c2ca-fec3-4f90-8df7-51e21e6c7e1c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pstrc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pstrc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:32Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-4n9zj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:32Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:32 crc kubenswrapper[4756]: I0930 19:31:32.523518 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-additional-cni-plugins-m6zsp"] Sep 30 19:31:32 crc kubenswrapper[4756]: I0930 19:31:32.524230 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-l9mw5"] Sep 30 19:31:32 crc kubenswrapper[4756]: I0930 19:31:32.524510 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-l9mw5" Sep 30 19:31:32 crc kubenswrapper[4756]: I0930 19:31:32.524504 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-m6zsp" Sep 30 19:31:32 crc kubenswrapper[4756]: I0930 19:31:32.527726 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"kube-root-ca.crt" Sep 30 19:31:32 crc kubenswrapper[4756]: I0930 19:31:32.530233 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"default-cni-sysctl-allowlist" Sep 30 19:31:32 crc kubenswrapper[4756]: I0930 19:31:32.530523 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"openshift-service-ca.crt" Sep 30 19:31:32 crc kubenswrapper[4756]: I0930 19:31:32.530649 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"default-dockercfg-2q5b6" Sep 30 19:31:32 crc kubenswrapper[4756]: I0930 19:31:32.530764 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz" Sep 30 19:31:32 crc kubenswrapper[4756]: I0930 19:31:32.534011 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"multus-daemon-config" Sep 30 19:31:32 crc kubenswrapper[4756]: I0930 19:31:32.535649 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-copy-resources" Sep 30 19:31:32 crc kubenswrapper[4756]: I0930 19:31:32.577234 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-m6zsp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2b4cba15-5ae1-4c98-a5e5-060b3af3aec6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:32Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-m6zsp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:32Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:32 crc kubenswrapper[4756]: I0930 19:31:32.614847 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:32Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:32 crc kubenswrapper[4756]: I0930 19:31:32.639168 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d39cfd1b725ded02a27e8f7bed73221dd180e3fffb7f06d1877d1bc2f6910334\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:32Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:32 crc kubenswrapper[4756]: I0930 19:31:32.653238 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/104562fb-2a2d-4291-963c-7a95062ec13a-system-cni-dir\") pod \"multus-l9mw5\" (UID: \"104562fb-2a2d-4291-963c-7a95062ec13a\") " pod="openshift-multus/multus-l9mw5" Sep 30 19:31:32 crc kubenswrapper[4756]: I0930 19:31:32.653286 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mhq89\" (UniqueName: \"kubernetes.io/projected/104562fb-2a2d-4291-963c-7a95062ec13a-kube-api-access-mhq89\") pod \"multus-l9mw5\" (UID: \"104562fb-2a2d-4291-963c-7a95062ec13a\") " pod="openshift-multus/multus-l9mw5" Sep 30 19:31:32 crc kubenswrapper[4756]: I0930 19:31:32.653307 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/2b4cba15-5ae1-4c98-a5e5-060b3af3aec6-cni-binary-copy\") pod \"multus-additional-cni-plugins-m6zsp\" (UID: \"2b4cba15-5ae1-4c98-a5e5-060b3af3aec6\") " pod="openshift-multus/multus-additional-cni-plugins-m6zsp" Sep 30 19:31:32 crc kubenswrapper[4756]: I0930 19:31:32.653352 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/104562fb-2a2d-4291-963c-7a95062ec13a-multus-cni-dir\") pod \"multus-l9mw5\" (UID: \"104562fb-2a2d-4291-963c-7a95062ec13a\") " pod="openshift-multus/multus-l9mw5" Sep 30 19:31:32 crc kubenswrapper[4756]: I0930 19:31:32.653368 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/104562fb-2a2d-4291-963c-7a95062ec13a-host-var-lib-kubelet\") pod \"multus-l9mw5\" (UID: \"104562fb-2a2d-4291-963c-7a95062ec13a\") " pod="openshift-multus/multus-l9mw5" Sep 30 19:31:32 crc kubenswrapper[4756]: I0930 19:31:32.653419 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/104562fb-2a2d-4291-963c-7a95062ec13a-cni-binary-copy\") pod \"multus-l9mw5\" (UID: \"104562fb-2a2d-4291-963c-7a95062ec13a\") " pod="openshift-multus/multus-l9mw5" Sep 30 19:31:32 crc kubenswrapper[4756]: I0930 19:31:32.653448 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/2b4cba15-5ae1-4c98-a5e5-060b3af3aec6-tuning-conf-dir\") pod \"multus-additional-cni-plugins-m6zsp\" (UID: \"2b4cba15-5ae1-4c98-a5e5-060b3af3aec6\") " pod="openshift-multus/multus-additional-cni-plugins-m6zsp" Sep 30 19:31:32 crc kubenswrapper[4756]: I0930 19:31:32.653466 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/104562fb-2a2d-4291-963c-7a95062ec13a-host-run-k8s-cni-cncf-io\") pod \"multus-l9mw5\" (UID: \"104562fb-2a2d-4291-963c-7a95062ec13a\") " pod="openshift-multus/multus-l9mw5" Sep 30 19:31:32 crc kubenswrapper[4756]: I0930 19:31:32.653481 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/104562fb-2a2d-4291-963c-7a95062ec13a-etc-kubernetes\") pod \"multus-l9mw5\" (UID: \"104562fb-2a2d-4291-963c-7a95062ec13a\") " pod="openshift-multus/multus-l9mw5" Sep 30 19:31:32 crc kubenswrapper[4756]: I0930 19:31:32.653494 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/104562fb-2a2d-4291-963c-7a95062ec13a-host-var-lib-cni-bin\") pod \"multus-l9mw5\" (UID: \"104562fb-2a2d-4291-963c-7a95062ec13a\") " pod="openshift-multus/multus-l9mw5" Sep 30 19:31:32 crc kubenswrapper[4756]: I0930 19:31:32.653508 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/104562fb-2a2d-4291-963c-7a95062ec13a-multus-daemon-config\") pod \"multus-l9mw5\" (UID: \"104562fb-2a2d-4291-963c-7a95062ec13a\") " pod="openshift-multus/multus-l9mw5" Sep 30 19:31:32 crc kubenswrapper[4756]: I0930 19:31:32.653533 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/2b4cba15-5ae1-4c98-a5e5-060b3af3aec6-cnibin\") pod \"multus-additional-cni-plugins-m6zsp\" (UID: \"2b4cba15-5ae1-4c98-a5e5-060b3af3aec6\") " pod="openshift-multus/multus-additional-cni-plugins-m6zsp" Sep 30 19:31:32 crc kubenswrapper[4756]: I0930 19:31:32.653549 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/2b4cba15-5ae1-4c98-a5e5-060b3af3aec6-system-cni-dir\") pod \"multus-additional-cni-plugins-m6zsp\" (UID: \"2b4cba15-5ae1-4c98-a5e5-060b3af3aec6\") " pod="openshift-multus/multus-additional-cni-plugins-m6zsp" Sep 30 19:31:32 crc kubenswrapper[4756]: I0930 19:31:32.653564 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/2b4cba15-5ae1-4c98-a5e5-060b3af3aec6-os-release\") pod \"multus-additional-cni-plugins-m6zsp\" (UID: \"2b4cba15-5ae1-4c98-a5e5-060b3af3aec6\") " pod="openshift-multus/multus-additional-cni-plugins-m6zsp" Sep 30 19:31:32 crc kubenswrapper[4756]: I0930 19:31:32.653580 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jvl9h\" (UniqueName: \"kubernetes.io/projected/2b4cba15-5ae1-4c98-a5e5-060b3af3aec6-kube-api-access-jvl9h\") pod \"multus-additional-cni-plugins-m6zsp\" (UID: \"2b4cba15-5ae1-4c98-a5e5-060b3af3aec6\") " pod="openshift-multus/multus-additional-cni-plugins-m6zsp" Sep 30 19:31:32 crc kubenswrapper[4756]: I0930 19:31:32.653597 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/104562fb-2a2d-4291-963c-7a95062ec13a-cnibin\") pod \"multus-l9mw5\" (UID: \"104562fb-2a2d-4291-963c-7a95062ec13a\") " pod="openshift-multus/multus-l9mw5" Sep 30 19:31:32 crc kubenswrapper[4756]: I0930 19:31:32.653615 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/104562fb-2a2d-4291-963c-7a95062ec13a-host-var-lib-cni-multus\") pod \"multus-l9mw5\" (UID: \"104562fb-2a2d-4291-963c-7a95062ec13a\") " pod="openshift-multus/multus-l9mw5" Sep 30 19:31:32 crc kubenswrapper[4756]: I0930 19:31:32.653636 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/2b4cba15-5ae1-4c98-a5e5-060b3af3aec6-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-m6zsp\" (UID: \"2b4cba15-5ae1-4c98-a5e5-060b3af3aec6\") " pod="openshift-multus/multus-additional-cni-plugins-m6zsp" Sep 30 19:31:32 crc kubenswrapper[4756]: I0930 19:31:32.653655 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/104562fb-2a2d-4291-963c-7a95062ec13a-hostroot\") pod \"multus-l9mw5\" (UID: \"104562fb-2a2d-4291-963c-7a95062ec13a\") " pod="openshift-multus/multus-l9mw5" Sep 30 19:31:32 crc kubenswrapper[4756]: I0930 19:31:32.653671 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/104562fb-2a2d-4291-963c-7a95062ec13a-multus-socket-dir-parent\") pod \"multus-l9mw5\" (UID: \"104562fb-2a2d-4291-963c-7a95062ec13a\") " pod="openshift-multus/multus-l9mw5" Sep 30 19:31:32 crc kubenswrapper[4756]: I0930 19:31:32.653691 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/104562fb-2a2d-4291-963c-7a95062ec13a-multus-conf-dir\") pod \"multus-l9mw5\" (UID: \"104562fb-2a2d-4291-963c-7a95062ec13a\") " pod="openshift-multus/multus-l9mw5" Sep 30 19:31:32 crc kubenswrapper[4756]: I0930 19:31:32.653705 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/104562fb-2a2d-4291-963c-7a95062ec13a-os-release\") pod \"multus-l9mw5\" (UID: \"104562fb-2a2d-4291-963c-7a95062ec13a\") " pod="openshift-multus/multus-l9mw5" Sep 30 19:31:32 crc kubenswrapper[4756]: I0930 19:31:32.653720 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/104562fb-2a2d-4291-963c-7a95062ec13a-host-run-netns\") pod \"multus-l9mw5\" (UID: \"104562fb-2a2d-4291-963c-7a95062ec13a\") " pod="openshift-multus/multus-l9mw5" Sep 30 19:31:32 crc kubenswrapper[4756]: I0930 19:31:32.653735 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/104562fb-2a2d-4291-963c-7a95062ec13a-host-run-multus-certs\") pod \"multus-l9mw5\" (UID: \"104562fb-2a2d-4291-963c-7a95062ec13a\") " pod="openshift-multus/multus-l9mw5" Sep 30 19:31:32 crc kubenswrapper[4756]: I0930 19:31:32.661864 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-ghp5t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"32ba8a0a-215c-415b-a893-74f7a40e3c20\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pslwb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:32Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-ghp5t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:32Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:32 crc kubenswrapper[4756]: I0930 19:31:32.691954 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3370c2ca-fec3-4f90-8df7-51e21e6c7e1c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pstrc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pstrc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:32Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-4n9zj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:32Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:32 crc kubenswrapper[4756]: I0930 19:31:32.708454 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"218f95d8-7230-4b64-83a5-00af4f5ec7dc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0a76d38dbf936ca79e57470de2c8bf98e8681f7d94f2c572aa5c0d2b2acbb484\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://57f786181b4762f1e04a48ac6e352090b62e1691091ddc390821abb693e8f7a6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c275af67ad8675a437b3f7fae34236bbdfbdf1f216be6a442d6765471d1d11e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a2e0caca690592c9bd17dae14f79390b94f47507afb05a1c1732453700b99786\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8f5786b74e1c05cccdbb4ed46b2fc96112d8b112c92e7e30c08d3f38d63d8a5c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T19:31:24Z\\\",\\\"message\\\":\\\"W0930 19:31:14.261606 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0930 19:31:14.262005 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759260674 cert, and key in /tmp/serving-cert-4044505907/serving-signer.crt, /tmp/serving-cert-4044505907/serving-signer.key\\\\nI0930 19:31:14.657086 1 observer_polling.go:159] Starting file observer\\\\nW0930 19:31:14.660923 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0930 19:31:14.661186 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0930 19:31:14.662354 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4044505907/tls.crt::/tmp/serving-cert-4044505907/tls.key\\\\\\\"\\\\nF0930 19:31:24.928649 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:14Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a2e0caca690592c9bd17dae14f79390b94f47507afb05a1c1732453700b99786\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"message\\\":\\\"g.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 19:31:31.245412 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 19:31:31.245417 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0930 19:31:31.245423 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0930 19:31:31.245426 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0930 19:31:31.245428 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0930 19:31:31.245860 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI0930 19:31:31.249125 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0930 19:31:31.249169 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0930 19:31:31.249201 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0930 19:31:31.249211 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0930 19:31:31.249225 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0930 19:31:31.249238 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0930 19:31:31.250125 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI0930 19:31:31.250150 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nF0930 19:31:31.250576 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://01164eb1afdb771ef4d2dc268107dadd206959e1ef66b154caf3ff18717e20de\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:14Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7365492f5fe72c39f569bfa6e2b1a44236da10b4687a738b97ce95626081c1b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7365492f5fe72c39f569bfa6e2b1a44236da10b4687a738b97ce95626081c1b5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:11Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:32Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:32 crc kubenswrapper[4756]: I0930 19:31:32.721777 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:32Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:32 crc kubenswrapper[4756]: I0930 19:31:32.735191 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://449a4827a34c17e8cdc851d2714ced931581b5c3a1bcd2f7f058b95d68403740\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://163fab064016913eb90cd87fe0da1c702eeb7f000c92b108bfb24a21ef0e98d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:32Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:32 crc kubenswrapper[4756]: I0930 19:31:32.748699 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:32Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:32 crc kubenswrapper[4756]: I0930 19:31:32.754255 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 19:31:32 crc kubenswrapper[4756]: E0930 19:31:32.754521 4756 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 19:31:34.75446926 +0000 UTC m=+24.375402747 (durationBeforeRetry 2s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:31:32 crc kubenswrapper[4756]: I0930 19:31:32.754626 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 19:31:32 crc kubenswrapper[4756]: I0930 19:31:32.754694 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/2b4cba15-5ae1-4c98-a5e5-060b3af3aec6-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-m6zsp\" (UID: \"2b4cba15-5ae1-4c98-a5e5-060b3af3aec6\") " pod="openshift-multus/multus-additional-cni-plugins-m6zsp" Sep 30 19:31:32 crc kubenswrapper[4756]: I0930 19:31:32.754982 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/104562fb-2a2d-4291-963c-7a95062ec13a-hostroot\") pod \"multus-l9mw5\" (UID: \"104562fb-2a2d-4291-963c-7a95062ec13a\") " pod="openshift-multus/multus-l9mw5" Sep 30 19:31:32 crc kubenswrapper[4756]: E0930 19:31:32.754904 4756 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Sep 30 19:31:32 crc kubenswrapper[4756]: E0930 19:31:32.755059 4756 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Sep 30 19:31:32 crc kubenswrapper[4756]: E0930 19:31:32.755077 4756 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 30 19:31:32 crc kubenswrapper[4756]: I0930 19:31:32.755091 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/104562fb-2a2d-4291-963c-7a95062ec13a-hostroot\") pod \"multus-l9mw5\" (UID: \"104562fb-2a2d-4291-963c-7a95062ec13a\") " pod="openshift-multus/multus-l9mw5" Sep 30 19:31:32 crc kubenswrapper[4756]: E0930 19:31:32.755148 4756 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-09-30 19:31:34.755136707 +0000 UTC m=+24.376070184 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 30 19:31:32 crc kubenswrapper[4756]: I0930 19:31:32.755254 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/104562fb-2a2d-4291-963c-7a95062ec13a-multus-socket-dir-parent\") pod \"multus-l9mw5\" (UID: \"104562fb-2a2d-4291-963c-7a95062ec13a\") " pod="openshift-multus/multus-l9mw5" Sep 30 19:31:32 crc kubenswrapper[4756]: I0930 19:31:32.755012 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/104562fb-2a2d-4291-963c-7a95062ec13a-multus-socket-dir-parent\") pod \"multus-l9mw5\" (UID: \"104562fb-2a2d-4291-963c-7a95062ec13a\") " pod="openshift-multus/multus-l9mw5" Sep 30 19:31:32 crc kubenswrapper[4756]: I0930 19:31:32.755463 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/104562fb-2a2d-4291-963c-7a95062ec13a-multus-conf-dir\") pod \"multus-l9mw5\" (UID: \"104562fb-2a2d-4291-963c-7a95062ec13a\") " pod="openshift-multus/multus-l9mw5" Sep 30 19:31:32 crc kubenswrapper[4756]: I0930 19:31:32.755505 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/104562fb-2a2d-4291-963c-7a95062ec13a-multus-conf-dir\") pod \"multus-l9mw5\" (UID: \"104562fb-2a2d-4291-963c-7a95062ec13a\") " pod="openshift-multus/multus-l9mw5" Sep 30 19:31:32 crc kubenswrapper[4756]: I0930 19:31:32.755617 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/104562fb-2a2d-4291-963c-7a95062ec13a-os-release\") pod \"multus-l9mw5\" (UID: \"104562fb-2a2d-4291-963c-7a95062ec13a\") " pod="openshift-multus/multus-l9mw5" Sep 30 19:31:32 crc kubenswrapper[4756]: I0930 19:31:32.755679 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/2b4cba15-5ae1-4c98-a5e5-060b3af3aec6-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-m6zsp\" (UID: \"2b4cba15-5ae1-4c98-a5e5-060b3af3aec6\") " pod="openshift-multus/multus-additional-cni-plugins-m6zsp" Sep 30 19:31:32 crc kubenswrapper[4756]: I0930 19:31:32.755755 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/104562fb-2a2d-4291-963c-7a95062ec13a-host-run-netns\") pod \"multus-l9mw5\" (UID: \"104562fb-2a2d-4291-963c-7a95062ec13a\") " pod="openshift-multus/multus-l9mw5" Sep 30 19:31:32 crc kubenswrapper[4756]: I0930 19:31:32.755786 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/104562fb-2a2d-4291-963c-7a95062ec13a-host-run-multus-certs\") pod \"multus-l9mw5\" (UID: \"104562fb-2a2d-4291-963c-7a95062ec13a\") " pod="openshift-multus/multus-l9mw5" Sep 30 19:31:32 crc kubenswrapper[4756]: I0930 19:31:32.755919 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/104562fb-2a2d-4291-963c-7a95062ec13a-system-cni-dir\") pod \"multus-l9mw5\" (UID: \"104562fb-2a2d-4291-963c-7a95062ec13a\") " pod="openshift-multus/multus-l9mw5" Sep 30 19:31:32 crc kubenswrapper[4756]: I0930 19:31:32.755950 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/2b4cba15-5ae1-4c98-a5e5-060b3af3aec6-cni-binary-copy\") pod \"multus-additional-cni-plugins-m6zsp\" (UID: \"2b4cba15-5ae1-4c98-a5e5-060b3af3aec6\") " pod="openshift-multus/multus-additional-cni-plugins-m6zsp" Sep 30 19:31:32 crc kubenswrapper[4756]: I0930 19:31:32.755997 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/104562fb-2a2d-4291-963c-7a95062ec13a-host-run-netns\") pod \"multus-l9mw5\" (UID: \"104562fb-2a2d-4291-963c-7a95062ec13a\") " pod="openshift-multus/multus-l9mw5" Sep 30 19:31:32 crc kubenswrapper[4756]: I0930 19:31:32.756055 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/104562fb-2a2d-4291-963c-7a95062ec13a-os-release\") pod \"multus-l9mw5\" (UID: \"104562fb-2a2d-4291-963c-7a95062ec13a\") " pod="openshift-multus/multus-l9mw5" Sep 30 19:31:32 crc kubenswrapper[4756]: I0930 19:31:32.756078 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/104562fb-2a2d-4291-963c-7a95062ec13a-multus-cni-dir\") pod \"multus-l9mw5\" (UID: \"104562fb-2a2d-4291-963c-7a95062ec13a\") " pod="openshift-multus/multus-l9mw5" Sep 30 19:31:32 crc kubenswrapper[4756]: I0930 19:31:32.756038 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/104562fb-2a2d-4291-963c-7a95062ec13a-host-run-multus-certs\") pod \"multus-l9mw5\" (UID: \"104562fb-2a2d-4291-963c-7a95062ec13a\") " pod="openshift-multus/multus-l9mw5" Sep 30 19:31:32 crc kubenswrapper[4756]: I0930 19:31:32.756163 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/104562fb-2a2d-4291-963c-7a95062ec13a-host-var-lib-kubelet\") pod \"multus-l9mw5\" (UID: \"104562fb-2a2d-4291-963c-7a95062ec13a\") " pod="openshift-multus/multus-l9mw5" Sep 30 19:31:32 crc kubenswrapper[4756]: I0930 19:31:32.756196 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mhq89\" (UniqueName: \"kubernetes.io/projected/104562fb-2a2d-4291-963c-7a95062ec13a-kube-api-access-mhq89\") pod \"multus-l9mw5\" (UID: \"104562fb-2a2d-4291-963c-7a95062ec13a\") " pod="openshift-multus/multus-l9mw5" Sep 30 19:31:32 crc kubenswrapper[4756]: I0930 19:31:32.756233 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 19:31:32 crc kubenswrapper[4756]: I0930 19:31:32.756252 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/104562fb-2a2d-4291-963c-7a95062ec13a-multus-cni-dir\") pod \"multus-l9mw5\" (UID: \"104562fb-2a2d-4291-963c-7a95062ec13a\") " pod="openshift-multus/multus-l9mw5" Sep 30 19:31:32 crc kubenswrapper[4756]: I0930 19:31:32.756199 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/104562fb-2a2d-4291-963c-7a95062ec13a-system-cni-dir\") pod \"multus-l9mw5\" (UID: \"104562fb-2a2d-4291-963c-7a95062ec13a\") " pod="openshift-multus/multus-l9mw5" Sep 30 19:31:32 crc kubenswrapper[4756]: I0930 19:31:32.756330 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/104562fb-2a2d-4291-963c-7a95062ec13a-host-var-lib-kubelet\") pod \"multus-l9mw5\" (UID: \"104562fb-2a2d-4291-963c-7a95062ec13a\") " pod="openshift-multus/multus-l9mw5" Sep 30 19:31:32 crc kubenswrapper[4756]: I0930 19:31:32.756255 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 19:31:32 crc kubenswrapper[4756]: E0930 19:31:32.756358 4756 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Sep 30 19:31:32 crc kubenswrapper[4756]: I0930 19:31:32.756409 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/104562fb-2a2d-4291-963c-7a95062ec13a-cni-binary-copy\") pod \"multus-l9mw5\" (UID: \"104562fb-2a2d-4291-963c-7a95062ec13a\") " pod="openshift-multus/multus-l9mw5" Sep 30 19:31:32 crc kubenswrapper[4756]: E0930 19:31:32.756418 4756 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-09-30 19:31:34.75638552 +0000 UTC m=+24.377318987 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Sep 30 19:31:32 crc kubenswrapper[4756]: I0930 19:31:32.756503 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/2b4cba15-5ae1-4c98-a5e5-060b3af3aec6-tuning-conf-dir\") pod \"multus-additional-cni-plugins-m6zsp\" (UID: \"2b4cba15-5ae1-4c98-a5e5-060b3af3aec6\") " pod="openshift-multus/multus-additional-cni-plugins-m6zsp" Sep 30 19:31:32 crc kubenswrapper[4756]: E0930 19:31:32.756435 4756 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Sep 30 19:31:32 crc kubenswrapper[4756]: I0930 19:31:32.756580 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/104562fb-2a2d-4291-963c-7a95062ec13a-host-run-k8s-cni-cncf-io\") pod \"multus-l9mw5\" (UID: \"104562fb-2a2d-4291-963c-7a95062ec13a\") " pod="openshift-multus/multus-l9mw5" Sep 30 19:31:32 crc kubenswrapper[4756]: E0930 19:31:32.756619 4756 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-09-30 19:31:34.756595065 +0000 UTC m=+24.377528542 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Sep 30 19:31:32 crc kubenswrapper[4756]: I0930 19:31:32.756625 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/104562fb-2a2d-4291-963c-7a95062ec13a-host-run-k8s-cni-cncf-io\") pod \"multus-l9mw5\" (UID: \"104562fb-2a2d-4291-963c-7a95062ec13a\") " pod="openshift-multus/multus-l9mw5" Sep 30 19:31:32 crc kubenswrapper[4756]: I0930 19:31:32.756655 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/104562fb-2a2d-4291-963c-7a95062ec13a-etc-kubernetes\") pod \"multus-l9mw5\" (UID: \"104562fb-2a2d-4291-963c-7a95062ec13a\") " pod="openshift-multus/multus-l9mw5" Sep 30 19:31:32 crc kubenswrapper[4756]: I0930 19:31:32.756709 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/104562fb-2a2d-4291-963c-7a95062ec13a-host-var-lib-cni-bin\") pod \"multus-l9mw5\" (UID: \"104562fb-2a2d-4291-963c-7a95062ec13a\") " pod="openshift-multus/multus-l9mw5" Sep 30 19:31:32 crc kubenswrapper[4756]: I0930 19:31:32.756738 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/104562fb-2a2d-4291-963c-7a95062ec13a-multus-daemon-config\") pod \"multus-l9mw5\" (UID: \"104562fb-2a2d-4291-963c-7a95062ec13a\") " pod="openshift-multus/multus-l9mw5" Sep 30 19:31:32 crc kubenswrapper[4756]: I0930 19:31:32.756764 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/104562fb-2a2d-4291-963c-7a95062ec13a-etc-kubernetes\") pod \"multus-l9mw5\" (UID: \"104562fb-2a2d-4291-963c-7a95062ec13a\") " pod="openshift-multus/multus-l9mw5" Sep 30 19:31:32 crc kubenswrapper[4756]: I0930 19:31:32.756782 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/104562fb-2a2d-4291-963c-7a95062ec13a-host-var-lib-cni-bin\") pod \"multus-l9mw5\" (UID: \"104562fb-2a2d-4291-963c-7a95062ec13a\") " pod="openshift-multus/multus-l9mw5" Sep 30 19:31:32 crc kubenswrapper[4756]: I0930 19:31:32.756775 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 19:31:32 crc kubenswrapper[4756]: E0930 19:31:32.756880 4756 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Sep 30 19:31:32 crc kubenswrapper[4756]: E0930 19:31:32.756900 4756 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Sep 30 19:31:32 crc kubenswrapper[4756]: E0930 19:31:32.756914 4756 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 30 19:31:32 crc kubenswrapper[4756]: I0930 19:31:32.756935 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/2b4cba15-5ae1-4c98-a5e5-060b3af3aec6-cnibin\") pod \"multus-additional-cni-plugins-m6zsp\" (UID: \"2b4cba15-5ae1-4c98-a5e5-060b3af3aec6\") " pod="openshift-multus/multus-additional-cni-plugins-m6zsp" Sep 30 19:31:32 crc kubenswrapper[4756]: E0930 19:31:32.756969 4756 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-09-30 19:31:34.756950824 +0000 UTC m=+24.377884301 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 30 19:31:32 crc kubenswrapper[4756]: I0930 19:31:32.756991 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/2b4cba15-5ae1-4c98-a5e5-060b3af3aec6-system-cni-dir\") pod \"multus-additional-cni-plugins-m6zsp\" (UID: \"2b4cba15-5ae1-4c98-a5e5-060b3af3aec6\") " pod="openshift-multus/multus-additional-cni-plugins-m6zsp" Sep 30 19:31:32 crc kubenswrapper[4756]: I0930 19:31:32.757017 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/2b4cba15-5ae1-4c98-a5e5-060b3af3aec6-os-release\") pod \"multus-additional-cni-plugins-m6zsp\" (UID: \"2b4cba15-5ae1-4c98-a5e5-060b3af3aec6\") " pod="openshift-multus/multus-additional-cni-plugins-m6zsp" Sep 30 19:31:32 crc kubenswrapper[4756]: I0930 19:31:32.757019 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/104562fb-2a2d-4291-963c-7a95062ec13a-cni-binary-copy\") pod \"multus-l9mw5\" (UID: \"104562fb-2a2d-4291-963c-7a95062ec13a\") " pod="openshift-multus/multus-l9mw5" Sep 30 19:31:32 crc kubenswrapper[4756]: I0930 19:31:32.757036 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/2b4cba15-5ae1-4c98-a5e5-060b3af3aec6-system-cni-dir\") pod \"multus-additional-cni-plugins-m6zsp\" (UID: \"2b4cba15-5ae1-4c98-a5e5-060b3af3aec6\") " pod="openshift-multus/multus-additional-cni-plugins-m6zsp" Sep 30 19:31:32 crc kubenswrapper[4756]: I0930 19:31:32.756991 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/2b4cba15-5ae1-4c98-a5e5-060b3af3aec6-cnibin\") pod \"multus-additional-cni-plugins-m6zsp\" (UID: \"2b4cba15-5ae1-4c98-a5e5-060b3af3aec6\") " pod="openshift-multus/multus-additional-cni-plugins-m6zsp" Sep 30 19:31:32 crc kubenswrapper[4756]: I0930 19:31:32.757039 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jvl9h\" (UniqueName: \"kubernetes.io/projected/2b4cba15-5ae1-4c98-a5e5-060b3af3aec6-kube-api-access-jvl9h\") pod \"multus-additional-cni-plugins-m6zsp\" (UID: \"2b4cba15-5ae1-4c98-a5e5-060b3af3aec6\") " pod="openshift-multus/multus-additional-cni-plugins-m6zsp" Sep 30 19:31:32 crc kubenswrapper[4756]: I0930 19:31:32.757077 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/104562fb-2a2d-4291-963c-7a95062ec13a-cnibin\") pod \"multus-l9mw5\" (UID: \"104562fb-2a2d-4291-963c-7a95062ec13a\") " pod="openshift-multus/multus-l9mw5" Sep 30 19:31:32 crc kubenswrapper[4756]: I0930 19:31:32.757104 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/104562fb-2a2d-4291-963c-7a95062ec13a-host-var-lib-cni-multus\") pod \"multus-l9mw5\" (UID: \"104562fb-2a2d-4291-963c-7a95062ec13a\") " pod="openshift-multus/multus-l9mw5" Sep 30 19:31:32 crc kubenswrapper[4756]: I0930 19:31:32.757106 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/2b4cba15-5ae1-4c98-a5e5-060b3af3aec6-os-release\") pod \"multus-additional-cni-plugins-m6zsp\" (UID: \"2b4cba15-5ae1-4c98-a5e5-060b3af3aec6\") " pod="openshift-multus/multus-additional-cni-plugins-m6zsp" Sep 30 19:31:32 crc kubenswrapper[4756]: I0930 19:31:32.757211 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/104562fb-2a2d-4291-963c-7a95062ec13a-cnibin\") pod \"multus-l9mw5\" (UID: \"104562fb-2a2d-4291-963c-7a95062ec13a\") " pod="openshift-multus/multus-l9mw5" Sep 30 19:31:32 crc kubenswrapper[4756]: I0930 19:31:32.757223 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/104562fb-2a2d-4291-963c-7a95062ec13a-host-var-lib-cni-multus\") pod \"multus-l9mw5\" (UID: \"104562fb-2a2d-4291-963c-7a95062ec13a\") " pod="openshift-multus/multus-l9mw5" Sep 30 19:31:32 crc kubenswrapper[4756]: I0930 19:31:32.757409 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/2b4cba15-5ae1-4c98-a5e5-060b3af3aec6-tuning-conf-dir\") pod \"multus-additional-cni-plugins-m6zsp\" (UID: \"2b4cba15-5ae1-4c98-a5e5-060b3af3aec6\") " pod="openshift-multus/multus-additional-cni-plugins-m6zsp" Sep 30 19:31:32 crc kubenswrapper[4756]: I0930 19:31:32.757635 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/2b4cba15-5ae1-4c98-a5e5-060b3af3aec6-cni-binary-copy\") pod \"multus-additional-cni-plugins-m6zsp\" (UID: \"2b4cba15-5ae1-4c98-a5e5-060b3af3aec6\") " pod="openshift-multus/multus-additional-cni-plugins-m6zsp" Sep 30 19:31:32 crc kubenswrapper[4756]: I0930 19:31:32.757808 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/104562fb-2a2d-4291-963c-7a95062ec13a-multus-daemon-config\") pod \"multus-l9mw5\" (UID: \"104562fb-2a2d-4291-963c-7a95062ec13a\") " pod="openshift-multus/multus-l9mw5" Sep 30 19:31:32 crc kubenswrapper[4756]: I0930 19:31:32.764885 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:32Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:32 crc kubenswrapper[4756]: I0930 19:31:32.778172 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jvl9h\" (UniqueName: \"kubernetes.io/projected/2b4cba15-5ae1-4c98-a5e5-060b3af3aec6-kube-api-access-jvl9h\") pod \"multus-additional-cni-plugins-m6zsp\" (UID: \"2b4cba15-5ae1-4c98-a5e5-060b3af3aec6\") " pod="openshift-multus/multus-additional-cni-plugins-m6zsp" Sep 30 19:31:32 crc kubenswrapper[4756]: I0930 19:31:32.779814 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-l9mw5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"104562fb-2a2d-4291-963c-7a95062ec13a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mhq89\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:32Z\\\"}}\" for pod \"openshift-multus\"/\"multus-l9mw5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:32Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:32 crc kubenswrapper[4756]: I0930 19:31:32.781747 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mhq89\" (UniqueName: \"kubernetes.io/projected/104562fb-2a2d-4291-963c-7a95062ec13a-kube-api-access-mhq89\") pod \"multus-l9mw5\" (UID: \"104562fb-2a2d-4291-963c-7a95062ec13a\") " pod="openshift-multus/multus-l9mw5" Sep 30 19:31:32 crc kubenswrapper[4756]: I0930 19:31:32.795490 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:32Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:32 crc kubenswrapper[4756]: I0930 19:31:32.807572 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:32Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:32 crc kubenswrapper[4756]: I0930 19:31:32.818318 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 30 19:31:32 crc kubenswrapper[4756]: I0930 19:31:32.821006 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-l9mw5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"104562fb-2a2d-4291-963c-7a95062ec13a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mhq89\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:32Z\\\"}}\" for pod \"openshift-multus\"/\"multus-l9mw5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:32Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:32 crc kubenswrapper[4756]: I0930 19:31:32.823215 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 30 19:31:32 crc kubenswrapper[4756]: I0930 19:31:32.827971 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/kube-controller-manager-crc"] Sep 30 19:31:32 crc kubenswrapper[4756]: I0930 19:31:32.835207 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:32Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:32 crc kubenswrapper[4756]: I0930 19:31:32.843336 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-l9mw5" Sep 30 19:31:32 crc kubenswrapper[4756]: I0930 19:31:32.850128 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d39cfd1b725ded02a27e8f7bed73221dd180e3fffb7f06d1877d1bc2f6910334\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:32Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:32 crc kubenswrapper[4756]: I0930 19:31:32.852239 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-m6zsp" Sep 30 19:31:32 crc kubenswrapper[4756]: W0930 19:31:32.854636 4756 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod104562fb_2a2d_4291_963c_7a95062ec13a.slice/crio-da692c22fb01dda2353912c9f59ef80728765f2bafbb2e2acdd0bda531c73890 WatchSource:0}: Error finding container da692c22fb01dda2353912c9f59ef80728765f2bafbb2e2acdd0bda531c73890: Status 404 returned error can't find the container with id da692c22fb01dda2353912c9f59ef80728765f2bafbb2e2acdd0bda531c73890 Sep 30 19:31:32 crc kubenswrapper[4756]: I0930 19:31:32.861973 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-ghp5t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"32ba8a0a-215c-415b-a893-74f7a40e3c20\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pslwb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:32Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-ghp5t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:32Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:32 crc kubenswrapper[4756]: W0930 19:31:32.862144 4756 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2b4cba15_5ae1_4c98_a5e5_060b3af3aec6.slice/crio-a7dae2ba8d79a9a6a996471a5946364549ccb57be4f791b9559fbcc69454a0b8 WatchSource:0}: Error finding container a7dae2ba8d79a9a6a996471a5946364549ccb57be4f791b9559fbcc69454a0b8: Status 404 returned error can't find the container with id a7dae2ba8d79a9a6a996471a5946364549ccb57be4f791b9559fbcc69454a0b8 Sep 30 19:31:32 crc kubenswrapper[4756]: I0930 19:31:32.884176 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-m6zsp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2b4cba15-5ae1-4c98-a5e5-060b3af3aec6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:32Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-m6zsp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:32Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:32 crc kubenswrapper[4756]: I0930 19:31:32.901951 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3370c2ca-fec3-4f90-8df7-51e21e6c7e1c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pstrc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pstrc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:32Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-4n9zj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:32Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:32 crc kubenswrapper[4756]: I0930 19:31:32.918147 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"218f95d8-7230-4b64-83a5-00af4f5ec7dc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0a76d38dbf936ca79e57470de2c8bf98e8681f7d94f2c572aa5c0d2b2acbb484\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://57f786181b4762f1e04a48ac6e352090b62e1691091ddc390821abb693e8f7a6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c275af67ad8675a437b3f7fae34236bbdfbdf1f216be6a442d6765471d1d11e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a2e0caca690592c9bd17dae14f79390b94f47507afb05a1c1732453700b99786\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8f5786b74e1c05cccdbb4ed46b2fc96112d8b112c92e7e30c08d3f38d63d8a5c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T19:31:24Z\\\",\\\"message\\\":\\\"W0930 19:31:14.261606 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0930 19:31:14.262005 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759260674 cert, and key in /tmp/serving-cert-4044505907/serving-signer.crt, /tmp/serving-cert-4044505907/serving-signer.key\\\\nI0930 19:31:14.657086 1 observer_polling.go:159] Starting file observer\\\\nW0930 19:31:14.660923 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0930 19:31:14.661186 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0930 19:31:14.662354 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4044505907/tls.crt::/tmp/serving-cert-4044505907/tls.key\\\\\\\"\\\\nF0930 19:31:24.928649 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:14Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a2e0caca690592c9bd17dae14f79390b94f47507afb05a1c1732453700b99786\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"message\\\":\\\"g.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 19:31:31.245412 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 19:31:31.245417 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0930 19:31:31.245423 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0930 19:31:31.245426 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0930 19:31:31.245428 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0930 19:31:31.245860 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI0930 19:31:31.249125 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0930 19:31:31.249169 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0930 19:31:31.249201 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0930 19:31:31.249211 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0930 19:31:31.249225 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0930 19:31:31.249238 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0930 19:31:31.250125 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI0930 19:31:31.250150 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nF0930 19:31:31.250576 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://01164eb1afdb771ef4d2dc268107dadd206959e1ef66b154caf3ff18717e20de\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:14Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7365492f5fe72c39f569bfa6e2b1a44236da10b4687a738b97ce95626081c1b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7365492f5fe72c39f569bfa6e2b1a44236da10b4687a738b97ce95626081c1b5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:11Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:32Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:32 crc kubenswrapper[4756]: I0930 19:31:32.938225 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:32Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:32 crc kubenswrapper[4756]: I0930 19:31:32.954657 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://449a4827a34c17e8cdc851d2714ced931581b5c3a1bcd2f7f058b95d68403740\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://163fab064016913eb90cd87fe0da1c702eeb7f000c92b108bfb24a21ef0e98d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:32Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:32 crc kubenswrapper[4756]: I0930 19:31:32.970095 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-m6zsp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2b4cba15-5ae1-4c98-a5e5-060b3af3aec6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:32Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-m6zsp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:32Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:32 crc kubenswrapper[4756]: I0930 19:31:32.973877 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-kmxk8"] Sep 30 19:31:32 crc kubenswrapper[4756]: I0930 19:31:32.976001 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-kmxk8" Sep 30 19:31:32 crc kubenswrapper[4756]: I0930 19:31:32.980569 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-config" Sep 30 19:31:32 crc kubenswrapper[4756]: I0930 19:31:32.980572 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"env-overrides" Sep 30 19:31:32 crc kubenswrapper[4756]: I0930 19:31:32.980674 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"kube-root-ca.crt" Sep 30 19:31:32 crc kubenswrapper[4756]: I0930 19:31:32.980793 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-script-lib" Sep 30 19:31:32 crc kubenswrapper[4756]: I0930 19:31:32.980881 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt" Sep 30 19:31:32 crc kubenswrapper[4756]: I0930 19:31:32.980952 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert" Sep 30 19:31:32 crc kubenswrapper[4756]: I0930 19:31:32.980312 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl" Sep 30 19:31:32 crc kubenswrapper[4756]: I0930 19:31:32.990547 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:32Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:33 crc kubenswrapper[4756]: I0930 19:31:33.013586 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d39cfd1b725ded02a27e8f7bed73221dd180e3fffb7f06d1877d1bc2f6910334\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:33Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:33 crc kubenswrapper[4756]: I0930 19:31:33.027057 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-ghp5t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"32ba8a0a-215c-415b-a893-74f7a40e3c20\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pslwb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:32Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-ghp5t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:33Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:33 crc kubenswrapper[4756]: I0930 19:31:33.038551 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3370c2ca-fec3-4f90-8df7-51e21e6c7e1c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pstrc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pstrc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:32Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-4n9zj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:33Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:33 crc kubenswrapper[4756]: I0930 19:31:33.052591 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0dfdb5d6-9311-4699-a1ac-5f1a6cde2441\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a78698331fc3db3b12b06c8816256c5d055d1decfd12cc0099d6cfd155d673d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32fb926334772dc454adcdabd6070f751ef1aded82a5c13732875e1ec04d2c48\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6195128a0e66f0f932a444f4717084f00ea05ff648f7c26364bb9c96483b652a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://746e37834d01867d327b2cb407df8e9a8489f58884a28f1da6a336f74e4d1033\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:11Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:33Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:33 crc kubenswrapper[4756]: I0930 19:31:33.059508 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/d96acc31-a519-46fc-94d8-f19522e77391-host-run-ovn-kubernetes\") pod \"ovnkube-node-kmxk8\" (UID: \"d96acc31-a519-46fc-94d8-f19522e77391\") " pod="openshift-ovn-kubernetes/ovnkube-node-kmxk8" Sep 30 19:31:33 crc kubenswrapper[4756]: I0930 19:31:33.059565 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/d96acc31-a519-46fc-94d8-f19522e77391-host-run-netns\") pod \"ovnkube-node-kmxk8\" (UID: \"d96acc31-a519-46fc-94d8-f19522e77391\") " pod="openshift-ovn-kubernetes/ovnkube-node-kmxk8" Sep 30 19:31:33 crc kubenswrapper[4756]: I0930 19:31:33.059588 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/d96acc31-a519-46fc-94d8-f19522e77391-run-systemd\") pod \"ovnkube-node-kmxk8\" (UID: \"d96acc31-a519-46fc-94d8-f19522e77391\") " pod="openshift-ovn-kubernetes/ovnkube-node-kmxk8" Sep 30 19:31:33 crc kubenswrapper[4756]: I0930 19:31:33.059607 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/d96acc31-a519-46fc-94d8-f19522e77391-node-log\") pod \"ovnkube-node-kmxk8\" (UID: \"d96acc31-a519-46fc-94d8-f19522e77391\") " pod="openshift-ovn-kubernetes/ovnkube-node-kmxk8" Sep 30 19:31:33 crc kubenswrapper[4756]: I0930 19:31:33.059635 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/d96acc31-a519-46fc-94d8-f19522e77391-run-ovn\") pod \"ovnkube-node-kmxk8\" (UID: \"d96acc31-a519-46fc-94d8-f19522e77391\") " pod="openshift-ovn-kubernetes/ovnkube-node-kmxk8" Sep 30 19:31:33 crc kubenswrapper[4756]: I0930 19:31:33.059652 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/d96acc31-a519-46fc-94d8-f19522e77391-host-cni-netd\") pod \"ovnkube-node-kmxk8\" (UID: \"d96acc31-a519-46fc-94d8-f19522e77391\") " pod="openshift-ovn-kubernetes/ovnkube-node-kmxk8" Sep 30 19:31:33 crc kubenswrapper[4756]: I0930 19:31:33.059672 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/d96acc31-a519-46fc-94d8-f19522e77391-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-kmxk8\" (UID: \"d96acc31-a519-46fc-94d8-f19522e77391\") " pod="openshift-ovn-kubernetes/ovnkube-node-kmxk8" Sep 30 19:31:33 crc kubenswrapper[4756]: I0930 19:31:33.059694 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/d96acc31-a519-46fc-94d8-f19522e77391-var-lib-openvswitch\") pod \"ovnkube-node-kmxk8\" (UID: \"d96acc31-a519-46fc-94d8-f19522e77391\") " pod="openshift-ovn-kubernetes/ovnkube-node-kmxk8" Sep 30 19:31:33 crc kubenswrapper[4756]: I0930 19:31:33.059716 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/d96acc31-a519-46fc-94d8-f19522e77391-env-overrides\") pod \"ovnkube-node-kmxk8\" (UID: \"d96acc31-a519-46fc-94d8-f19522e77391\") " pod="openshift-ovn-kubernetes/ovnkube-node-kmxk8" Sep 30 19:31:33 crc kubenswrapper[4756]: I0930 19:31:33.059733 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/d96acc31-a519-46fc-94d8-f19522e77391-host-kubelet\") pod \"ovnkube-node-kmxk8\" (UID: \"d96acc31-a519-46fc-94d8-f19522e77391\") " pod="openshift-ovn-kubernetes/ovnkube-node-kmxk8" Sep 30 19:31:33 crc kubenswrapper[4756]: I0930 19:31:33.059751 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/d96acc31-a519-46fc-94d8-f19522e77391-etc-openvswitch\") pod \"ovnkube-node-kmxk8\" (UID: \"d96acc31-a519-46fc-94d8-f19522e77391\") " pod="openshift-ovn-kubernetes/ovnkube-node-kmxk8" Sep 30 19:31:33 crc kubenswrapper[4756]: I0930 19:31:33.059770 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/d96acc31-a519-46fc-94d8-f19522e77391-run-openvswitch\") pod \"ovnkube-node-kmxk8\" (UID: \"d96acc31-a519-46fc-94d8-f19522e77391\") " pod="openshift-ovn-kubernetes/ovnkube-node-kmxk8" Sep 30 19:31:33 crc kubenswrapper[4756]: I0930 19:31:33.059851 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/d96acc31-a519-46fc-94d8-f19522e77391-log-socket\") pod \"ovnkube-node-kmxk8\" (UID: \"d96acc31-a519-46fc-94d8-f19522e77391\") " pod="openshift-ovn-kubernetes/ovnkube-node-kmxk8" Sep 30 19:31:33 crc kubenswrapper[4756]: I0930 19:31:33.059897 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/d96acc31-a519-46fc-94d8-f19522e77391-host-cni-bin\") pod \"ovnkube-node-kmxk8\" (UID: \"d96acc31-a519-46fc-94d8-f19522e77391\") " pod="openshift-ovn-kubernetes/ovnkube-node-kmxk8" Sep 30 19:31:33 crc kubenswrapper[4756]: I0930 19:31:33.059963 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d96acc31-a519-46fc-94d8-f19522e77391-host-slash\") pod \"ovnkube-node-kmxk8\" (UID: \"d96acc31-a519-46fc-94d8-f19522e77391\") " pod="openshift-ovn-kubernetes/ovnkube-node-kmxk8" Sep 30 19:31:33 crc kubenswrapper[4756]: I0930 19:31:33.059988 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/d96acc31-a519-46fc-94d8-f19522e77391-ovnkube-config\") pod \"ovnkube-node-kmxk8\" (UID: \"d96acc31-a519-46fc-94d8-f19522e77391\") " pod="openshift-ovn-kubernetes/ovnkube-node-kmxk8" Sep 30 19:31:33 crc kubenswrapper[4756]: I0930 19:31:33.060014 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/d96acc31-a519-46fc-94d8-f19522e77391-ovnkube-script-lib\") pod \"ovnkube-node-kmxk8\" (UID: \"d96acc31-a519-46fc-94d8-f19522e77391\") " pod="openshift-ovn-kubernetes/ovnkube-node-kmxk8" Sep 30 19:31:33 crc kubenswrapper[4756]: I0930 19:31:33.060039 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/d96acc31-a519-46fc-94d8-f19522e77391-ovn-node-metrics-cert\") pod \"ovnkube-node-kmxk8\" (UID: \"d96acc31-a519-46fc-94d8-f19522e77391\") " pod="openshift-ovn-kubernetes/ovnkube-node-kmxk8" Sep 30 19:31:33 crc kubenswrapper[4756]: I0930 19:31:33.060066 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5bwg2\" (UniqueName: \"kubernetes.io/projected/d96acc31-a519-46fc-94d8-f19522e77391-kube-api-access-5bwg2\") pod \"ovnkube-node-kmxk8\" (UID: \"d96acc31-a519-46fc-94d8-f19522e77391\") " pod="openshift-ovn-kubernetes/ovnkube-node-kmxk8" Sep 30 19:31:33 crc kubenswrapper[4756]: I0930 19:31:33.060100 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/d96acc31-a519-46fc-94d8-f19522e77391-systemd-units\") pod \"ovnkube-node-kmxk8\" (UID: \"d96acc31-a519-46fc-94d8-f19522e77391\") " pod="openshift-ovn-kubernetes/ovnkube-node-kmxk8" Sep 30 19:31:33 crc kubenswrapper[4756]: I0930 19:31:33.067099 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"218f95d8-7230-4b64-83a5-00af4f5ec7dc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0a76d38dbf936ca79e57470de2c8bf98e8681f7d94f2c572aa5c0d2b2acbb484\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://57f786181b4762f1e04a48ac6e352090b62e1691091ddc390821abb693e8f7a6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c275af67ad8675a437b3f7fae34236bbdfbdf1f216be6a442d6765471d1d11e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a2e0caca690592c9bd17dae14f79390b94f47507afb05a1c1732453700b99786\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8f5786b74e1c05cccdbb4ed46b2fc96112d8b112c92e7e30c08d3f38d63d8a5c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T19:31:24Z\\\",\\\"message\\\":\\\"W0930 19:31:14.261606 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0930 19:31:14.262005 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759260674 cert, and key in /tmp/serving-cert-4044505907/serving-signer.crt, /tmp/serving-cert-4044505907/serving-signer.key\\\\nI0930 19:31:14.657086 1 observer_polling.go:159] Starting file observer\\\\nW0930 19:31:14.660923 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0930 19:31:14.661186 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0930 19:31:14.662354 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4044505907/tls.crt::/tmp/serving-cert-4044505907/tls.key\\\\\\\"\\\\nF0930 19:31:24.928649 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:14Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a2e0caca690592c9bd17dae14f79390b94f47507afb05a1c1732453700b99786\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"message\\\":\\\"g.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 19:31:31.245412 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 19:31:31.245417 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0930 19:31:31.245423 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0930 19:31:31.245426 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0930 19:31:31.245428 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0930 19:31:31.245860 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI0930 19:31:31.249125 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0930 19:31:31.249169 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0930 19:31:31.249201 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0930 19:31:31.249211 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0930 19:31:31.249225 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0930 19:31:31.249238 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0930 19:31:31.250125 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI0930 19:31:31.250150 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nF0930 19:31:31.250576 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://01164eb1afdb771ef4d2dc268107dadd206959e1ef66b154caf3ff18717e20de\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:14Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7365492f5fe72c39f569bfa6e2b1a44236da10b4687a738b97ce95626081c1b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7365492f5fe72c39f569bfa6e2b1a44236da10b4687a738b97ce95626081c1b5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:11Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:33Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:33 crc kubenswrapper[4756]: I0930 19:31:33.080869 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:33Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:33 crc kubenswrapper[4756]: I0930 19:31:33.098274 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://449a4827a34c17e8cdc851d2714ced931581b5c3a1bcd2f7f058b95d68403740\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://163fab064016913eb90cd87fe0da1c702eeb7f000c92b108bfb24a21ef0e98d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:33Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:33 crc kubenswrapper[4756]: I0930 19:31:33.111314 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:33Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:33 crc kubenswrapper[4756]: I0930 19:31:33.115035 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 19:31:33 crc kubenswrapper[4756]: E0930 19:31:33.115162 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 19:31:33 crc kubenswrapper[4756]: I0930 19:31:33.115176 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 19:31:33 crc kubenswrapper[4756]: I0930 19:31:33.115224 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 19:31:33 crc kubenswrapper[4756]: E0930 19:31:33.115484 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 19:31:33 crc kubenswrapper[4756]: E0930 19:31:33.115651 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 19:31:33 crc kubenswrapper[4756]: I0930 19:31:33.119859 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="01ab3dd5-8196-46d0-ad33-122e2ca51def" path="/var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes" Sep 30 19:31:33 crc kubenswrapper[4756]: I0930 19:31:33.120576 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" path="/var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes" Sep 30 19:31:33 crc kubenswrapper[4756]: I0930 19:31:33.121699 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09efc573-dbb6-4249-bd59-9b87aba8dd28" path="/var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes" Sep 30 19:31:33 crc kubenswrapper[4756]: I0930 19:31:33.122588 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b574797-001e-440a-8f4e-c0be86edad0f" path="/var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes" Sep 30 19:31:33 crc kubenswrapper[4756]: I0930 19:31:33.123454 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b78653f-4ff9-4508-8672-245ed9b561e3" path="/var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes" Sep 30 19:31:33 crc kubenswrapper[4756]: I0930 19:31:33.124198 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1386a44e-36a2-460c-96d0-0359d2b6f0f5" path="/var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes" Sep 30 19:31:33 crc kubenswrapper[4756]: I0930 19:31:33.125183 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1bf7eb37-55a3-4c65-b768-a94c82151e69" path="/var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes" Sep 30 19:31:33 crc kubenswrapper[4756]: I0930 19:31:33.127078 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:33Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:33 crc kubenswrapper[4756]: I0930 19:31:33.127644 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1d611f23-29be-4491-8495-bee1670e935f" path="/var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes" Sep 30 19:31:33 crc kubenswrapper[4756]: I0930 19:31:33.128456 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="20b0d48f-5fd6-431c-a545-e3c800c7b866" path="/var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/volumes" Sep 30 19:31:33 crc kubenswrapper[4756]: I0930 19:31:33.129897 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" path="/var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes" Sep 30 19:31:33 crc kubenswrapper[4756]: I0930 19:31:33.130627 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="22c825df-677d-4ca6-82db-3454ed06e783" path="/var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes" Sep 30 19:31:33 crc kubenswrapper[4756]: I0930 19:31:33.134855 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="25e176fe-21b4-4974-b1ed-c8b94f112a7f" path="/var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes" Sep 30 19:31:33 crc kubenswrapper[4756]: I0930 19:31:33.135656 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" path="/var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes" Sep 30 19:31:33 crc kubenswrapper[4756]: I0930 19:31:33.136217 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="31d8b7a1-420e-4252-a5b7-eebe8a111292" path="/var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes" Sep 30 19:31:33 crc kubenswrapper[4756]: I0930 19:31:33.136761 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3ab1a177-2de0-46d9-b765-d0d0649bb42e" path="/var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/volumes" Sep 30 19:31:33 crc kubenswrapper[4756]: I0930 19:31:33.137329 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" path="/var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes" Sep 30 19:31:33 crc kubenswrapper[4756]: I0930 19:31:33.137906 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="43509403-f426-496e-be36-56cef71462f5" path="/var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes" Sep 30 19:31:33 crc kubenswrapper[4756]: I0930 19:31:33.138346 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="44663579-783b-4372-86d6-acf235a62d72" path="/var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/volumes" Sep 30 19:31:33 crc kubenswrapper[4756]: I0930 19:31:33.138948 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="496e6271-fb68-4057-954e-a0d97a4afa3f" path="/var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes" Sep 30 19:31:33 crc kubenswrapper[4756]: I0930 19:31:33.140114 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-l9mw5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"104562fb-2a2d-4291-963c-7a95062ec13a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mhq89\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:32Z\\\"}}\" for pod \"openshift-multus\"/\"multus-l9mw5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:33Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:33 crc kubenswrapper[4756]: I0930 19:31:33.140310 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" path="/var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes" Sep 30 19:31:33 crc kubenswrapper[4756]: I0930 19:31:33.140836 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49ef4625-1d3a-4a9f-b595-c2433d32326d" path="/var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/volumes" Sep 30 19:31:33 crc kubenswrapper[4756]: I0930 19:31:33.141441 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4bb40260-dbaa-4fb0-84df-5e680505d512" path="/var/lib/kubelet/pods/4bb40260-dbaa-4fb0-84df-5e680505d512/volumes" Sep 30 19:31:33 crc kubenswrapper[4756]: I0930 19:31:33.141875 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5225d0e4-402f-4861-b410-819f433b1803" path="/var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes" Sep 30 19:31:33 crc kubenswrapper[4756]: I0930 19:31:33.142553 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5441d097-087c-4d9a-baa8-b210afa90fc9" path="/var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes" Sep 30 19:31:33 crc kubenswrapper[4756]: I0930 19:31:33.142956 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="57a731c4-ef35-47a8-b875-bfb08a7f8011" path="/var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes" Sep 30 19:31:33 crc kubenswrapper[4756]: I0930 19:31:33.143574 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5b88f790-22fa-440e-b583-365168c0b23d" path="/var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/volumes" Sep 30 19:31:33 crc kubenswrapper[4756]: I0930 19:31:33.144244 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5fe579f8-e8a6-4643-bce5-a661393c4dde" path="/var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/volumes" Sep 30 19:31:33 crc kubenswrapper[4756]: I0930 19:31:33.144744 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6402fda4-df10-493c-b4e5-d0569419652d" path="/var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes" Sep 30 19:31:33 crc kubenswrapper[4756]: I0930 19:31:33.145385 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6509e943-70c6-444c-bc41-48a544e36fbd" path="/var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes" Sep 30 19:31:33 crc kubenswrapper[4756]: I0930 19:31:33.145909 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6731426b-95fe-49ff-bb5f-40441049fde2" path="/var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/volumes" Sep 30 19:31:33 crc kubenswrapper[4756]: I0930 19:31:33.146387 4756 kubelet_volumes.go:152] "Cleaned up orphaned volume subpath from pod" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volume-subpaths/run-systemd/ovnkube-controller/6" Sep 30 19:31:33 crc kubenswrapper[4756]: I0930 19:31:33.146512 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volumes" Sep 30 19:31:33 crc kubenswrapper[4756]: I0930 19:31:33.149284 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7539238d-5fe0-46ed-884e-1c3b566537ec" path="/var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes" Sep 30 19:31:33 crc kubenswrapper[4756]: I0930 19:31:33.150268 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7583ce53-e0fe-4a16-9e4d-50516596a136" path="/var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes" Sep 30 19:31:33 crc kubenswrapper[4756]: I0930 19:31:33.150880 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7bb08738-c794-4ee8-9972-3a62ca171029" path="/var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes" Sep 30 19:31:33 crc kubenswrapper[4756]: I0930 19:31:33.152327 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:33Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:33 crc kubenswrapper[4756]: I0930 19:31:33.152440 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="87cf06ed-a83f-41a7-828d-70653580a8cb" path="/var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes" Sep 30 19:31:33 crc kubenswrapper[4756]: I0930 19:31:33.153323 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" path="/var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes" Sep 30 19:31:33 crc kubenswrapper[4756]: I0930 19:31:33.154063 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="925f1c65-6136-48ba-85aa-3a3b50560753" path="/var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes" Sep 30 19:31:33 crc kubenswrapper[4756]: I0930 19:31:33.154967 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" path="/var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/volumes" Sep 30 19:31:33 crc kubenswrapper[4756]: I0930 19:31:33.155835 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9d4552c7-cd75-42dd-8880-30dd377c49a4" path="/var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes" Sep 30 19:31:33 crc kubenswrapper[4756]: I0930 19:31:33.156505 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" path="/var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/volumes" Sep 30 19:31:33 crc kubenswrapper[4756]: I0930 19:31:33.157356 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a31745f5-9847-4afe-82a5-3161cc66ca93" path="/var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes" Sep 30 19:31:33 crc kubenswrapper[4756]: I0930 19:31:33.160893 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" path="/var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes" Sep 30 19:31:33 crc kubenswrapper[4756]: I0930 19:31:33.161057 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/d96acc31-a519-46fc-94d8-f19522e77391-systemd-units\") pod \"ovnkube-node-kmxk8\" (UID: \"d96acc31-a519-46fc-94d8-f19522e77391\") " pod="openshift-ovn-kubernetes/ovnkube-node-kmxk8" Sep 30 19:31:33 crc kubenswrapper[4756]: I0930 19:31:33.161112 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/d96acc31-a519-46fc-94d8-f19522e77391-host-run-ovn-kubernetes\") pod \"ovnkube-node-kmxk8\" (UID: \"d96acc31-a519-46fc-94d8-f19522e77391\") " pod="openshift-ovn-kubernetes/ovnkube-node-kmxk8" Sep 30 19:31:33 crc kubenswrapper[4756]: I0930 19:31:33.161195 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/d96acc31-a519-46fc-94d8-f19522e77391-host-run-netns\") pod \"ovnkube-node-kmxk8\" (UID: \"d96acc31-a519-46fc-94d8-f19522e77391\") " pod="openshift-ovn-kubernetes/ovnkube-node-kmxk8" Sep 30 19:31:33 crc kubenswrapper[4756]: I0930 19:31:33.161202 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/d96acc31-a519-46fc-94d8-f19522e77391-systemd-units\") pod \"ovnkube-node-kmxk8\" (UID: \"d96acc31-a519-46fc-94d8-f19522e77391\") " pod="openshift-ovn-kubernetes/ovnkube-node-kmxk8" Sep 30 19:31:33 crc kubenswrapper[4756]: I0930 19:31:33.161254 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/d96acc31-a519-46fc-94d8-f19522e77391-host-run-netns\") pod \"ovnkube-node-kmxk8\" (UID: \"d96acc31-a519-46fc-94d8-f19522e77391\") " pod="openshift-ovn-kubernetes/ovnkube-node-kmxk8" Sep 30 19:31:33 crc kubenswrapper[4756]: I0930 19:31:33.161222 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/d96acc31-a519-46fc-94d8-f19522e77391-run-systemd\") pod \"ovnkube-node-kmxk8\" (UID: \"d96acc31-a519-46fc-94d8-f19522e77391\") " pod="openshift-ovn-kubernetes/ovnkube-node-kmxk8" Sep 30 19:31:33 crc kubenswrapper[4756]: I0930 19:31:33.161202 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/d96acc31-a519-46fc-94d8-f19522e77391-host-run-ovn-kubernetes\") pod \"ovnkube-node-kmxk8\" (UID: \"d96acc31-a519-46fc-94d8-f19522e77391\") " pod="openshift-ovn-kubernetes/ovnkube-node-kmxk8" Sep 30 19:31:33 crc kubenswrapper[4756]: I0930 19:31:33.161296 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/d96acc31-a519-46fc-94d8-f19522e77391-run-systemd\") pod \"ovnkube-node-kmxk8\" (UID: \"d96acc31-a519-46fc-94d8-f19522e77391\") " pod="openshift-ovn-kubernetes/ovnkube-node-kmxk8" Sep 30 19:31:33 crc kubenswrapper[4756]: I0930 19:31:33.161336 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/d96acc31-a519-46fc-94d8-f19522e77391-node-log\") pod \"ovnkube-node-kmxk8\" (UID: \"d96acc31-a519-46fc-94d8-f19522e77391\") " pod="openshift-ovn-kubernetes/ovnkube-node-kmxk8" Sep 30 19:31:33 crc kubenswrapper[4756]: I0930 19:31:33.161369 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/d96acc31-a519-46fc-94d8-f19522e77391-run-ovn\") pod \"ovnkube-node-kmxk8\" (UID: \"d96acc31-a519-46fc-94d8-f19522e77391\") " pod="openshift-ovn-kubernetes/ovnkube-node-kmxk8" Sep 30 19:31:33 crc kubenswrapper[4756]: I0930 19:31:33.161388 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/d96acc31-a519-46fc-94d8-f19522e77391-node-log\") pod \"ovnkube-node-kmxk8\" (UID: \"d96acc31-a519-46fc-94d8-f19522e77391\") " pod="openshift-ovn-kubernetes/ovnkube-node-kmxk8" Sep 30 19:31:33 crc kubenswrapper[4756]: I0930 19:31:33.161388 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/d96acc31-a519-46fc-94d8-f19522e77391-host-cni-netd\") pod \"ovnkube-node-kmxk8\" (UID: \"d96acc31-a519-46fc-94d8-f19522e77391\") " pod="openshift-ovn-kubernetes/ovnkube-node-kmxk8" Sep 30 19:31:33 crc kubenswrapper[4756]: I0930 19:31:33.161431 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/d96acc31-a519-46fc-94d8-f19522e77391-host-cni-netd\") pod \"ovnkube-node-kmxk8\" (UID: \"d96acc31-a519-46fc-94d8-f19522e77391\") " pod="openshift-ovn-kubernetes/ovnkube-node-kmxk8" Sep 30 19:31:33 crc kubenswrapper[4756]: I0930 19:31:33.161438 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/d96acc31-a519-46fc-94d8-f19522e77391-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-kmxk8\" (UID: \"d96acc31-a519-46fc-94d8-f19522e77391\") " pod="openshift-ovn-kubernetes/ovnkube-node-kmxk8" Sep 30 19:31:33 crc kubenswrapper[4756]: I0930 19:31:33.161449 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/d96acc31-a519-46fc-94d8-f19522e77391-run-ovn\") pod \"ovnkube-node-kmxk8\" (UID: \"d96acc31-a519-46fc-94d8-f19522e77391\") " pod="openshift-ovn-kubernetes/ovnkube-node-kmxk8" Sep 30 19:31:33 crc kubenswrapper[4756]: I0930 19:31:33.161478 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/d96acc31-a519-46fc-94d8-f19522e77391-env-overrides\") pod \"ovnkube-node-kmxk8\" (UID: \"d96acc31-a519-46fc-94d8-f19522e77391\") " pod="openshift-ovn-kubernetes/ovnkube-node-kmxk8" Sep 30 19:31:33 crc kubenswrapper[4756]: I0930 19:31:33.161496 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/d96acc31-a519-46fc-94d8-f19522e77391-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-kmxk8\" (UID: \"d96acc31-a519-46fc-94d8-f19522e77391\") " pod="openshift-ovn-kubernetes/ovnkube-node-kmxk8" Sep 30 19:31:33 crc kubenswrapper[4756]: I0930 19:31:33.161503 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/d96acc31-a519-46fc-94d8-f19522e77391-var-lib-openvswitch\") pod \"ovnkube-node-kmxk8\" (UID: \"d96acc31-a519-46fc-94d8-f19522e77391\") " pod="openshift-ovn-kubernetes/ovnkube-node-kmxk8" Sep 30 19:31:33 crc kubenswrapper[4756]: I0930 19:31:33.161526 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/d96acc31-a519-46fc-94d8-f19522e77391-var-lib-openvswitch\") pod \"ovnkube-node-kmxk8\" (UID: \"d96acc31-a519-46fc-94d8-f19522e77391\") " pod="openshift-ovn-kubernetes/ovnkube-node-kmxk8" Sep 30 19:31:33 crc kubenswrapper[4756]: I0930 19:31:33.161543 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/d96acc31-a519-46fc-94d8-f19522e77391-host-kubelet\") pod \"ovnkube-node-kmxk8\" (UID: \"d96acc31-a519-46fc-94d8-f19522e77391\") " pod="openshift-ovn-kubernetes/ovnkube-node-kmxk8" Sep 30 19:31:33 crc kubenswrapper[4756]: I0930 19:31:33.161567 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/d96acc31-a519-46fc-94d8-f19522e77391-etc-openvswitch\") pod \"ovnkube-node-kmxk8\" (UID: \"d96acc31-a519-46fc-94d8-f19522e77391\") " pod="openshift-ovn-kubernetes/ovnkube-node-kmxk8" Sep 30 19:31:33 crc kubenswrapper[4756]: I0930 19:31:33.161585 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/d96acc31-a519-46fc-94d8-f19522e77391-log-socket\") pod \"ovnkube-node-kmxk8\" (UID: \"d96acc31-a519-46fc-94d8-f19522e77391\") " pod="openshift-ovn-kubernetes/ovnkube-node-kmxk8" Sep 30 19:31:33 crc kubenswrapper[4756]: I0930 19:31:33.161603 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/d96acc31-a519-46fc-94d8-f19522e77391-host-cni-bin\") pod \"ovnkube-node-kmxk8\" (UID: \"d96acc31-a519-46fc-94d8-f19522e77391\") " pod="openshift-ovn-kubernetes/ovnkube-node-kmxk8" Sep 30 19:31:33 crc kubenswrapper[4756]: I0930 19:31:33.161621 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/d96acc31-a519-46fc-94d8-f19522e77391-etc-openvswitch\") pod \"ovnkube-node-kmxk8\" (UID: \"d96acc31-a519-46fc-94d8-f19522e77391\") " pod="openshift-ovn-kubernetes/ovnkube-node-kmxk8" Sep 30 19:31:33 crc kubenswrapper[4756]: I0930 19:31:33.161633 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/d96acc31-a519-46fc-94d8-f19522e77391-run-openvswitch\") pod \"ovnkube-node-kmxk8\" (UID: \"d96acc31-a519-46fc-94d8-f19522e77391\") " pod="openshift-ovn-kubernetes/ovnkube-node-kmxk8" Sep 30 19:31:33 crc kubenswrapper[4756]: I0930 19:31:33.161656 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d96acc31-a519-46fc-94d8-f19522e77391-host-slash\") pod \"ovnkube-node-kmxk8\" (UID: \"d96acc31-a519-46fc-94d8-f19522e77391\") " pod="openshift-ovn-kubernetes/ovnkube-node-kmxk8" Sep 30 19:31:33 crc kubenswrapper[4756]: I0930 19:31:33.161660 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/d96acc31-a519-46fc-94d8-f19522e77391-host-kubelet\") pod \"ovnkube-node-kmxk8\" (UID: \"d96acc31-a519-46fc-94d8-f19522e77391\") " pod="openshift-ovn-kubernetes/ovnkube-node-kmxk8" Sep 30 19:31:33 crc kubenswrapper[4756]: I0930 19:31:33.161673 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/d96acc31-a519-46fc-94d8-f19522e77391-ovnkube-config\") pod \"ovnkube-node-kmxk8\" (UID: \"d96acc31-a519-46fc-94d8-f19522e77391\") " pod="openshift-ovn-kubernetes/ovnkube-node-kmxk8" Sep 30 19:31:33 crc kubenswrapper[4756]: I0930 19:31:33.161689 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/d96acc31-a519-46fc-94d8-f19522e77391-host-cni-bin\") pod \"ovnkube-node-kmxk8\" (UID: \"d96acc31-a519-46fc-94d8-f19522e77391\") " pod="openshift-ovn-kubernetes/ovnkube-node-kmxk8" Sep 30 19:31:33 crc kubenswrapper[4756]: I0930 19:31:33.161697 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/d96acc31-a519-46fc-94d8-f19522e77391-ovnkube-script-lib\") pod \"ovnkube-node-kmxk8\" (UID: \"d96acc31-a519-46fc-94d8-f19522e77391\") " pod="openshift-ovn-kubernetes/ovnkube-node-kmxk8" Sep 30 19:31:33 crc kubenswrapper[4756]: I0930 19:31:33.161720 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5bwg2\" (UniqueName: \"kubernetes.io/projected/d96acc31-a519-46fc-94d8-f19522e77391-kube-api-access-5bwg2\") pod \"ovnkube-node-kmxk8\" (UID: \"d96acc31-a519-46fc-94d8-f19522e77391\") " pod="openshift-ovn-kubernetes/ovnkube-node-kmxk8" Sep 30 19:31:33 crc kubenswrapper[4756]: I0930 19:31:33.161746 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d96acc31-a519-46fc-94d8-f19522e77391-host-slash\") pod \"ovnkube-node-kmxk8\" (UID: \"d96acc31-a519-46fc-94d8-f19522e77391\") " pod="openshift-ovn-kubernetes/ovnkube-node-kmxk8" Sep 30 19:31:33 crc kubenswrapper[4756]: I0930 19:31:33.161753 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/d96acc31-a519-46fc-94d8-f19522e77391-ovn-node-metrics-cert\") pod \"ovnkube-node-kmxk8\" (UID: \"d96acc31-a519-46fc-94d8-f19522e77391\") " pod="openshift-ovn-kubernetes/ovnkube-node-kmxk8" Sep 30 19:31:33 crc kubenswrapper[4756]: I0930 19:31:33.161859 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/d96acc31-a519-46fc-94d8-f19522e77391-run-openvswitch\") pod \"ovnkube-node-kmxk8\" (UID: \"d96acc31-a519-46fc-94d8-f19522e77391\") " pod="openshift-ovn-kubernetes/ovnkube-node-kmxk8" Sep 30 19:31:33 crc kubenswrapper[4756]: I0930 19:31:33.161721 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/d96acc31-a519-46fc-94d8-f19522e77391-log-socket\") pod \"ovnkube-node-kmxk8\" (UID: \"d96acc31-a519-46fc-94d8-f19522e77391\") " pod="openshift-ovn-kubernetes/ovnkube-node-kmxk8" Sep 30 19:31:33 crc kubenswrapper[4756]: I0930 19:31:33.162330 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/d96acc31-a519-46fc-94d8-f19522e77391-env-overrides\") pod \"ovnkube-node-kmxk8\" (UID: \"d96acc31-a519-46fc-94d8-f19522e77391\") " pod="openshift-ovn-kubernetes/ovnkube-node-kmxk8" Sep 30 19:31:33 crc kubenswrapper[4756]: I0930 19:31:33.162442 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/d96acc31-a519-46fc-94d8-f19522e77391-ovnkube-script-lib\") pod \"ovnkube-node-kmxk8\" (UID: \"d96acc31-a519-46fc-94d8-f19522e77391\") " pod="openshift-ovn-kubernetes/ovnkube-node-kmxk8" Sep 30 19:31:33 crc kubenswrapper[4756]: I0930 19:31:33.162626 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6312bbd-5731-4ea0-a20f-81d5a57df44a" path="/var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/volumes" Sep 30 19:31:33 crc kubenswrapper[4756]: I0930 19:31:33.162879 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/d96acc31-a519-46fc-94d8-f19522e77391-ovnkube-config\") pod \"ovnkube-node-kmxk8\" (UID: \"d96acc31-a519-46fc-94d8-f19522e77391\") " pod="openshift-ovn-kubernetes/ovnkube-node-kmxk8" Sep 30 19:31:33 crc kubenswrapper[4756]: I0930 19:31:33.163359 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" path="/var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes" Sep 30 19:31:33 crc kubenswrapper[4756]: I0930 19:31:33.164641 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" path="/var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes" Sep 30 19:31:33 crc kubenswrapper[4756]: I0930 19:31:33.165290 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/d96acc31-a519-46fc-94d8-f19522e77391-ovn-node-metrics-cert\") pod \"ovnkube-node-kmxk8\" (UID: \"d96acc31-a519-46fc-94d8-f19522e77391\") " pod="openshift-ovn-kubernetes/ovnkube-node-kmxk8" Sep 30 19:31:33 crc kubenswrapper[4756]: I0930 19:31:33.165429 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" path="/var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/volumes" Sep 30 19:31:33 crc kubenswrapper[4756]: I0930 19:31:33.166404 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d39cfd1b725ded02a27e8f7bed73221dd180e3fffb7f06d1877d1bc2f6910334\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:33Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:33 crc kubenswrapper[4756]: I0930 19:31:33.166777 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bf126b07-da06-4140-9a57-dfd54fc6b486" path="/var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes" Sep 30 19:31:33 crc kubenswrapper[4756]: I0930 19:31:33.167657 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c03ee662-fb2f-4fc4-a2c1-af487c19d254" path="/var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes" Sep 30 19:31:33 crc kubenswrapper[4756]: I0930 19:31:33.169283 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" path="/var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/volumes" Sep 30 19:31:33 crc kubenswrapper[4756]: I0930 19:31:33.169842 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e7e6199b-1264-4501-8953-767f51328d08" path="/var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes" Sep 30 19:31:33 crc kubenswrapper[4756]: I0930 19:31:33.170707 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="efdd0498-1daa-4136-9a4a-3b948c2293fc" path="/var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/volumes" Sep 30 19:31:33 crc kubenswrapper[4756]: I0930 19:31:33.172073 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" path="/var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/volumes" Sep 30 19:31:33 crc kubenswrapper[4756]: I0930 19:31:33.172949 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fda69060-fa79-4696-b1a6-7980f124bf7c" path="/var/lib/kubelet/pods/fda69060-fa79-4696-b1a6-7980f124bf7c/volumes" Sep 30 19:31:33 crc kubenswrapper[4756]: I0930 19:31:33.188215 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5bwg2\" (UniqueName: \"kubernetes.io/projected/d96acc31-a519-46fc-94d8-f19522e77391-kube-api-access-5bwg2\") pod \"ovnkube-node-kmxk8\" (UID: \"d96acc31-a519-46fc-94d8-f19522e77391\") " pod="openshift-ovn-kubernetes/ovnkube-node-kmxk8" Sep 30 19:31:33 crc kubenswrapper[4756]: I0930 19:31:33.189413 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-ghp5t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"32ba8a0a-215c-415b-a893-74f7a40e3c20\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pslwb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:32Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-ghp5t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:33Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:33 crc kubenswrapper[4756]: I0930 19:31:33.205492 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-m6zsp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2b4cba15-5ae1-4c98-a5e5-060b3af3aec6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:32Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-m6zsp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:33Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:33 crc kubenswrapper[4756]: I0930 19:31:33.224733 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-kmxk8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d96acc31-a519-46fc-94d8-f19522e77391\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:32Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-kmxk8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:33Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:33 crc kubenswrapper[4756]: I0930 19:31:33.236695 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0dfdb5d6-9311-4699-a1ac-5f1a6cde2441\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a78698331fc3db3b12b06c8816256c5d055d1decfd12cc0099d6cfd155d673d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32fb926334772dc454adcdabd6070f751ef1aded82a5c13732875e1ec04d2c48\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6195128a0e66f0f932a444f4717084f00ea05ff648f7c26364bb9c96483b652a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://746e37834d01867d327b2cb407df8e9a8489f58884a28f1da6a336f74e4d1033\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:11Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:33Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:33 crc kubenswrapper[4756]: I0930 19:31:33.272295 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" event={"ID":"3370c2ca-fec3-4f90-8df7-51e21e6c7e1c","Type":"ContainerStarted","Data":"1a5ef539293ea3ff94f53b11a0510bb8bb45c62c945cffab746512efeeb19a5a"} Sep 30 19:31:33 crc kubenswrapper[4756]: I0930 19:31:33.272372 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" event={"ID":"3370c2ca-fec3-4f90-8df7-51e21e6c7e1c","Type":"ContainerStarted","Data":"ba18abf718fde52cadd61c25b05097873901a4dbbe7631fd7ce64133627b9473"} Sep 30 19:31:33 crc kubenswrapper[4756]: I0930 19:31:33.272388 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" event={"ID":"3370c2ca-fec3-4f90-8df7-51e21e6c7e1c","Type":"ContainerStarted","Data":"b5a48b46005813b4949cb13b53196850a282973e2628bf78eae8ab9d5883858c"} Sep 30 19:31:33 crc kubenswrapper[4756]: I0930 19:31:33.274118 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-ghp5t" event={"ID":"32ba8a0a-215c-415b-a893-74f7a40e3c20","Type":"ContainerStarted","Data":"a96a8eea15e5d12ad93cdb41ef3d1054fae455aea2878451bc5715de6d5b612f"} Sep 30 19:31:33 crc kubenswrapper[4756]: I0930 19:31:33.274211 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-ghp5t" event={"ID":"32ba8a0a-215c-415b-a893-74f7a40e3c20","Type":"ContainerStarted","Data":"39f9221113eecabe6bdf6d1acdc45fb6c491ba0c83abbaf6a64fe7159c3b29ab"} Sep 30 19:31:33 crc kubenswrapper[4756]: I0930 19:31:33.276512 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/1.log" Sep 30 19:31:33 crc kubenswrapper[4756]: I0930 19:31:33.278959 4756 scope.go:117] "RemoveContainer" containerID="a2e0caca690592c9bd17dae14f79390b94f47507afb05a1c1732453700b99786" Sep 30 19:31:33 crc kubenswrapper[4756]: E0930 19:31:33.279121 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-apiserver-check-endpoints\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\"" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" Sep 30 19:31:33 crc kubenswrapper[4756]: I0930 19:31:33.280815 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-m6zsp" event={"ID":"2b4cba15-5ae1-4c98-a5e5-060b3af3aec6","Type":"ContainerStarted","Data":"828a01ba594287ceea1e72baa5419d522ce14d2ab3df5a69fb6332400c3e4abc"} Sep 30 19:31:33 crc kubenswrapper[4756]: I0930 19:31:33.280865 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-m6zsp" event={"ID":"2b4cba15-5ae1-4c98-a5e5-060b3af3aec6","Type":"ContainerStarted","Data":"a7dae2ba8d79a9a6a996471a5946364549ccb57be4f791b9559fbcc69454a0b8"} Sep 30 19:31:33 crc kubenswrapper[4756]: I0930 19:31:33.282535 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-l9mw5" event={"ID":"104562fb-2a2d-4291-963c-7a95062ec13a","Type":"ContainerStarted","Data":"c012009175f2ca25af13346dce481a4640354a2b7c771ce9a8d92f0a6a014f23"} Sep 30 19:31:33 crc kubenswrapper[4756]: I0930 19:31:33.282608 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-l9mw5" event={"ID":"104562fb-2a2d-4291-963c-7a95062ec13a","Type":"ContainerStarted","Data":"da692c22fb01dda2353912c9f59ef80728765f2bafbb2e2acdd0bda531c73890"} Sep 30 19:31:33 crc kubenswrapper[4756]: I0930 19:31:33.286607 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3370c2ca-fec3-4f90-8df7-51e21e6c7e1c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pstrc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pstrc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:32Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-4n9zj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:33Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:33 crc kubenswrapper[4756]: E0930 19:31:33.292683 4756 kubelet.go:1929] "Failed creating a mirror pod for" err="pods \"kube-controller-manager-crc\" already exists" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 30 19:31:33 crc kubenswrapper[4756]: I0930 19:31:33.293228 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-kmxk8" Sep 30 19:31:33 crc kubenswrapper[4756]: W0930 19:31:33.312947 4756 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd96acc31_a519_46fc_94d8_f19522e77391.slice/crio-491f8988059f7e68820ea749015f1d214f2fc64998eea802c4d2db97a3b30cc6 WatchSource:0}: Error finding container 491f8988059f7e68820ea749015f1d214f2fc64998eea802c4d2db97a3b30cc6: Status 404 returned error can't find the container with id 491f8988059f7e68820ea749015f1d214f2fc64998eea802c4d2db97a3b30cc6 Sep 30 19:31:33 crc kubenswrapper[4756]: I0930 19:31:33.336587 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"218f95d8-7230-4b64-83a5-00af4f5ec7dc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0a76d38dbf936ca79e57470de2c8bf98e8681f7d94f2c572aa5c0d2b2acbb484\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://57f786181b4762f1e04a48ac6e352090b62e1691091ddc390821abb693e8f7a6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c275af67ad8675a437b3f7fae34236bbdfbdf1f216be6a442d6765471d1d11e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a2e0caca690592c9bd17dae14f79390b94f47507afb05a1c1732453700b99786\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8f5786b74e1c05cccdbb4ed46b2fc96112d8b112c92e7e30c08d3f38d63d8a5c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T19:31:24Z\\\",\\\"message\\\":\\\"W0930 19:31:14.261606 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0930 19:31:14.262005 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759260674 cert, and key in /tmp/serving-cert-4044505907/serving-signer.crt, /tmp/serving-cert-4044505907/serving-signer.key\\\\nI0930 19:31:14.657086 1 observer_polling.go:159] Starting file observer\\\\nW0930 19:31:14.660923 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0930 19:31:14.661186 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0930 19:31:14.662354 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4044505907/tls.crt::/tmp/serving-cert-4044505907/tls.key\\\\\\\"\\\\nF0930 19:31:24.928649 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:14Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a2e0caca690592c9bd17dae14f79390b94f47507afb05a1c1732453700b99786\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"message\\\":\\\"g.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 19:31:31.245412 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 19:31:31.245417 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0930 19:31:31.245423 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0930 19:31:31.245426 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0930 19:31:31.245428 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0930 19:31:31.245860 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI0930 19:31:31.249125 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0930 19:31:31.249169 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0930 19:31:31.249201 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0930 19:31:31.249211 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0930 19:31:31.249225 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0930 19:31:31.249238 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0930 19:31:31.250125 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI0930 19:31:31.250150 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nF0930 19:31:31.250576 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://01164eb1afdb771ef4d2dc268107dadd206959e1ef66b154caf3ff18717e20de\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:14Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7365492f5fe72c39f569bfa6e2b1a44236da10b4687a738b97ce95626081c1b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7365492f5fe72c39f569bfa6e2b1a44236da10b4687a738b97ce95626081c1b5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:11Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:33Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:33 crc kubenswrapper[4756]: I0930 19:31:33.377172 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:33Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:33 crc kubenswrapper[4756]: I0930 19:31:33.418686 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://449a4827a34c17e8cdc851d2714ced931581b5c3a1bcd2f7f058b95d68403740\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://163fab064016913eb90cd87fe0da1c702eeb7f000c92b108bfb24a21ef0e98d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:33Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:33 crc kubenswrapper[4756]: I0930 19:31:33.490362 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:33Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:33 crc kubenswrapper[4756]: I0930 19:31:33.512184 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-etcd/etcd-crc" Sep 30 19:31:33 crc kubenswrapper[4756]: I0930 19:31:33.514427 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:33Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:33 crc kubenswrapper[4756]: I0930 19:31:33.535262 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-etcd/etcd-crc" Sep 30 19:31:33 crc kubenswrapper[4756]: I0930 19:31:33.537870 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd/etcd-crc"] Sep 30 19:31:33 crc kubenswrapper[4756]: I0930 19:31:33.557527 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-l9mw5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"104562fb-2a2d-4291-963c-7a95062ec13a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mhq89\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:32Z\\\"}}\" for pod \"openshift-multus\"/\"multus-l9mw5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:33Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:33 crc kubenswrapper[4756]: I0930 19:31:33.596074 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:33Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:33 crc kubenswrapper[4756]: I0930 19:31:33.637568 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d39cfd1b725ded02a27e8f7bed73221dd180e3fffb7f06d1877d1bc2f6910334\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:33Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:33 crc kubenswrapper[4756]: I0930 19:31:33.674780 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-ghp5t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"32ba8a0a-215c-415b-a893-74f7a40e3c20\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a96a8eea15e5d12ad93cdb41ef3d1054fae455aea2878451bc5715de6d5b612f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pslwb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:32Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-ghp5t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:33Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:33 crc kubenswrapper[4756]: I0930 19:31:33.719514 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-m6zsp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2b4cba15-5ae1-4c98-a5e5-060b3af3aec6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://828a01ba594287ceea1e72baa5419d522ce14d2ab3df5a69fb6332400c3e4abc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:32Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-m6zsp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:33Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:33 crc kubenswrapper[4756]: I0930 19:31:33.766098 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-kmxk8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d96acc31-a519-46fc-94d8-f19522e77391\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:32Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-kmxk8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:33Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:33 crc kubenswrapper[4756]: I0930 19:31:33.806673 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6aa869dd-ec28-4032-82be-c656f27bd7b1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://99767cc72b9d6057d38c45fbec6231bafa47ccffefb78e5924b5e9c1e797722b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b852143b26114c05edce20729cf706c5394609a74f522b7ecf44d92f9d66aed7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ee47cdb77e416535bd00569159712241bbd0952651774af6440fd04000d1722\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5cca81a0aeb985d6044f19cccc580ca5c78553a0abb60fb2a2bed955233f7a97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://621b37e117d0eeef289e110d75521c3262ced12144d665be35d6823780b4bae2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a5550fd486f7fad8080fb3eed0f4c73140172c68983fa8a423cd48da8f31ef3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a5550fd486f7fad8080fb3eed0f4c73140172c68983fa8a423cd48da8f31ef3a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://390ef7ee5c5d98dc226f1d061c6c9bb83236c988f2af15635e7834f643862840\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://390ef7ee5c5d98dc226f1d061c6c9bb83236c988f2af15635e7834f643862840\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:13Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://6f98786c914dcdd3ca1cde8421686a1d536abb97c375f26f458d8f514bdccd52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6f98786c914dcdd3ca1cde8421686a1d536abb97c375f26f458d8f514bdccd52\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:11Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:33Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:33 crc kubenswrapper[4756]: I0930 19:31:33.836232 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0dfdb5d6-9311-4699-a1ac-5f1a6cde2441\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a78698331fc3db3b12b06c8816256c5d055d1decfd12cc0099d6cfd155d673d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32fb926334772dc454adcdabd6070f751ef1aded82a5c13732875e1ec04d2c48\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6195128a0e66f0f932a444f4717084f00ea05ff648f7c26364bb9c96483b652a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://746e37834d01867d327b2cb407df8e9a8489f58884a28f1da6a336f74e4d1033\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:11Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:33Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:33 crc kubenswrapper[4756]: I0930 19:31:33.875450 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3370c2ca-fec3-4f90-8df7-51e21e6c7e1c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1a5ef539293ea3ff94f53b11a0510bb8bb45c62c945cffab746512efeeb19a5a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pstrc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ba18abf718fde52cadd61c25b05097873901a4dbbe7631fd7ce64133627b9473\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pstrc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:32Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-4n9zj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:33Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:33 crc kubenswrapper[4756]: I0930 19:31:33.918188 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"218f95d8-7230-4b64-83a5-00af4f5ec7dc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0a76d38dbf936ca79e57470de2c8bf98e8681f7d94f2c572aa5c0d2b2acbb484\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://57f786181b4762f1e04a48ac6e352090b62e1691091ddc390821abb693e8f7a6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c275af67ad8675a437b3f7fae34236bbdfbdf1f216be6a442d6765471d1d11e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a2e0caca690592c9bd17dae14f79390b94f47507afb05a1c1732453700b99786\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a2e0caca690592c9bd17dae14f79390b94f47507afb05a1c1732453700b99786\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"message\\\":\\\"g.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 19:31:31.245412 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 19:31:31.245417 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0930 19:31:31.245423 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0930 19:31:31.245426 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0930 19:31:31.245428 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0930 19:31:31.245860 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI0930 19:31:31.249125 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0930 19:31:31.249169 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0930 19:31:31.249201 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0930 19:31:31.249211 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0930 19:31:31.249225 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0930 19:31:31.249238 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0930 19:31:31.250125 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI0930 19:31:31.250150 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nF0930 19:31:31.250576 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:25Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://01164eb1afdb771ef4d2dc268107dadd206959e1ef66b154caf3ff18717e20de\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:14Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7365492f5fe72c39f569bfa6e2b1a44236da10b4687a738b97ce95626081c1b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7365492f5fe72c39f569bfa6e2b1a44236da10b4687a738b97ce95626081c1b5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:11Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:33Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:33 crc kubenswrapper[4756]: I0930 19:31:33.955701 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:33Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:33 crc kubenswrapper[4756]: I0930 19:31:33.995787 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://449a4827a34c17e8cdc851d2714ced931581b5c3a1bcd2f7f058b95d68403740\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://163fab064016913eb90cd87fe0da1c702eeb7f000c92b108bfb24a21ef0e98d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:33Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:34 crc kubenswrapper[4756]: I0930 19:31:34.035734 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:34Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:34 crc kubenswrapper[4756]: I0930 19:31:34.079002 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:34Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:34 crc kubenswrapper[4756]: I0930 19:31:34.113259 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/node-ca-8nj5z"] Sep 30 19:31:34 crc kubenswrapper[4756]: I0930 19:31:34.113738 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-8nj5z" Sep 30 19:31:34 crc kubenswrapper[4756]: I0930 19:31:34.116251 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-l9mw5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"104562fb-2a2d-4291-963c-7a95062ec13a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c012009175f2ca25af13346dce481a4640354a2b7c771ce9a8d92f0a6a014f23\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mhq89\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:32Z\\\"}}\" for pod \"openshift-multus\"/\"multus-l9mw5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:34Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:34 crc kubenswrapper[4756]: I0930 19:31:34.126133 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"image-registry-certificates" Sep 30 19:31:34 crc kubenswrapper[4756]: I0930 19:31:34.146691 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"node-ca-dockercfg-4777p" Sep 30 19:31:34 crc kubenswrapper[4756]: I0930 19:31:34.165919 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"openshift-service-ca.crt" Sep 30 19:31:34 crc kubenswrapper[4756]: I0930 19:31:34.186330 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"kube-root-ca.crt" Sep 30 19:31:34 crc kubenswrapper[4756]: I0930 19:31:34.235052 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3370c2ca-fec3-4f90-8df7-51e21e6c7e1c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1a5ef539293ea3ff94f53b11a0510bb8bb45c62c945cffab746512efeeb19a5a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pstrc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ba18abf718fde52cadd61c25b05097873901a4dbbe7631fd7ce64133627b9473\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pstrc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:32Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-4n9zj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:34Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:34 crc kubenswrapper[4756]: I0930 19:31:34.275618 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/02bb1d9a-1ad2-406d-afc2-c4cd283fe1f4-host\") pod \"node-ca-8nj5z\" (UID: \"02bb1d9a-1ad2-406d-afc2-c4cd283fe1f4\") " pod="openshift-image-registry/node-ca-8nj5z" Sep 30 19:31:34 crc kubenswrapper[4756]: I0930 19:31:34.275681 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/02bb1d9a-1ad2-406d-afc2-c4cd283fe1f4-serviceca\") pod \"node-ca-8nj5z\" (UID: \"02bb1d9a-1ad2-406d-afc2-c4cd283fe1f4\") " pod="openshift-image-registry/node-ca-8nj5z" Sep 30 19:31:34 crc kubenswrapper[4756]: I0930 19:31:34.276025 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l6nxq\" (UniqueName: \"kubernetes.io/projected/02bb1d9a-1ad2-406d-afc2-c4cd283fe1f4-kube-api-access-l6nxq\") pod \"node-ca-8nj5z\" (UID: \"02bb1d9a-1ad2-406d-afc2-c4cd283fe1f4\") " pod="openshift-image-registry/node-ca-8nj5z" Sep 30 19:31:34 crc kubenswrapper[4756]: I0930 19:31:34.284106 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-kmxk8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d96acc31-a519-46fc-94d8-f19522e77391\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:32Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-kmxk8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:34Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:34 crc kubenswrapper[4756]: I0930 19:31:34.292802 4756 generic.go:334] "Generic (PLEG): container finished" podID="2b4cba15-5ae1-4c98-a5e5-060b3af3aec6" containerID="828a01ba594287ceea1e72baa5419d522ce14d2ab3df5a69fb6332400c3e4abc" exitCode=0 Sep 30 19:31:34 crc kubenswrapper[4756]: I0930 19:31:34.293187 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-m6zsp" event={"ID":"2b4cba15-5ae1-4c98-a5e5-060b3af3aec6","Type":"ContainerDied","Data":"828a01ba594287ceea1e72baa5419d522ce14d2ab3df5a69fb6332400c3e4abc"} Sep 30 19:31:34 crc kubenswrapper[4756]: I0930 19:31:34.295713 4756 generic.go:334] "Generic (PLEG): container finished" podID="d96acc31-a519-46fc-94d8-f19522e77391" containerID="76bb398eefb2b0a9ef06b44c2ea182deefd51e4daf42b17946b911822d6eb0af" exitCode=0 Sep 30 19:31:34 crc kubenswrapper[4756]: I0930 19:31:34.295807 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-kmxk8" event={"ID":"d96acc31-a519-46fc-94d8-f19522e77391","Type":"ContainerDied","Data":"76bb398eefb2b0a9ef06b44c2ea182deefd51e4daf42b17946b911822d6eb0af"} Sep 30 19:31:34 crc kubenswrapper[4756]: I0930 19:31:34.295841 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-kmxk8" event={"ID":"d96acc31-a519-46fc-94d8-f19522e77391","Type":"ContainerStarted","Data":"491f8988059f7e68820ea749015f1d214f2fc64998eea802c4d2db97a3b30cc6"} Sep 30 19:31:34 crc kubenswrapper[4756]: I0930 19:31:34.299554 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"d37999f5cb36e7071b109ed90026aea745a55dbdd26d53799ee4196dc6a983c5"} Sep 30 19:31:34 crc kubenswrapper[4756]: I0930 19:31:34.316810 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-8nj5z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"02bb1d9a-1ad2-406d-afc2-c4cd283fe1f4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:34Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:34Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l6nxq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:34Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-8nj5z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:34Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:34 crc kubenswrapper[4756]: I0930 19:31:34.360321 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6aa869dd-ec28-4032-82be-c656f27bd7b1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://99767cc72b9d6057d38c45fbec6231bafa47ccffefb78e5924b5e9c1e797722b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b852143b26114c05edce20729cf706c5394609a74f522b7ecf44d92f9d66aed7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ee47cdb77e416535bd00569159712241bbd0952651774af6440fd04000d1722\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5cca81a0aeb985d6044f19cccc580ca5c78553a0abb60fb2a2bed955233f7a97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://621b37e117d0eeef289e110d75521c3262ced12144d665be35d6823780b4bae2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a5550fd486f7fad8080fb3eed0f4c73140172c68983fa8a423cd48da8f31ef3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a5550fd486f7fad8080fb3eed0f4c73140172c68983fa8a423cd48da8f31ef3a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://390ef7ee5c5d98dc226f1d061c6c9bb83236c988f2af15635e7834f643862840\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://390ef7ee5c5d98dc226f1d061c6c9bb83236c988f2af15635e7834f643862840\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:13Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://6f98786c914dcdd3ca1cde8421686a1d536abb97c375f26f458d8f514bdccd52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6f98786c914dcdd3ca1cde8421686a1d536abb97c375f26f458d8f514bdccd52\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:11Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:34Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:34 crc kubenswrapper[4756]: I0930 19:31:34.377902 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l6nxq\" (UniqueName: \"kubernetes.io/projected/02bb1d9a-1ad2-406d-afc2-c4cd283fe1f4-kube-api-access-l6nxq\") pod \"node-ca-8nj5z\" (UID: \"02bb1d9a-1ad2-406d-afc2-c4cd283fe1f4\") " pod="openshift-image-registry/node-ca-8nj5z" Sep 30 19:31:34 crc kubenswrapper[4756]: I0930 19:31:34.378045 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/02bb1d9a-1ad2-406d-afc2-c4cd283fe1f4-host\") pod \"node-ca-8nj5z\" (UID: \"02bb1d9a-1ad2-406d-afc2-c4cd283fe1f4\") " pod="openshift-image-registry/node-ca-8nj5z" Sep 30 19:31:34 crc kubenswrapper[4756]: I0930 19:31:34.378092 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/02bb1d9a-1ad2-406d-afc2-c4cd283fe1f4-serviceca\") pod \"node-ca-8nj5z\" (UID: \"02bb1d9a-1ad2-406d-afc2-c4cd283fe1f4\") " pod="openshift-image-registry/node-ca-8nj5z" Sep 30 19:31:34 crc kubenswrapper[4756]: I0930 19:31:34.380517 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/02bb1d9a-1ad2-406d-afc2-c4cd283fe1f4-host\") pod \"node-ca-8nj5z\" (UID: \"02bb1d9a-1ad2-406d-afc2-c4cd283fe1f4\") " pod="openshift-image-registry/node-ca-8nj5z" Sep 30 19:31:34 crc kubenswrapper[4756]: I0930 19:31:34.382431 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/02bb1d9a-1ad2-406d-afc2-c4cd283fe1f4-serviceca\") pod \"node-ca-8nj5z\" (UID: \"02bb1d9a-1ad2-406d-afc2-c4cd283fe1f4\") " pod="openshift-image-registry/node-ca-8nj5z" Sep 30 19:31:34 crc kubenswrapper[4756]: I0930 19:31:34.397151 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0dfdb5d6-9311-4699-a1ac-5f1a6cde2441\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a78698331fc3db3b12b06c8816256c5d055d1decfd12cc0099d6cfd155d673d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32fb926334772dc454adcdabd6070f751ef1aded82a5c13732875e1ec04d2c48\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6195128a0e66f0f932a444f4717084f00ea05ff648f7c26364bb9c96483b652a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://746e37834d01867d327b2cb407df8e9a8489f58884a28f1da6a336f74e4d1033\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:11Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:34Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:34 crc kubenswrapper[4756]: I0930 19:31:34.433606 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l6nxq\" (UniqueName: \"kubernetes.io/projected/02bb1d9a-1ad2-406d-afc2-c4cd283fe1f4-kube-api-access-l6nxq\") pod \"node-ca-8nj5z\" (UID: \"02bb1d9a-1ad2-406d-afc2-c4cd283fe1f4\") " pod="openshift-image-registry/node-ca-8nj5z" Sep 30 19:31:34 crc kubenswrapper[4756]: I0930 19:31:34.445637 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-8nj5z" Sep 30 19:31:34 crc kubenswrapper[4756]: I0930 19:31:34.460101 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"218f95d8-7230-4b64-83a5-00af4f5ec7dc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0a76d38dbf936ca79e57470de2c8bf98e8681f7d94f2c572aa5c0d2b2acbb484\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://57f786181b4762f1e04a48ac6e352090b62e1691091ddc390821abb693e8f7a6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c275af67ad8675a437b3f7fae34236bbdfbdf1f216be6a442d6765471d1d11e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a2e0caca690592c9bd17dae14f79390b94f47507afb05a1c1732453700b99786\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a2e0caca690592c9bd17dae14f79390b94f47507afb05a1c1732453700b99786\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"message\\\":\\\"g.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 19:31:31.245412 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 19:31:31.245417 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0930 19:31:31.245423 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0930 19:31:31.245426 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0930 19:31:31.245428 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0930 19:31:31.245860 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI0930 19:31:31.249125 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0930 19:31:31.249169 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0930 19:31:31.249201 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0930 19:31:31.249211 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0930 19:31:31.249225 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0930 19:31:31.249238 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0930 19:31:31.250125 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI0930 19:31:31.250150 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nF0930 19:31:31.250576 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:25Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://01164eb1afdb771ef4d2dc268107dadd206959e1ef66b154caf3ff18717e20de\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:14Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7365492f5fe72c39f569bfa6e2b1a44236da10b4687a738b97ce95626081c1b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7365492f5fe72c39f569bfa6e2b1a44236da10b4687a738b97ce95626081c1b5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:11Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:34Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:34 crc kubenswrapper[4756]: W0930 19:31:34.472206 4756 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod02bb1d9a_1ad2_406d_afc2_c4cd283fe1f4.slice/crio-879d2329a33d76ca3884bf36910dfa7a8ce211870e02bc64957a78300a4d8702 WatchSource:0}: Error finding container 879d2329a33d76ca3884bf36910dfa7a8ce211870e02bc64957a78300a4d8702: Status 404 returned error can't find the container with id 879d2329a33d76ca3884bf36910dfa7a8ce211870e02bc64957a78300a4d8702 Sep 30 19:31:34 crc kubenswrapper[4756]: I0930 19:31:34.495768 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:34Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:34 crc kubenswrapper[4756]: I0930 19:31:34.554965 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://449a4827a34c17e8cdc851d2714ced931581b5c3a1bcd2f7f058b95d68403740\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://163fab064016913eb90cd87fe0da1c702eeb7f000c92b108bfb24a21ef0e98d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:34Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:34 crc kubenswrapper[4756]: I0930 19:31:34.576483 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:34Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:34 crc kubenswrapper[4756]: I0930 19:31:34.621161 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:34Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:34 crc kubenswrapper[4756]: I0930 19:31:34.655431 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-l9mw5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"104562fb-2a2d-4291-963c-7a95062ec13a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c012009175f2ca25af13346dce481a4640354a2b7c771ce9a8d92f0a6a014f23\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mhq89\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:32Z\\\"}}\" for pod \"openshift-multus\"/\"multus-l9mw5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:34Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:34 crc kubenswrapper[4756]: I0930 19:31:34.697087 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:34Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:34 crc kubenswrapper[4756]: I0930 19:31:34.739898 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d39cfd1b725ded02a27e8f7bed73221dd180e3fffb7f06d1877d1bc2f6910334\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:34Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:34 crc kubenswrapper[4756]: I0930 19:31:34.775054 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-ghp5t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"32ba8a0a-215c-415b-a893-74f7a40e3c20\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a96a8eea15e5d12ad93cdb41ef3d1054fae455aea2878451bc5715de6d5b612f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pslwb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:32Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-ghp5t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:34Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:34 crc kubenswrapper[4756]: I0930 19:31:34.782756 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 19:31:34 crc kubenswrapper[4756]: E0930 19:31:34.782936 4756 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 19:31:38.782903108 +0000 UTC m=+28.403836585 (durationBeforeRetry 4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:31:34 crc kubenswrapper[4756]: I0930 19:31:34.783053 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 19:31:34 crc kubenswrapper[4756]: I0930 19:31:34.783125 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 19:31:34 crc kubenswrapper[4756]: I0930 19:31:34.783146 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 19:31:34 crc kubenswrapper[4756]: I0930 19:31:34.783183 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 19:31:34 crc kubenswrapper[4756]: E0930 19:31:34.783321 4756 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Sep 30 19:31:34 crc kubenswrapper[4756]: E0930 19:31:34.783320 4756 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Sep 30 19:31:34 crc kubenswrapper[4756]: E0930 19:31:34.783340 4756 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Sep 30 19:31:34 crc kubenswrapper[4756]: E0930 19:31:34.783360 4756 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 30 19:31:34 crc kubenswrapper[4756]: E0930 19:31:34.783418 4756 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-09-30 19:31:38.783389941 +0000 UTC m=+28.404323418 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 30 19:31:34 crc kubenswrapper[4756]: E0930 19:31:34.783446 4756 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Sep 30 19:31:34 crc kubenswrapper[4756]: E0930 19:31:34.783555 4756 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-09-30 19:31:38.783532795 +0000 UTC m=+28.404466272 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Sep 30 19:31:34 crc kubenswrapper[4756]: E0930 19:31:34.783456 4756 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Sep 30 19:31:34 crc kubenswrapper[4756]: E0930 19:31:34.783598 4756 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Sep 30 19:31:34 crc kubenswrapper[4756]: E0930 19:31:34.783613 4756 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 30 19:31:34 crc kubenswrapper[4756]: E0930 19:31:34.783647 4756 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-09-30 19:31:38.783640097 +0000 UTC m=+28.404573574 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 30 19:31:34 crc kubenswrapper[4756]: E0930 19:31:34.783886 4756 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-09-30 19:31:38.783866083 +0000 UTC m=+28.404799720 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Sep 30 19:31:34 crc kubenswrapper[4756]: I0930 19:31:34.820964 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-m6zsp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2b4cba15-5ae1-4c98-a5e5-060b3af3aec6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://828a01ba594287ceea1e72baa5419d522ce14d2ab3df5a69fb6332400c3e4abc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:32Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-m6zsp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:34Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:34 crc kubenswrapper[4756]: I0930 19:31:34.858949 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d39cfd1b725ded02a27e8f7bed73221dd180e3fffb7f06d1877d1bc2f6910334\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:34Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:34 crc kubenswrapper[4756]: I0930 19:31:34.897060 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-ghp5t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"32ba8a0a-215c-415b-a893-74f7a40e3c20\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a96a8eea15e5d12ad93cdb41ef3d1054fae455aea2878451bc5715de6d5b612f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pslwb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:32Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-ghp5t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:34Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:34 crc kubenswrapper[4756]: I0930 19:31:34.939682 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-m6zsp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2b4cba15-5ae1-4c98-a5e5-060b3af3aec6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://828a01ba594287ceea1e72baa5419d522ce14d2ab3df5a69fb6332400c3e4abc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://828a01ba594287ceea1e72baa5419d522ce14d2ab3df5a69fb6332400c3e4abc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:32Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-m6zsp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:34Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:34 crc kubenswrapper[4756]: I0930 19:31:34.975294 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:34Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:35 crc kubenswrapper[4756]: I0930 19:31:35.022164 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6aa869dd-ec28-4032-82be-c656f27bd7b1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://99767cc72b9d6057d38c45fbec6231bafa47ccffefb78e5924b5e9c1e797722b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b852143b26114c05edce20729cf706c5394609a74f522b7ecf44d92f9d66aed7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ee47cdb77e416535bd00569159712241bbd0952651774af6440fd04000d1722\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5cca81a0aeb985d6044f19cccc580ca5c78553a0abb60fb2a2bed955233f7a97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://621b37e117d0eeef289e110d75521c3262ced12144d665be35d6823780b4bae2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a5550fd486f7fad8080fb3eed0f4c73140172c68983fa8a423cd48da8f31ef3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a5550fd486f7fad8080fb3eed0f4c73140172c68983fa8a423cd48da8f31ef3a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://390ef7ee5c5d98dc226f1d061c6c9bb83236c988f2af15635e7834f643862840\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://390ef7ee5c5d98dc226f1d061c6c9bb83236c988f2af15635e7834f643862840\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:13Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://6f98786c914dcdd3ca1cde8421686a1d536abb97c375f26f458d8f514bdccd52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6f98786c914dcdd3ca1cde8421686a1d536abb97c375f26f458d8f514bdccd52\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:11Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:35Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:35 crc kubenswrapper[4756]: I0930 19:31:35.057478 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0dfdb5d6-9311-4699-a1ac-5f1a6cde2441\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a78698331fc3db3b12b06c8816256c5d055d1decfd12cc0099d6cfd155d673d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32fb926334772dc454adcdabd6070f751ef1aded82a5c13732875e1ec04d2c48\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6195128a0e66f0f932a444f4717084f00ea05ff648f7c26364bb9c96483b652a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://746e37834d01867d327b2cb407df8e9a8489f58884a28f1da6a336f74e4d1033\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:11Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:35Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:35 crc kubenswrapper[4756]: I0930 19:31:35.095680 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3370c2ca-fec3-4f90-8df7-51e21e6c7e1c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1a5ef539293ea3ff94f53b11a0510bb8bb45c62c945cffab746512efeeb19a5a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pstrc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ba18abf718fde52cadd61c25b05097873901a4dbbe7631fd7ce64133627b9473\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pstrc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:32Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-4n9zj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:35Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:35 crc kubenswrapper[4756]: I0930 19:31:35.115619 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 19:31:35 crc kubenswrapper[4756]: E0930 19:31:35.115804 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 19:31:35 crc kubenswrapper[4756]: I0930 19:31:35.115888 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 19:31:35 crc kubenswrapper[4756]: E0930 19:31:35.116208 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 19:31:35 crc kubenswrapper[4756]: I0930 19:31:35.115627 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 19:31:35 crc kubenswrapper[4756]: E0930 19:31:35.116607 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 19:31:35 crc kubenswrapper[4756]: I0930 19:31:35.142253 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-kmxk8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d96acc31-a519-46fc-94d8-f19522e77391\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://76bb398eefb2b0a9ef06b44c2ea182deefd51e4daf42b17946b911822d6eb0af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://76bb398eefb2b0a9ef06b44c2ea182deefd51e4daf42b17946b911822d6eb0af\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:32Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-kmxk8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:35Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:35 crc kubenswrapper[4756]: I0930 19:31:35.174683 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-8nj5z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"02bb1d9a-1ad2-406d-afc2-c4cd283fe1f4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:34Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:34Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l6nxq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:34Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-8nj5z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:35Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:35 crc kubenswrapper[4756]: I0930 19:31:35.216610 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:35Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:35 crc kubenswrapper[4756]: I0930 19:31:35.257103 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://449a4827a34c17e8cdc851d2714ced931581b5c3a1bcd2f7f058b95d68403740\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://163fab064016913eb90cd87fe0da1c702eeb7f000c92b108bfb24a21ef0e98d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:35Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:35 crc kubenswrapper[4756]: I0930 19:31:35.303282 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"218f95d8-7230-4b64-83a5-00af4f5ec7dc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0a76d38dbf936ca79e57470de2c8bf98e8681f7d94f2c572aa5c0d2b2acbb484\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://57f786181b4762f1e04a48ac6e352090b62e1691091ddc390821abb693e8f7a6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c275af67ad8675a437b3f7fae34236bbdfbdf1f216be6a442d6765471d1d11e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a2e0caca690592c9bd17dae14f79390b94f47507afb05a1c1732453700b99786\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a2e0caca690592c9bd17dae14f79390b94f47507afb05a1c1732453700b99786\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"message\\\":\\\"g.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 19:31:31.245412 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 19:31:31.245417 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0930 19:31:31.245423 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0930 19:31:31.245426 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0930 19:31:31.245428 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0930 19:31:31.245860 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI0930 19:31:31.249125 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0930 19:31:31.249169 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0930 19:31:31.249201 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0930 19:31:31.249211 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0930 19:31:31.249225 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0930 19:31:31.249238 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0930 19:31:31.250125 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI0930 19:31:31.250150 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nF0930 19:31:31.250576 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:25Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://01164eb1afdb771ef4d2dc268107dadd206959e1ef66b154caf3ff18717e20de\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:14Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7365492f5fe72c39f569bfa6e2b1a44236da10b4687a738b97ce95626081c1b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7365492f5fe72c39f569bfa6e2b1a44236da10b4687a738b97ce95626081c1b5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:11Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:35Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:35 crc kubenswrapper[4756]: I0930 19:31:35.308648 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-kmxk8" event={"ID":"d96acc31-a519-46fc-94d8-f19522e77391","Type":"ContainerStarted","Data":"f20230d520af06912183a05d0b9bcabb8581bf772fe34e037fa8d1a157c4a1b2"} Sep 30 19:31:35 crc kubenswrapper[4756]: I0930 19:31:35.308716 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-kmxk8" event={"ID":"d96acc31-a519-46fc-94d8-f19522e77391","Type":"ContainerStarted","Data":"261b104308c48bbe4631db85f56d63cefeb0bc6b7112076b0bf3f4fb6b67caad"} Sep 30 19:31:35 crc kubenswrapper[4756]: I0930 19:31:35.308735 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-kmxk8" event={"ID":"d96acc31-a519-46fc-94d8-f19522e77391","Type":"ContainerStarted","Data":"c5badf6335e4358fa0078ae48d7d50a908bf754fdb57c786e445ccae31933ebe"} Sep 30 19:31:35 crc kubenswrapper[4756]: I0930 19:31:35.308750 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-kmxk8" event={"ID":"d96acc31-a519-46fc-94d8-f19522e77391","Type":"ContainerStarted","Data":"fe054d1844c58433aebfe8f4e500e17558f1b4afb5423002d60b50f210052c93"} Sep 30 19:31:35 crc kubenswrapper[4756]: I0930 19:31:35.308764 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-kmxk8" event={"ID":"d96acc31-a519-46fc-94d8-f19522e77391","Type":"ContainerStarted","Data":"1fa8babb1f5d1213dec99c75be1d5f37d09c2257a31babd51ec297621fc7d564"} Sep 30 19:31:35 crc kubenswrapper[4756]: I0930 19:31:35.308778 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-kmxk8" event={"ID":"d96acc31-a519-46fc-94d8-f19522e77391","Type":"ContainerStarted","Data":"990926364c8d973164f3da73740b7d31c8caf87f0fb691539f7a62520b822fba"} Sep 30 19:31:35 crc kubenswrapper[4756]: I0930 19:31:35.310910 4756 generic.go:334] "Generic (PLEG): container finished" podID="2b4cba15-5ae1-4c98-a5e5-060b3af3aec6" containerID="0f91b3382af5267116de235e9d5f480e574b479350e738621b5e7568329dd879" exitCode=0 Sep 30 19:31:35 crc kubenswrapper[4756]: I0930 19:31:35.311009 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-m6zsp" event={"ID":"2b4cba15-5ae1-4c98-a5e5-060b3af3aec6","Type":"ContainerDied","Data":"0f91b3382af5267116de235e9d5f480e574b479350e738621b5e7568329dd879"} Sep 30 19:31:35 crc kubenswrapper[4756]: I0930 19:31:35.312618 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-8nj5z" event={"ID":"02bb1d9a-1ad2-406d-afc2-c4cd283fe1f4","Type":"ContainerStarted","Data":"28f7474a36dde55478614d47fe30f93b44cb549a25e148e295987e64158fc3c7"} Sep 30 19:31:35 crc kubenswrapper[4756]: I0930 19:31:35.312687 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-8nj5z" event={"ID":"02bb1d9a-1ad2-406d-afc2-c4cd283fe1f4","Type":"ContainerStarted","Data":"879d2329a33d76ca3884bf36910dfa7a8ce211870e02bc64957a78300a4d8702"} Sep 30 19:31:35 crc kubenswrapper[4756]: I0930 19:31:35.339451 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d37999f5cb36e7071b109ed90026aea745a55dbdd26d53799ee4196dc6a983c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:35Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:35 crc kubenswrapper[4756]: I0930 19:31:35.378335 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-l9mw5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"104562fb-2a2d-4291-963c-7a95062ec13a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c012009175f2ca25af13346dce481a4640354a2b7c771ce9a8d92f0a6a014f23\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mhq89\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:32Z\\\"}}\" for pod \"openshift-multus\"/\"multus-l9mw5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:35Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:35 crc kubenswrapper[4756]: I0930 19:31:35.416782 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:35Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:35 crc kubenswrapper[4756]: I0930 19:31:35.456652 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-m6zsp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2b4cba15-5ae1-4c98-a5e5-060b3af3aec6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"message\\\":\\\"containers with incomplete status: [bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://828a01ba594287ceea1e72baa5419d522ce14d2ab3df5a69fb6332400c3e4abc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://828a01ba594287ceea1e72baa5419d522ce14d2ab3df5a69fb6332400c3e4abc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f91b3382af5267116de235e9d5f480e574b479350e738621b5e7568329dd879\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0f91b3382af5267116de235e9d5f480e574b479350e738621b5e7568329dd879\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:32Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-m6zsp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:35Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:35 crc kubenswrapper[4756]: I0930 19:31:35.501340 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:35Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:35 crc kubenswrapper[4756]: I0930 19:31:35.538120 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d39cfd1b725ded02a27e8f7bed73221dd180e3fffb7f06d1877d1bc2f6910334\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:35Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:35 crc kubenswrapper[4756]: I0930 19:31:35.576421 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-ghp5t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"32ba8a0a-215c-415b-a893-74f7a40e3c20\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a96a8eea15e5d12ad93cdb41ef3d1054fae455aea2878451bc5715de6d5b612f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pslwb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:32Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-ghp5t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:35Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:35 crc kubenswrapper[4756]: I0930 19:31:35.613899 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3370c2ca-fec3-4f90-8df7-51e21e6c7e1c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1a5ef539293ea3ff94f53b11a0510bb8bb45c62c945cffab746512efeeb19a5a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pstrc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ba18abf718fde52cadd61c25b05097873901a4dbbe7631fd7ce64133627b9473\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pstrc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:32Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-4n9zj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:35Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:35 crc kubenswrapper[4756]: I0930 19:31:35.661247 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-kmxk8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d96acc31-a519-46fc-94d8-f19522e77391\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://76bb398eefb2b0a9ef06b44c2ea182deefd51e4daf42b17946b911822d6eb0af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://76bb398eefb2b0a9ef06b44c2ea182deefd51e4daf42b17946b911822d6eb0af\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:32Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-kmxk8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:35Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:35 crc kubenswrapper[4756]: I0930 19:31:35.693540 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-8nj5z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"02bb1d9a-1ad2-406d-afc2-c4cd283fe1f4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://28f7474a36dde55478614d47fe30f93b44cb549a25e148e295987e64158fc3c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l6nxq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:34Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-8nj5z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:35Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:35 crc kubenswrapper[4756]: I0930 19:31:35.740579 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6aa869dd-ec28-4032-82be-c656f27bd7b1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://99767cc72b9d6057d38c45fbec6231bafa47ccffefb78e5924b5e9c1e797722b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b852143b26114c05edce20729cf706c5394609a74f522b7ecf44d92f9d66aed7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ee47cdb77e416535bd00569159712241bbd0952651774af6440fd04000d1722\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5cca81a0aeb985d6044f19cccc580ca5c78553a0abb60fb2a2bed955233f7a97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://621b37e117d0eeef289e110d75521c3262ced12144d665be35d6823780b4bae2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a5550fd486f7fad8080fb3eed0f4c73140172c68983fa8a423cd48da8f31ef3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a5550fd486f7fad8080fb3eed0f4c73140172c68983fa8a423cd48da8f31ef3a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://390ef7ee5c5d98dc226f1d061c6c9bb83236c988f2af15635e7834f643862840\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://390ef7ee5c5d98dc226f1d061c6c9bb83236c988f2af15635e7834f643862840\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:13Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://6f98786c914dcdd3ca1cde8421686a1d536abb97c375f26f458d8f514bdccd52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6f98786c914dcdd3ca1cde8421686a1d536abb97c375f26f458d8f514bdccd52\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:11Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:35Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:35 crc kubenswrapper[4756]: I0930 19:31:35.765499 4756 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 30 19:31:35 crc kubenswrapper[4756]: I0930 19:31:35.766377 4756 scope.go:117] "RemoveContainer" containerID="a2e0caca690592c9bd17dae14f79390b94f47507afb05a1c1732453700b99786" Sep 30 19:31:35 crc kubenswrapper[4756]: E0930 19:31:35.766607 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-apiserver-check-endpoints\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\"" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" Sep 30 19:31:35 crc kubenswrapper[4756]: I0930 19:31:35.776101 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0dfdb5d6-9311-4699-a1ac-5f1a6cde2441\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a78698331fc3db3b12b06c8816256c5d055d1decfd12cc0099d6cfd155d673d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32fb926334772dc454adcdabd6070f751ef1aded82a5c13732875e1ec04d2c48\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6195128a0e66f0f932a444f4717084f00ea05ff648f7c26364bb9c96483b652a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://746e37834d01867d327b2cb407df8e9a8489f58884a28f1da6a336f74e4d1033\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:11Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:35Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:35 crc kubenswrapper[4756]: I0930 19:31:35.826465 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"218f95d8-7230-4b64-83a5-00af4f5ec7dc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0a76d38dbf936ca79e57470de2c8bf98e8681f7d94f2c572aa5c0d2b2acbb484\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://57f786181b4762f1e04a48ac6e352090b62e1691091ddc390821abb693e8f7a6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c275af67ad8675a437b3f7fae34236bbdfbdf1f216be6a442d6765471d1d11e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a2e0caca690592c9bd17dae14f79390b94f47507afb05a1c1732453700b99786\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a2e0caca690592c9bd17dae14f79390b94f47507afb05a1c1732453700b99786\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"message\\\":\\\"g.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 19:31:31.245412 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 19:31:31.245417 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0930 19:31:31.245423 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0930 19:31:31.245426 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0930 19:31:31.245428 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0930 19:31:31.245860 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI0930 19:31:31.249125 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0930 19:31:31.249169 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0930 19:31:31.249201 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0930 19:31:31.249211 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0930 19:31:31.249225 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0930 19:31:31.249238 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0930 19:31:31.250125 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI0930 19:31:31.250150 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nF0930 19:31:31.250576 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:25Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://01164eb1afdb771ef4d2dc268107dadd206959e1ef66b154caf3ff18717e20de\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:14Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7365492f5fe72c39f569bfa6e2b1a44236da10b4687a738b97ce95626081c1b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7365492f5fe72c39f569bfa6e2b1a44236da10b4687a738b97ce95626081c1b5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:11Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:35Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:35 crc kubenswrapper[4756]: I0930 19:31:35.864199 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:35Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:35 crc kubenswrapper[4756]: I0930 19:31:35.896468 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://449a4827a34c17e8cdc851d2714ced931581b5c3a1bcd2f7f058b95d68403740\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://163fab064016913eb90cd87fe0da1c702eeb7f000c92b108bfb24a21ef0e98d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:35Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:35 crc kubenswrapper[4756]: I0930 19:31:35.936040 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:35Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:35 crc kubenswrapper[4756]: I0930 19:31:35.976042 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d37999f5cb36e7071b109ed90026aea745a55dbdd26d53799ee4196dc6a983c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:35Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:36 crc kubenswrapper[4756]: I0930 19:31:36.019614 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-l9mw5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"104562fb-2a2d-4291-963c-7a95062ec13a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c012009175f2ca25af13346dce481a4640354a2b7c771ce9a8d92f0a6a014f23\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mhq89\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:32Z\\\"}}\" for pod \"openshift-multus\"/\"multus-l9mw5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:36Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:36 crc kubenswrapper[4756]: I0930 19:31:36.317058 4756 generic.go:334] "Generic (PLEG): container finished" podID="2b4cba15-5ae1-4c98-a5e5-060b3af3aec6" containerID="677982485b1155ded1bce2a0f5b3262766d498642307d1744d5cbae6c66102e3" exitCode=0 Sep 30 19:31:36 crc kubenswrapper[4756]: I0930 19:31:36.317113 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-m6zsp" event={"ID":"2b4cba15-5ae1-4c98-a5e5-060b3af3aec6","Type":"ContainerDied","Data":"677982485b1155ded1bce2a0f5b3262766d498642307d1744d5cbae6c66102e3"} Sep 30 19:31:36 crc kubenswrapper[4756]: I0930 19:31:36.335067 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-m6zsp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2b4cba15-5ae1-4c98-a5e5-060b3af3aec6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"message\\\":\\\"containers with incomplete status: [routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://828a01ba594287ceea1e72baa5419d522ce14d2ab3df5a69fb6332400c3e4abc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://828a01ba594287ceea1e72baa5419d522ce14d2ab3df5a69fb6332400c3e4abc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f91b3382af5267116de235e9d5f480e574b479350e738621b5e7568329dd879\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0f91b3382af5267116de235e9d5f480e574b479350e738621b5e7568329dd879\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://677982485b1155ded1bce2a0f5b3262766d498642307d1744d5cbae6c66102e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://677982485b1155ded1bce2a0f5b3262766d498642307d1744d5cbae6c66102e3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:32Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-m6zsp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:36Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:36 crc kubenswrapper[4756]: I0930 19:31:36.347681 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:36Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:36 crc kubenswrapper[4756]: I0930 19:31:36.362078 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d39cfd1b725ded02a27e8f7bed73221dd180e3fffb7f06d1877d1bc2f6910334\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:36Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:36 crc kubenswrapper[4756]: I0930 19:31:36.376285 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-ghp5t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"32ba8a0a-215c-415b-a893-74f7a40e3c20\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a96a8eea15e5d12ad93cdb41ef3d1054fae455aea2878451bc5715de6d5b612f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pslwb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:32Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-ghp5t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:36Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:36 crc kubenswrapper[4756]: I0930 19:31:36.390641 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3370c2ca-fec3-4f90-8df7-51e21e6c7e1c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1a5ef539293ea3ff94f53b11a0510bb8bb45c62c945cffab746512efeeb19a5a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pstrc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ba18abf718fde52cadd61c25b05097873901a4dbbe7631fd7ce64133627b9473\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pstrc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:32Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-4n9zj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:36Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:36 crc kubenswrapper[4756]: I0930 19:31:36.409100 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-kmxk8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d96acc31-a519-46fc-94d8-f19522e77391\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://76bb398eefb2b0a9ef06b44c2ea182deefd51e4daf42b17946b911822d6eb0af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://76bb398eefb2b0a9ef06b44c2ea182deefd51e4daf42b17946b911822d6eb0af\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:32Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-kmxk8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:36Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:36 crc kubenswrapper[4756]: I0930 19:31:36.423106 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-8nj5z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"02bb1d9a-1ad2-406d-afc2-c4cd283fe1f4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://28f7474a36dde55478614d47fe30f93b44cb549a25e148e295987e64158fc3c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l6nxq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:34Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-8nj5z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:36Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:36 crc kubenswrapper[4756]: I0930 19:31:36.448757 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6aa869dd-ec28-4032-82be-c656f27bd7b1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://99767cc72b9d6057d38c45fbec6231bafa47ccffefb78e5924b5e9c1e797722b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b852143b26114c05edce20729cf706c5394609a74f522b7ecf44d92f9d66aed7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ee47cdb77e416535bd00569159712241bbd0952651774af6440fd04000d1722\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5cca81a0aeb985d6044f19cccc580ca5c78553a0abb60fb2a2bed955233f7a97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://621b37e117d0eeef289e110d75521c3262ced12144d665be35d6823780b4bae2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a5550fd486f7fad8080fb3eed0f4c73140172c68983fa8a423cd48da8f31ef3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a5550fd486f7fad8080fb3eed0f4c73140172c68983fa8a423cd48da8f31ef3a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://390ef7ee5c5d98dc226f1d061c6c9bb83236c988f2af15635e7834f643862840\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://390ef7ee5c5d98dc226f1d061c6c9bb83236c988f2af15635e7834f643862840\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:13Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://6f98786c914dcdd3ca1cde8421686a1d536abb97c375f26f458d8f514bdccd52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6f98786c914dcdd3ca1cde8421686a1d536abb97c375f26f458d8f514bdccd52\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:11Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:36Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:36 crc kubenswrapper[4756]: I0930 19:31:36.464541 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0dfdb5d6-9311-4699-a1ac-5f1a6cde2441\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a78698331fc3db3b12b06c8816256c5d055d1decfd12cc0099d6cfd155d673d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32fb926334772dc454adcdabd6070f751ef1aded82a5c13732875e1ec04d2c48\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6195128a0e66f0f932a444f4717084f00ea05ff648f7c26364bb9c96483b652a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://746e37834d01867d327b2cb407df8e9a8489f58884a28f1da6a336f74e4d1033\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:11Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:36Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:36 crc kubenswrapper[4756]: I0930 19:31:36.478703 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"218f95d8-7230-4b64-83a5-00af4f5ec7dc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0a76d38dbf936ca79e57470de2c8bf98e8681f7d94f2c572aa5c0d2b2acbb484\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://57f786181b4762f1e04a48ac6e352090b62e1691091ddc390821abb693e8f7a6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c275af67ad8675a437b3f7fae34236bbdfbdf1f216be6a442d6765471d1d11e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a2e0caca690592c9bd17dae14f79390b94f47507afb05a1c1732453700b99786\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a2e0caca690592c9bd17dae14f79390b94f47507afb05a1c1732453700b99786\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"message\\\":\\\"g.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 19:31:31.245412 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 19:31:31.245417 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0930 19:31:31.245423 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0930 19:31:31.245426 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0930 19:31:31.245428 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0930 19:31:31.245860 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI0930 19:31:31.249125 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0930 19:31:31.249169 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0930 19:31:31.249201 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0930 19:31:31.249211 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0930 19:31:31.249225 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0930 19:31:31.249238 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0930 19:31:31.250125 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI0930 19:31:31.250150 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nF0930 19:31:31.250576 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:25Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://01164eb1afdb771ef4d2dc268107dadd206959e1ef66b154caf3ff18717e20de\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:14Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7365492f5fe72c39f569bfa6e2b1a44236da10b4687a738b97ce95626081c1b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7365492f5fe72c39f569bfa6e2b1a44236da10b4687a738b97ce95626081c1b5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:11Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:36Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:36 crc kubenswrapper[4756]: I0930 19:31:36.490322 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:36Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:36 crc kubenswrapper[4756]: I0930 19:31:36.504719 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://449a4827a34c17e8cdc851d2714ced931581b5c3a1bcd2f7f058b95d68403740\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://163fab064016913eb90cd87fe0da1c702eeb7f000c92b108bfb24a21ef0e98d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:36Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:36 crc kubenswrapper[4756]: I0930 19:31:36.536061 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:36Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:36 crc kubenswrapper[4756]: I0930 19:31:36.575241 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d37999f5cb36e7071b109ed90026aea745a55dbdd26d53799ee4196dc6a983c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:36Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:36 crc kubenswrapper[4756]: I0930 19:31:36.618124 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-l9mw5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"104562fb-2a2d-4291-963c-7a95062ec13a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c012009175f2ca25af13346dce481a4640354a2b7c771ce9a8d92f0a6a014f23\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mhq89\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:32Z\\\"}}\" for pod \"openshift-multus\"/\"multus-l9mw5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:36Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:36 crc kubenswrapper[4756]: I0930 19:31:36.923428 4756 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 19:31:36 crc kubenswrapper[4756]: I0930 19:31:36.925627 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:36 crc kubenswrapper[4756]: I0930 19:31:36.925671 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:36 crc kubenswrapper[4756]: I0930 19:31:36.925681 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:36 crc kubenswrapper[4756]: I0930 19:31:36.925810 4756 kubelet_node_status.go:76] "Attempting to register node" node="crc" Sep 30 19:31:36 crc kubenswrapper[4756]: I0930 19:31:36.934554 4756 kubelet_node_status.go:115] "Node was previously registered" node="crc" Sep 30 19:31:36 crc kubenswrapper[4756]: I0930 19:31:36.934827 4756 kubelet_node_status.go:79] "Successfully registered node" node="crc" Sep 30 19:31:36 crc kubenswrapper[4756]: I0930 19:31:36.935978 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:36 crc kubenswrapper[4756]: I0930 19:31:36.936011 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:36 crc kubenswrapper[4756]: I0930 19:31:36.936021 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:36 crc kubenswrapper[4756]: I0930 19:31:36.936039 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:31:36 crc kubenswrapper[4756]: I0930 19:31:36.936054 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:31:36Z","lastTransitionTime":"2025-09-30T19:31:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:31:36 crc kubenswrapper[4756]: E0930 19:31:36.950138 4756 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:31:36Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:36Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:31:36Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:36Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:31:36Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:36Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:31:36Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:36Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"0611c06e-4fb1-42eb-9eae-fbc2363d05f7\\\",\\\"systemUUID\\\":\\\"9e1f3995-67e3-42b5-a320-7b79274c960e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:36Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:36 crc kubenswrapper[4756]: I0930 19:31:36.954489 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:36 crc kubenswrapper[4756]: I0930 19:31:36.954560 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:36 crc kubenswrapper[4756]: I0930 19:31:36.954573 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:36 crc kubenswrapper[4756]: I0930 19:31:36.954589 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:31:36 crc kubenswrapper[4756]: I0930 19:31:36.954599 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:31:36Z","lastTransitionTime":"2025-09-30T19:31:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:31:36 crc kubenswrapper[4756]: E0930 19:31:36.968050 4756 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:31:36Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:36Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:31:36Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:36Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:31:36Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:36Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:31:36Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:36Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"0611c06e-4fb1-42eb-9eae-fbc2363d05f7\\\",\\\"systemUUID\\\":\\\"9e1f3995-67e3-42b5-a320-7b79274c960e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:36Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:36 crc kubenswrapper[4756]: I0930 19:31:36.972570 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:36 crc kubenswrapper[4756]: I0930 19:31:36.972607 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:36 crc kubenswrapper[4756]: I0930 19:31:36.972617 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:36 crc kubenswrapper[4756]: I0930 19:31:36.972635 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:31:36 crc kubenswrapper[4756]: I0930 19:31:36.972646 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:31:36Z","lastTransitionTime":"2025-09-30T19:31:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:31:36 crc kubenswrapper[4756]: E0930 19:31:36.987087 4756 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:31:36Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:36Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:31:36Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:36Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:31:36Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:36Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:31:36Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:36Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"0611c06e-4fb1-42eb-9eae-fbc2363d05f7\\\",\\\"systemUUID\\\":\\\"9e1f3995-67e3-42b5-a320-7b79274c960e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:36Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:36 crc kubenswrapper[4756]: I0930 19:31:36.992438 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:36 crc kubenswrapper[4756]: I0930 19:31:36.992493 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:36 crc kubenswrapper[4756]: I0930 19:31:36.992510 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:36 crc kubenswrapper[4756]: I0930 19:31:36.992536 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:31:36 crc kubenswrapper[4756]: I0930 19:31:36.992556 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:31:36Z","lastTransitionTime":"2025-09-30T19:31:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:31:37 crc kubenswrapper[4756]: E0930 19:31:37.010597 4756 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:31:36Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:36Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:31:36Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:36Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:31:36Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:36Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:31:36Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:36Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"0611c06e-4fb1-42eb-9eae-fbc2363d05f7\\\",\\\"systemUUID\\\":\\\"9e1f3995-67e3-42b5-a320-7b79274c960e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:37Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:37 crc kubenswrapper[4756]: I0930 19:31:37.014887 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:37 crc kubenswrapper[4756]: I0930 19:31:37.014931 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:37 crc kubenswrapper[4756]: I0930 19:31:37.014955 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:37 crc kubenswrapper[4756]: I0930 19:31:37.014986 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:31:37 crc kubenswrapper[4756]: I0930 19:31:37.015002 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:31:37Z","lastTransitionTime":"2025-09-30T19:31:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:31:37 crc kubenswrapper[4756]: E0930 19:31:37.032767 4756 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:31:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:37Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:31:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:37Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:31:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:37Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:31:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:37Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"0611c06e-4fb1-42eb-9eae-fbc2363d05f7\\\",\\\"systemUUID\\\":\\\"9e1f3995-67e3-42b5-a320-7b79274c960e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:37Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:37 crc kubenswrapper[4756]: E0930 19:31:37.032947 4756 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Sep 30 19:31:37 crc kubenswrapper[4756]: I0930 19:31:37.034985 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:37 crc kubenswrapper[4756]: I0930 19:31:37.035055 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:37 crc kubenswrapper[4756]: I0930 19:31:37.035066 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:37 crc kubenswrapper[4756]: I0930 19:31:37.035085 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:31:37 crc kubenswrapper[4756]: I0930 19:31:37.035463 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:31:37Z","lastTransitionTime":"2025-09-30T19:31:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:31:37 crc kubenswrapper[4756]: I0930 19:31:37.115251 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 19:31:37 crc kubenswrapper[4756]: I0930 19:31:37.115301 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 19:31:37 crc kubenswrapper[4756]: I0930 19:31:37.115337 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 19:31:37 crc kubenswrapper[4756]: E0930 19:31:37.116354 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 19:31:37 crc kubenswrapper[4756]: E0930 19:31:37.116500 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 19:31:37 crc kubenswrapper[4756]: E0930 19:31:37.116542 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 19:31:37 crc kubenswrapper[4756]: I0930 19:31:37.138936 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:37 crc kubenswrapper[4756]: I0930 19:31:37.138968 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:37 crc kubenswrapper[4756]: I0930 19:31:37.138979 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:37 crc kubenswrapper[4756]: I0930 19:31:37.138995 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:31:37 crc kubenswrapper[4756]: I0930 19:31:37.139007 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:31:37Z","lastTransitionTime":"2025-09-30T19:31:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:31:37 crc kubenswrapper[4756]: I0930 19:31:37.241592 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:37 crc kubenswrapper[4756]: I0930 19:31:37.241930 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:37 crc kubenswrapper[4756]: I0930 19:31:37.242047 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:37 crc kubenswrapper[4756]: I0930 19:31:37.242137 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:31:37 crc kubenswrapper[4756]: I0930 19:31:37.242211 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:31:37Z","lastTransitionTime":"2025-09-30T19:31:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:31:37 crc kubenswrapper[4756]: I0930 19:31:37.324313 4756 generic.go:334] "Generic (PLEG): container finished" podID="2b4cba15-5ae1-4c98-a5e5-060b3af3aec6" containerID="14b8b318d4cd0da2272b87ac74a4dccdf29874a818aadc0698e9d85a75a169da" exitCode=0 Sep 30 19:31:37 crc kubenswrapper[4756]: I0930 19:31:37.324415 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-m6zsp" event={"ID":"2b4cba15-5ae1-4c98-a5e5-060b3af3aec6","Type":"ContainerDied","Data":"14b8b318d4cd0da2272b87ac74a4dccdf29874a818aadc0698e9d85a75a169da"} Sep 30 19:31:37 crc kubenswrapper[4756]: I0930 19:31:37.331611 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-kmxk8" event={"ID":"d96acc31-a519-46fc-94d8-f19522e77391","Type":"ContainerStarted","Data":"64404e34c6a077df9cc9e9f9273dbe188c6feda3bc1186a83e05a43e1fa2ea99"} Sep 30 19:31:37 crc kubenswrapper[4756]: I0930 19:31:37.344935 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:37 crc kubenswrapper[4756]: I0930 19:31:37.344985 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:37 crc kubenswrapper[4756]: I0930 19:31:37.344999 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:37 crc kubenswrapper[4756]: I0930 19:31:37.345020 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:31:37 crc kubenswrapper[4756]: I0930 19:31:37.345033 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:31:37Z","lastTransitionTime":"2025-09-30T19:31:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:31:37 crc kubenswrapper[4756]: I0930 19:31:37.352939 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"218f95d8-7230-4b64-83a5-00af4f5ec7dc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0a76d38dbf936ca79e57470de2c8bf98e8681f7d94f2c572aa5c0d2b2acbb484\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://57f786181b4762f1e04a48ac6e352090b62e1691091ddc390821abb693e8f7a6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c275af67ad8675a437b3f7fae34236bbdfbdf1f216be6a442d6765471d1d11e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a2e0caca690592c9bd17dae14f79390b94f47507afb05a1c1732453700b99786\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a2e0caca690592c9bd17dae14f79390b94f47507afb05a1c1732453700b99786\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"message\\\":\\\"g.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 19:31:31.245412 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 19:31:31.245417 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0930 19:31:31.245423 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0930 19:31:31.245426 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0930 19:31:31.245428 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0930 19:31:31.245860 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI0930 19:31:31.249125 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0930 19:31:31.249169 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0930 19:31:31.249201 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0930 19:31:31.249211 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0930 19:31:31.249225 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0930 19:31:31.249238 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0930 19:31:31.250125 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI0930 19:31:31.250150 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nF0930 19:31:31.250576 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:25Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://01164eb1afdb771ef4d2dc268107dadd206959e1ef66b154caf3ff18717e20de\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:14Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7365492f5fe72c39f569bfa6e2b1a44236da10b4687a738b97ce95626081c1b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7365492f5fe72c39f569bfa6e2b1a44236da10b4687a738b97ce95626081c1b5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:11Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:37Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:37 crc kubenswrapper[4756]: I0930 19:31:37.370944 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:37Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:37 crc kubenswrapper[4756]: I0930 19:31:37.385093 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://449a4827a34c17e8cdc851d2714ced931581b5c3a1bcd2f7f058b95d68403740\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://163fab064016913eb90cd87fe0da1c702eeb7f000c92b108bfb24a21ef0e98d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:37Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:37 crc kubenswrapper[4756]: I0930 19:31:37.399296 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:37Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:37 crc kubenswrapper[4756]: I0930 19:31:37.417115 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d37999f5cb36e7071b109ed90026aea745a55dbdd26d53799ee4196dc6a983c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:37Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:37 crc kubenswrapper[4756]: I0930 19:31:37.431450 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-l9mw5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"104562fb-2a2d-4291-963c-7a95062ec13a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c012009175f2ca25af13346dce481a4640354a2b7c771ce9a8d92f0a6a014f23\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mhq89\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:32Z\\\"}}\" for pod \"openshift-multus\"/\"multus-l9mw5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:37Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:37 crc kubenswrapper[4756]: I0930 19:31:37.449000 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:37Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:37 crc kubenswrapper[4756]: I0930 19:31:37.449521 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:37 crc kubenswrapper[4756]: I0930 19:31:37.449563 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:37 crc kubenswrapper[4756]: I0930 19:31:37.449579 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:37 crc kubenswrapper[4756]: I0930 19:31:37.449601 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:31:37 crc kubenswrapper[4756]: I0930 19:31:37.449617 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:31:37Z","lastTransitionTime":"2025-09-30T19:31:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:31:37 crc kubenswrapper[4756]: I0930 19:31:37.462632 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d39cfd1b725ded02a27e8f7bed73221dd180e3fffb7f06d1877d1bc2f6910334\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:37Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:37 crc kubenswrapper[4756]: I0930 19:31:37.473827 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-ghp5t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"32ba8a0a-215c-415b-a893-74f7a40e3c20\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a96a8eea15e5d12ad93cdb41ef3d1054fae455aea2878451bc5715de6d5b612f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pslwb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:32Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-ghp5t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:37Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:37 crc kubenswrapper[4756]: I0930 19:31:37.489313 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-m6zsp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2b4cba15-5ae1-4c98-a5e5-060b3af3aec6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://828a01ba594287ceea1e72baa5419d522ce14d2ab3df5a69fb6332400c3e4abc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://828a01ba594287ceea1e72baa5419d522ce14d2ab3df5a69fb6332400c3e4abc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f91b3382af5267116de235e9d5f480e574b479350e738621b5e7568329dd879\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0f91b3382af5267116de235e9d5f480e574b479350e738621b5e7568329dd879\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://677982485b1155ded1bce2a0f5b3262766d498642307d1744d5cbae6c66102e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://677982485b1155ded1bce2a0f5b3262766d498642307d1744d5cbae6c66102e3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://14b8b318d4cd0da2272b87ac74a4dccdf29874a818aadc0698e9d85a75a169da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://14b8b318d4cd0da2272b87ac74a4dccdf29874a818aadc0698e9d85a75a169da\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:32Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-m6zsp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:37Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:37 crc kubenswrapper[4756]: I0930 19:31:37.510009 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6aa869dd-ec28-4032-82be-c656f27bd7b1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://99767cc72b9d6057d38c45fbec6231bafa47ccffefb78e5924b5e9c1e797722b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b852143b26114c05edce20729cf706c5394609a74f522b7ecf44d92f9d66aed7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ee47cdb77e416535bd00569159712241bbd0952651774af6440fd04000d1722\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5cca81a0aeb985d6044f19cccc580ca5c78553a0abb60fb2a2bed955233f7a97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://621b37e117d0eeef289e110d75521c3262ced12144d665be35d6823780b4bae2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a5550fd486f7fad8080fb3eed0f4c73140172c68983fa8a423cd48da8f31ef3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a5550fd486f7fad8080fb3eed0f4c73140172c68983fa8a423cd48da8f31ef3a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://390ef7ee5c5d98dc226f1d061c6c9bb83236c988f2af15635e7834f643862840\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://390ef7ee5c5d98dc226f1d061c6c9bb83236c988f2af15635e7834f643862840\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:13Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://6f98786c914dcdd3ca1cde8421686a1d536abb97c375f26f458d8f514bdccd52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6f98786c914dcdd3ca1cde8421686a1d536abb97c375f26f458d8f514bdccd52\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:11Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:37Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:37 crc kubenswrapper[4756]: I0930 19:31:37.526757 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0dfdb5d6-9311-4699-a1ac-5f1a6cde2441\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a78698331fc3db3b12b06c8816256c5d055d1decfd12cc0099d6cfd155d673d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32fb926334772dc454adcdabd6070f751ef1aded82a5c13732875e1ec04d2c48\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6195128a0e66f0f932a444f4717084f00ea05ff648f7c26364bb9c96483b652a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://746e37834d01867d327b2cb407df8e9a8489f58884a28f1da6a336f74e4d1033\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:11Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:37Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:37 crc kubenswrapper[4756]: I0930 19:31:37.540862 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3370c2ca-fec3-4f90-8df7-51e21e6c7e1c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1a5ef539293ea3ff94f53b11a0510bb8bb45c62c945cffab746512efeeb19a5a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pstrc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ba18abf718fde52cadd61c25b05097873901a4dbbe7631fd7ce64133627b9473\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pstrc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:32Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-4n9zj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:37Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:37 crc kubenswrapper[4756]: I0930 19:31:37.552494 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:37 crc kubenswrapper[4756]: I0930 19:31:37.552545 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:37 crc kubenswrapper[4756]: I0930 19:31:37.552560 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:37 crc kubenswrapper[4756]: I0930 19:31:37.552582 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:31:37 crc kubenswrapper[4756]: I0930 19:31:37.552593 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:31:37Z","lastTransitionTime":"2025-09-30T19:31:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:31:37 crc kubenswrapper[4756]: I0930 19:31:37.565606 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-kmxk8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d96acc31-a519-46fc-94d8-f19522e77391\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://76bb398eefb2b0a9ef06b44c2ea182deefd51e4daf42b17946b911822d6eb0af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://76bb398eefb2b0a9ef06b44c2ea182deefd51e4daf42b17946b911822d6eb0af\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:32Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-kmxk8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:37Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:37 crc kubenswrapper[4756]: I0930 19:31:37.578327 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-8nj5z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"02bb1d9a-1ad2-406d-afc2-c4cd283fe1f4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://28f7474a36dde55478614d47fe30f93b44cb549a25e148e295987e64158fc3c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l6nxq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:34Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-8nj5z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:37Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:37 crc kubenswrapper[4756]: I0930 19:31:37.655683 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:37 crc kubenswrapper[4756]: I0930 19:31:37.655716 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:37 crc kubenswrapper[4756]: I0930 19:31:37.655727 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:37 crc kubenswrapper[4756]: I0930 19:31:37.655744 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:31:37 crc kubenswrapper[4756]: I0930 19:31:37.655758 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:31:37Z","lastTransitionTime":"2025-09-30T19:31:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:31:37 crc kubenswrapper[4756]: I0930 19:31:37.758973 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:37 crc kubenswrapper[4756]: I0930 19:31:37.759042 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:37 crc kubenswrapper[4756]: I0930 19:31:37.759054 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:37 crc kubenswrapper[4756]: I0930 19:31:37.759075 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:31:37 crc kubenswrapper[4756]: I0930 19:31:37.759094 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:31:37Z","lastTransitionTime":"2025-09-30T19:31:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:31:37 crc kubenswrapper[4756]: I0930 19:31:37.861623 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:37 crc kubenswrapper[4756]: I0930 19:31:37.861679 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:37 crc kubenswrapper[4756]: I0930 19:31:37.861694 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:37 crc kubenswrapper[4756]: I0930 19:31:37.861716 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:31:37 crc kubenswrapper[4756]: I0930 19:31:37.861732 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:31:37Z","lastTransitionTime":"2025-09-30T19:31:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:31:37 crc kubenswrapper[4756]: I0930 19:31:37.971311 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:37 crc kubenswrapper[4756]: I0930 19:31:37.971360 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:37 crc kubenswrapper[4756]: I0930 19:31:37.971370 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:37 crc kubenswrapper[4756]: I0930 19:31:37.971388 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:31:37 crc kubenswrapper[4756]: I0930 19:31:37.971423 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:31:37Z","lastTransitionTime":"2025-09-30T19:31:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:31:38 crc kubenswrapper[4756]: I0930 19:31:38.075689 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:38 crc kubenswrapper[4756]: I0930 19:31:38.075727 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:38 crc kubenswrapper[4756]: I0930 19:31:38.075737 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:38 crc kubenswrapper[4756]: I0930 19:31:38.075756 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:31:38 crc kubenswrapper[4756]: I0930 19:31:38.075769 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:31:38Z","lastTransitionTime":"2025-09-30T19:31:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:31:38 crc kubenswrapper[4756]: I0930 19:31:38.181077 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:38 crc kubenswrapper[4756]: I0930 19:31:38.181114 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:38 crc kubenswrapper[4756]: I0930 19:31:38.181124 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:38 crc kubenswrapper[4756]: I0930 19:31:38.181142 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:31:38 crc kubenswrapper[4756]: I0930 19:31:38.181153 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:31:38Z","lastTransitionTime":"2025-09-30T19:31:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:31:38 crc kubenswrapper[4756]: I0930 19:31:38.284554 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:38 crc kubenswrapper[4756]: I0930 19:31:38.284598 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:38 crc kubenswrapper[4756]: I0930 19:31:38.284611 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:38 crc kubenswrapper[4756]: I0930 19:31:38.284630 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:31:38 crc kubenswrapper[4756]: I0930 19:31:38.284642 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:31:38Z","lastTransitionTime":"2025-09-30T19:31:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:31:38 crc kubenswrapper[4756]: I0930 19:31:38.338267 4756 generic.go:334] "Generic (PLEG): container finished" podID="2b4cba15-5ae1-4c98-a5e5-060b3af3aec6" containerID="e877edd834105d5bd0dcd24e6ba9c11ead17ab39a95ca0169c54acaad7544ebe" exitCode=0 Sep 30 19:31:38 crc kubenswrapper[4756]: I0930 19:31:38.338334 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-m6zsp" event={"ID":"2b4cba15-5ae1-4c98-a5e5-060b3af3aec6","Type":"ContainerDied","Data":"e877edd834105d5bd0dcd24e6ba9c11ead17ab39a95ca0169c54acaad7544ebe"} Sep 30 19:31:38 crc kubenswrapper[4756]: I0930 19:31:38.355117 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d37999f5cb36e7071b109ed90026aea745a55dbdd26d53799ee4196dc6a983c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:38Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:38 crc kubenswrapper[4756]: I0930 19:31:38.372164 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-l9mw5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"104562fb-2a2d-4291-963c-7a95062ec13a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c012009175f2ca25af13346dce481a4640354a2b7c771ce9a8d92f0a6a014f23\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mhq89\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:32Z\\\"}}\" for pod \"openshift-multus\"/\"multus-l9mw5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:38Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:38 crc kubenswrapper[4756]: I0930 19:31:38.385011 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:38Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:38 crc kubenswrapper[4756]: I0930 19:31:38.386524 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:38 crc kubenswrapper[4756]: I0930 19:31:38.386584 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:38 crc kubenswrapper[4756]: I0930 19:31:38.386598 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:38 crc kubenswrapper[4756]: I0930 19:31:38.386620 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:31:38 crc kubenswrapper[4756]: I0930 19:31:38.386635 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:31:38Z","lastTransitionTime":"2025-09-30T19:31:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:31:38 crc kubenswrapper[4756]: I0930 19:31:38.400760 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d39cfd1b725ded02a27e8f7bed73221dd180e3fffb7f06d1877d1bc2f6910334\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:38Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:38 crc kubenswrapper[4756]: I0930 19:31:38.411560 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-ghp5t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"32ba8a0a-215c-415b-a893-74f7a40e3c20\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a96a8eea15e5d12ad93cdb41ef3d1054fae455aea2878451bc5715de6d5b612f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pslwb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:32Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-ghp5t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:38Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:38 crc kubenswrapper[4756]: I0930 19:31:38.430612 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-m6zsp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2b4cba15-5ae1-4c98-a5e5-060b3af3aec6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://828a01ba594287ceea1e72baa5419d522ce14d2ab3df5a69fb6332400c3e4abc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://828a01ba594287ceea1e72baa5419d522ce14d2ab3df5a69fb6332400c3e4abc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f91b3382af5267116de235e9d5f480e574b479350e738621b5e7568329dd879\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0f91b3382af5267116de235e9d5f480e574b479350e738621b5e7568329dd879\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://677982485b1155ded1bce2a0f5b3262766d498642307d1744d5cbae6c66102e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://677982485b1155ded1bce2a0f5b3262766d498642307d1744d5cbae6c66102e3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://14b8b318d4cd0da2272b87ac74a4dccdf29874a818aadc0698e9d85a75a169da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://14b8b318d4cd0da2272b87ac74a4dccdf29874a818aadc0698e9d85a75a169da\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e877edd834105d5bd0dcd24e6ba9c11ead17ab39a95ca0169c54acaad7544ebe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e877edd834105d5bd0dcd24e6ba9c11ead17ab39a95ca0169c54acaad7544ebe\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:32Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-m6zsp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:38Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:38 crc kubenswrapper[4756]: I0930 19:31:38.444705 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:38Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:38 crc kubenswrapper[4756]: I0930 19:31:38.466783 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6aa869dd-ec28-4032-82be-c656f27bd7b1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://99767cc72b9d6057d38c45fbec6231bafa47ccffefb78e5924b5e9c1e797722b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b852143b26114c05edce20729cf706c5394609a74f522b7ecf44d92f9d66aed7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ee47cdb77e416535bd00569159712241bbd0952651774af6440fd04000d1722\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5cca81a0aeb985d6044f19cccc580ca5c78553a0abb60fb2a2bed955233f7a97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://621b37e117d0eeef289e110d75521c3262ced12144d665be35d6823780b4bae2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a5550fd486f7fad8080fb3eed0f4c73140172c68983fa8a423cd48da8f31ef3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a5550fd486f7fad8080fb3eed0f4c73140172c68983fa8a423cd48da8f31ef3a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://390ef7ee5c5d98dc226f1d061c6c9bb83236c988f2af15635e7834f643862840\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://390ef7ee5c5d98dc226f1d061c6c9bb83236c988f2af15635e7834f643862840\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:13Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://6f98786c914dcdd3ca1cde8421686a1d536abb97c375f26f458d8f514bdccd52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6f98786c914dcdd3ca1cde8421686a1d536abb97c375f26f458d8f514bdccd52\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:11Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:38Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:38 crc kubenswrapper[4756]: I0930 19:31:38.479561 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0dfdb5d6-9311-4699-a1ac-5f1a6cde2441\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a78698331fc3db3b12b06c8816256c5d055d1decfd12cc0099d6cfd155d673d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32fb926334772dc454adcdabd6070f751ef1aded82a5c13732875e1ec04d2c48\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6195128a0e66f0f932a444f4717084f00ea05ff648f7c26364bb9c96483b652a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://746e37834d01867d327b2cb407df8e9a8489f58884a28f1da6a336f74e4d1033\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:11Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:38Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:38 crc kubenswrapper[4756]: I0930 19:31:38.490534 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:38 crc kubenswrapper[4756]: I0930 19:31:38.490570 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:38 crc kubenswrapper[4756]: I0930 19:31:38.490580 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:38 crc kubenswrapper[4756]: I0930 19:31:38.490596 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:31:38 crc kubenswrapper[4756]: I0930 19:31:38.490608 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:31:38Z","lastTransitionTime":"2025-09-30T19:31:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:31:38 crc kubenswrapper[4756]: I0930 19:31:38.490918 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3370c2ca-fec3-4f90-8df7-51e21e6c7e1c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1a5ef539293ea3ff94f53b11a0510bb8bb45c62c945cffab746512efeeb19a5a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pstrc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ba18abf718fde52cadd61c25b05097873901a4dbbe7631fd7ce64133627b9473\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pstrc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:32Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-4n9zj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:38Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:38 crc kubenswrapper[4756]: I0930 19:31:38.507847 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-kmxk8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d96acc31-a519-46fc-94d8-f19522e77391\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://76bb398eefb2b0a9ef06b44c2ea182deefd51e4daf42b17946b911822d6eb0af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://76bb398eefb2b0a9ef06b44c2ea182deefd51e4daf42b17946b911822d6eb0af\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:32Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-kmxk8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:38Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:38 crc kubenswrapper[4756]: I0930 19:31:38.519388 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-8nj5z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"02bb1d9a-1ad2-406d-afc2-c4cd283fe1f4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://28f7474a36dde55478614d47fe30f93b44cb549a25e148e295987e64158fc3c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l6nxq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:34Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-8nj5z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:38Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:38 crc kubenswrapper[4756]: I0930 19:31:38.532661 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:38Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:38 crc kubenswrapper[4756]: I0930 19:31:38.545880 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://449a4827a34c17e8cdc851d2714ced931581b5c3a1bcd2f7f058b95d68403740\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://163fab064016913eb90cd87fe0da1c702eeb7f000c92b108bfb24a21ef0e98d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:38Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:38 crc kubenswrapper[4756]: I0930 19:31:38.561478 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"218f95d8-7230-4b64-83a5-00af4f5ec7dc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0a76d38dbf936ca79e57470de2c8bf98e8681f7d94f2c572aa5c0d2b2acbb484\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://57f786181b4762f1e04a48ac6e352090b62e1691091ddc390821abb693e8f7a6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c275af67ad8675a437b3f7fae34236bbdfbdf1f216be6a442d6765471d1d11e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a2e0caca690592c9bd17dae14f79390b94f47507afb05a1c1732453700b99786\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a2e0caca690592c9bd17dae14f79390b94f47507afb05a1c1732453700b99786\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"message\\\":\\\"g.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 19:31:31.245412 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 19:31:31.245417 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0930 19:31:31.245423 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0930 19:31:31.245426 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0930 19:31:31.245428 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0930 19:31:31.245860 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI0930 19:31:31.249125 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0930 19:31:31.249169 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0930 19:31:31.249201 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0930 19:31:31.249211 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0930 19:31:31.249225 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0930 19:31:31.249238 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0930 19:31:31.250125 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI0930 19:31:31.250150 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nF0930 19:31:31.250576 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:25Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://01164eb1afdb771ef4d2dc268107dadd206959e1ef66b154caf3ff18717e20de\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:14Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7365492f5fe72c39f569bfa6e2b1a44236da10b4687a738b97ce95626081c1b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7365492f5fe72c39f569bfa6e2b1a44236da10b4687a738b97ce95626081c1b5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:11Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:38Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:38 crc kubenswrapper[4756]: I0930 19:31:38.593221 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:38 crc kubenswrapper[4756]: I0930 19:31:38.593272 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:38 crc kubenswrapper[4756]: I0930 19:31:38.593285 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:38 crc kubenswrapper[4756]: I0930 19:31:38.593303 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:31:38 crc kubenswrapper[4756]: I0930 19:31:38.593314 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:31:38Z","lastTransitionTime":"2025-09-30T19:31:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:31:38 crc kubenswrapper[4756]: I0930 19:31:38.696864 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:38 crc kubenswrapper[4756]: I0930 19:31:38.696916 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:38 crc kubenswrapper[4756]: I0930 19:31:38.696936 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:38 crc kubenswrapper[4756]: I0930 19:31:38.696959 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:31:38 crc kubenswrapper[4756]: I0930 19:31:38.696978 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:31:38Z","lastTransitionTime":"2025-09-30T19:31:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:31:38 crc kubenswrapper[4756]: I0930 19:31:38.799824 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:38 crc kubenswrapper[4756]: I0930 19:31:38.799885 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:38 crc kubenswrapper[4756]: I0930 19:31:38.799897 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:38 crc kubenswrapper[4756]: I0930 19:31:38.799916 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:31:38 crc kubenswrapper[4756]: I0930 19:31:38.799928 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:31:38Z","lastTransitionTime":"2025-09-30T19:31:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:31:38 crc kubenswrapper[4756]: I0930 19:31:38.828332 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 19:31:38 crc kubenswrapper[4756]: I0930 19:31:38.828487 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 19:31:38 crc kubenswrapper[4756]: I0930 19:31:38.828521 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 19:31:38 crc kubenswrapper[4756]: I0930 19:31:38.828561 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 19:31:38 crc kubenswrapper[4756]: I0930 19:31:38.828584 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 19:31:38 crc kubenswrapper[4756]: E0930 19:31:38.828632 4756 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 19:31:46.828593464 +0000 UTC m=+36.449526951 (durationBeforeRetry 8s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:31:38 crc kubenswrapper[4756]: E0930 19:31:38.828715 4756 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Sep 30 19:31:38 crc kubenswrapper[4756]: E0930 19:31:38.828791 4756 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-09-30 19:31:46.828767468 +0000 UTC m=+36.449701155 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Sep 30 19:31:38 crc kubenswrapper[4756]: E0930 19:31:38.828804 4756 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Sep 30 19:31:38 crc kubenswrapper[4756]: E0930 19:31:38.828826 4756 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Sep 30 19:31:38 crc kubenswrapper[4756]: E0930 19:31:38.828846 4756 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 30 19:31:38 crc kubenswrapper[4756]: E0930 19:31:38.828906 4756 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-09-30 19:31:46.828891752 +0000 UTC m=+36.449825459 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 30 19:31:38 crc kubenswrapper[4756]: E0930 19:31:38.828987 4756 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Sep 30 19:31:38 crc kubenswrapper[4756]: E0930 19:31:38.828999 4756 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Sep 30 19:31:38 crc kubenswrapper[4756]: E0930 19:31:38.829012 4756 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 30 19:31:38 crc kubenswrapper[4756]: E0930 19:31:38.829041 4756 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-09-30 19:31:46.829032465 +0000 UTC m=+36.449965952 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 30 19:31:38 crc kubenswrapper[4756]: E0930 19:31:38.829098 4756 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Sep 30 19:31:38 crc kubenswrapper[4756]: E0930 19:31:38.829128 4756 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-09-30 19:31:46.829118297 +0000 UTC m=+36.450051784 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Sep 30 19:31:38 crc kubenswrapper[4756]: I0930 19:31:38.903032 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:38 crc kubenswrapper[4756]: I0930 19:31:38.903077 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:38 crc kubenswrapper[4756]: I0930 19:31:38.903085 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:38 crc kubenswrapper[4756]: I0930 19:31:38.903103 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:31:38 crc kubenswrapper[4756]: I0930 19:31:38.903115 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:31:38Z","lastTransitionTime":"2025-09-30T19:31:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:31:39 crc kubenswrapper[4756]: I0930 19:31:39.006338 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:39 crc kubenswrapper[4756]: I0930 19:31:39.006439 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:39 crc kubenswrapper[4756]: I0930 19:31:39.006460 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:39 crc kubenswrapper[4756]: I0930 19:31:39.006489 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:31:39 crc kubenswrapper[4756]: I0930 19:31:39.006508 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:31:39Z","lastTransitionTime":"2025-09-30T19:31:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:31:39 crc kubenswrapper[4756]: I0930 19:31:39.109857 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:39 crc kubenswrapper[4756]: I0930 19:31:39.109936 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:39 crc kubenswrapper[4756]: I0930 19:31:39.109956 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:39 crc kubenswrapper[4756]: I0930 19:31:39.109982 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:31:39 crc kubenswrapper[4756]: I0930 19:31:39.110004 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:31:39Z","lastTransitionTime":"2025-09-30T19:31:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:31:39 crc kubenswrapper[4756]: I0930 19:31:39.115144 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 19:31:39 crc kubenswrapper[4756]: I0930 19:31:39.115199 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 19:31:39 crc kubenswrapper[4756]: I0930 19:31:39.115346 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 19:31:39 crc kubenswrapper[4756]: E0930 19:31:39.115506 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 19:31:39 crc kubenswrapper[4756]: E0930 19:31:39.115686 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 19:31:39 crc kubenswrapper[4756]: E0930 19:31:39.115951 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 19:31:39 crc kubenswrapper[4756]: I0930 19:31:39.213165 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:39 crc kubenswrapper[4756]: I0930 19:31:39.213220 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:39 crc kubenswrapper[4756]: I0930 19:31:39.213239 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:39 crc kubenswrapper[4756]: I0930 19:31:39.213266 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:31:39 crc kubenswrapper[4756]: I0930 19:31:39.213287 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:31:39Z","lastTransitionTime":"2025-09-30T19:31:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:31:39 crc kubenswrapper[4756]: I0930 19:31:39.323700 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:39 crc kubenswrapper[4756]: I0930 19:31:39.323733 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:39 crc kubenswrapper[4756]: I0930 19:31:39.323742 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:39 crc kubenswrapper[4756]: I0930 19:31:39.323758 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:31:39 crc kubenswrapper[4756]: I0930 19:31:39.323767 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:31:39Z","lastTransitionTime":"2025-09-30T19:31:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:31:39 crc kubenswrapper[4756]: I0930 19:31:39.347161 4756 generic.go:334] "Generic (PLEG): container finished" podID="2b4cba15-5ae1-4c98-a5e5-060b3af3aec6" containerID="decf531f70c78d1d142bef3d265612ebac2c8790ea10aba75def6d785fba84a8" exitCode=0 Sep 30 19:31:39 crc kubenswrapper[4756]: I0930 19:31:39.347224 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-m6zsp" event={"ID":"2b4cba15-5ae1-4c98-a5e5-060b3af3aec6","Type":"ContainerDied","Data":"decf531f70c78d1d142bef3d265612ebac2c8790ea10aba75def6d785fba84a8"} Sep 30 19:31:39 crc kubenswrapper[4756]: I0930 19:31:39.374383 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"218f95d8-7230-4b64-83a5-00af4f5ec7dc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0a76d38dbf936ca79e57470de2c8bf98e8681f7d94f2c572aa5c0d2b2acbb484\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://57f786181b4762f1e04a48ac6e352090b62e1691091ddc390821abb693e8f7a6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c275af67ad8675a437b3f7fae34236bbdfbdf1f216be6a442d6765471d1d11e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a2e0caca690592c9bd17dae14f79390b94f47507afb05a1c1732453700b99786\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a2e0caca690592c9bd17dae14f79390b94f47507afb05a1c1732453700b99786\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"message\\\":\\\"g.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 19:31:31.245412 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 19:31:31.245417 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0930 19:31:31.245423 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0930 19:31:31.245426 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0930 19:31:31.245428 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0930 19:31:31.245860 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI0930 19:31:31.249125 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0930 19:31:31.249169 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0930 19:31:31.249201 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0930 19:31:31.249211 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0930 19:31:31.249225 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0930 19:31:31.249238 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0930 19:31:31.250125 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI0930 19:31:31.250150 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nF0930 19:31:31.250576 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:25Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://01164eb1afdb771ef4d2dc268107dadd206959e1ef66b154caf3ff18717e20de\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:14Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7365492f5fe72c39f569bfa6e2b1a44236da10b4687a738b97ce95626081c1b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7365492f5fe72c39f569bfa6e2b1a44236da10b4687a738b97ce95626081c1b5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:11Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:39Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:39 crc kubenswrapper[4756]: I0930 19:31:39.394895 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:39Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:39 crc kubenswrapper[4756]: I0930 19:31:39.413142 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://449a4827a34c17e8cdc851d2714ced931581b5c3a1bcd2f7f058b95d68403740\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://163fab064016913eb90cd87fe0da1c702eeb7f000c92b108bfb24a21ef0e98d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:39Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:39 crc kubenswrapper[4756]: I0930 19:31:39.426744 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:39 crc kubenswrapper[4756]: I0930 19:31:39.426790 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:39 crc kubenswrapper[4756]: I0930 19:31:39.426802 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:39 crc kubenswrapper[4756]: I0930 19:31:39.426822 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:31:39 crc kubenswrapper[4756]: I0930 19:31:39.426837 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:31:39Z","lastTransitionTime":"2025-09-30T19:31:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:31:39 crc kubenswrapper[4756]: I0930 19:31:39.428562 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:39Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:39 crc kubenswrapper[4756]: I0930 19:31:39.442317 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d37999f5cb36e7071b109ed90026aea745a55dbdd26d53799ee4196dc6a983c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:39Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:39 crc kubenswrapper[4756]: I0930 19:31:39.460900 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-l9mw5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"104562fb-2a2d-4291-963c-7a95062ec13a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c012009175f2ca25af13346dce481a4640354a2b7c771ce9a8d92f0a6a014f23\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mhq89\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:32Z\\\"}}\" for pod \"openshift-multus\"/\"multus-l9mw5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:39Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:39 crc kubenswrapper[4756]: I0930 19:31:39.473736 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:39Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:39 crc kubenswrapper[4756]: I0930 19:31:39.487972 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d39cfd1b725ded02a27e8f7bed73221dd180e3fffb7f06d1877d1bc2f6910334\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:39Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:39 crc kubenswrapper[4756]: I0930 19:31:39.499347 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-ghp5t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"32ba8a0a-215c-415b-a893-74f7a40e3c20\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a96a8eea15e5d12ad93cdb41ef3d1054fae455aea2878451bc5715de6d5b612f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pslwb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:32Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-ghp5t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:39Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:39 crc kubenswrapper[4756]: I0930 19:31:39.514218 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-m6zsp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2b4cba15-5ae1-4c98-a5e5-060b3af3aec6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://828a01ba594287ceea1e72baa5419d522ce14d2ab3df5a69fb6332400c3e4abc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://828a01ba594287ceea1e72baa5419d522ce14d2ab3df5a69fb6332400c3e4abc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f91b3382af5267116de235e9d5f480e574b479350e738621b5e7568329dd879\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0f91b3382af5267116de235e9d5f480e574b479350e738621b5e7568329dd879\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://677982485b1155ded1bce2a0f5b3262766d498642307d1744d5cbae6c66102e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://677982485b1155ded1bce2a0f5b3262766d498642307d1744d5cbae6c66102e3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://14b8b318d4cd0da2272b87ac74a4dccdf29874a818aadc0698e9d85a75a169da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://14b8b318d4cd0da2272b87ac74a4dccdf29874a818aadc0698e9d85a75a169da\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e877edd834105d5bd0dcd24e6ba9c11ead17ab39a95ca0169c54acaad7544ebe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e877edd834105d5bd0dcd24e6ba9c11ead17ab39a95ca0169c54acaad7544ebe\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://decf531f70c78d1d142bef3d265612ebac2c8790ea10aba75def6d785fba84a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://decf531f70c78d1d142bef3d265612ebac2c8790ea10aba75def6d785fba84a8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:32Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-m6zsp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:39Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:39 crc kubenswrapper[4756]: I0930 19:31:39.528604 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:39 crc kubenswrapper[4756]: I0930 19:31:39.528642 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:39 crc kubenswrapper[4756]: I0930 19:31:39.528653 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:39 crc kubenswrapper[4756]: I0930 19:31:39.528671 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:31:39 crc kubenswrapper[4756]: I0930 19:31:39.528687 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:31:39Z","lastTransitionTime":"2025-09-30T19:31:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:31:39 crc kubenswrapper[4756]: I0930 19:31:39.532585 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6aa869dd-ec28-4032-82be-c656f27bd7b1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://99767cc72b9d6057d38c45fbec6231bafa47ccffefb78e5924b5e9c1e797722b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b852143b26114c05edce20729cf706c5394609a74f522b7ecf44d92f9d66aed7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ee47cdb77e416535bd00569159712241bbd0952651774af6440fd04000d1722\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5cca81a0aeb985d6044f19cccc580ca5c78553a0abb60fb2a2bed955233f7a97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://621b37e117d0eeef289e110d75521c3262ced12144d665be35d6823780b4bae2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a5550fd486f7fad8080fb3eed0f4c73140172c68983fa8a423cd48da8f31ef3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a5550fd486f7fad8080fb3eed0f4c73140172c68983fa8a423cd48da8f31ef3a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://390ef7ee5c5d98dc226f1d061c6c9bb83236c988f2af15635e7834f643862840\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://390ef7ee5c5d98dc226f1d061c6c9bb83236c988f2af15635e7834f643862840\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:13Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://6f98786c914dcdd3ca1cde8421686a1d536abb97c375f26f458d8f514bdccd52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6f98786c914dcdd3ca1cde8421686a1d536abb97c375f26f458d8f514bdccd52\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:11Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:39Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:39 crc kubenswrapper[4756]: I0930 19:31:39.547241 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0dfdb5d6-9311-4699-a1ac-5f1a6cde2441\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a78698331fc3db3b12b06c8816256c5d055d1decfd12cc0099d6cfd155d673d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32fb926334772dc454adcdabd6070f751ef1aded82a5c13732875e1ec04d2c48\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6195128a0e66f0f932a444f4717084f00ea05ff648f7c26364bb9c96483b652a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://746e37834d01867d327b2cb407df8e9a8489f58884a28f1da6a336f74e4d1033\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:11Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:39Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:39 crc kubenswrapper[4756]: I0930 19:31:39.559259 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3370c2ca-fec3-4f90-8df7-51e21e6c7e1c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1a5ef539293ea3ff94f53b11a0510bb8bb45c62c945cffab746512efeeb19a5a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pstrc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ba18abf718fde52cadd61c25b05097873901a4dbbe7631fd7ce64133627b9473\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pstrc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:32Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-4n9zj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:39Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:39 crc kubenswrapper[4756]: I0930 19:31:39.576917 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-kmxk8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d96acc31-a519-46fc-94d8-f19522e77391\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://76bb398eefb2b0a9ef06b44c2ea182deefd51e4daf42b17946b911822d6eb0af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://76bb398eefb2b0a9ef06b44c2ea182deefd51e4daf42b17946b911822d6eb0af\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:32Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-kmxk8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:39Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:39 crc kubenswrapper[4756]: I0930 19:31:39.586843 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-8nj5z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"02bb1d9a-1ad2-406d-afc2-c4cd283fe1f4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://28f7474a36dde55478614d47fe30f93b44cb549a25e148e295987e64158fc3c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l6nxq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:34Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-8nj5z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:39Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:39 crc kubenswrapper[4756]: I0930 19:31:39.631812 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:39 crc kubenswrapper[4756]: I0930 19:31:39.631889 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:39 crc kubenswrapper[4756]: I0930 19:31:39.631902 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:39 crc kubenswrapper[4756]: I0930 19:31:39.631923 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:31:39 crc kubenswrapper[4756]: I0930 19:31:39.631953 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:31:39Z","lastTransitionTime":"2025-09-30T19:31:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:31:39 crc kubenswrapper[4756]: I0930 19:31:39.734348 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:39 crc kubenswrapper[4756]: I0930 19:31:39.734427 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:39 crc kubenswrapper[4756]: I0930 19:31:39.734442 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:39 crc kubenswrapper[4756]: I0930 19:31:39.734462 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:31:39 crc kubenswrapper[4756]: I0930 19:31:39.734475 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:31:39Z","lastTransitionTime":"2025-09-30T19:31:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:31:39 crc kubenswrapper[4756]: I0930 19:31:39.837815 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:39 crc kubenswrapper[4756]: I0930 19:31:39.837863 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:39 crc kubenswrapper[4756]: I0930 19:31:39.837912 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:39 crc kubenswrapper[4756]: I0930 19:31:39.837931 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:31:39 crc kubenswrapper[4756]: I0930 19:31:39.837943 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:31:39Z","lastTransitionTime":"2025-09-30T19:31:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:31:39 crc kubenswrapper[4756]: I0930 19:31:39.941280 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:39 crc kubenswrapper[4756]: I0930 19:31:39.941345 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:39 crc kubenswrapper[4756]: I0930 19:31:39.941362 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:39 crc kubenswrapper[4756]: I0930 19:31:39.941391 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:31:39 crc kubenswrapper[4756]: I0930 19:31:39.941441 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:31:39Z","lastTransitionTime":"2025-09-30T19:31:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:31:40 crc kubenswrapper[4756]: I0930 19:31:40.044590 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:40 crc kubenswrapper[4756]: I0930 19:31:40.045014 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:40 crc kubenswrapper[4756]: I0930 19:31:40.045027 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:40 crc kubenswrapper[4756]: I0930 19:31:40.045045 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:31:40 crc kubenswrapper[4756]: I0930 19:31:40.045059 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:31:40Z","lastTransitionTime":"2025-09-30T19:31:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:31:40 crc kubenswrapper[4756]: I0930 19:31:40.147693 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:40 crc kubenswrapper[4756]: I0930 19:31:40.147809 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:40 crc kubenswrapper[4756]: I0930 19:31:40.147831 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:40 crc kubenswrapper[4756]: I0930 19:31:40.147859 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:31:40 crc kubenswrapper[4756]: I0930 19:31:40.147879 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:31:40Z","lastTransitionTime":"2025-09-30T19:31:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:31:40 crc kubenswrapper[4756]: I0930 19:31:40.250868 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:40 crc kubenswrapper[4756]: I0930 19:31:40.250910 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:40 crc kubenswrapper[4756]: I0930 19:31:40.250921 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:40 crc kubenswrapper[4756]: I0930 19:31:40.250941 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:31:40 crc kubenswrapper[4756]: I0930 19:31:40.250953 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:31:40Z","lastTransitionTime":"2025-09-30T19:31:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:31:40 crc kubenswrapper[4756]: I0930 19:31:40.352604 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:40 crc kubenswrapper[4756]: I0930 19:31:40.352646 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:40 crc kubenswrapper[4756]: I0930 19:31:40.352658 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:40 crc kubenswrapper[4756]: I0930 19:31:40.352678 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:31:40 crc kubenswrapper[4756]: I0930 19:31:40.352691 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:31:40Z","lastTransitionTime":"2025-09-30T19:31:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:31:40 crc kubenswrapper[4756]: I0930 19:31:40.354414 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-m6zsp" event={"ID":"2b4cba15-5ae1-4c98-a5e5-060b3af3aec6","Type":"ContainerStarted","Data":"13307050242b53fcc9d2c9ab6870c36d5c2c1e43adfa36c1e76addf32166b95b"} Sep 30 19:31:40 crc kubenswrapper[4756]: I0930 19:31:40.358461 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-kmxk8" event={"ID":"d96acc31-a519-46fc-94d8-f19522e77391","Type":"ContainerStarted","Data":"2a2a998d0733193f87d6782c9553ac48179934e6534304d8f8c3e1edc868b8b1"} Sep 30 19:31:40 crc kubenswrapper[4756]: I0930 19:31:40.358765 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-kmxk8" Sep 30 19:31:40 crc kubenswrapper[4756]: I0930 19:31:40.358815 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-kmxk8" Sep 30 19:31:40 crc kubenswrapper[4756]: I0930 19:31:40.371240 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:40Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:40 crc kubenswrapper[4756]: I0930 19:31:40.382940 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d37999f5cb36e7071b109ed90026aea745a55dbdd26d53799ee4196dc6a983c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:40Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:40 crc kubenswrapper[4756]: I0930 19:31:40.396685 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-l9mw5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"104562fb-2a2d-4291-963c-7a95062ec13a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c012009175f2ca25af13346dce481a4640354a2b7c771ce9a8d92f0a6a014f23\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mhq89\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:32Z\\\"}}\" for pod \"openshift-multus\"/\"multus-l9mw5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:40Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:40 crc kubenswrapper[4756]: I0930 19:31:40.409477 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:40Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:40 crc kubenswrapper[4756]: I0930 19:31:40.424704 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d39cfd1b725ded02a27e8f7bed73221dd180e3fffb7f06d1877d1bc2f6910334\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:40Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:40 crc kubenswrapper[4756]: I0930 19:31:40.437982 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-ghp5t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"32ba8a0a-215c-415b-a893-74f7a40e3c20\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a96a8eea15e5d12ad93cdb41ef3d1054fae455aea2878451bc5715de6d5b612f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pslwb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:32Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-ghp5t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:40Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:40 crc kubenswrapper[4756]: I0930 19:31:40.453679 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-m6zsp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2b4cba15-5ae1-4c98-a5e5-060b3af3aec6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://13307050242b53fcc9d2c9ab6870c36d5c2c1e43adfa36c1e76addf32166b95b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://828a01ba594287ceea1e72baa5419d522ce14d2ab3df5a69fb6332400c3e4abc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://828a01ba594287ceea1e72baa5419d522ce14d2ab3df5a69fb6332400c3e4abc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f91b3382af5267116de235e9d5f480e574b479350e738621b5e7568329dd879\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0f91b3382af5267116de235e9d5f480e574b479350e738621b5e7568329dd879\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://677982485b1155ded1bce2a0f5b3262766d498642307d1744d5cbae6c66102e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://677982485b1155ded1bce2a0f5b3262766d498642307d1744d5cbae6c66102e3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://14b8b318d4cd0da2272b87ac74a4dccdf29874a818aadc0698e9d85a75a169da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://14b8b318d4cd0da2272b87ac74a4dccdf29874a818aadc0698e9d85a75a169da\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e877edd834105d5bd0dcd24e6ba9c11ead17ab39a95ca0169c54acaad7544ebe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e877edd834105d5bd0dcd24e6ba9c11ead17ab39a95ca0169c54acaad7544ebe\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://decf531f70c78d1d142bef3d265612ebac2c8790ea10aba75def6d785fba84a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://decf531f70c78d1d142bef3d265612ebac2c8790ea10aba75def6d785fba84a8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:32Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-m6zsp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:40Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:40 crc kubenswrapper[4756]: I0930 19:31:40.455686 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:40 crc kubenswrapper[4756]: I0930 19:31:40.455730 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:40 crc kubenswrapper[4756]: I0930 19:31:40.455744 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:40 crc kubenswrapper[4756]: I0930 19:31:40.455767 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:31:40 crc kubenswrapper[4756]: I0930 19:31:40.455780 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:31:40Z","lastTransitionTime":"2025-09-30T19:31:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:31:40 crc kubenswrapper[4756]: I0930 19:31:40.476123 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-kmxk8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d96acc31-a519-46fc-94d8-f19522e77391\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://76bb398eefb2b0a9ef06b44c2ea182deefd51e4daf42b17946b911822d6eb0af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://76bb398eefb2b0a9ef06b44c2ea182deefd51e4daf42b17946b911822d6eb0af\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:32Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-kmxk8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:40Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:40 crc kubenswrapper[4756]: I0930 19:31:40.480282 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-kmxk8" Sep 30 19:31:40 crc kubenswrapper[4756]: I0930 19:31:40.480365 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-kmxk8" Sep 30 19:31:40 crc kubenswrapper[4756]: I0930 19:31:40.492287 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-8nj5z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"02bb1d9a-1ad2-406d-afc2-c4cd283fe1f4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://28f7474a36dde55478614d47fe30f93b44cb549a25e148e295987e64158fc3c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l6nxq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:34Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-8nj5z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:40Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:40 crc kubenswrapper[4756]: I0930 19:31:40.521513 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6aa869dd-ec28-4032-82be-c656f27bd7b1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://99767cc72b9d6057d38c45fbec6231bafa47ccffefb78e5924b5e9c1e797722b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b852143b26114c05edce20729cf706c5394609a74f522b7ecf44d92f9d66aed7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ee47cdb77e416535bd00569159712241bbd0952651774af6440fd04000d1722\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5cca81a0aeb985d6044f19cccc580ca5c78553a0abb60fb2a2bed955233f7a97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://621b37e117d0eeef289e110d75521c3262ced12144d665be35d6823780b4bae2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a5550fd486f7fad8080fb3eed0f4c73140172c68983fa8a423cd48da8f31ef3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a5550fd486f7fad8080fb3eed0f4c73140172c68983fa8a423cd48da8f31ef3a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://390ef7ee5c5d98dc226f1d061c6c9bb83236c988f2af15635e7834f643862840\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://390ef7ee5c5d98dc226f1d061c6c9bb83236c988f2af15635e7834f643862840\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:13Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://6f98786c914dcdd3ca1cde8421686a1d536abb97c375f26f458d8f514bdccd52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6f98786c914dcdd3ca1cde8421686a1d536abb97c375f26f458d8f514bdccd52\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:11Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:40Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:40 crc kubenswrapper[4756]: I0930 19:31:40.537335 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0dfdb5d6-9311-4699-a1ac-5f1a6cde2441\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a78698331fc3db3b12b06c8816256c5d055d1decfd12cc0099d6cfd155d673d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32fb926334772dc454adcdabd6070f751ef1aded82a5c13732875e1ec04d2c48\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6195128a0e66f0f932a444f4717084f00ea05ff648f7c26364bb9c96483b652a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://746e37834d01867d327b2cb407df8e9a8489f58884a28f1da6a336f74e4d1033\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:11Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:40Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:40 crc kubenswrapper[4756]: I0930 19:31:40.549954 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3370c2ca-fec3-4f90-8df7-51e21e6c7e1c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1a5ef539293ea3ff94f53b11a0510bb8bb45c62c945cffab746512efeeb19a5a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pstrc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ba18abf718fde52cadd61c25b05097873901a4dbbe7631fd7ce64133627b9473\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pstrc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:32Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-4n9zj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:40Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:40 crc kubenswrapper[4756]: I0930 19:31:40.558742 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:40 crc kubenswrapper[4756]: I0930 19:31:40.558769 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:40 crc kubenswrapper[4756]: I0930 19:31:40.558778 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:40 crc kubenswrapper[4756]: I0930 19:31:40.558798 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:31:40 crc kubenswrapper[4756]: I0930 19:31:40.558810 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:31:40Z","lastTransitionTime":"2025-09-30T19:31:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:31:40 crc kubenswrapper[4756]: I0930 19:31:40.567039 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"218f95d8-7230-4b64-83a5-00af4f5ec7dc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0a76d38dbf936ca79e57470de2c8bf98e8681f7d94f2c572aa5c0d2b2acbb484\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://57f786181b4762f1e04a48ac6e352090b62e1691091ddc390821abb693e8f7a6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c275af67ad8675a437b3f7fae34236bbdfbdf1f216be6a442d6765471d1d11e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a2e0caca690592c9bd17dae14f79390b94f47507afb05a1c1732453700b99786\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a2e0caca690592c9bd17dae14f79390b94f47507afb05a1c1732453700b99786\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"message\\\":\\\"g.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 19:31:31.245412 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 19:31:31.245417 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0930 19:31:31.245423 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0930 19:31:31.245426 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0930 19:31:31.245428 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0930 19:31:31.245860 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI0930 19:31:31.249125 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0930 19:31:31.249169 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0930 19:31:31.249201 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0930 19:31:31.249211 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0930 19:31:31.249225 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0930 19:31:31.249238 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0930 19:31:31.250125 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI0930 19:31:31.250150 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nF0930 19:31:31.250576 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:25Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://01164eb1afdb771ef4d2dc268107dadd206959e1ef66b154caf3ff18717e20de\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:14Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7365492f5fe72c39f569bfa6e2b1a44236da10b4687a738b97ce95626081c1b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7365492f5fe72c39f569bfa6e2b1a44236da10b4687a738b97ce95626081c1b5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:11Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:40Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:40 crc kubenswrapper[4756]: I0930 19:31:40.583700 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:40Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:40 crc kubenswrapper[4756]: I0930 19:31:40.598085 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://449a4827a34c17e8cdc851d2714ced931581b5c3a1bcd2f7f058b95d68403740\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://163fab064016913eb90cd87fe0da1c702eeb7f000c92b108bfb24a21ef0e98d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:40Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:40 crc kubenswrapper[4756]: I0930 19:31:40.611618 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:40Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:40 crc kubenswrapper[4756]: I0930 19:31:40.625989 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d37999f5cb36e7071b109ed90026aea745a55dbdd26d53799ee4196dc6a983c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:40Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:40 crc kubenswrapper[4756]: I0930 19:31:40.642858 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-l9mw5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"104562fb-2a2d-4291-963c-7a95062ec13a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c012009175f2ca25af13346dce481a4640354a2b7c771ce9a8d92f0a6a014f23\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mhq89\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:32Z\\\"}}\" for pod \"openshift-multus\"/\"multus-l9mw5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:40Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:40 crc kubenswrapper[4756]: I0930 19:31:40.655528 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:40Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:40 crc kubenswrapper[4756]: I0930 19:31:40.661638 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:40 crc kubenswrapper[4756]: I0930 19:31:40.661701 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:40 crc kubenswrapper[4756]: I0930 19:31:40.661715 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:40 crc kubenswrapper[4756]: I0930 19:31:40.661736 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:31:40 crc kubenswrapper[4756]: I0930 19:31:40.661752 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:31:40Z","lastTransitionTime":"2025-09-30T19:31:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:31:40 crc kubenswrapper[4756]: I0930 19:31:40.671643 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d39cfd1b725ded02a27e8f7bed73221dd180e3fffb7f06d1877d1bc2f6910334\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:40Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:40 crc kubenswrapper[4756]: I0930 19:31:40.683943 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-ghp5t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"32ba8a0a-215c-415b-a893-74f7a40e3c20\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a96a8eea15e5d12ad93cdb41ef3d1054fae455aea2878451bc5715de6d5b612f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pslwb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:32Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-ghp5t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:40Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:40 crc kubenswrapper[4756]: I0930 19:31:40.701333 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-m6zsp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2b4cba15-5ae1-4c98-a5e5-060b3af3aec6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://13307050242b53fcc9d2c9ab6870c36d5c2c1e43adfa36c1e76addf32166b95b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://828a01ba594287ceea1e72baa5419d522ce14d2ab3df5a69fb6332400c3e4abc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://828a01ba594287ceea1e72baa5419d522ce14d2ab3df5a69fb6332400c3e4abc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f91b3382af5267116de235e9d5f480e574b479350e738621b5e7568329dd879\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0f91b3382af5267116de235e9d5f480e574b479350e738621b5e7568329dd879\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://677982485b1155ded1bce2a0f5b3262766d498642307d1744d5cbae6c66102e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://677982485b1155ded1bce2a0f5b3262766d498642307d1744d5cbae6c66102e3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://14b8b318d4cd0da2272b87ac74a4dccdf29874a818aadc0698e9d85a75a169da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://14b8b318d4cd0da2272b87ac74a4dccdf29874a818aadc0698e9d85a75a169da\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e877edd834105d5bd0dcd24e6ba9c11ead17ab39a95ca0169c54acaad7544ebe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e877edd834105d5bd0dcd24e6ba9c11ead17ab39a95ca0169c54acaad7544ebe\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://decf531f70c78d1d142bef3d265612ebac2c8790ea10aba75def6d785fba84a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://decf531f70c78d1d142bef3d265612ebac2c8790ea10aba75def6d785fba84a8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:32Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-m6zsp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:40Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:40 crc kubenswrapper[4756]: I0930 19:31:40.721436 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6aa869dd-ec28-4032-82be-c656f27bd7b1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://99767cc72b9d6057d38c45fbec6231bafa47ccffefb78e5924b5e9c1e797722b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b852143b26114c05edce20729cf706c5394609a74f522b7ecf44d92f9d66aed7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ee47cdb77e416535bd00569159712241bbd0952651774af6440fd04000d1722\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5cca81a0aeb985d6044f19cccc580ca5c78553a0abb60fb2a2bed955233f7a97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://621b37e117d0eeef289e110d75521c3262ced12144d665be35d6823780b4bae2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a5550fd486f7fad8080fb3eed0f4c73140172c68983fa8a423cd48da8f31ef3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a5550fd486f7fad8080fb3eed0f4c73140172c68983fa8a423cd48da8f31ef3a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://390ef7ee5c5d98dc226f1d061c6c9bb83236c988f2af15635e7834f643862840\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://390ef7ee5c5d98dc226f1d061c6c9bb83236c988f2af15635e7834f643862840\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:13Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://6f98786c914dcdd3ca1cde8421686a1d536abb97c375f26f458d8f514bdccd52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6f98786c914dcdd3ca1cde8421686a1d536abb97c375f26f458d8f514bdccd52\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:11Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:40Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:40 crc kubenswrapper[4756]: I0930 19:31:40.737761 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0dfdb5d6-9311-4699-a1ac-5f1a6cde2441\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a78698331fc3db3b12b06c8816256c5d055d1decfd12cc0099d6cfd155d673d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32fb926334772dc454adcdabd6070f751ef1aded82a5c13732875e1ec04d2c48\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6195128a0e66f0f932a444f4717084f00ea05ff648f7c26364bb9c96483b652a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://746e37834d01867d327b2cb407df8e9a8489f58884a28f1da6a336f74e4d1033\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:11Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:40Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:40 crc kubenswrapper[4756]: I0930 19:31:40.751198 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3370c2ca-fec3-4f90-8df7-51e21e6c7e1c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1a5ef539293ea3ff94f53b11a0510bb8bb45c62c945cffab746512efeeb19a5a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pstrc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ba18abf718fde52cadd61c25b05097873901a4dbbe7631fd7ce64133627b9473\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pstrc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:32Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-4n9zj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:40Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:40 crc kubenswrapper[4756]: I0930 19:31:40.764564 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:40 crc kubenswrapper[4756]: I0930 19:31:40.764610 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:40 crc kubenswrapper[4756]: I0930 19:31:40.764622 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:40 crc kubenswrapper[4756]: I0930 19:31:40.764638 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:31:40 crc kubenswrapper[4756]: I0930 19:31:40.764649 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:31:40Z","lastTransitionTime":"2025-09-30T19:31:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:31:40 crc kubenswrapper[4756]: I0930 19:31:40.771705 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-kmxk8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d96acc31-a519-46fc-94d8-f19522e77391\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe054d1844c58433aebfe8f4e500e17558f1b4afb5423002d60b50f210052c93\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c5badf6335e4358fa0078ae48d7d50a908bf754fdb57c786e445ccae31933ebe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f20230d520af06912183a05d0b9bcabb8581bf772fe34e037fa8d1a157c4a1b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://261b104308c48bbe4631db85f56d63cefeb0bc6b7112076b0bf3f4fb6b67caad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1fa8babb1f5d1213dec99c75be1d5f37d09c2257a31babd51ec297621fc7d564\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://990926364c8d973164f3da73740b7d31c8caf87f0fb691539f7a62520b822fba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2a2a998d0733193f87d6782c9553ac48179934e6534304d8f8c3e1edc868b8b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://64404e34c6a077df9cc9e9f9273dbe188c6feda3bc1186a83e05a43e1fa2ea99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://76bb398eefb2b0a9ef06b44c2ea182deefd51e4daf42b17946b911822d6eb0af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://76bb398eefb2b0a9ef06b44c2ea182deefd51e4daf42b17946b911822d6eb0af\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:32Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-kmxk8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:40Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:40 crc kubenswrapper[4756]: I0930 19:31:40.784449 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-8nj5z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"02bb1d9a-1ad2-406d-afc2-c4cd283fe1f4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://28f7474a36dde55478614d47fe30f93b44cb549a25e148e295987e64158fc3c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l6nxq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:34Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-8nj5z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:40Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:40 crc kubenswrapper[4756]: I0930 19:31:40.801084 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"218f95d8-7230-4b64-83a5-00af4f5ec7dc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0a76d38dbf936ca79e57470de2c8bf98e8681f7d94f2c572aa5c0d2b2acbb484\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://57f786181b4762f1e04a48ac6e352090b62e1691091ddc390821abb693e8f7a6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c275af67ad8675a437b3f7fae34236bbdfbdf1f216be6a442d6765471d1d11e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a2e0caca690592c9bd17dae14f79390b94f47507afb05a1c1732453700b99786\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a2e0caca690592c9bd17dae14f79390b94f47507afb05a1c1732453700b99786\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"message\\\":\\\"g.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 19:31:31.245412 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 19:31:31.245417 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0930 19:31:31.245423 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0930 19:31:31.245426 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0930 19:31:31.245428 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0930 19:31:31.245860 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI0930 19:31:31.249125 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0930 19:31:31.249169 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0930 19:31:31.249201 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0930 19:31:31.249211 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0930 19:31:31.249225 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0930 19:31:31.249238 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0930 19:31:31.250125 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI0930 19:31:31.250150 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nF0930 19:31:31.250576 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:25Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://01164eb1afdb771ef4d2dc268107dadd206959e1ef66b154caf3ff18717e20de\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:14Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7365492f5fe72c39f569bfa6e2b1a44236da10b4687a738b97ce95626081c1b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7365492f5fe72c39f569bfa6e2b1a44236da10b4687a738b97ce95626081c1b5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:11Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:40Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:40 crc kubenswrapper[4756]: I0930 19:31:40.820152 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:40Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:40 crc kubenswrapper[4756]: I0930 19:31:40.835050 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://449a4827a34c17e8cdc851d2714ced931581b5c3a1bcd2f7f058b95d68403740\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://163fab064016913eb90cd87fe0da1c702eeb7f000c92b108bfb24a21ef0e98d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:40Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:40 crc kubenswrapper[4756]: I0930 19:31:40.867782 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:40 crc kubenswrapper[4756]: I0930 19:31:40.867833 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:40 crc kubenswrapper[4756]: I0930 19:31:40.867843 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:40 crc kubenswrapper[4756]: I0930 19:31:40.867862 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:31:40 crc kubenswrapper[4756]: I0930 19:31:40.867873 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:31:40Z","lastTransitionTime":"2025-09-30T19:31:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:31:40 crc kubenswrapper[4756]: I0930 19:31:40.971641 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:40 crc kubenswrapper[4756]: I0930 19:31:40.971725 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:40 crc kubenswrapper[4756]: I0930 19:31:40.971753 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:40 crc kubenswrapper[4756]: I0930 19:31:40.971784 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:31:40 crc kubenswrapper[4756]: I0930 19:31:40.971807 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:31:40Z","lastTransitionTime":"2025-09-30T19:31:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:31:41 crc kubenswrapper[4756]: I0930 19:31:41.075507 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:41 crc kubenswrapper[4756]: I0930 19:31:41.075574 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:41 crc kubenswrapper[4756]: I0930 19:31:41.075600 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:41 crc kubenswrapper[4756]: I0930 19:31:41.075633 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:31:41 crc kubenswrapper[4756]: I0930 19:31:41.075656 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:31:41Z","lastTransitionTime":"2025-09-30T19:31:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:31:41 crc kubenswrapper[4756]: I0930 19:31:41.115068 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 19:31:41 crc kubenswrapper[4756]: I0930 19:31:41.115241 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 19:31:41 crc kubenswrapper[4756]: E0930 19:31:41.115423 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 19:31:41 crc kubenswrapper[4756]: I0930 19:31:41.115470 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 19:31:41 crc kubenswrapper[4756]: E0930 19:31:41.115630 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 19:31:41 crc kubenswrapper[4756]: E0930 19:31:41.115768 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 19:31:41 crc kubenswrapper[4756]: I0930 19:31:41.128490 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-8nj5z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"02bb1d9a-1ad2-406d-afc2-c4cd283fe1f4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://28f7474a36dde55478614d47fe30f93b44cb549a25e148e295987e64158fc3c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l6nxq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:34Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-8nj5z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:41Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:41 crc kubenswrapper[4756]: I0930 19:31:41.156064 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6aa869dd-ec28-4032-82be-c656f27bd7b1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://99767cc72b9d6057d38c45fbec6231bafa47ccffefb78e5924b5e9c1e797722b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b852143b26114c05edce20729cf706c5394609a74f522b7ecf44d92f9d66aed7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ee47cdb77e416535bd00569159712241bbd0952651774af6440fd04000d1722\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5cca81a0aeb985d6044f19cccc580ca5c78553a0abb60fb2a2bed955233f7a97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://621b37e117d0eeef289e110d75521c3262ced12144d665be35d6823780b4bae2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a5550fd486f7fad8080fb3eed0f4c73140172c68983fa8a423cd48da8f31ef3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a5550fd486f7fad8080fb3eed0f4c73140172c68983fa8a423cd48da8f31ef3a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://390ef7ee5c5d98dc226f1d061c6c9bb83236c988f2af15635e7834f643862840\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://390ef7ee5c5d98dc226f1d061c6c9bb83236c988f2af15635e7834f643862840\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:13Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://6f98786c914dcdd3ca1cde8421686a1d536abb97c375f26f458d8f514bdccd52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6f98786c914dcdd3ca1cde8421686a1d536abb97c375f26f458d8f514bdccd52\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:11Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:41Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:41 crc kubenswrapper[4756]: I0930 19:31:41.173938 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0dfdb5d6-9311-4699-a1ac-5f1a6cde2441\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a78698331fc3db3b12b06c8816256c5d055d1decfd12cc0099d6cfd155d673d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32fb926334772dc454adcdabd6070f751ef1aded82a5c13732875e1ec04d2c48\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6195128a0e66f0f932a444f4717084f00ea05ff648f7c26364bb9c96483b652a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://746e37834d01867d327b2cb407df8e9a8489f58884a28f1da6a336f74e4d1033\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:11Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:41Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:41 crc kubenswrapper[4756]: I0930 19:31:41.178354 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:41 crc kubenswrapper[4756]: I0930 19:31:41.178385 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:41 crc kubenswrapper[4756]: I0930 19:31:41.178420 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:41 crc kubenswrapper[4756]: I0930 19:31:41.178453 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:31:41 crc kubenswrapper[4756]: I0930 19:31:41.178467 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:31:41Z","lastTransitionTime":"2025-09-30T19:31:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:31:41 crc kubenswrapper[4756]: I0930 19:31:41.192707 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3370c2ca-fec3-4f90-8df7-51e21e6c7e1c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1a5ef539293ea3ff94f53b11a0510bb8bb45c62c945cffab746512efeeb19a5a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pstrc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ba18abf718fde52cadd61c25b05097873901a4dbbe7631fd7ce64133627b9473\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pstrc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:32Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-4n9zj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:41Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:41 crc kubenswrapper[4756]: I0930 19:31:41.233497 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-kmxk8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d96acc31-a519-46fc-94d8-f19522e77391\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe054d1844c58433aebfe8f4e500e17558f1b4afb5423002d60b50f210052c93\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c5badf6335e4358fa0078ae48d7d50a908bf754fdb57c786e445ccae31933ebe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f20230d520af06912183a05d0b9bcabb8581bf772fe34e037fa8d1a157c4a1b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://261b104308c48bbe4631db85f56d63cefeb0bc6b7112076b0bf3f4fb6b67caad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1fa8babb1f5d1213dec99c75be1d5f37d09c2257a31babd51ec297621fc7d564\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://990926364c8d973164f3da73740b7d31c8caf87f0fb691539f7a62520b822fba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2a2a998d0733193f87d6782c9553ac48179934e6534304d8f8c3e1edc868b8b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://64404e34c6a077df9cc9e9f9273dbe188c6feda3bc1186a83e05a43e1fa2ea99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://76bb398eefb2b0a9ef06b44c2ea182deefd51e4daf42b17946b911822d6eb0af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://76bb398eefb2b0a9ef06b44c2ea182deefd51e4daf42b17946b911822d6eb0af\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:32Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-kmxk8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:41Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:41 crc kubenswrapper[4756]: I0930 19:31:41.259959 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"218f95d8-7230-4b64-83a5-00af4f5ec7dc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0a76d38dbf936ca79e57470de2c8bf98e8681f7d94f2c572aa5c0d2b2acbb484\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://57f786181b4762f1e04a48ac6e352090b62e1691091ddc390821abb693e8f7a6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c275af67ad8675a437b3f7fae34236bbdfbdf1f216be6a442d6765471d1d11e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a2e0caca690592c9bd17dae14f79390b94f47507afb05a1c1732453700b99786\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a2e0caca690592c9bd17dae14f79390b94f47507afb05a1c1732453700b99786\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"message\\\":\\\"g.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 19:31:31.245412 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 19:31:31.245417 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0930 19:31:31.245423 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0930 19:31:31.245426 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0930 19:31:31.245428 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0930 19:31:31.245860 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI0930 19:31:31.249125 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0930 19:31:31.249169 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0930 19:31:31.249201 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0930 19:31:31.249211 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0930 19:31:31.249225 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0930 19:31:31.249238 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0930 19:31:31.250125 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI0930 19:31:31.250150 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nF0930 19:31:31.250576 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:25Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://01164eb1afdb771ef4d2dc268107dadd206959e1ef66b154caf3ff18717e20de\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:14Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7365492f5fe72c39f569bfa6e2b1a44236da10b4687a738b97ce95626081c1b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7365492f5fe72c39f569bfa6e2b1a44236da10b4687a738b97ce95626081c1b5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:11Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:41Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:41 crc kubenswrapper[4756]: I0930 19:31:41.279744 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:41Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:41 crc kubenswrapper[4756]: I0930 19:31:41.280576 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:41 crc kubenswrapper[4756]: I0930 19:31:41.280625 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:41 crc kubenswrapper[4756]: I0930 19:31:41.280635 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:41 crc kubenswrapper[4756]: I0930 19:31:41.280653 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:31:41 crc kubenswrapper[4756]: I0930 19:31:41.280663 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:31:41Z","lastTransitionTime":"2025-09-30T19:31:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:31:41 crc kubenswrapper[4756]: I0930 19:31:41.295983 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://449a4827a34c17e8cdc851d2714ced931581b5c3a1bcd2f7f058b95d68403740\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://163fab064016913eb90cd87fe0da1c702eeb7f000c92b108bfb24a21ef0e98d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:41Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:41 crc kubenswrapper[4756]: I0930 19:31:41.308469 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:41Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:41 crc kubenswrapper[4756]: I0930 19:31:41.319414 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d37999f5cb36e7071b109ed90026aea745a55dbdd26d53799ee4196dc6a983c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:41Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:41 crc kubenswrapper[4756]: I0930 19:31:41.334907 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-l9mw5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"104562fb-2a2d-4291-963c-7a95062ec13a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c012009175f2ca25af13346dce481a4640354a2b7c771ce9a8d92f0a6a014f23\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mhq89\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:32Z\\\"}}\" for pod \"openshift-multus\"/\"multus-l9mw5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:41Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:41 crc kubenswrapper[4756]: I0930 19:31:41.347896 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:41Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:41 crc kubenswrapper[4756]: I0930 19:31:41.361719 4756 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Sep 30 19:31:41 crc kubenswrapper[4756]: I0930 19:31:41.361851 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d39cfd1b725ded02a27e8f7bed73221dd180e3fffb7f06d1877d1bc2f6910334\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:41Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:41 crc kubenswrapper[4756]: I0930 19:31:41.372559 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-ghp5t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"32ba8a0a-215c-415b-a893-74f7a40e3c20\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a96a8eea15e5d12ad93cdb41ef3d1054fae455aea2878451bc5715de6d5b612f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pslwb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:32Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-ghp5t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:41Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:41 crc kubenswrapper[4756]: I0930 19:31:41.382687 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:41 crc kubenswrapper[4756]: I0930 19:31:41.382722 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:41 crc kubenswrapper[4756]: I0930 19:31:41.382734 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:41 crc kubenswrapper[4756]: I0930 19:31:41.382751 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:31:41 crc kubenswrapper[4756]: I0930 19:31:41.382790 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:31:41Z","lastTransitionTime":"2025-09-30T19:31:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:31:41 crc kubenswrapper[4756]: I0930 19:31:41.388110 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-m6zsp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2b4cba15-5ae1-4c98-a5e5-060b3af3aec6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://13307050242b53fcc9d2c9ab6870c36d5c2c1e43adfa36c1e76addf32166b95b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://828a01ba594287ceea1e72baa5419d522ce14d2ab3df5a69fb6332400c3e4abc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://828a01ba594287ceea1e72baa5419d522ce14d2ab3df5a69fb6332400c3e4abc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f91b3382af5267116de235e9d5f480e574b479350e738621b5e7568329dd879\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0f91b3382af5267116de235e9d5f480e574b479350e738621b5e7568329dd879\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://677982485b1155ded1bce2a0f5b3262766d498642307d1744d5cbae6c66102e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://677982485b1155ded1bce2a0f5b3262766d498642307d1744d5cbae6c66102e3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://14b8b318d4cd0da2272b87ac74a4dccdf29874a818aadc0698e9d85a75a169da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://14b8b318d4cd0da2272b87ac74a4dccdf29874a818aadc0698e9d85a75a169da\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e877edd834105d5bd0dcd24e6ba9c11ead17ab39a95ca0169c54acaad7544ebe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e877edd834105d5bd0dcd24e6ba9c11ead17ab39a95ca0169c54acaad7544ebe\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://decf531f70c78d1d142bef3d265612ebac2c8790ea10aba75def6d785fba84a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://decf531f70c78d1d142bef3d265612ebac2c8790ea10aba75def6d785fba84a8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:32Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-m6zsp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:41Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:41 crc kubenswrapper[4756]: I0930 19:31:41.487198 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:41 crc kubenswrapper[4756]: I0930 19:31:41.487514 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:41 crc kubenswrapper[4756]: I0930 19:31:41.487631 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:41 crc kubenswrapper[4756]: I0930 19:31:41.487722 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:31:41 crc kubenswrapper[4756]: I0930 19:31:41.487800 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:31:41Z","lastTransitionTime":"2025-09-30T19:31:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:31:41 crc kubenswrapper[4756]: I0930 19:31:41.591598 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:41 crc kubenswrapper[4756]: I0930 19:31:41.591649 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:41 crc kubenswrapper[4756]: I0930 19:31:41.591659 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:41 crc kubenswrapper[4756]: I0930 19:31:41.591679 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:31:41 crc kubenswrapper[4756]: I0930 19:31:41.591690 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:31:41Z","lastTransitionTime":"2025-09-30T19:31:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:31:41 crc kubenswrapper[4756]: I0930 19:31:41.693768 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:41 crc kubenswrapper[4756]: I0930 19:31:41.693813 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:41 crc kubenswrapper[4756]: I0930 19:31:41.693826 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:41 crc kubenswrapper[4756]: I0930 19:31:41.693844 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:31:41 crc kubenswrapper[4756]: I0930 19:31:41.693856 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:31:41Z","lastTransitionTime":"2025-09-30T19:31:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:31:41 crc kubenswrapper[4756]: I0930 19:31:41.802837 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:41 crc kubenswrapper[4756]: I0930 19:31:41.802896 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:41 crc kubenswrapper[4756]: I0930 19:31:41.802908 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:41 crc kubenswrapper[4756]: I0930 19:31:41.802930 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:31:41 crc kubenswrapper[4756]: I0930 19:31:41.802943 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:31:41Z","lastTransitionTime":"2025-09-30T19:31:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:31:41 crc kubenswrapper[4756]: I0930 19:31:41.905522 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:41 crc kubenswrapper[4756]: I0930 19:31:41.905564 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:41 crc kubenswrapper[4756]: I0930 19:31:41.905575 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:41 crc kubenswrapper[4756]: I0930 19:31:41.905595 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:31:41 crc kubenswrapper[4756]: I0930 19:31:41.905603 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:31:41Z","lastTransitionTime":"2025-09-30T19:31:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:31:42 crc kubenswrapper[4756]: I0930 19:31:42.008122 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:42 crc kubenswrapper[4756]: I0930 19:31:42.008180 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:42 crc kubenswrapper[4756]: I0930 19:31:42.008219 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:42 crc kubenswrapper[4756]: I0930 19:31:42.008247 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:31:42 crc kubenswrapper[4756]: I0930 19:31:42.008300 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:31:42Z","lastTransitionTime":"2025-09-30T19:31:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:31:42 crc kubenswrapper[4756]: I0930 19:31:42.110812 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:42 crc kubenswrapper[4756]: I0930 19:31:42.110865 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:42 crc kubenswrapper[4756]: I0930 19:31:42.110878 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:42 crc kubenswrapper[4756]: I0930 19:31:42.110896 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:31:42 crc kubenswrapper[4756]: I0930 19:31:42.110906 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:31:42Z","lastTransitionTime":"2025-09-30T19:31:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:31:42 crc kubenswrapper[4756]: I0930 19:31:42.214850 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:42 crc kubenswrapper[4756]: I0930 19:31:42.214907 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:42 crc kubenswrapper[4756]: I0930 19:31:42.214919 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:42 crc kubenswrapper[4756]: I0930 19:31:42.214941 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:31:42 crc kubenswrapper[4756]: I0930 19:31:42.214954 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:31:42Z","lastTransitionTime":"2025-09-30T19:31:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:31:42 crc kubenswrapper[4756]: I0930 19:31:42.317731 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:42 crc kubenswrapper[4756]: I0930 19:31:42.317770 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:42 crc kubenswrapper[4756]: I0930 19:31:42.317780 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:42 crc kubenswrapper[4756]: I0930 19:31:42.317797 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:31:42 crc kubenswrapper[4756]: I0930 19:31:42.317808 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:31:42Z","lastTransitionTime":"2025-09-30T19:31:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:31:42 crc kubenswrapper[4756]: I0930 19:31:42.366004 4756 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Sep 30 19:31:42 crc kubenswrapper[4756]: I0930 19:31:42.421202 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:42 crc kubenswrapper[4756]: I0930 19:31:42.421272 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:42 crc kubenswrapper[4756]: I0930 19:31:42.421287 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:42 crc kubenswrapper[4756]: I0930 19:31:42.421313 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:31:42 crc kubenswrapper[4756]: I0930 19:31:42.421330 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:31:42Z","lastTransitionTime":"2025-09-30T19:31:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:31:42 crc kubenswrapper[4756]: I0930 19:31:42.524955 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:42 crc kubenswrapper[4756]: I0930 19:31:42.524998 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:42 crc kubenswrapper[4756]: I0930 19:31:42.525009 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:42 crc kubenswrapper[4756]: I0930 19:31:42.525024 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:31:42 crc kubenswrapper[4756]: I0930 19:31:42.525037 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:31:42Z","lastTransitionTime":"2025-09-30T19:31:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:31:42 crc kubenswrapper[4756]: I0930 19:31:42.627877 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:42 crc kubenswrapper[4756]: I0930 19:31:42.627955 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:42 crc kubenswrapper[4756]: I0930 19:31:42.627966 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:42 crc kubenswrapper[4756]: I0930 19:31:42.627984 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:31:42 crc kubenswrapper[4756]: I0930 19:31:42.628000 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:31:42Z","lastTransitionTime":"2025-09-30T19:31:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:31:42 crc kubenswrapper[4756]: I0930 19:31:42.730650 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:42 crc kubenswrapper[4756]: I0930 19:31:42.730697 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:42 crc kubenswrapper[4756]: I0930 19:31:42.730707 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:42 crc kubenswrapper[4756]: I0930 19:31:42.730726 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:31:42 crc kubenswrapper[4756]: I0930 19:31:42.730741 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:31:42Z","lastTransitionTime":"2025-09-30T19:31:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:31:42 crc kubenswrapper[4756]: I0930 19:31:42.833300 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:42 crc kubenswrapper[4756]: I0930 19:31:42.833344 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:42 crc kubenswrapper[4756]: I0930 19:31:42.833357 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:42 crc kubenswrapper[4756]: I0930 19:31:42.833374 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:31:42 crc kubenswrapper[4756]: I0930 19:31:42.833388 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:31:42Z","lastTransitionTime":"2025-09-30T19:31:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:31:42 crc kubenswrapper[4756]: I0930 19:31:42.936451 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:42 crc kubenswrapper[4756]: I0930 19:31:42.936499 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:42 crc kubenswrapper[4756]: I0930 19:31:42.936509 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:42 crc kubenswrapper[4756]: I0930 19:31:42.936524 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:31:42 crc kubenswrapper[4756]: I0930 19:31:42.936536 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:31:42Z","lastTransitionTime":"2025-09-30T19:31:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:31:43 crc kubenswrapper[4756]: I0930 19:31:43.039637 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:43 crc kubenswrapper[4756]: I0930 19:31:43.039672 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:43 crc kubenswrapper[4756]: I0930 19:31:43.039681 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:43 crc kubenswrapper[4756]: I0930 19:31:43.039698 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:31:43 crc kubenswrapper[4756]: I0930 19:31:43.039709 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:31:43Z","lastTransitionTime":"2025-09-30T19:31:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:31:43 crc kubenswrapper[4756]: I0930 19:31:43.115681 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 19:31:43 crc kubenswrapper[4756]: I0930 19:31:43.115869 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 19:31:43 crc kubenswrapper[4756]: I0930 19:31:43.116121 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 19:31:43 crc kubenswrapper[4756]: E0930 19:31:43.116134 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 19:31:43 crc kubenswrapper[4756]: E0930 19:31:43.116223 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 19:31:43 crc kubenswrapper[4756]: E0930 19:31:43.116297 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 19:31:43 crc kubenswrapper[4756]: I0930 19:31:43.142803 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:43 crc kubenswrapper[4756]: I0930 19:31:43.142855 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:43 crc kubenswrapper[4756]: I0930 19:31:43.142864 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:43 crc kubenswrapper[4756]: I0930 19:31:43.142880 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:31:43 crc kubenswrapper[4756]: I0930 19:31:43.142892 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:31:43Z","lastTransitionTime":"2025-09-30T19:31:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:31:43 crc kubenswrapper[4756]: I0930 19:31:43.246309 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:43 crc kubenswrapper[4756]: I0930 19:31:43.246388 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:43 crc kubenswrapper[4756]: I0930 19:31:43.246426 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:43 crc kubenswrapper[4756]: I0930 19:31:43.246452 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:31:43 crc kubenswrapper[4756]: I0930 19:31:43.246467 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:31:43Z","lastTransitionTime":"2025-09-30T19:31:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:31:43 crc kubenswrapper[4756]: I0930 19:31:43.349419 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:43 crc kubenswrapper[4756]: I0930 19:31:43.349477 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:43 crc kubenswrapper[4756]: I0930 19:31:43.349489 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:43 crc kubenswrapper[4756]: I0930 19:31:43.349510 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:31:43 crc kubenswrapper[4756]: I0930 19:31:43.349522 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:31:43Z","lastTransitionTime":"2025-09-30T19:31:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:31:43 crc kubenswrapper[4756]: I0930 19:31:43.370972 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-kmxk8_d96acc31-a519-46fc-94d8-f19522e77391/ovnkube-controller/0.log" Sep 30 19:31:43 crc kubenswrapper[4756]: I0930 19:31:43.373797 4756 generic.go:334] "Generic (PLEG): container finished" podID="d96acc31-a519-46fc-94d8-f19522e77391" containerID="2a2a998d0733193f87d6782c9553ac48179934e6534304d8f8c3e1edc868b8b1" exitCode=1 Sep 30 19:31:43 crc kubenswrapper[4756]: I0930 19:31:43.373834 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-kmxk8" event={"ID":"d96acc31-a519-46fc-94d8-f19522e77391","Type":"ContainerDied","Data":"2a2a998d0733193f87d6782c9553ac48179934e6534304d8f8c3e1edc868b8b1"} Sep 30 19:31:43 crc kubenswrapper[4756]: I0930 19:31:43.374620 4756 scope.go:117] "RemoveContainer" containerID="2a2a998d0733193f87d6782c9553ac48179934e6534304d8f8c3e1edc868b8b1" Sep 30 19:31:43 crc kubenswrapper[4756]: I0930 19:31:43.389652 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:43Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:43 crc kubenswrapper[4756]: I0930 19:31:43.403501 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d37999f5cb36e7071b109ed90026aea745a55dbdd26d53799ee4196dc6a983c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:43Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:43 crc kubenswrapper[4756]: I0930 19:31:43.417816 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-l9mw5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"104562fb-2a2d-4291-963c-7a95062ec13a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c012009175f2ca25af13346dce481a4640354a2b7c771ce9a8d92f0a6a014f23\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mhq89\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:32Z\\\"}}\" for pod \"openshift-multus\"/\"multus-l9mw5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:43Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:43 crc kubenswrapper[4756]: I0930 19:31:43.429966 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:43Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:43 crc kubenswrapper[4756]: I0930 19:31:43.445486 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d39cfd1b725ded02a27e8f7bed73221dd180e3fffb7f06d1877d1bc2f6910334\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:43Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:43 crc kubenswrapper[4756]: I0930 19:31:43.452117 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:43 crc kubenswrapper[4756]: I0930 19:31:43.452191 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:43 crc kubenswrapper[4756]: I0930 19:31:43.452202 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:43 crc kubenswrapper[4756]: I0930 19:31:43.452218 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:31:43 crc kubenswrapper[4756]: I0930 19:31:43.452228 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:31:43Z","lastTransitionTime":"2025-09-30T19:31:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:31:43 crc kubenswrapper[4756]: I0930 19:31:43.457607 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-ghp5t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"32ba8a0a-215c-415b-a893-74f7a40e3c20\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a96a8eea15e5d12ad93cdb41ef3d1054fae455aea2878451bc5715de6d5b612f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pslwb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:32Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-ghp5t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:43Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:43 crc kubenswrapper[4756]: I0930 19:31:43.481052 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-m6zsp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2b4cba15-5ae1-4c98-a5e5-060b3af3aec6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://13307050242b53fcc9d2c9ab6870c36d5c2c1e43adfa36c1e76addf32166b95b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://828a01ba594287ceea1e72baa5419d522ce14d2ab3df5a69fb6332400c3e4abc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://828a01ba594287ceea1e72baa5419d522ce14d2ab3df5a69fb6332400c3e4abc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f91b3382af5267116de235e9d5f480e574b479350e738621b5e7568329dd879\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0f91b3382af5267116de235e9d5f480e574b479350e738621b5e7568329dd879\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://677982485b1155ded1bce2a0f5b3262766d498642307d1744d5cbae6c66102e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://677982485b1155ded1bce2a0f5b3262766d498642307d1744d5cbae6c66102e3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://14b8b318d4cd0da2272b87ac74a4dccdf29874a818aadc0698e9d85a75a169da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://14b8b318d4cd0da2272b87ac74a4dccdf29874a818aadc0698e9d85a75a169da\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e877edd834105d5bd0dcd24e6ba9c11ead17ab39a95ca0169c54acaad7544ebe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e877edd834105d5bd0dcd24e6ba9c11ead17ab39a95ca0169c54acaad7544ebe\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://decf531f70c78d1d142bef3d265612ebac2c8790ea10aba75def6d785fba84a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://decf531f70c78d1d142bef3d265612ebac2c8790ea10aba75def6d785fba84a8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:32Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-m6zsp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:43Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:43 crc kubenswrapper[4756]: I0930 19:31:43.492714 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3370c2ca-fec3-4f90-8df7-51e21e6c7e1c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1a5ef539293ea3ff94f53b11a0510bb8bb45c62c945cffab746512efeeb19a5a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pstrc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ba18abf718fde52cadd61c25b05097873901a4dbbe7631fd7ce64133627b9473\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pstrc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:32Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-4n9zj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:43Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:43 crc kubenswrapper[4756]: I0930 19:31:43.512505 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-kmxk8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d96acc31-a519-46fc-94d8-f19522e77391\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe054d1844c58433aebfe8f4e500e17558f1b4afb5423002d60b50f210052c93\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c5badf6335e4358fa0078ae48d7d50a908bf754fdb57c786e445ccae31933ebe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f20230d520af06912183a05d0b9bcabb8581bf772fe34e037fa8d1a157c4a1b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://261b104308c48bbe4631db85f56d63cefeb0bc6b7112076b0bf3f4fb6b67caad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1fa8babb1f5d1213dec99c75be1d5f37d09c2257a31babd51ec297621fc7d564\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://990926364c8d973164f3da73740b7d31c8caf87f0fb691539f7a62520b822fba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2a2a998d0733193f87d6782c9553ac48179934e6534304d8f8c3e1edc868b8b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2a2a998d0733193f87d6782c9553ac48179934e6534304d8f8c3e1edc868b8b1\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T19:31:42Z\\\",\\\"message\\\":\\\" k8s.io/client-go/informers/factory.go:160\\\\nI0930 19:31:42.479361 6071 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0930 19:31:42.479312 6071 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0930 19:31:42.479421 6071 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0930 19:31:42.479450 6071 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI0930 19:31:42.479463 6071 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI0930 19:31:42.479489 6071 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0930 19:31:42.479493 6071 handler.go:208] Removed *v1.Pod event handler 6\\\\nI0930 19:31:42.479507 6071 handler.go:208] Removed *v1.Pod event handler 3\\\\nI0930 19:31:42.479503 6071 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0930 19:31:42.479331 6071 reflector.go:311] Stopping reflector *v1.ClusterUserDefinedNetwork (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/userdefinednetwork/v1/apis/informers/externalversions/factory.go:140\\\\nI0930 19:31:42.479748 6071 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0930 19:31:42.480441 6071 factory.go:656] Stopping \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://64404e34c6a077df9cc9e9f9273dbe188c6feda3bc1186a83e05a43e1fa2ea99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://76bb398eefb2b0a9ef06b44c2ea182deefd51e4daf42b17946b911822d6eb0af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://76bb398eefb2b0a9ef06b44c2ea182deefd51e4daf42b17946b911822d6eb0af\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:32Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-kmxk8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:43Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:43 crc kubenswrapper[4756]: I0930 19:31:43.522917 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-8nj5z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"02bb1d9a-1ad2-406d-afc2-c4cd283fe1f4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://28f7474a36dde55478614d47fe30f93b44cb549a25e148e295987e64158fc3c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l6nxq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:34Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-8nj5z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:43Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:43 crc kubenswrapper[4756]: I0930 19:31:43.544946 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6aa869dd-ec28-4032-82be-c656f27bd7b1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://99767cc72b9d6057d38c45fbec6231bafa47ccffefb78e5924b5e9c1e797722b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b852143b26114c05edce20729cf706c5394609a74f522b7ecf44d92f9d66aed7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ee47cdb77e416535bd00569159712241bbd0952651774af6440fd04000d1722\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5cca81a0aeb985d6044f19cccc580ca5c78553a0abb60fb2a2bed955233f7a97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://621b37e117d0eeef289e110d75521c3262ced12144d665be35d6823780b4bae2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a5550fd486f7fad8080fb3eed0f4c73140172c68983fa8a423cd48da8f31ef3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a5550fd486f7fad8080fb3eed0f4c73140172c68983fa8a423cd48da8f31ef3a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://390ef7ee5c5d98dc226f1d061c6c9bb83236c988f2af15635e7834f643862840\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://390ef7ee5c5d98dc226f1d061c6c9bb83236c988f2af15635e7834f643862840\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:13Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://6f98786c914dcdd3ca1cde8421686a1d536abb97c375f26f458d8f514bdccd52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6f98786c914dcdd3ca1cde8421686a1d536abb97c375f26f458d8f514bdccd52\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:11Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:43Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:43 crc kubenswrapper[4756]: I0930 19:31:43.555290 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:43 crc kubenswrapper[4756]: I0930 19:31:43.555331 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:43 crc kubenswrapper[4756]: I0930 19:31:43.555341 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:43 crc kubenswrapper[4756]: I0930 19:31:43.555361 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:31:43 crc kubenswrapper[4756]: I0930 19:31:43.555370 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:31:43Z","lastTransitionTime":"2025-09-30T19:31:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:31:43 crc kubenswrapper[4756]: I0930 19:31:43.562642 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0dfdb5d6-9311-4699-a1ac-5f1a6cde2441\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a78698331fc3db3b12b06c8816256c5d055d1decfd12cc0099d6cfd155d673d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32fb926334772dc454adcdabd6070f751ef1aded82a5c13732875e1ec04d2c48\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6195128a0e66f0f932a444f4717084f00ea05ff648f7c26364bb9c96483b652a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://746e37834d01867d327b2cb407df8e9a8489f58884a28f1da6a336f74e4d1033\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:11Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:43Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:43 crc kubenswrapper[4756]: I0930 19:31:43.575187 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"218f95d8-7230-4b64-83a5-00af4f5ec7dc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0a76d38dbf936ca79e57470de2c8bf98e8681f7d94f2c572aa5c0d2b2acbb484\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://57f786181b4762f1e04a48ac6e352090b62e1691091ddc390821abb693e8f7a6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c275af67ad8675a437b3f7fae34236bbdfbdf1f216be6a442d6765471d1d11e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a2e0caca690592c9bd17dae14f79390b94f47507afb05a1c1732453700b99786\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a2e0caca690592c9bd17dae14f79390b94f47507afb05a1c1732453700b99786\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"message\\\":\\\"g.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 19:31:31.245412 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 19:31:31.245417 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0930 19:31:31.245423 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0930 19:31:31.245426 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0930 19:31:31.245428 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0930 19:31:31.245860 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI0930 19:31:31.249125 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0930 19:31:31.249169 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0930 19:31:31.249201 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0930 19:31:31.249211 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0930 19:31:31.249225 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0930 19:31:31.249238 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0930 19:31:31.250125 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI0930 19:31:31.250150 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nF0930 19:31:31.250576 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:25Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://01164eb1afdb771ef4d2dc268107dadd206959e1ef66b154caf3ff18717e20de\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:14Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7365492f5fe72c39f569bfa6e2b1a44236da10b4687a738b97ce95626081c1b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7365492f5fe72c39f569bfa6e2b1a44236da10b4687a738b97ce95626081c1b5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:11Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:43Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:43 crc kubenswrapper[4756]: I0930 19:31:43.589877 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:43Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:43 crc kubenswrapper[4756]: I0930 19:31:43.603343 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://449a4827a34c17e8cdc851d2714ced931581b5c3a1bcd2f7f058b95d68403740\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://163fab064016913eb90cd87fe0da1c702eeb7f000c92b108bfb24a21ef0e98d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:43Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:43 crc kubenswrapper[4756]: I0930 19:31:43.658588 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:43 crc kubenswrapper[4756]: I0930 19:31:43.658639 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:43 crc kubenswrapper[4756]: I0930 19:31:43.658650 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:43 crc kubenswrapper[4756]: I0930 19:31:43.658666 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:31:43 crc kubenswrapper[4756]: I0930 19:31:43.658677 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:31:43Z","lastTransitionTime":"2025-09-30T19:31:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:31:43 crc kubenswrapper[4756]: I0930 19:31:43.762168 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:43 crc kubenswrapper[4756]: I0930 19:31:43.762228 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:43 crc kubenswrapper[4756]: I0930 19:31:43.762240 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:43 crc kubenswrapper[4756]: I0930 19:31:43.762259 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:31:43 crc kubenswrapper[4756]: I0930 19:31:43.762274 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:31:43Z","lastTransitionTime":"2025-09-30T19:31:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:31:43 crc kubenswrapper[4756]: I0930 19:31:43.865001 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:43 crc kubenswrapper[4756]: I0930 19:31:43.865048 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:43 crc kubenswrapper[4756]: I0930 19:31:43.865060 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:43 crc kubenswrapper[4756]: I0930 19:31:43.865077 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:31:43 crc kubenswrapper[4756]: I0930 19:31:43.865089 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:31:43Z","lastTransitionTime":"2025-09-30T19:31:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:31:43 crc kubenswrapper[4756]: I0930 19:31:43.968008 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:43 crc kubenswrapper[4756]: I0930 19:31:43.968054 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:43 crc kubenswrapper[4756]: I0930 19:31:43.968064 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:43 crc kubenswrapper[4756]: I0930 19:31:43.968083 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:31:43 crc kubenswrapper[4756]: I0930 19:31:43.968096 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:31:43Z","lastTransitionTime":"2025-09-30T19:31:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:31:44 crc kubenswrapper[4756]: I0930 19:31:44.071433 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:44 crc kubenswrapper[4756]: I0930 19:31:44.071487 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:44 crc kubenswrapper[4756]: I0930 19:31:44.071499 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:44 crc kubenswrapper[4756]: I0930 19:31:44.071519 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:31:44 crc kubenswrapper[4756]: I0930 19:31:44.071533 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:31:44Z","lastTransitionTime":"2025-09-30T19:31:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:31:44 crc kubenswrapper[4756]: I0930 19:31:44.173976 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:44 crc kubenswrapper[4756]: I0930 19:31:44.174015 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:44 crc kubenswrapper[4756]: I0930 19:31:44.174030 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:44 crc kubenswrapper[4756]: I0930 19:31:44.174047 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:31:44 crc kubenswrapper[4756]: I0930 19:31:44.174057 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:31:44Z","lastTransitionTime":"2025-09-30T19:31:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:31:44 crc kubenswrapper[4756]: I0930 19:31:44.276969 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:44 crc kubenswrapper[4756]: I0930 19:31:44.277012 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:44 crc kubenswrapper[4756]: I0930 19:31:44.277028 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:44 crc kubenswrapper[4756]: I0930 19:31:44.277045 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:31:44 crc kubenswrapper[4756]: I0930 19:31:44.277058 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:31:44Z","lastTransitionTime":"2025-09-30T19:31:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:31:44 crc kubenswrapper[4756]: I0930 19:31:44.379023 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:44 crc kubenswrapper[4756]: I0930 19:31:44.379087 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:44 crc kubenswrapper[4756]: I0930 19:31:44.379106 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:44 crc kubenswrapper[4756]: I0930 19:31:44.379131 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:31:44 crc kubenswrapper[4756]: I0930 19:31:44.379149 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:31:44Z","lastTransitionTime":"2025-09-30T19:31:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:31:44 crc kubenswrapper[4756]: I0930 19:31:44.379198 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-kmxk8_d96acc31-a519-46fc-94d8-f19522e77391/ovnkube-controller/1.log" Sep 30 19:31:44 crc kubenswrapper[4756]: I0930 19:31:44.379861 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-kmxk8_d96acc31-a519-46fc-94d8-f19522e77391/ovnkube-controller/0.log" Sep 30 19:31:44 crc kubenswrapper[4756]: I0930 19:31:44.383342 4756 generic.go:334] "Generic (PLEG): container finished" podID="d96acc31-a519-46fc-94d8-f19522e77391" containerID="721fa5da1ae99aae66e7486221afabcd9c996982c008c5b00f72787911d29921" exitCode=1 Sep 30 19:31:44 crc kubenswrapper[4756]: I0930 19:31:44.383429 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-kmxk8" event={"ID":"d96acc31-a519-46fc-94d8-f19522e77391","Type":"ContainerDied","Data":"721fa5da1ae99aae66e7486221afabcd9c996982c008c5b00f72787911d29921"} Sep 30 19:31:44 crc kubenswrapper[4756]: I0930 19:31:44.383487 4756 scope.go:117] "RemoveContainer" containerID="2a2a998d0733193f87d6782c9553ac48179934e6534304d8f8c3e1edc868b8b1" Sep 30 19:31:44 crc kubenswrapper[4756]: I0930 19:31:44.385037 4756 scope.go:117] "RemoveContainer" containerID="721fa5da1ae99aae66e7486221afabcd9c996982c008c5b00f72787911d29921" Sep 30 19:31:44 crc kubenswrapper[4756]: E0930 19:31:44.385383 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-kmxk8_openshift-ovn-kubernetes(d96acc31-a519-46fc-94d8-f19522e77391)\"" pod="openshift-ovn-kubernetes/ovnkube-node-kmxk8" podUID="d96acc31-a519-46fc-94d8-f19522e77391" Sep 30 19:31:44 crc kubenswrapper[4756]: I0930 19:31:44.400846 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d39cfd1b725ded02a27e8f7bed73221dd180e3fffb7f06d1877d1bc2f6910334\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:44Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:44 crc kubenswrapper[4756]: I0930 19:31:44.413592 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-ghp5t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"32ba8a0a-215c-415b-a893-74f7a40e3c20\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a96a8eea15e5d12ad93cdb41ef3d1054fae455aea2878451bc5715de6d5b612f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pslwb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:32Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-ghp5t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:44Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:44 crc kubenswrapper[4756]: I0930 19:31:44.427327 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-m6zsp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2b4cba15-5ae1-4c98-a5e5-060b3af3aec6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://13307050242b53fcc9d2c9ab6870c36d5c2c1e43adfa36c1e76addf32166b95b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://828a01ba594287ceea1e72baa5419d522ce14d2ab3df5a69fb6332400c3e4abc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://828a01ba594287ceea1e72baa5419d522ce14d2ab3df5a69fb6332400c3e4abc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f91b3382af5267116de235e9d5f480e574b479350e738621b5e7568329dd879\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0f91b3382af5267116de235e9d5f480e574b479350e738621b5e7568329dd879\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://677982485b1155ded1bce2a0f5b3262766d498642307d1744d5cbae6c66102e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://677982485b1155ded1bce2a0f5b3262766d498642307d1744d5cbae6c66102e3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://14b8b318d4cd0da2272b87ac74a4dccdf29874a818aadc0698e9d85a75a169da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://14b8b318d4cd0da2272b87ac74a4dccdf29874a818aadc0698e9d85a75a169da\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e877edd834105d5bd0dcd24e6ba9c11ead17ab39a95ca0169c54acaad7544ebe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e877edd834105d5bd0dcd24e6ba9c11ead17ab39a95ca0169c54acaad7544ebe\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://decf531f70c78d1d142bef3d265612ebac2c8790ea10aba75def6d785fba84a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://decf531f70c78d1d142bef3d265612ebac2c8790ea10aba75def6d785fba84a8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:32Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-m6zsp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:44Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:44 crc kubenswrapper[4756]: I0930 19:31:44.441879 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:44Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:44 crc kubenswrapper[4756]: I0930 19:31:44.459260 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6aa869dd-ec28-4032-82be-c656f27bd7b1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://99767cc72b9d6057d38c45fbec6231bafa47ccffefb78e5924b5e9c1e797722b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b852143b26114c05edce20729cf706c5394609a74f522b7ecf44d92f9d66aed7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ee47cdb77e416535bd00569159712241bbd0952651774af6440fd04000d1722\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5cca81a0aeb985d6044f19cccc580ca5c78553a0abb60fb2a2bed955233f7a97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://621b37e117d0eeef289e110d75521c3262ced12144d665be35d6823780b4bae2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a5550fd486f7fad8080fb3eed0f4c73140172c68983fa8a423cd48da8f31ef3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a5550fd486f7fad8080fb3eed0f4c73140172c68983fa8a423cd48da8f31ef3a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://390ef7ee5c5d98dc226f1d061c6c9bb83236c988f2af15635e7834f643862840\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://390ef7ee5c5d98dc226f1d061c6c9bb83236c988f2af15635e7834f643862840\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:13Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://6f98786c914dcdd3ca1cde8421686a1d536abb97c375f26f458d8f514bdccd52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6f98786c914dcdd3ca1cde8421686a1d536abb97c375f26f458d8f514bdccd52\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:11Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:44Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:44 crc kubenswrapper[4756]: I0930 19:31:44.471667 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0dfdb5d6-9311-4699-a1ac-5f1a6cde2441\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a78698331fc3db3b12b06c8816256c5d055d1decfd12cc0099d6cfd155d673d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32fb926334772dc454adcdabd6070f751ef1aded82a5c13732875e1ec04d2c48\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6195128a0e66f0f932a444f4717084f00ea05ff648f7c26364bb9c96483b652a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://746e37834d01867d327b2cb407df8e9a8489f58884a28f1da6a336f74e4d1033\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:11Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:44Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:44 crc kubenswrapper[4756]: I0930 19:31:44.481812 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:44 crc kubenswrapper[4756]: I0930 19:31:44.481851 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:44 crc kubenswrapper[4756]: I0930 19:31:44.481864 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:44 crc kubenswrapper[4756]: I0930 19:31:44.481882 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:31:44 crc kubenswrapper[4756]: I0930 19:31:44.482179 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:31:44Z","lastTransitionTime":"2025-09-30T19:31:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:31:44 crc kubenswrapper[4756]: I0930 19:31:44.483120 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3370c2ca-fec3-4f90-8df7-51e21e6c7e1c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1a5ef539293ea3ff94f53b11a0510bb8bb45c62c945cffab746512efeeb19a5a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pstrc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ba18abf718fde52cadd61c25b05097873901a4dbbe7631fd7ce64133627b9473\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pstrc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:32Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-4n9zj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:44Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:44 crc kubenswrapper[4756]: I0930 19:31:44.500087 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-kmxk8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d96acc31-a519-46fc-94d8-f19522e77391\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe054d1844c58433aebfe8f4e500e17558f1b4afb5423002d60b50f210052c93\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c5badf6335e4358fa0078ae48d7d50a908bf754fdb57c786e445ccae31933ebe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f20230d520af06912183a05d0b9bcabb8581bf772fe34e037fa8d1a157c4a1b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://261b104308c48bbe4631db85f56d63cefeb0bc6b7112076b0bf3f4fb6b67caad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1fa8babb1f5d1213dec99c75be1d5f37d09c2257a31babd51ec297621fc7d564\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://990926364c8d973164f3da73740b7d31c8caf87f0fb691539f7a62520b822fba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://721fa5da1ae99aae66e7486221afabcd9c996982c008c5b00f72787911d29921\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2a2a998d0733193f87d6782c9553ac48179934e6534304d8f8c3e1edc868b8b1\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T19:31:42Z\\\",\\\"message\\\":\\\" k8s.io/client-go/informers/factory.go:160\\\\nI0930 19:31:42.479361 6071 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0930 19:31:42.479312 6071 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0930 19:31:42.479421 6071 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0930 19:31:42.479450 6071 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI0930 19:31:42.479463 6071 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI0930 19:31:42.479489 6071 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0930 19:31:42.479493 6071 handler.go:208] Removed *v1.Pod event handler 6\\\\nI0930 19:31:42.479507 6071 handler.go:208] Removed *v1.Pod event handler 3\\\\nI0930 19:31:42.479503 6071 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0930 19:31:42.479331 6071 reflector.go:311] Stopping reflector *v1.ClusterUserDefinedNetwork (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/userdefinednetwork/v1/apis/informers/externalversions/factory.go:140\\\\nI0930 19:31:42.479748 6071 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0930 19:31:42.480441 6071 factory.go:656] Stopping \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:39Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://721fa5da1ae99aae66e7486221afabcd9c996982c008c5b00f72787911d29921\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T19:31:44Z\\\",\\\"message\\\":\\\"erver-crc in node crc\\\\nI0930 19:31:44.135110 6217 obj_retry.go:303] Retry object setup: *v1.Pod openshift-multus/multus-additional-cni-plugins-m6zsp\\\\nI0930 19:31:44.135119 6217 obj_retry.go:386] Retry successful for *v1.Pod openshift-kube-apiserver/kube-apiserver-crc after 0 failed attempt(s)\\\\nF0930 19:31:44.135041 6217 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:44Z is after 2025-08-24T17:21:41Z]\\\\nI0930 19:31:44.135126 6217 obj_retry.go:365] Adding new object: *v1.Pod openshift-multus/multus-additional-cni-plugins-m6zsp\\\\nI0930 19:31:44.135121 6217 obj_retry.go:365] Adding new object: *v1.Pod openshift-image\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://64404e34c6a077df9cc9e9f9273dbe188c6feda3bc1186a83e05a43e1fa2ea99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://76bb398eefb2b0a9ef06b44c2ea182deefd51e4daf42b17946b911822d6eb0af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://76bb398eefb2b0a9ef06b44c2ea182deefd51e4daf42b17946b911822d6eb0af\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:32Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-kmxk8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:44Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:44 crc kubenswrapper[4756]: I0930 19:31:44.509839 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-8nj5z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"02bb1d9a-1ad2-406d-afc2-c4cd283fe1f4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://28f7474a36dde55478614d47fe30f93b44cb549a25e148e295987e64158fc3c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l6nxq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:34Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-8nj5z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:44Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:44 crc kubenswrapper[4756]: I0930 19:31:44.521568 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:44Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:44 crc kubenswrapper[4756]: I0930 19:31:44.531475 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://449a4827a34c17e8cdc851d2714ced931581b5c3a1bcd2f7f058b95d68403740\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://163fab064016913eb90cd87fe0da1c702eeb7f000c92b108bfb24a21ef0e98d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:44Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:44 crc kubenswrapper[4756]: I0930 19:31:44.541689 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-gld2h"] Sep 30 19:31:44 crc kubenswrapper[4756]: I0930 19:31:44.546708 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-gld2h" Sep 30 19:31:44 crc kubenswrapper[4756]: I0930 19:31:44.548121 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"218f95d8-7230-4b64-83a5-00af4f5ec7dc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0a76d38dbf936ca79e57470de2c8bf98e8681f7d94f2c572aa5c0d2b2acbb484\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://57f786181b4762f1e04a48ac6e352090b62e1691091ddc390821abb693e8f7a6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c275af67ad8675a437b3f7fae34236bbdfbdf1f216be6a442d6765471d1d11e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a2e0caca690592c9bd17dae14f79390b94f47507afb05a1c1732453700b99786\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a2e0caca690592c9bd17dae14f79390b94f47507afb05a1c1732453700b99786\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"message\\\":\\\"g.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 19:31:31.245412 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 19:31:31.245417 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0930 19:31:31.245423 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0930 19:31:31.245426 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0930 19:31:31.245428 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0930 19:31:31.245860 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI0930 19:31:31.249125 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0930 19:31:31.249169 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0930 19:31:31.249201 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0930 19:31:31.249211 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0930 19:31:31.249225 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0930 19:31:31.249238 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0930 19:31:31.250125 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI0930 19:31:31.250150 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nF0930 19:31:31.250576 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:25Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://01164eb1afdb771ef4d2dc268107dadd206959e1ef66b154caf3ff18717e20de\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:14Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7365492f5fe72c39f569bfa6e2b1a44236da10b4687a738b97ce95626081c1b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7365492f5fe72c39f569bfa6e2b1a44236da10b4687a738b97ce95626081c1b5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:11Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:44Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:44 crc kubenswrapper[4756]: I0930 19:31:44.549267 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-control-plane-metrics-cert" Sep 30 19:31:44 crc kubenswrapper[4756]: I0930 19:31:44.549356 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-control-plane-dockercfg-gs7dd" Sep 30 19:31:44 crc kubenswrapper[4756]: I0930 19:31:44.564707 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d37999f5cb36e7071b109ed90026aea745a55dbdd26d53799ee4196dc6a983c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:44Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:44 crc kubenswrapper[4756]: I0930 19:31:44.575668 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-l9mw5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"104562fb-2a2d-4291-963c-7a95062ec13a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c012009175f2ca25af13346dce481a4640354a2b7c771ce9a8d92f0a6a014f23\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mhq89\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:32Z\\\"}}\" for pod \"openshift-multus\"/\"multus-l9mw5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:44Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:44 crc kubenswrapper[4756]: I0930 19:31:44.585455 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:44 crc kubenswrapper[4756]: I0930 19:31:44.585487 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:44 crc kubenswrapper[4756]: I0930 19:31:44.585499 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:44 crc kubenswrapper[4756]: I0930 19:31:44.585517 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:31:44 crc kubenswrapper[4756]: I0930 19:31:44.585528 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:31:44Z","lastTransitionTime":"2025-09-30T19:31:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:31:44 crc kubenswrapper[4756]: I0930 19:31:44.587098 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:44Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:44 crc kubenswrapper[4756]: I0930 19:31:44.597420 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:44Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:44 crc kubenswrapper[4756]: I0930 19:31:44.606627 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d37999f5cb36e7071b109ed90026aea745a55dbdd26d53799ee4196dc6a983c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:44Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:44 crc kubenswrapper[4756]: I0930 19:31:44.618197 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-l9mw5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"104562fb-2a2d-4291-963c-7a95062ec13a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c012009175f2ca25af13346dce481a4640354a2b7c771ce9a8d92f0a6a014f23\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mhq89\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:32Z\\\"}}\" for pod \"openshift-multus\"/\"multus-l9mw5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:44Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:44 crc kubenswrapper[4756]: I0930 19:31:44.629314 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:44Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:44 crc kubenswrapper[4756]: I0930 19:31:44.641369 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d39cfd1b725ded02a27e8f7bed73221dd180e3fffb7f06d1877d1bc2f6910334\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:44Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:44 crc kubenswrapper[4756]: I0930 19:31:44.650304 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-ghp5t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"32ba8a0a-215c-415b-a893-74f7a40e3c20\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a96a8eea15e5d12ad93cdb41ef3d1054fae455aea2878451bc5715de6d5b612f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pslwb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:32Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-ghp5t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:44Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:44 crc kubenswrapper[4756]: I0930 19:31:44.662886 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-m6zsp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2b4cba15-5ae1-4c98-a5e5-060b3af3aec6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://13307050242b53fcc9d2c9ab6870c36d5c2c1e43adfa36c1e76addf32166b95b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://828a01ba594287ceea1e72baa5419d522ce14d2ab3df5a69fb6332400c3e4abc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://828a01ba594287ceea1e72baa5419d522ce14d2ab3df5a69fb6332400c3e4abc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f91b3382af5267116de235e9d5f480e574b479350e738621b5e7568329dd879\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0f91b3382af5267116de235e9d5f480e574b479350e738621b5e7568329dd879\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://677982485b1155ded1bce2a0f5b3262766d498642307d1744d5cbae6c66102e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://677982485b1155ded1bce2a0f5b3262766d498642307d1744d5cbae6c66102e3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://14b8b318d4cd0da2272b87ac74a4dccdf29874a818aadc0698e9d85a75a169da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://14b8b318d4cd0da2272b87ac74a4dccdf29874a818aadc0698e9d85a75a169da\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e877edd834105d5bd0dcd24e6ba9c11ead17ab39a95ca0169c54acaad7544ebe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e877edd834105d5bd0dcd24e6ba9c11ead17ab39a95ca0169c54acaad7544ebe\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://decf531f70c78d1d142bef3d265612ebac2c8790ea10aba75def6d785fba84a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://decf531f70c78d1d142bef3d265612ebac2c8790ea10aba75def6d785fba84a8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:32Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-m6zsp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:44Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:44 crc kubenswrapper[4756]: I0930 19:31:44.672709 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-gld2h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7ae264bc-c901-4628-89f4-53e94403dff8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:44Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:44Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc8dq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc8dq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:44Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-gld2h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:44Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:44 crc kubenswrapper[4756]: I0930 19:31:44.681466 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-8nj5z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"02bb1d9a-1ad2-406d-afc2-c4cd283fe1f4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://28f7474a36dde55478614d47fe30f93b44cb549a25e148e295987e64158fc3c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l6nxq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:34Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-8nj5z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:44Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:44 crc kubenswrapper[4756]: I0930 19:31:44.688619 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pc8dq\" (UniqueName: \"kubernetes.io/projected/7ae264bc-c901-4628-89f4-53e94403dff8-kube-api-access-pc8dq\") pod \"ovnkube-control-plane-749d76644c-gld2h\" (UID: \"7ae264bc-c901-4628-89f4-53e94403dff8\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-gld2h" Sep 30 19:31:44 crc kubenswrapper[4756]: I0930 19:31:44.688670 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/7ae264bc-c901-4628-89f4-53e94403dff8-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-gld2h\" (UID: \"7ae264bc-c901-4628-89f4-53e94403dff8\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-gld2h" Sep 30 19:31:44 crc kubenswrapper[4756]: I0930 19:31:44.688733 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/7ae264bc-c901-4628-89f4-53e94403dff8-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-gld2h\" (UID: \"7ae264bc-c901-4628-89f4-53e94403dff8\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-gld2h" Sep 30 19:31:44 crc kubenswrapper[4756]: I0930 19:31:44.688806 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:44 crc kubenswrapper[4756]: I0930 19:31:44.688836 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:44 crc kubenswrapper[4756]: I0930 19:31:44.688850 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:44 crc kubenswrapper[4756]: I0930 19:31:44.688871 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:31:44 crc kubenswrapper[4756]: I0930 19:31:44.688884 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:31:44Z","lastTransitionTime":"2025-09-30T19:31:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:31:44 crc kubenswrapper[4756]: I0930 19:31:44.688925 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/7ae264bc-c901-4628-89f4-53e94403dff8-env-overrides\") pod \"ovnkube-control-plane-749d76644c-gld2h\" (UID: \"7ae264bc-c901-4628-89f4-53e94403dff8\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-gld2h" Sep 30 19:31:44 crc kubenswrapper[4756]: I0930 19:31:44.702637 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6aa869dd-ec28-4032-82be-c656f27bd7b1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://99767cc72b9d6057d38c45fbec6231bafa47ccffefb78e5924b5e9c1e797722b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b852143b26114c05edce20729cf706c5394609a74f522b7ecf44d92f9d66aed7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ee47cdb77e416535bd00569159712241bbd0952651774af6440fd04000d1722\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5cca81a0aeb985d6044f19cccc580ca5c78553a0abb60fb2a2bed955233f7a97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://621b37e117d0eeef289e110d75521c3262ced12144d665be35d6823780b4bae2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a5550fd486f7fad8080fb3eed0f4c73140172c68983fa8a423cd48da8f31ef3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a5550fd486f7fad8080fb3eed0f4c73140172c68983fa8a423cd48da8f31ef3a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://390ef7ee5c5d98dc226f1d061c6c9bb83236c988f2af15635e7834f643862840\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://390ef7ee5c5d98dc226f1d061c6c9bb83236c988f2af15635e7834f643862840\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:13Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://6f98786c914dcdd3ca1cde8421686a1d536abb97c375f26f458d8f514bdccd52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6f98786c914dcdd3ca1cde8421686a1d536abb97c375f26f458d8f514bdccd52\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:11Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:44Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:44 crc kubenswrapper[4756]: I0930 19:31:44.718075 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0dfdb5d6-9311-4699-a1ac-5f1a6cde2441\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a78698331fc3db3b12b06c8816256c5d055d1decfd12cc0099d6cfd155d673d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32fb926334772dc454adcdabd6070f751ef1aded82a5c13732875e1ec04d2c48\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6195128a0e66f0f932a444f4717084f00ea05ff648f7c26364bb9c96483b652a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://746e37834d01867d327b2cb407df8e9a8489f58884a28f1da6a336f74e4d1033\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:11Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:44Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:44 crc kubenswrapper[4756]: I0930 19:31:44.728058 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3370c2ca-fec3-4f90-8df7-51e21e6c7e1c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1a5ef539293ea3ff94f53b11a0510bb8bb45c62c945cffab746512efeeb19a5a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pstrc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ba18abf718fde52cadd61c25b05097873901a4dbbe7631fd7ce64133627b9473\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pstrc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:32Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-4n9zj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:44Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:44 crc kubenswrapper[4756]: I0930 19:31:44.747347 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-kmxk8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d96acc31-a519-46fc-94d8-f19522e77391\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe054d1844c58433aebfe8f4e500e17558f1b4afb5423002d60b50f210052c93\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c5badf6335e4358fa0078ae48d7d50a908bf754fdb57c786e445ccae31933ebe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f20230d520af06912183a05d0b9bcabb8581bf772fe34e037fa8d1a157c4a1b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://261b104308c48bbe4631db85f56d63cefeb0bc6b7112076b0bf3f4fb6b67caad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1fa8babb1f5d1213dec99c75be1d5f37d09c2257a31babd51ec297621fc7d564\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://990926364c8d973164f3da73740b7d31c8caf87f0fb691539f7a62520b822fba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://721fa5da1ae99aae66e7486221afabcd9c996982c008c5b00f72787911d29921\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2a2a998d0733193f87d6782c9553ac48179934e6534304d8f8c3e1edc868b8b1\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T19:31:42Z\\\",\\\"message\\\":\\\" k8s.io/client-go/informers/factory.go:160\\\\nI0930 19:31:42.479361 6071 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0930 19:31:42.479312 6071 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0930 19:31:42.479421 6071 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0930 19:31:42.479450 6071 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI0930 19:31:42.479463 6071 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI0930 19:31:42.479489 6071 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0930 19:31:42.479493 6071 handler.go:208] Removed *v1.Pod event handler 6\\\\nI0930 19:31:42.479507 6071 handler.go:208] Removed *v1.Pod event handler 3\\\\nI0930 19:31:42.479503 6071 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0930 19:31:42.479331 6071 reflector.go:311] Stopping reflector *v1.ClusterUserDefinedNetwork (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/userdefinednetwork/v1/apis/informers/externalversions/factory.go:140\\\\nI0930 19:31:42.479748 6071 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0930 19:31:42.480441 6071 factory.go:656] Stopping \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:39Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://721fa5da1ae99aae66e7486221afabcd9c996982c008c5b00f72787911d29921\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T19:31:44Z\\\",\\\"message\\\":\\\"erver-crc in node crc\\\\nI0930 19:31:44.135110 6217 obj_retry.go:303] Retry object setup: *v1.Pod openshift-multus/multus-additional-cni-plugins-m6zsp\\\\nI0930 19:31:44.135119 6217 obj_retry.go:386] Retry successful for *v1.Pod openshift-kube-apiserver/kube-apiserver-crc after 0 failed attempt(s)\\\\nF0930 19:31:44.135041 6217 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:44Z is after 2025-08-24T17:21:41Z]\\\\nI0930 19:31:44.135126 6217 obj_retry.go:365] Adding new object: *v1.Pod openshift-multus/multus-additional-cni-plugins-m6zsp\\\\nI0930 19:31:44.135121 6217 obj_retry.go:365] Adding new object: *v1.Pod openshift-image\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://64404e34c6a077df9cc9e9f9273dbe188c6feda3bc1186a83e05a43e1fa2ea99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://76bb398eefb2b0a9ef06b44c2ea182deefd51e4daf42b17946b911822d6eb0af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://76bb398eefb2b0a9ef06b44c2ea182deefd51e4daf42b17946b911822d6eb0af\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:32Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-kmxk8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:44Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:44 crc kubenswrapper[4756]: I0930 19:31:44.759795 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"218f95d8-7230-4b64-83a5-00af4f5ec7dc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0a76d38dbf936ca79e57470de2c8bf98e8681f7d94f2c572aa5c0d2b2acbb484\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://57f786181b4762f1e04a48ac6e352090b62e1691091ddc390821abb693e8f7a6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c275af67ad8675a437b3f7fae34236bbdfbdf1f216be6a442d6765471d1d11e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a2e0caca690592c9bd17dae14f79390b94f47507afb05a1c1732453700b99786\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a2e0caca690592c9bd17dae14f79390b94f47507afb05a1c1732453700b99786\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"message\\\":\\\"g.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 19:31:31.245412 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 19:31:31.245417 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0930 19:31:31.245423 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0930 19:31:31.245426 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0930 19:31:31.245428 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0930 19:31:31.245860 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI0930 19:31:31.249125 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0930 19:31:31.249169 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0930 19:31:31.249201 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0930 19:31:31.249211 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0930 19:31:31.249225 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0930 19:31:31.249238 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0930 19:31:31.250125 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI0930 19:31:31.250150 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nF0930 19:31:31.250576 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:25Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://01164eb1afdb771ef4d2dc268107dadd206959e1ef66b154caf3ff18717e20de\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:14Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7365492f5fe72c39f569bfa6e2b1a44236da10b4687a738b97ce95626081c1b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7365492f5fe72c39f569bfa6e2b1a44236da10b4687a738b97ce95626081c1b5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:11Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:44Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:44 crc kubenswrapper[4756]: I0930 19:31:44.772195 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:44Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:44 crc kubenswrapper[4756]: I0930 19:31:44.785230 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://449a4827a34c17e8cdc851d2714ced931581b5c3a1bcd2f7f058b95d68403740\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://163fab064016913eb90cd87fe0da1c702eeb7f000c92b108bfb24a21ef0e98d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:44Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:44 crc kubenswrapper[4756]: I0930 19:31:44.790084 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pc8dq\" (UniqueName: \"kubernetes.io/projected/7ae264bc-c901-4628-89f4-53e94403dff8-kube-api-access-pc8dq\") pod \"ovnkube-control-plane-749d76644c-gld2h\" (UID: \"7ae264bc-c901-4628-89f4-53e94403dff8\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-gld2h" Sep 30 19:31:44 crc kubenswrapper[4756]: I0930 19:31:44.790120 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/7ae264bc-c901-4628-89f4-53e94403dff8-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-gld2h\" (UID: \"7ae264bc-c901-4628-89f4-53e94403dff8\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-gld2h" Sep 30 19:31:44 crc kubenswrapper[4756]: I0930 19:31:44.790162 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/7ae264bc-c901-4628-89f4-53e94403dff8-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-gld2h\" (UID: \"7ae264bc-c901-4628-89f4-53e94403dff8\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-gld2h" Sep 30 19:31:44 crc kubenswrapper[4756]: I0930 19:31:44.790186 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/7ae264bc-c901-4628-89f4-53e94403dff8-env-overrides\") pod \"ovnkube-control-plane-749d76644c-gld2h\" (UID: \"7ae264bc-c901-4628-89f4-53e94403dff8\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-gld2h" Sep 30 19:31:44 crc kubenswrapper[4756]: I0930 19:31:44.790792 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/7ae264bc-c901-4628-89f4-53e94403dff8-env-overrides\") pod \"ovnkube-control-plane-749d76644c-gld2h\" (UID: \"7ae264bc-c901-4628-89f4-53e94403dff8\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-gld2h" Sep 30 19:31:44 crc kubenswrapper[4756]: I0930 19:31:44.791349 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:44 crc kubenswrapper[4756]: I0930 19:31:44.791412 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:44 crc kubenswrapper[4756]: I0930 19:31:44.791430 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:44 crc kubenswrapper[4756]: I0930 19:31:44.791456 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:31:44 crc kubenswrapper[4756]: I0930 19:31:44.791469 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:31:44Z","lastTransitionTime":"2025-09-30T19:31:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:31:44 crc kubenswrapper[4756]: I0930 19:31:44.791510 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/7ae264bc-c901-4628-89f4-53e94403dff8-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-gld2h\" (UID: \"7ae264bc-c901-4628-89f4-53e94403dff8\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-gld2h" Sep 30 19:31:44 crc kubenswrapper[4756]: I0930 19:31:44.797019 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/7ae264bc-c901-4628-89f4-53e94403dff8-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-gld2h\" (UID: \"7ae264bc-c901-4628-89f4-53e94403dff8\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-gld2h" Sep 30 19:31:44 crc kubenswrapper[4756]: I0930 19:31:44.805530 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pc8dq\" (UniqueName: \"kubernetes.io/projected/7ae264bc-c901-4628-89f4-53e94403dff8-kube-api-access-pc8dq\") pod \"ovnkube-control-plane-749d76644c-gld2h\" (UID: \"7ae264bc-c901-4628-89f4-53e94403dff8\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-gld2h" Sep 30 19:31:44 crc kubenswrapper[4756]: I0930 19:31:44.865840 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-gld2h" Sep 30 19:31:44 crc kubenswrapper[4756]: W0930 19:31:44.880542 4756 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7ae264bc_c901_4628_89f4_53e94403dff8.slice/crio-e642defa6833583693720409926aecb846be656a85f18a9d25392d14e6f5d87f WatchSource:0}: Error finding container e642defa6833583693720409926aecb846be656a85f18a9d25392d14e6f5d87f: Status 404 returned error can't find the container with id e642defa6833583693720409926aecb846be656a85f18a9d25392d14e6f5d87f Sep 30 19:31:44 crc kubenswrapper[4756]: I0930 19:31:44.893709 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:44 crc kubenswrapper[4756]: I0930 19:31:44.893749 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:44 crc kubenswrapper[4756]: I0930 19:31:44.893760 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:44 crc kubenswrapper[4756]: I0930 19:31:44.893780 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:31:44 crc kubenswrapper[4756]: I0930 19:31:44.893796 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:31:44Z","lastTransitionTime":"2025-09-30T19:31:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:31:44 crc kubenswrapper[4756]: I0930 19:31:44.996889 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:44 crc kubenswrapper[4756]: I0930 19:31:44.996937 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:44 crc kubenswrapper[4756]: I0930 19:31:44.996949 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:44 crc kubenswrapper[4756]: I0930 19:31:44.996970 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:31:44 crc kubenswrapper[4756]: I0930 19:31:44.996983 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:31:44Z","lastTransitionTime":"2025-09-30T19:31:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:31:45 crc kubenswrapper[4756]: I0930 19:31:45.099687 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:45 crc kubenswrapper[4756]: I0930 19:31:45.099734 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:45 crc kubenswrapper[4756]: I0930 19:31:45.099746 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:45 crc kubenswrapper[4756]: I0930 19:31:45.099763 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:31:45 crc kubenswrapper[4756]: I0930 19:31:45.099777 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:31:45Z","lastTransitionTime":"2025-09-30T19:31:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:31:45 crc kubenswrapper[4756]: I0930 19:31:45.115247 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 19:31:45 crc kubenswrapper[4756]: I0930 19:31:45.115351 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 19:31:45 crc kubenswrapper[4756]: E0930 19:31:45.115444 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 19:31:45 crc kubenswrapper[4756]: I0930 19:31:45.115494 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 19:31:45 crc kubenswrapper[4756]: E0930 19:31:45.115613 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 19:31:45 crc kubenswrapper[4756]: E0930 19:31:45.115821 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 19:31:45 crc kubenswrapper[4756]: I0930 19:31:45.202050 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:45 crc kubenswrapper[4756]: I0930 19:31:45.202076 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:45 crc kubenswrapper[4756]: I0930 19:31:45.202083 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:45 crc kubenswrapper[4756]: I0930 19:31:45.202098 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:31:45 crc kubenswrapper[4756]: I0930 19:31:45.202108 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:31:45Z","lastTransitionTime":"2025-09-30T19:31:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:31:45 crc kubenswrapper[4756]: I0930 19:31:45.305451 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:45 crc kubenswrapper[4756]: I0930 19:31:45.305496 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:45 crc kubenswrapper[4756]: I0930 19:31:45.305508 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:45 crc kubenswrapper[4756]: I0930 19:31:45.305528 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:31:45 crc kubenswrapper[4756]: I0930 19:31:45.305543 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:31:45Z","lastTransitionTime":"2025-09-30T19:31:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:31:45 crc kubenswrapper[4756]: I0930 19:31:45.392740 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-kmxk8_d96acc31-a519-46fc-94d8-f19522e77391/ovnkube-controller/1.log" Sep 30 19:31:45 crc kubenswrapper[4756]: I0930 19:31:45.397830 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-gld2h" event={"ID":"7ae264bc-c901-4628-89f4-53e94403dff8","Type":"ContainerStarted","Data":"764e5cdcf5a9c07e5d046217ffd9db27e2b09a827d02f165459deb24d99f141c"} Sep 30 19:31:45 crc kubenswrapper[4756]: I0930 19:31:45.397874 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-gld2h" event={"ID":"7ae264bc-c901-4628-89f4-53e94403dff8","Type":"ContainerStarted","Data":"d059c5c1b5cded992e4971b04737c4f528ed04278d98651c01c8b8a168fe5655"} Sep 30 19:31:45 crc kubenswrapper[4756]: I0930 19:31:45.397885 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-gld2h" event={"ID":"7ae264bc-c901-4628-89f4-53e94403dff8","Type":"ContainerStarted","Data":"e642defa6833583693720409926aecb846be656a85f18a9d25392d14e6f5d87f"} Sep 30 19:31:45 crc kubenswrapper[4756]: I0930 19:31:45.408629 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:45 crc kubenswrapper[4756]: I0930 19:31:45.408679 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:45 crc kubenswrapper[4756]: I0930 19:31:45.408690 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:45 crc kubenswrapper[4756]: I0930 19:31:45.408710 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:31:45 crc kubenswrapper[4756]: I0930 19:31:45.408725 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:31:45Z","lastTransitionTime":"2025-09-30T19:31:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:31:45 crc kubenswrapper[4756]: I0930 19:31:45.415933 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-m6zsp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2b4cba15-5ae1-4c98-a5e5-060b3af3aec6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://13307050242b53fcc9d2c9ab6870c36d5c2c1e43adfa36c1e76addf32166b95b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://828a01ba594287ceea1e72baa5419d522ce14d2ab3df5a69fb6332400c3e4abc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://828a01ba594287ceea1e72baa5419d522ce14d2ab3df5a69fb6332400c3e4abc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f91b3382af5267116de235e9d5f480e574b479350e738621b5e7568329dd879\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0f91b3382af5267116de235e9d5f480e574b479350e738621b5e7568329dd879\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://677982485b1155ded1bce2a0f5b3262766d498642307d1744d5cbae6c66102e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://677982485b1155ded1bce2a0f5b3262766d498642307d1744d5cbae6c66102e3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://14b8b318d4cd0da2272b87ac74a4dccdf29874a818aadc0698e9d85a75a169da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://14b8b318d4cd0da2272b87ac74a4dccdf29874a818aadc0698e9d85a75a169da\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e877edd834105d5bd0dcd24e6ba9c11ead17ab39a95ca0169c54acaad7544ebe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e877edd834105d5bd0dcd24e6ba9c11ead17ab39a95ca0169c54acaad7544ebe\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://decf531f70c78d1d142bef3d265612ebac2c8790ea10aba75def6d785fba84a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://decf531f70c78d1d142bef3d265612ebac2c8790ea10aba75def6d785fba84a8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:32Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-m6zsp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:45Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:45 crc kubenswrapper[4756]: I0930 19:31:45.428702 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-gld2h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7ae264bc-c901-4628-89f4-53e94403dff8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d059c5c1b5cded992e4971b04737c4f528ed04278d98651c01c8b8a168fe5655\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc8dq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://764e5cdcf5a9c07e5d046217ffd9db27e2b09a827d02f165459deb24d99f141c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc8dq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:44Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-gld2h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:45Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:45 crc kubenswrapper[4756]: I0930 19:31:45.441127 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:45Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:45 crc kubenswrapper[4756]: I0930 19:31:45.455378 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d39cfd1b725ded02a27e8f7bed73221dd180e3fffb7f06d1877d1bc2f6910334\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:45Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:45 crc kubenswrapper[4756]: I0930 19:31:45.464637 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-ghp5t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"32ba8a0a-215c-415b-a893-74f7a40e3c20\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a96a8eea15e5d12ad93cdb41ef3d1054fae455aea2878451bc5715de6d5b612f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pslwb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:32Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-ghp5t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:45Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:45 crc kubenswrapper[4756]: I0930 19:31:45.474389 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3370c2ca-fec3-4f90-8df7-51e21e6c7e1c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1a5ef539293ea3ff94f53b11a0510bb8bb45c62c945cffab746512efeeb19a5a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pstrc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ba18abf718fde52cadd61c25b05097873901a4dbbe7631fd7ce64133627b9473\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pstrc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:32Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-4n9zj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:45Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:45 crc kubenswrapper[4756]: I0930 19:31:45.492241 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-kmxk8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d96acc31-a519-46fc-94d8-f19522e77391\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe054d1844c58433aebfe8f4e500e17558f1b4afb5423002d60b50f210052c93\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c5badf6335e4358fa0078ae48d7d50a908bf754fdb57c786e445ccae31933ebe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f20230d520af06912183a05d0b9bcabb8581bf772fe34e037fa8d1a157c4a1b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://261b104308c48bbe4631db85f56d63cefeb0bc6b7112076b0bf3f4fb6b67caad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1fa8babb1f5d1213dec99c75be1d5f37d09c2257a31babd51ec297621fc7d564\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://990926364c8d973164f3da73740b7d31c8caf87f0fb691539f7a62520b822fba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://721fa5da1ae99aae66e7486221afabcd9c996982c008c5b00f72787911d29921\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2a2a998d0733193f87d6782c9553ac48179934e6534304d8f8c3e1edc868b8b1\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T19:31:42Z\\\",\\\"message\\\":\\\" k8s.io/client-go/informers/factory.go:160\\\\nI0930 19:31:42.479361 6071 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0930 19:31:42.479312 6071 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0930 19:31:42.479421 6071 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0930 19:31:42.479450 6071 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI0930 19:31:42.479463 6071 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI0930 19:31:42.479489 6071 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0930 19:31:42.479493 6071 handler.go:208] Removed *v1.Pod event handler 6\\\\nI0930 19:31:42.479507 6071 handler.go:208] Removed *v1.Pod event handler 3\\\\nI0930 19:31:42.479503 6071 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0930 19:31:42.479331 6071 reflector.go:311] Stopping reflector *v1.ClusterUserDefinedNetwork (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/userdefinednetwork/v1/apis/informers/externalversions/factory.go:140\\\\nI0930 19:31:42.479748 6071 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0930 19:31:42.480441 6071 factory.go:656] Stopping \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:39Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://721fa5da1ae99aae66e7486221afabcd9c996982c008c5b00f72787911d29921\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T19:31:44Z\\\",\\\"message\\\":\\\"erver-crc in node crc\\\\nI0930 19:31:44.135110 6217 obj_retry.go:303] Retry object setup: *v1.Pod openshift-multus/multus-additional-cni-plugins-m6zsp\\\\nI0930 19:31:44.135119 6217 obj_retry.go:386] Retry successful for *v1.Pod openshift-kube-apiserver/kube-apiserver-crc after 0 failed attempt(s)\\\\nF0930 19:31:44.135041 6217 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:44Z is after 2025-08-24T17:21:41Z]\\\\nI0930 19:31:44.135126 6217 obj_retry.go:365] Adding new object: *v1.Pod openshift-multus/multus-additional-cni-plugins-m6zsp\\\\nI0930 19:31:44.135121 6217 obj_retry.go:365] Adding new object: *v1.Pod openshift-image\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://64404e34c6a077df9cc9e9f9273dbe188c6feda3bc1186a83e05a43e1fa2ea99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://76bb398eefb2b0a9ef06b44c2ea182deefd51e4daf42b17946b911822d6eb0af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://76bb398eefb2b0a9ef06b44c2ea182deefd51e4daf42b17946b911822d6eb0af\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:32Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-kmxk8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:45Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:45 crc kubenswrapper[4756]: I0930 19:31:45.502366 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-8nj5z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"02bb1d9a-1ad2-406d-afc2-c4cd283fe1f4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://28f7474a36dde55478614d47fe30f93b44cb549a25e148e295987e64158fc3c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l6nxq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:34Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-8nj5z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:45Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:45 crc kubenswrapper[4756]: I0930 19:31:45.511161 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:45 crc kubenswrapper[4756]: I0930 19:31:45.511236 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:45 crc kubenswrapper[4756]: I0930 19:31:45.511252 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:45 crc kubenswrapper[4756]: I0930 19:31:45.511278 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:31:45 crc kubenswrapper[4756]: I0930 19:31:45.511293 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:31:45Z","lastTransitionTime":"2025-09-30T19:31:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:31:45 crc kubenswrapper[4756]: I0930 19:31:45.523882 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6aa869dd-ec28-4032-82be-c656f27bd7b1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://99767cc72b9d6057d38c45fbec6231bafa47ccffefb78e5924b5e9c1e797722b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b852143b26114c05edce20729cf706c5394609a74f522b7ecf44d92f9d66aed7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ee47cdb77e416535bd00569159712241bbd0952651774af6440fd04000d1722\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5cca81a0aeb985d6044f19cccc580ca5c78553a0abb60fb2a2bed955233f7a97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://621b37e117d0eeef289e110d75521c3262ced12144d665be35d6823780b4bae2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a5550fd486f7fad8080fb3eed0f4c73140172c68983fa8a423cd48da8f31ef3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a5550fd486f7fad8080fb3eed0f4c73140172c68983fa8a423cd48da8f31ef3a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://390ef7ee5c5d98dc226f1d061c6c9bb83236c988f2af15635e7834f643862840\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://390ef7ee5c5d98dc226f1d061c6c9bb83236c988f2af15635e7834f643862840\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:13Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://6f98786c914dcdd3ca1cde8421686a1d536abb97c375f26f458d8f514bdccd52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6f98786c914dcdd3ca1cde8421686a1d536abb97c375f26f458d8f514bdccd52\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:11Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:45Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:45 crc kubenswrapper[4756]: I0930 19:31:45.535561 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0dfdb5d6-9311-4699-a1ac-5f1a6cde2441\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a78698331fc3db3b12b06c8816256c5d055d1decfd12cc0099d6cfd155d673d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32fb926334772dc454adcdabd6070f751ef1aded82a5c13732875e1ec04d2c48\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6195128a0e66f0f932a444f4717084f00ea05ff648f7c26364bb9c96483b652a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://746e37834d01867d327b2cb407df8e9a8489f58884a28f1da6a336f74e4d1033\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:11Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:45Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:45 crc kubenswrapper[4756]: I0930 19:31:45.550566 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"218f95d8-7230-4b64-83a5-00af4f5ec7dc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0a76d38dbf936ca79e57470de2c8bf98e8681f7d94f2c572aa5c0d2b2acbb484\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://57f786181b4762f1e04a48ac6e352090b62e1691091ddc390821abb693e8f7a6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c275af67ad8675a437b3f7fae34236bbdfbdf1f216be6a442d6765471d1d11e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a2e0caca690592c9bd17dae14f79390b94f47507afb05a1c1732453700b99786\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a2e0caca690592c9bd17dae14f79390b94f47507afb05a1c1732453700b99786\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"message\\\":\\\"g.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 19:31:31.245412 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 19:31:31.245417 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0930 19:31:31.245423 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0930 19:31:31.245426 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0930 19:31:31.245428 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0930 19:31:31.245860 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI0930 19:31:31.249125 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0930 19:31:31.249169 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0930 19:31:31.249201 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0930 19:31:31.249211 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0930 19:31:31.249225 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0930 19:31:31.249238 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0930 19:31:31.250125 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI0930 19:31:31.250150 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nF0930 19:31:31.250576 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:25Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://01164eb1afdb771ef4d2dc268107dadd206959e1ef66b154caf3ff18717e20de\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:14Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7365492f5fe72c39f569bfa6e2b1a44236da10b4687a738b97ce95626081c1b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7365492f5fe72c39f569bfa6e2b1a44236da10b4687a738b97ce95626081c1b5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:11Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:45Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:45 crc kubenswrapper[4756]: I0930 19:31:45.562015 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:45Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:45 crc kubenswrapper[4756]: I0930 19:31:45.573584 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://449a4827a34c17e8cdc851d2714ced931581b5c3a1bcd2f7f058b95d68403740\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://163fab064016913eb90cd87fe0da1c702eeb7f000c92b108bfb24a21ef0e98d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:45Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:45 crc kubenswrapper[4756]: I0930 19:31:45.585273 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:45Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:45 crc kubenswrapper[4756]: I0930 19:31:45.595566 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d37999f5cb36e7071b109ed90026aea745a55dbdd26d53799ee4196dc6a983c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:45Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:45 crc kubenswrapper[4756]: I0930 19:31:45.611284 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-l9mw5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"104562fb-2a2d-4291-963c-7a95062ec13a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c012009175f2ca25af13346dce481a4640354a2b7c771ce9a8d92f0a6a014f23\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mhq89\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:32Z\\\"}}\" for pod \"openshift-multus\"/\"multus-l9mw5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:45Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:45 crc kubenswrapper[4756]: I0930 19:31:45.613946 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:45 crc kubenswrapper[4756]: I0930 19:31:45.614003 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:45 crc kubenswrapper[4756]: I0930 19:31:45.614013 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:45 crc kubenswrapper[4756]: I0930 19:31:45.614028 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:31:45 crc kubenswrapper[4756]: I0930 19:31:45.614039 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:31:45Z","lastTransitionTime":"2025-09-30T19:31:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:31:45 crc kubenswrapper[4756]: I0930 19:31:45.717052 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:45 crc kubenswrapper[4756]: I0930 19:31:45.717104 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:45 crc kubenswrapper[4756]: I0930 19:31:45.717124 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:45 crc kubenswrapper[4756]: I0930 19:31:45.717149 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:31:45 crc kubenswrapper[4756]: I0930 19:31:45.717167 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:31:45Z","lastTransitionTime":"2025-09-30T19:31:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:31:45 crc kubenswrapper[4756]: I0930 19:31:45.820242 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:45 crc kubenswrapper[4756]: I0930 19:31:45.820325 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:45 crc kubenswrapper[4756]: I0930 19:31:45.820341 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:45 crc kubenswrapper[4756]: I0930 19:31:45.820370 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:31:45 crc kubenswrapper[4756]: I0930 19:31:45.820388 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:31:45Z","lastTransitionTime":"2025-09-30T19:31:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:31:45 crc kubenswrapper[4756]: I0930 19:31:45.924171 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:45 crc kubenswrapper[4756]: I0930 19:31:45.924252 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:45 crc kubenswrapper[4756]: I0930 19:31:45.924290 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:45 crc kubenswrapper[4756]: I0930 19:31:45.924323 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:31:45 crc kubenswrapper[4756]: I0930 19:31:45.924347 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:31:45Z","lastTransitionTime":"2025-09-30T19:31:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:31:46 crc kubenswrapper[4756]: I0930 19:31:46.025388 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/network-metrics-daemon-b4n45"] Sep 30 19:31:46 crc kubenswrapper[4756]: I0930 19:31:46.026860 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-b4n45" Sep 30 19:31:46 crc kubenswrapper[4756]: E0930 19:31:46.027006 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-b4n45" podUID="6c151bbd-9757-44ba-aa11-ff679e841fdc" Sep 30 19:31:46 crc kubenswrapper[4756]: I0930 19:31:46.027218 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:46 crc kubenswrapper[4756]: I0930 19:31:46.027270 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:46 crc kubenswrapper[4756]: I0930 19:31:46.027288 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:46 crc kubenswrapper[4756]: I0930 19:31:46.027315 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:31:46 crc kubenswrapper[4756]: I0930 19:31:46.027335 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:31:46Z","lastTransitionTime":"2025-09-30T19:31:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:31:46 crc kubenswrapper[4756]: I0930 19:31:46.043286 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"218f95d8-7230-4b64-83a5-00af4f5ec7dc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0a76d38dbf936ca79e57470de2c8bf98e8681f7d94f2c572aa5c0d2b2acbb484\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://57f786181b4762f1e04a48ac6e352090b62e1691091ddc390821abb693e8f7a6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c275af67ad8675a437b3f7fae34236bbdfbdf1f216be6a442d6765471d1d11e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a2e0caca690592c9bd17dae14f79390b94f47507afb05a1c1732453700b99786\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a2e0caca690592c9bd17dae14f79390b94f47507afb05a1c1732453700b99786\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"message\\\":\\\"g.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 19:31:31.245412 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 19:31:31.245417 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0930 19:31:31.245423 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0930 19:31:31.245426 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0930 19:31:31.245428 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0930 19:31:31.245860 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI0930 19:31:31.249125 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0930 19:31:31.249169 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0930 19:31:31.249201 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0930 19:31:31.249211 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0930 19:31:31.249225 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0930 19:31:31.249238 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0930 19:31:31.250125 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI0930 19:31:31.250150 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nF0930 19:31:31.250576 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:25Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://01164eb1afdb771ef4d2dc268107dadd206959e1ef66b154caf3ff18717e20de\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:14Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7365492f5fe72c39f569bfa6e2b1a44236da10b4687a738b97ce95626081c1b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7365492f5fe72c39f569bfa6e2b1a44236da10b4687a738b97ce95626081c1b5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:11Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:46Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:46 crc kubenswrapper[4756]: I0930 19:31:46.058068 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:46Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:46 crc kubenswrapper[4756]: I0930 19:31:46.071727 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://449a4827a34c17e8cdc851d2714ced931581b5c3a1bcd2f7f058b95d68403740\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://163fab064016913eb90cd87fe0da1c702eeb7f000c92b108bfb24a21ef0e98d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:46Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:46 crc kubenswrapper[4756]: I0930 19:31:46.084131 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:46Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:46 crc kubenswrapper[4756]: I0930 19:31:46.095514 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d37999f5cb36e7071b109ed90026aea745a55dbdd26d53799ee4196dc6a983c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:46Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:46 crc kubenswrapper[4756]: I0930 19:31:46.111617 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-l9mw5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"104562fb-2a2d-4291-963c-7a95062ec13a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c012009175f2ca25af13346dce481a4640354a2b7c771ce9a8d92f0a6a014f23\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mhq89\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:32Z\\\"}}\" for pod \"openshift-multus\"/\"multus-l9mw5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:46Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:46 crc kubenswrapper[4756]: I0930 19:31:46.125474 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-m6zsp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2b4cba15-5ae1-4c98-a5e5-060b3af3aec6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://13307050242b53fcc9d2c9ab6870c36d5c2c1e43adfa36c1e76addf32166b95b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://828a01ba594287ceea1e72baa5419d522ce14d2ab3df5a69fb6332400c3e4abc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://828a01ba594287ceea1e72baa5419d522ce14d2ab3df5a69fb6332400c3e4abc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f91b3382af5267116de235e9d5f480e574b479350e738621b5e7568329dd879\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0f91b3382af5267116de235e9d5f480e574b479350e738621b5e7568329dd879\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://677982485b1155ded1bce2a0f5b3262766d498642307d1744d5cbae6c66102e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://677982485b1155ded1bce2a0f5b3262766d498642307d1744d5cbae6c66102e3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://14b8b318d4cd0da2272b87ac74a4dccdf29874a818aadc0698e9d85a75a169da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://14b8b318d4cd0da2272b87ac74a4dccdf29874a818aadc0698e9d85a75a169da\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e877edd834105d5bd0dcd24e6ba9c11ead17ab39a95ca0169c54acaad7544ebe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e877edd834105d5bd0dcd24e6ba9c11ead17ab39a95ca0169c54acaad7544ebe\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://decf531f70c78d1d142bef3d265612ebac2c8790ea10aba75def6d785fba84a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://decf531f70c78d1d142bef3d265612ebac2c8790ea10aba75def6d785fba84a8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:32Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-m6zsp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:46Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:46 crc kubenswrapper[4756]: I0930 19:31:46.129580 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:46 crc kubenswrapper[4756]: I0930 19:31:46.129628 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:46 crc kubenswrapper[4756]: I0930 19:31:46.129641 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:46 crc kubenswrapper[4756]: I0930 19:31:46.129660 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:31:46 crc kubenswrapper[4756]: I0930 19:31:46.129675 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:31:46Z","lastTransitionTime":"2025-09-30T19:31:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:31:46 crc kubenswrapper[4756]: I0930 19:31:46.137313 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-gld2h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7ae264bc-c901-4628-89f4-53e94403dff8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d059c5c1b5cded992e4971b04737c4f528ed04278d98651c01c8b8a168fe5655\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc8dq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://764e5cdcf5a9c07e5d046217ffd9db27e2b09a827d02f165459deb24d99f141c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc8dq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:44Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-gld2h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:46Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:46 crc kubenswrapper[4756]: I0930 19:31:46.149822 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:46Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:46 crc kubenswrapper[4756]: I0930 19:31:46.162176 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d39cfd1b725ded02a27e8f7bed73221dd180e3fffb7f06d1877d1bc2f6910334\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:46Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:46 crc kubenswrapper[4756]: I0930 19:31:46.170994 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-ghp5t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"32ba8a0a-215c-415b-a893-74f7a40e3c20\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a96a8eea15e5d12ad93cdb41ef3d1054fae455aea2878451bc5715de6d5b612f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pslwb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:32Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-ghp5t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:46Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:46 crc kubenswrapper[4756]: I0930 19:31:46.180675 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3370c2ca-fec3-4f90-8df7-51e21e6c7e1c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1a5ef539293ea3ff94f53b11a0510bb8bb45c62c945cffab746512efeeb19a5a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pstrc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ba18abf718fde52cadd61c25b05097873901a4dbbe7631fd7ce64133627b9473\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pstrc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:32Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-4n9zj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:46Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:46 crc kubenswrapper[4756]: I0930 19:31:46.198287 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-kmxk8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d96acc31-a519-46fc-94d8-f19522e77391\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe054d1844c58433aebfe8f4e500e17558f1b4afb5423002d60b50f210052c93\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c5badf6335e4358fa0078ae48d7d50a908bf754fdb57c786e445ccae31933ebe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f20230d520af06912183a05d0b9bcabb8581bf772fe34e037fa8d1a157c4a1b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://261b104308c48bbe4631db85f56d63cefeb0bc6b7112076b0bf3f4fb6b67caad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1fa8babb1f5d1213dec99c75be1d5f37d09c2257a31babd51ec297621fc7d564\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://990926364c8d973164f3da73740b7d31c8caf87f0fb691539f7a62520b822fba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://721fa5da1ae99aae66e7486221afabcd9c996982c008c5b00f72787911d29921\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2a2a998d0733193f87d6782c9553ac48179934e6534304d8f8c3e1edc868b8b1\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T19:31:42Z\\\",\\\"message\\\":\\\" k8s.io/client-go/informers/factory.go:160\\\\nI0930 19:31:42.479361 6071 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0930 19:31:42.479312 6071 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0930 19:31:42.479421 6071 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0930 19:31:42.479450 6071 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI0930 19:31:42.479463 6071 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI0930 19:31:42.479489 6071 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0930 19:31:42.479493 6071 handler.go:208] Removed *v1.Pod event handler 6\\\\nI0930 19:31:42.479507 6071 handler.go:208] Removed *v1.Pod event handler 3\\\\nI0930 19:31:42.479503 6071 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0930 19:31:42.479331 6071 reflector.go:311] Stopping reflector *v1.ClusterUserDefinedNetwork (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/userdefinednetwork/v1/apis/informers/externalversions/factory.go:140\\\\nI0930 19:31:42.479748 6071 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0930 19:31:42.480441 6071 factory.go:656] Stopping \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:39Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://721fa5da1ae99aae66e7486221afabcd9c996982c008c5b00f72787911d29921\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T19:31:44Z\\\",\\\"message\\\":\\\"erver-crc in node crc\\\\nI0930 19:31:44.135110 6217 obj_retry.go:303] Retry object setup: *v1.Pod openshift-multus/multus-additional-cni-plugins-m6zsp\\\\nI0930 19:31:44.135119 6217 obj_retry.go:386] Retry successful for *v1.Pod openshift-kube-apiserver/kube-apiserver-crc after 0 failed attempt(s)\\\\nF0930 19:31:44.135041 6217 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:44Z is after 2025-08-24T17:21:41Z]\\\\nI0930 19:31:44.135126 6217 obj_retry.go:365] Adding new object: *v1.Pod openshift-multus/multus-additional-cni-plugins-m6zsp\\\\nI0930 19:31:44.135121 6217 obj_retry.go:365] Adding new object: *v1.Pod openshift-image\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://64404e34c6a077df9cc9e9f9273dbe188c6feda3bc1186a83e05a43e1fa2ea99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://76bb398eefb2b0a9ef06b44c2ea182deefd51e4daf42b17946b911822d6eb0af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://76bb398eefb2b0a9ef06b44c2ea182deefd51e4daf42b17946b911822d6eb0af\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:32Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-kmxk8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:46Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:46 crc kubenswrapper[4756]: I0930 19:31:46.202968 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/6c151bbd-9757-44ba-aa11-ff679e841fdc-metrics-certs\") pod \"network-metrics-daemon-b4n45\" (UID: \"6c151bbd-9757-44ba-aa11-ff679e841fdc\") " pod="openshift-multus/network-metrics-daemon-b4n45" Sep 30 19:31:46 crc kubenswrapper[4756]: I0930 19:31:46.203041 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fsl2z\" (UniqueName: \"kubernetes.io/projected/6c151bbd-9757-44ba-aa11-ff679e841fdc-kube-api-access-fsl2z\") pod \"network-metrics-daemon-b4n45\" (UID: \"6c151bbd-9757-44ba-aa11-ff679e841fdc\") " pod="openshift-multus/network-metrics-daemon-b4n45" Sep 30 19:31:46 crc kubenswrapper[4756]: I0930 19:31:46.207816 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-8nj5z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"02bb1d9a-1ad2-406d-afc2-c4cd283fe1f4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://28f7474a36dde55478614d47fe30f93b44cb549a25e148e295987e64158fc3c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l6nxq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:34Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-8nj5z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:46Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:46 crc kubenswrapper[4756]: I0930 19:31:46.217512 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-b4n45" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6c151bbd-9757-44ba-aa11-ff679e841fdc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fsl2z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fsl2z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:46Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-b4n45\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:46Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:46 crc kubenswrapper[4756]: I0930 19:31:46.231957 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:46 crc kubenswrapper[4756]: I0930 19:31:46.232015 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:46 crc kubenswrapper[4756]: I0930 19:31:46.232027 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:46 crc kubenswrapper[4756]: I0930 19:31:46.232046 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:31:46 crc kubenswrapper[4756]: I0930 19:31:46.232059 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:31:46Z","lastTransitionTime":"2025-09-30T19:31:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:31:46 crc kubenswrapper[4756]: I0930 19:31:46.244564 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6aa869dd-ec28-4032-82be-c656f27bd7b1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://99767cc72b9d6057d38c45fbec6231bafa47ccffefb78e5924b5e9c1e797722b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b852143b26114c05edce20729cf706c5394609a74f522b7ecf44d92f9d66aed7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ee47cdb77e416535bd00569159712241bbd0952651774af6440fd04000d1722\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5cca81a0aeb985d6044f19cccc580ca5c78553a0abb60fb2a2bed955233f7a97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://621b37e117d0eeef289e110d75521c3262ced12144d665be35d6823780b4bae2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a5550fd486f7fad8080fb3eed0f4c73140172c68983fa8a423cd48da8f31ef3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a5550fd486f7fad8080fb3eed0f4c73140172c68983fa8a423cd48da8f31ef3a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://390ef7ee5c5d98dc226f1d061c6c9bb83236c988f2af15635e7834f643862840\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://390ef7ee5c5d98dc226f1d061c6c9bb83236c988f2af15635e7834f643862840\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:13Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://6f98786c914dcdd3ca1cde8421686a1d536abb97c375f26f458d8f514bdccd52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6f98786c914dcdd3ca1cde8421686a1d536abb97c375f26f458d8f514bdccd52\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:11Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:46Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:46 crc kubenswrapper[4756]: I0930 19:31:46.258181 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0dfdb5d6-9311-4699-a1ac-5f1a6cde2441\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a78698331fc3db3b12b06c8816256c5d055d1decfd12cc0099d6cfd155d673d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32fb926334772dc454adcdabd6070f751ef1aded82a5c13732875e1ec04d2c48\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6195128a0e66f0f932a444f4717084f00ea05ff648f7c26364bb9c96483b652a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://746e37834d01867d327b2cb407df8e9a8489f58884a28f1da6a336f74e4d1033\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:11Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:46Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:46 crc kubenswrapper[4756]: I0930 19:31:46.303797 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/6c151bbd-9757-44ba-aa11-ff679e841fdc-metrics-certs\") pod \"network-metrics-daemon-b4n45\" (UID: \"6c151bbd-9757-44ba-aa11-ff679e841fdc\") " pod="openshift-multus/network-metrics-daemon-b4n45" Sep 30 19:31:46 crc kubenswrapper[4756]: I0930 19:31:46.303844 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fsl2z\" (UniqueName: \"kubernetes.io/projected/6c151bbd-9757-44ba-aa11-ff679e841fdc-kube-api-access-fsl2z\") pod \"network-metrics-daemon-b4n45\" (UID: \"6c151bbd-9757-44ba-aa11-ff679e841fdc\") " pod="openshift-multus/network-metrics-daemon-b4n45" Sep 30 19:31:46 crc kubenswrapper[4756]: E0930 19:31:46.303994 4756 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Sep 30 19:31:46 crc kubenswrapper[4756]: E0930 19:31:46.304074 4756 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/6c151bbd-9757-44ba-aa11-ff679e841fdc-metrics-certs podName:6c151bbd-9757-44ba-aa11-ff679e841fdc nodeName:}" failed. No retries permitted until 2025-09-30 19:31:46.804053988 +0000 UTC m=+36.424987465 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/6c151bbd-9757-44ba-aa11-ff679e841fdc-metrics-certs") pod "network-metrics-daemon-b4n45" (UID: "6c151bbd-9757-44ba-aa11-ff679e841fdc") : object "openshift-multus"/"metrics-daemon-secret" not registered Sep 30 19:31:46 crc kubenswrapper[4756]: I0930 19:31:46.320784 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fsl2z\" (UniqueName: \"kubernetes.io/projected/6c151bbd-9757-44ba-aa11-ff679e841fdc-kube-api-access-fsl2z\") pod \"network-metrics-daemon-b4n45\" (UID: \"6c151bbd-9757-44ba-aa11-ff679e841fdc\") " pod="openshift-multus/network-metrics-daemon-b4n45" Sep 30 19:31:46 crc kubenswrapper[4756]: I0930 19:31:46.335781 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:46 crc kubenswrapper[4756]: I0930 19:31:46.335853 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:46 crc kubenswrapper[4756]: I0930 19:31:46.335871 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:46 crc kubenswrapper[4756]: I0930 19:31:46.335900 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:31:46 crc kubenswrapper[4756]: I0930 19:31:46.335920 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:31:46Z","lastTransitionTime":"2025-09-30T19:31:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:31:46 crc kubenswrapper[4756]: I0930 19:31:46.439039 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:46 crc kubenswrapper[4756]: I0930 19:31:46.439139 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:46 crc kubenswrapper[4756]: I0930 19:31:46.439152 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:46 crc kubenswrapper[4756]: I0930 19:31:46.439170 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:31:46 crc kubenswrapper[4756]: I0930 19:31:46.439183 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:31:46Z","lastTransitionTime":"2025-09-30T19:31:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:31:46 crc kubenswrapper[4756]: I0930 19:31:46.542379 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:46 crc kubenswrapper[4756]: I0930 19:31:46.542444 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:46 crc kubenswrapper[4756]: I0930 19:31:46.542454 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:46 crc kubenswrapper[4756]: I0930 19:31:46.542472 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:31:46 crc kubenswrapper[4756]: I0930 19:31:46.542483 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:31:46Z","lastTransitionTime":"2025-09-30T19:31:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:31:46 crc kubenswrapper[4756]: I0930 19:31:46.644381 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:46 crc kubenswrapper[4756]: I0930 19:31:46.644444 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:46 crc kubenswrapper[4756]: I0930 19:31:46.644455 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:46 crc kubenswrapper[4756]: I0930 19:31:46.644475 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:31:46 crc kubenswrapper[4756]: I0930 19:31:46.644487 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:31:46Z","lastTransitionTime":"2025-09-30T19:31:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:31:46 crc kubenswrapper[4756]: I0930 19:31:46.748373 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:46 crc kubenswrapper[4756]: I0930 19:31:46.748715 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:46 crc kubenswrapper[4756]: I0930 19:31:46.748725 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:46 crc kubenswrapper[4756]: I0930 19:31:46.748742 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:31:46 crc kubenswrapper[4756]: I0930 19:31:46.748753 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:31:46Z","lastTransitionTime":"2025-09-30T19:31:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:31:46 crc kubenswrapper[4756]: I0930 19:31:46.811142 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/6c151bbd-9757-44ba-aa11-ff679e841fdc-metrics-certs\") pod \"network-metrics-daemon-b4n45\" (UID: \"6c151bbd-9757-44ba-aa11-ff679e841fdc\") " pod="openshift-multus/network-metrics-daemon-b4n45" Sep 30 19:31:46 crc kubenswrapper[4756]: E0930 19:31:46.811388 4756 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Sep 30 19:31:46 crc kubenswrapper[4756]: E0930 19:31:46.811536 4756 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/6c151bbd-9757-44ba-aa11-ff679e841fdc-metrics-certs podName:6c151bbd-9757-44ba-aa11-ff679e841fdc nodeName:}" failed. No retries permitted until 2025-09-30 19:31:47.811508334 +0000 UTC m=+37.432442011 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/6c151bbd-9757-44ba-aa11-ff679e841fdc-metrics-certs") pod "network-metrics-daemon-b4n45" (UID: "6c151bbd-9757-44ba-aa11-ff679e841fdc") : object "openshift-multus"/"metrics-daemon-secret" not registered Sep 30 19:31:46 crc kubenswrapper[4756]: I0930 19:31:46.852145 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:46 crc kubenswrapper[4756]: I0930 19:31:46.852187 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:46 crc kubenswrapper[4756]: I0930 19:31:46.852196 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:46 crc kubenswrapper[4756]: I0930 19:31:46.852212 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:31:46 crc kubenswrapper[4756]: I0930 19:31:46.852223 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:31:46Z","lastTransitionTime":"2025-09-30T19:31:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:31:46 crc kubenswrapper[4756]: I0930 19:31:46.912269 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 19:31:46 crc kubenswrapper[4756]: I0930 19:31:46.912461 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 19:31:46 crc kubenswrapper[4756]: E0930 19:31:46.912517 4756 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 19:32:02.91247399 +0000 UTC m=+52.533407467 (durationBeforeRetry 16s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:31:46 crc kubenswrapper[4756]: I0930 19:31:46.912627 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 19:31:46 crc kubenswrapper[4756]: E0930 19:31:46.912649 4756 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Sep 30 19:31:46 crc kubenswrapper[4756]: E0930 19:31:46.912691 4756 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Sep 30 19:31:46 crc kubenswrapper[4756]: E0930 19:31:46.912711 4756 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 30 19:31:46 crc kubenswrapper[4756]: E0930 19:31:46.912756 4756 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Sep 30 19:31:46 crc kubenswrapper[4756]: E0930 19:31:46.912757 4756 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Sep 30 19:31:46 crc kubenswrapper[4756]: E0930 19:31:46.912788 4756 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-09-30 19:32:02.912760397 +0000 UTC m=+52.533694094 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 30 19:31:46 crc kubenswrapper[4756]: E0930 19:31:46.912822 4756 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-09-30 19:32:02.912806849 +0000 UTC m=+52.533740576 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Sep 30 19:31:46 crc kubenswrapper[4756]: I0930 19:31:46.912656 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 19:31:46 crc kubenswrapper[4756]: E0930 19:31:46.912845 4756 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-09-30 19:32:02.912833179 +0000 UTC m=+52.533766896 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Sep 30 19:31:46 crc kubenswrapper[4756]: I0930 19:31:46.913020 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 19:31:46 crc kubenswrapper[4756]: E0930 19:31:46.913172 4756 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Sep 30 19:31:46 crc kubenswrapper[4756]: E0930 19:31:46.913201 4756 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Sep 30 19:31:46 crc kubenswrapper[4756]: E0930 19:31:46.913215 4756 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 30 19:31:46 crc kubenswrapper[4756]: E0930 19:31:46.913263 4756 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-09-30 19:32:02.91325036 +0000 UTC m=+52.534184037 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 30 19:31:46 crc kubenswrapper[4756]: I0930 19:31:46.955098 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:46 crc kubenswrapper[4756]: I0930 19:31:46.955149 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:46 crc kubenswrapper[4756]: I0930 19:31:46.955164 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:46 crc kubenswrapper[4756]: I0930 19:31:46.955188 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:31:46 crc kubenswrapper[4756]: I0930 19:31:46.955205 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:31:46Z","lastTransitionTime":"2025-09-30T19:31:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:31:47 crc kubenswrapper[4756]: I0930 19:31:47.058010 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:47 crc kubenswrapper[4756]: I0930 19:31:47.058059 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:47 crc kubenswrapper[4756]: I0930 19:31:47.058072 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:47 crc kubenswrapper[4756]: I0930 19:31:47.058120 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:31:47 crc kubenswrapper[4756]: I0930 19:31:47.058132 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:31:47Z","lastTransitionTime":"2025-09-30T19:31:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:31:47 crc kubenswrapper[4756]: I0930 19:31:47.115531 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 19:31:47 crc kubenswrapper[4756]: I0930 19:31:47.115770 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 19:31:47 crc kubenswrapper[4756]: I0930 19:31:47.115859 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 19:31:47 crc kubenswrapper[4756]: E0930 19:31:47.116012 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 19:31:47 crc kubenswrapper[4756]: I0930 19:31:47.116126 4756 scope.go:117] "RemoveContainer" containerID="a2e0caca690592c9bd17dae14f79390b94f47507afb05a1c1732453700b99786" Sep 30 19:31:47 crc kubenswrapper[4756]: E0930 19:31:47.116493 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 19:31:47 crc kubenswrapper[4756]: E0930 19:31:47.116591 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 19:31:47 crc kubenswrapper[4756]: I0930 19:31:47.160856 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:47 crc kubenswrapper[4756]: I0930 19:31:47.160897 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:47 crc kubenswrapper[4756]: I0930 19:31:47.160906 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:47 crc kubenswrapper[4756]: I0930 19:31:47.160923 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:31:47 crc kubenswrapper[4756]: I0930 19:31:47.160933 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:31:47Z","lastTransitionTime":"2025-09-30T19:31:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:31:47 crc kubenswrapper[4756]: I0930 19:31:47.263293 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:47 crc kubenswrapper[4756]: I0930 19:31:47.263341 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:47 crc kubenswrapper[4756]: I0930 19:31:47.263357 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:47 crc kubenswrapper[4756]: I0930 19:31:47.263379 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:31:47 crc kubenswrapper[4756]: I0930 19:31:47.263406 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:31:47Z","lastTransitionTime":"2025-09-30T19:31:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:31:47 crc kubenswrapper[4756]: I0930 19:31:47.272960 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:47 crc kubenswrapper[4756]: I0930 19:31:47.272993 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:47 crc kubenswrapper[4756]: I0930 19:31:47.273004 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:47 crc kubenswrapper[4756]: I0930 19:31:47.273028 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:31:47 crc kubenswrapper[4756]: I0930 19:31:47.273037 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:31:47Z","lastTransitionTime":"2025-09-30T19:31:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:31:47 crc kubenswrapper[4756]: E0930 19:31:47.286082 4756 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:31:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:47Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:31:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:47Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:31:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:47Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:31:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:47Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"0611c06e-4fb1-42eb-9eae-fbc2363d05f7\\\",\\\"systemUUID\\\":\\\"9e1f3995-67e3-42b5-a320-7b79274c960e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:47Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:47 crc kubenswrapper[4756]: I0930 19:31:47.291534 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:47 crc kubenswrapper[4756]: I0930 19:31:47.291574 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:47 crc kubenswrapper[4756]: I0930 19:31:47.291583 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:47 crc kubenswrapper[4756]: I0930 19:31:47.291599 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:31:47 crc kubenswrapper[4756]: I0930 19:31:47.291619 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:31:47Z","lastTransitionTime":"2025-09-30T19:31:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:31:47 crc kubenswrapper[4756]: E0930 19:31:47.305505 4756 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:31:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:47Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:31:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:47Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:31:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:47Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:31:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:47Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"0611c06e-4fb1-42eb-9eae-fbc2363d05f7\\\",\\\"systemUUID\\\":\\\"9e1f3995-67e3-42b5-a320-7b79274c960e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:47Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:47 crc kubenswrapper[4756]: I0930 19:31:47.311206 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:47 crc kubenswrapper[4756]: I0930 19:31:47.311261 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:47 crc kubenswrapper[4756]: I0930 19:31:47.311278 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:47 crc kubenswrapper[4756]: I0930 19:31:47.311308 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:31:47 crc kubenswrapper[4756]: I0930 19:31:47.311323 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:31:47Z","lastTransitionTime":"2025-09-30T19:31:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:31:47 crc kubenswrapper[4756]: E0930 19:31:47.327424 4756 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:31:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:47Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:31:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:47Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:31:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:47Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:31:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:47Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"0611c06e-4fb1-42eb-9eae-fbc2363d05f7\\\",\\\"systemUUID\\\":\\\"9e1f3995-67e3-42b5-a320-7b79274c960e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:47Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:47 crc kubenswrapper[4756]: I0930 19:31:47.332007 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:47 crc kubenswrapper[4756]: I0930 19:31:47.332065 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:47 crc kubenswrapper[4756]: I0930 19:31:47.332074 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:47 crc kubenswrapper[4756]: I0930 19:31:47.332094 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:31:47 crc kubenswrapper[4756]: I0930 19:31:47.332106 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:31:47Z","lastTransitionTime":"2025-09-30T19:31:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:31:47 crc kubenswrapper[4756]: E0930 19:31:47.349980 4756 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:31:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:47Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:31:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:47Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:31:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:47Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:31:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:47Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"0611c06e-4fb1-42eb-9eae-fbc2363d05f7\\\",\\\"systemUUID\\\":\\\"9e1f3995-67e3-42b5-a320-7b79274c960e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:47Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:47 crc kubenswrapper[4756]: I0930 19:31:47.354613 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:47 crc kubenswrapper[4756]: I0930 19:31:47.354652 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:47 crc kubenswrapper[4756]: I0930 19:31:47.354666 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:47 crc kubenswrapper[4756]: I0930 19:31:47.354687 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:31:47 crc kubenswrapper[4756]: I0930 19:31:47.354700 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:31:47Z","lastTransitionTime":"2025-09-30T19:31:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:31:47 crc kubenswrapper[4756]: E0930 19:31:47.373981 4756 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:31:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:47Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:31:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:47Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:31:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:47Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:31:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:47Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"0611c06e-4fb1-42eb-9eae-fbc2363d05f7\\\",\\\"systemUUID\\\":\\\"9e1f3995-67e3-42b5-a320-7b79274c960e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:47Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:47 crc kubenswrapper[4756]: E0930 19:31:47.374098 4756 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Sep 30 19:31:47 crc kubenswrapper[4756]: I0930 19:31:47.376253 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:47 crc kubenswrapper[4756]: I0930 19:31:47.376280 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:47 crc kubenswrapper[4756]: I0930 19:31:47.376290 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:47 crc kubenswrapper[4756]: I0930 19:31:47.376306 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:31:47 crc kubenswrapper[4756]: I0930 19:31:47.376317 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:31:47Z","lastTransitionTime":"2025-09-30T19:31:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:31:47 crc kubenswrapper[4756]: I0930 19:31:47.410639 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/1.log" Sep 30 19:31:47 crc kubenswrapper[4756]: I0930 19:31:47.412358 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"c7673b8a2ea701b95437a1ad447661ea3140ce0b27204ab946f39dbf0a25e29b"} Sep 30 19:31:47 crc kubenswrapper[4756]: I0930 19:31:47.413571 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 30 19:31:47 crc kubenswrapper[4756]: I0930 19:31:47.430680 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:47Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:47 crc kubenswrapper[4756]: I0930 19:31:47.445129 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d37999f5cb36e7071b109ed90026aea745a55dbdd26d53799ee4196dc6a983c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:47Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:47 crc kubenswrapper[4756]: I0930 19:31:47.459345 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-l9mw5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"104562fb-2a2d-4291-963c-7a95062ec13a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c012009175f2ca25af13346dce481a4640354a2b7c771ce9a8d92f0a6a014f23\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mhq89\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:32Z\\\"}}\" for pod \"openshift-multus\"/\"multus-l9mw5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:47Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:47 crc kubenswrapper[4756]: I0930 19:31:47.473319 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:47Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:47 crc kubenswrapper[4756]: I0930 19:31:47.478500 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:47 crc kubenswrapper[4756]: I0930 19:31:47.478674 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:47 crc kubenswrapper[4756]: I0930 19:31:47.478748 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:47 crc kubenswrapper[4756]: I0930 19:31:47.478813 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:31:47 crc kubenswrapper[4756]: I0930 19:31:47.478870 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:31:47Z","lastTransitionTime":"2025-09-30T19:31:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:31:47 crc kubenswrapper[4756]: I0930 19:31:47.487963 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d39cfd1b725ded02a27e8f7bed73221dd180e3fffb7f06d1877d1bc2f6910334\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:47Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:47 crc kubenswrapper[4756]: I0930 19:31:47.498749 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-ghp5t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"32ba8a0a-215c-415b-a893-74f7a40e3c20\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a96a8eea15e5d12ad93cdb41ef3d1054fae455aea2878451bc5715de6d5b612f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pslwb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:32Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-ghp5t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:47Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:47 crc kubenswrapper[4756]: I0930 19:31:47.512787 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-m6zsp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2b4cba15-5ae1-4c98-a5e5-060b3af3aec6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://13307050242b53fcc9d2c9ab6870c36d5c2c1e43adfa36c1e76addf32166b95b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://828a01ba594287ceea1e72baa5419d522ce14d2ab3df5a69fb6332400c3e4abc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://828a01ba594287ceea1e72baa5419d522ce14d2ab3df5a69fb6332400c3e4abc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f91b3382af5267116de235e9d5f480e574b479350e738621b5e7568329dd879\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0f91b3382af5267116de235e9d5f480e574b479350e738621b5e7568329dd879\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://677982485b1155ded1bce2a0f5b3262766d498642307d1744d5cbae6c66102e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://677982485b1155ded1bce2a0f5b3262766d498642307d1744d5cbae6c66102e3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://14b8b318d4cd0da2272b87ac74a4dccdf29874a818aadc0698e9d85a75a169da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://14b8b318d4cd0da2272b87ac74a4dccdf29874a818aadc0698e9d85a75a169da\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e877edd834105d5bd0dcd24e6ba9c11ead17ab39a95ca0169c54acaad7544ebe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e877edd834105d5bd0dcd24e6ba9c11ead17ab39a95ca0169c54acaad7544ebe\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://decf531f70c78d1d142bef3d265612ebac2c8790ea10aba75def6d785fba84a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://decf531f70c78d1d142bef3d265612ebac2c8790ea10aba75def6d785fba84a8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:32Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-m6zsp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:47Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:47 crc kubenswrapper[4756]: I0930 19:31:47.526819 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-gld2h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7ae264bc-c901-4628-89f4-53e94403dff8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d059c5c1b5cded992e4971b04737c4f528ed04278d98651c01c8b8a168fe5655\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc8dq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://764e5cdcf5a9c07e5d046217ffd9db27e2b09a827d02f165459deb24d99f141c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc8dq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:44Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-gld2h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:47Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:47 crc kubenswrapper[4756]: I0930 19:31:47.538806 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-b4n45" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6c151bbd-9757-44ba-aa11-ff679e841fdc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fsl2z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fsl2z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:46Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-b4n45\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:47Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:47 crc kubenswrapper[4756]: I0930 19:31:47.558349 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6aa869dd-ec28-4032-82be-c656f27bd7b1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://99767cc72b9d6057d38c45fbec6231bafa47ccffefb78e5924b5e9c1e797722b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b852143b26114c05edce20729cf706c5394609a74f522b7ecf44d92f9d66aed7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ee47cdb77e416535bd00569159712241bbd0952651774af6440fd04000d1722\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5cca81a0aeb985d6044f19cccc580ca5c78553a0abb60fb2a2bed955233f7a97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://621b37e117d0eeef289e110d75521c3262ced12144d665be35d6823780b4bae2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a5550fd486f7fad8080fb3eed0f4c73140172c68983fa8a423cd48da8f31ef3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a5550fd486f7fad8080fb3eed0f4c73140172c68983fa8a423cd48da8f31ef3a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://390ef7ee5c5d98dc226f1d061c6c9bb83236c988f2af15635e7834f643862840\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://390ef7ee5c5d98dc226f1d061c6c9bb83236c988f2af15635e7834f643862840\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:13Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://6f98786c914dcdd3ca1cde8421686a1d536abb97c375f26f458d8f514bdccd52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6f98786c914dcdd3ca1cde8421686a1d536abb97c375f26f458d8f514bdccd52\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:11Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:47Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:47 crc kubenswrapper[4756]: I0930 19:31:47.572053 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0dfdb5d6-9311-4699-a1ac-5f1a6cde2441\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a78698331fc3db3b12b06c8816256c5d055d1decfd12cc0099d6cfd155d673d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32fb926334772dc454adcdabd6070f751ef1aded82a5c13732875e1ec04d2c48\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6195128a0e66f0f932a444f4717084f00ea05ff648f7c26364bb9c96483b652a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://746e37834d01867d327b2cb407df8e9a8489f58884a28f1da6a336f74e4d1033\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:11Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:47Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:47 crc kubenswrapper[4756]: I0930 19:31:47.581754 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:47 crc kubenswrapper[4756]: I0930 19:31:47.581786 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:47 crc kubenswrapper[4756]: I0930 19:31:47.581794 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:47 crc kubenswrapper[4756]: I0930 19:31:47.581809 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:31:47 crc kubenswrapper[4756]: I0930 19:31:47.581820 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:31:47Z","lastTransitionTime":"2025-09-30T19:31:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:31:47 crc kubenswrapper[4756]: I0930 19:31:47.582647 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3370c2ca-fec3-4f90-8df7-51e21e6c7e1c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1a5ef539293ea3ff94f53b11a0510bb8bb45c62c945cffab746512efeeb19a5a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pstrc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ba18abf718fde52cadd61c25b05097873901a4dbbe7631fd7ce64133627b9473\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pstrc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:32Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-4n9zj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:47Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:47 crc kubenswrapper[4756]: I0930 19:31:47.601459 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-kmxk8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d96acc31-a519-46fc-94d8-f19522e77391\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe054d1844c58433aebfe8f4e500e17558f1b4afb5423002d60b50f210052c93\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c5badf6335e4358fa0078ae48d7d50a908bf754fdb57c786e445ccae31933ebe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f20230d520af06912183a05d0b9bcabb8581bf772fe34e037fa8d1a157c4a1b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://261b104308c48bbe4631db85f56d63cefeb0bc6b7112076b0bf3f4fb6b67caad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1fa8babb1f5d1213dec99c75be1d5f37d09c2257a31babd51ec297621fc7d564\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://990926364c8d973164f3da73740b7d31c8caf87f0fb691539f7a62520b822fba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://721fa5da1ae99aae66e7486221afabcd9c996982c008c5b00f72787911d29921\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2a2a998d0733193f87d6782c9553ac48179934e6534304d8f8c3e1edc868b8b1\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T19:31:42Z\\\",\\\"message\\\":\\\" k8s.io/client-go/informers/factory.go:160\\\\nI0930 19:31:42.479361 6071 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0930 19:31:42.479312 6071 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0930 19:31:42.479421 6071 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0930 19:31:42.479450 6071 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI0930 19:31:42.479463 6071 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI0930 19:31:42.479489 6071 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0930 19:31:42.479493 6071 handler.go:208] Removed *v1.Pod event handler 6\\\\nI0930 19:31:42.479507 6071 handler.go:208] Removed *v1.Pod event handler 3\\\\nI0930 19:31:42.479503 6071 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0930 19:31:42.479331 6071 reflector.go:311] Stopping reflector *v1.ClusterUserDefinedNetwork (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/userdefinednetwork/v1/apis/informers/externalversions/factory.go:140\\\\nI0930 19:31:42.479748 6071 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0930 19:31:42.480441 6071 factory.go:656] Stopping \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:39Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://721fa5da1ae99aae66e7486221afabcd9c996982c008c5b00f72787911d29921\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T19:31:44Z\\\",\\\"message\\\":\\\"erver-crc in node crc\\\\nI0930 19:31:44.135110 6217 obj_retry.go:303] Retry object setup: *v1.Pod openshift-multus/multus-additional-cni-plugins-m6zsp\\\\nI0930 19:31:44.135119 6217 obj_retry.go:386] Retry successful for *v1.Pod openshift-kube-apiserver/kube-apiserver-crc after 0 failed attempt(s)\\\\nF0930 19:31:44.135041 6217 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:44Z is after 2025-08-24T17:21:41Z]\\\\nI0930 19:31:44.135126 6217 obj_retry.go:365] Adding new object: *v1.Pod openshift-multus/multus-additional-cni-plugins-m6zsp\\\\nI0930 19:31:44.135121 6217 obj_retry.go:365] Adding new object: *v1.Pod openshift-image\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://64404e34c6a077df9cc9e9f9273dbe188c6feda3bc1186a83e05a43e1fa2ea99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://76bb398eefb2b0a9ef06b44c2ea182deefd51e4daf42b17946b911822d6eb0af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://76bb398eefb2b0a9ef06b44c2ea182deefd51e4daf42b17946b911822d6eb0af\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:32Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-kmxk8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:47Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:47 crc kubenswrapper[4756]: I0930 19:31:47.611212 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-8nj5z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"02bb1d9a-1ad2-406d-afc2-c4cd283fe1f4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://28f7474a36dde55478614d47fe30f93b44cb549a25e148e295987e64158fc3c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l6nxq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:34Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-8nj5z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:47Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:47 crc kubenswrapper[4756]: I0930 19:31:47.625345 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"218f95d8-7230-4b64-83a5-00af4f5ec7dc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0a76d38dbf936ca79e57470de2c8bf98e8681f7d94f2c572aa5c0d2b2acbb484\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://57f786181b4762f1e04a48ac6e352090b62e1691091ddc390821abb693e8f7a6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c275af67ad8675a437b3f7fae34236bbdfbdf1f216be6a442d6765471d1d11e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7673b8a2ea701b95437a1ad447661ea3140ce0b27204ab946f39dbf0a25e29b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a2e0caca690592c9bd17dae14f79390b94f47507afb05a1c1732453700b99786\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"message\\\":\\\"g.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 19:31:31.245412 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 19:31:31.245417 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0930 19:31:31.245423 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0930 19:31:31.245426 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0930 19:31:31.245428 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0930 19:31:31.245860 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI0930 19:31:31.249125 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0930 19:31:31.249169 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0930 19:31:31.249201 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0930 19:31:31.249211 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0930 19:31:31.249225 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0930 19:31:31.249238 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0930 19:31:31.250125 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI0930 19:31:31.250150 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nF0930 19:31:31.250576 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:25Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://01164eb1afdb771ef4d2dc268107dadd206959e1ef66b154caf3ff18717e20de\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:14Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7365492f5fe72c39f569bfa6e2b1a44236da10b4687a738b97ce95626081c1b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7365492f5fe72c39f569bfa6e2b1a44236da10b4687a738b97ce95626081c1b5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:11Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:47Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:47 crc kubenswrapper[4756]: I0930 19:31:47.641247 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:47Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:47 crc kubenswrapper[4756]: I0930 19:31:47.657610 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://449a4827a34c17e8cdc851d2714ced931581b5c3a1bcd2f7f058b95d68403740\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://163fab064016913eb90cd87fe0da1c702eeb7f000c92b108bfb24a21ef0e98d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:47Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:47 crc kubenswrapper[4756]: I0930 19:31:47.685098 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:47 crc kubenswrapper[4756]: I0930 19:31:47.685170 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:47 crc kubenswrapper[4756]: I0930 19:31:47.685189 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:47 crc kubenswrapper[4756]: I0930 19:31:47.685215 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:31:47 crc kubenswrapper[4756]: I0930 19:31:47.685232 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:31:47Z","lastTransitionTime":"2025-09-30T19:31:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:31:47 crc kubenswrapper[4756]: I0930 19:31:47.788685 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:47 crc kubenswrapper[4756]: I0930 19:31:47.789154 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:47 crc kubenswrapper[4756]: I0930 19:31:47.789255 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:47 crc kubenswrapper[4756]: I0930 19:31:47.789348 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:31:47 crc kubenswrapper[4756]: I0930 19:31:47.789489 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:31:47Z","lastTransitionTime":"2025-09-30T19:31:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:31:47 crc kubenswrapper[4756]: I0930 19:31:47.822603 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/6c151bbd-9757-44ba-aa11-ff679e841fdc-metrics-certs\") pod \"network-metrics-daemon-b4n45\" (UID: \"6c151bbd-9757-44ba-aa11-ff679e841fdc\") " pod="openshift-multus/network-metrics-daemon-b4n45" Sep 30 19:31:47 crc kubenswrapper[4756]: E0930 19:31:47.822855 4756 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Sep 30 19:31:47 crc kubenswrapper[4756]: E0930 19:31:47.822934 4756 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/6c151bbd-9757-44ba-aa11-ff679e841fdc-metrics-certs podName:6c151bbd-9757-44ba-aa11-ff679e841fdc nodeName:}" failed. No retries permitted until 2025-09-30 19:31:49.822909325 +0000 UTC m=+39.443842822 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/6c151bbd-9757-44ba-aa11-ff679e841fdc-metrics-certs") pod "network-metrics-daemon-b4n45" (UID: "6c151bbd-9757-44ba-aa11-ff679e841fdc") : object "openshift-multus"/"metrics-daemon-secret" not registered Sep 30 19:31:47 crc kubenswrapper[4756]: I0930 19:31:47.893303 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:47 crc kubenswrapper[4756]: I0930 19:31:47.893641 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:47 crc kubenswrapper[4756]: I0930 19:31:47.893706 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:47 crc kubenswrapper[4756]: I0930 19:31:47.893844 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:31:47 crc kubenswrapper[4756]: I0930 19:31:47.893919 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:31:47Z","lastTransitionTime":"2025-09-30T19:31:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:31:47 crc kubenswrapper[4756]: I0930 19:31:47.997664 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:47 crc kubenswrapper[4756]: I0930 19:31:47.997709 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:47 crc kubenswrapper[4756]: I0930 19:31:47.997723 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:47 crc kubenswrapper[4756]: I0930 19:31:47.997742 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:31:47 crc kubenswrapper[4756]: I0930 19:31:47.997755 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:31:47Z","lastTransitionTime":"2025-09-30T19:31:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:31:48 crc kubenswrapper[4756]: I0930 19:31:48.100172 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:48 crc kubenswrapper[4756]: I0930 19:31:48.100774 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:48 crc kubenswrapper[4756]: I0930 19:31:48.100853 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:48 crc kubenswrapper[4756]: I0930 19:31:48.100929 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:31:48 crc kubenswrapper[4756]: I0930 19:31:48.100989 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:31:48Z","lastTransitionTime":"2025-09-30T19:31:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:31:48 crc kubenswrapper[4756]: I0930 19:31:48.115587 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-b4n45" Sep 30 19:31:48 crc kubenswrapper[4756]: E0930 19:31:48.115797 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-b4n45" podUID="6c151bbd-9757-44ba-aa11-ff679e841fdc" Sep 30 19:31:48 crc kubenswrapper[4756]: I0930 19:31:48.203312 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:48 crc kubenswrapper[4756]: I0930 19:31:48.203435 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:48 crc kubenswrapper[4756]: I0930 19:31:48.203460 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:48 crc kubenswrapper[4756]: I0930 19:31:48.203558 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:31:48 crc kubenswrapper[4756]: I0930 19:31:48.203599 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:31:48Z","lastTransitionTime":"2025-09-30T19:31:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:31:48 crc kubenswrapper[4756]: I0930 19:31:48.306493 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:48 crc kubenswrapper[4756]: I0930 19:31:48.306544 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:48 crc kubenswrapper[4756]: I0930 19:31:48.306557 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:48 crc kubenswrapper[4756]: I0930 19:31:48.306582 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:31:48 crc kubenswrapper[4756]: I0930 19:31:48.306597 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:31:48Z","lastTransitionTime":"2025-09-30T19:31:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:31:48 crc kubenswrapper[4756]: I0930 19:31:48.409714 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:48 crc kubenswrapper[4756]: I0930 19:31:48.410181 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:48 crc kubenswrapper[4756]: I0930 19:31:48.410269 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:48 crc kubenswrapper[4756]: I0930 19:31:48.410368 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:31:48 crc kubenswrapper[4756]: I0930 19:31:48.410492 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:31:48Z","lastTransitionTime":"2025-09-30T19:31:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:31:48 crc kubenswrapper[4756]: I0930 19:31:48.513133 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:48 crc kubenswrapper[4756]: I0930 19:31:48.513180 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:48 crc kubenswrapper[4756]: I0930 19:31:48.513192 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:48 crc kubenswrapper[4756]: I0930 19:31:48.513208 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:31:48 crc kubenswrapper[4756]: I0930 19:31:48.513221 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:31:48Z","lastTransitionTime":"2025-09-30T19:31:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:31:48 crc kubenswrapper[4756]: I0930 19:31:48.616317 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:48 crc kubenswrapper[4756]: I0930 19:31:48.616364 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:48 crc kubenswrapper[4756]: I0930 19:31:48.616377 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:48 crc kubenswrapper[4756]: I0930 19:31:48.616408 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:31:48 crc kubenswrapper[4756]: I0930 19:31:48.616421 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:31:48Z","lastTransitionTime":"2025-09-30T19:31:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:31:48 crc kubenswrapper[4756]: I0930 19:31:48.719093 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:48 crc kubenswrapper[4756]: I0930 19:31:48.719131 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:48 crc kubenswrapper[4756]: I0930 19:31:48.719141 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:48 crc kubenswrapper[4756]: I0930 19:31:48.719160 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:31:48 crc kubenswrapper[4756]: I0930 19:31:48.719171 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:31:48Z","lastTransitionTime":"2025-09-30T19:31:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:31:48 crc kubenswrapper[4756]: I0930 19:31:48.823275 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:48 crc kubenswrapper[4756]: I0930 19:31:48.823573 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:48 crc kubenswrapper[4756]: I0930 19:31:48.823690 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:48 crc kubenswrapper[4756]: I0930 19:31:48.823767 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:31:48 crc kubenswrapper[4756]: I0930 19:31:48.823825 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:31:48Z","lastTransitionTime":"2025-09-30T19:31:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:31:48 crc kubenswrapper[4756]: I0930 19:31:48.926767 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:48 crc kubenswrapper[4756]: I0930 19:31:48.927087 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:48 crc kubenswrapper[4756]: I0930 19:31:48.927150 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:48 crc kubenswrapper[4756]: I0930 19:31:48.927316 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:31:48 crc kubenswrapper[4756]: I0930 19:31:48.927378 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:31:48Z","lastTransitionTime":"2025-09-30T19:31:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:31:49 crc kubenswrapper[4756]: I0930 19:31:49.030859 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:49 crc kubenswrapper[4756]: I0930 19:31:49.030926 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:49 crc kubenswrapper[4756]: I0930 19:31:49.030941 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:49 crc kubenswrapper[4756]: I0930 19:31:49.030962 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:31:49 crc kubenswrapper[4756]: I0930 19:31:49.030976 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:31:49Z","lastTransitionTime":"2025-09-30T19:31:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:31:49 crc kubenswrapper[4756]: I0930 19:31:49.116047 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 19:31:49 crc kubenswrapper[4756]: I0930 19:31:49.116128 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 19:31:49 crc kubenswrapper[4756]: E0930 19:31:49.116228 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 19:31:49 crc kubenswrapper[4756]: I0930 19:31:49.116345 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 19:31:49 crc kubenswrapper[4756]: E0930 19:31:49.116442 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 19:31:49 crc kubenswrapper[4756]: E0930 19:31:49.116531 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 19:31:49 crc kubenswrapper[4756]: I0930 19:31:49.133955 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:49 crc kubenswrapper[4756]: I0930 19:31:49.134006 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:49 crc kubenswrapper[4756]: I0930 19:31:49.134019 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:49 crc kubenswrapper[4756]: I0930 19:31:49.134040 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:31:49 crc kubenswrapper[4756]: I0930 19:31:49.134054 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:31:49Z","lastTransitionTime":"2025-09-30T19:31:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:31:49 crc kubenswrapper[4756]: I0930 19:31:49.237829 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:49 crc kubenswrapper[4756]: I0930 19:31:49.237889 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:49 crc kubenswrapper[4756]: I0930 19:31:49.237907 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:49 crc kubenswrapper[4756]: I0930 19:31:49.237933 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:31:49 crc kubenswrapper[4756]: I0930 19:31:49.237953 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:31:49Z","lastTransitionTime":"2025-09-30T19:31:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:31:49 crc kubenswrapper[4756]: I0930 19:31:49.340615 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:49 crc kubenswrapper[4756]: I0930 19:31:49.341106 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:49 crc kubenswrapper[4756]: I0930 19:31:49.341261 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:49 crc kubenswrapper[4756]: I0930 19:31:49.341487 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:31:49 crc kubenswrapper[4756]: I0930 19:31:49.341640 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:31:49Z","lastTransitionTime":"2025-09-30T19:31:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:31:49 crc kubenswrapper[4756]: I0930 19:31:49.445579 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:49 crc kubenswrapper[4756]: I0930 19:31:49.445621 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:49 crc kubenswrapper[4756]: I0930 19:31:49.445633 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:49 crc kubenswrapper[4756]: I0930 19:31:49.445651 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:31:49 crc kubenswrapper[4756]: I0930 19:31:49.445664 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:31:49Z","lastTransitionTime":"2025-09-30T19:31:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:31:49 crc kubenswrapper[4756]: I0930 19:31:49.548990 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:49 crc kubenswrapper[4756]: I0930 19:31:49.549039 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:49 crc kubenswrapper[4756]: I0930 19:31:49.549051 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:49 crc kubenswrapper[4756]: I0930 19:31:49.549070 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:31:49 crc kubenswrapper[4756]: I0930 19:31:49.549087 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:31:49Z","lastTransitionTime":"2025-09-30T19:31:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:31:49 crc kubenswrapper[4756]: I0930 19:31:49.651435 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:49 crc kubenswrapper[4756]: I0930 19:31:49.651503 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:49 crc kubenswrapper[4756]: I0930 19:31:49.651520 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:49 crc kubenswrapper[4756]: I0930 19:31:49.651544 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:31:49 crc kubenswrapper[4756]: I0930 19:31:49.651560 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:31:49Z","lastTransitionTime":"2025-09-30T19:31:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:31:49 crc kubenswrapper[4756]: I0930 19:31:49.753872 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:49 crc kubenswrapper[4756]: I0930 19:31:49.753934 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:49 crc kubenswrapper[4756]: I0930 19:31:49.753950 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:49 crc kubenswrapper[4756]: I0930 19:31:49.753973 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:31:49 crc kubenswrapper[4756]: I0930 19:31:49.753991 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:31:49Z","lastTransitionTime":"2025-09-30T19:31:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:31:49 crc kubenswrapper[4756]: I0930 19:31:49.842913 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/6c151bbd-9757-44ba-aa11-ff679e841fdc-metrics-certs\") pod \"network-metrics-daemon-b4n45\" (UID: \"6c151bbd-9757-44ba-aa11-ff679e841fdc\") " pod="openshift-multus/network-metrics-daemon-b4n45" Sep 30 19:31:49 crc kubenswrapper[4756]: E0930 19:31:49.843172 4756 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Sep 30 19:31:49 crc kubenswrapper[4756]: E0930 19:31:49.843287 4756 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/6c151bbd-9757-44ba-aa11-ff679e841fdc-metrics-certs podName:6c151bbd-9757-44ba-aa11-ff679e841fdc nodeName:}" failed. No retries permitted until 2025-09-30 19:31:53.843261923 +0000 UTC m=+43.464195400 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/6c151bbd-9757-44ba-aa11-ff679e841fdc-metrics-certs") pod "network-metrics-daemon-b4n45" (UID: "6c151bbd-9757-44ba-aa11-ff679e841fdc") : object "openshift-multus"/"metrics-daemon-secret" not registered Sep 30 19:31:49 crc kubenswrapper[4756]: I0930 19:31:49.856688 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:49 crc kubenswrapper[4756]: I0930 19:31:49.856749 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:49 crc kubenswrapper[4756]: I0930 19:31:49.856762 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:49 crc kubenswrapper[4756]: I0930 19:31:49.856785 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:31:49 crc kubenswrapper[4756]: I0930 19:31:49.856803 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:31:49Z","lastTransitionTime":"2025-09-30T19:31:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:31:49 crc kubenswrapper[4756]: I0930 19:31:49.960411 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:49 crc kubenswrapper[4756]: I0930 19:31:49.960466 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:49 crc kubenswrapper[4756]: I0930 19:31:49.960481 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:49 crc kubenswrapper[4756]: I0930 19:31:49.960501 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:31:49 crc kubenswrapper[4756]: I0930 19:31:49.960513 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:31:49Z","lastTransitionTime":"2025-09-30T19:31:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:31:50 crc kubenswrapper[4756]: I0930 19:31:50.063626 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:50 crc kubenswrapper[4756]: I0930 19:31:50.063693 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:50 crc kubenswrapper[4756]: I0930 19:31:50.063706 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:50 crc kubenswrapper[4756]: I0930 19:31:50.063730 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:31:50 crc kubenswrapper[4756]: I0930 19:31:50.063742 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:31:50Z","lastTransitionTime":"2025-09-30T19:31:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:31:50 crc kubenswrapper[4756]: I0930 19:31:50.115973 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-b4n45" Sep 30 19:31:50 crc kubenswrapper[4756]: E0930 19:31:50.116173 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-b4n45" podUID="6c151bbd-9757-44ba-aa11-ff679e841fdc" Sep 30 19:31:50 crc kubenswrapper[4756]: I0930 19:31:50.168644 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:50 crc kubenswrapper[4756]: I0930 19:31:50.168692 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:50 crc kubenswrapper[4756]: I0930 19:31:50.168704 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:50 crc kubenswrapper[4756]: I0930 19:31:50.168722 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:31:50 crc kubenswrapper[4756]: I0930 19:31:50.168733 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:31:50Z","lastTransitionTime":"2025-09-30T19:31:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:31:50 crc kubenswrapper[4756]: I0930 19:31:50.272152 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:50 crc kubenswrapper[4756]: I0930 19:31:50.272217 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:50 crc kubenswrapper[4756]: I0930 19:31:50.272234 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:50 crc kubenswrapper[4756]: I0930 19:31:50.272264 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:31:50 crc kubenswrapper[4756]: I0930 19:31:50.272283 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:31:50Z","lastTransitionTime":"2025-09-30T19:31:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:31:50 crc kubenswrapper[4756]: I0930 19:31:50.375527 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:50 crc kubenswrapper[4756]: I0930 19:31:50.375593 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:50 crc kubenswrapper[4756]: I0930 19:31:50.375607 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:50 crc kubenswrapper[4756]: I0930 19:31:50.375628 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:31:50 crc kubenswrapper[4756]: I0930 19:31:50.375644 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:31:50Z","lastTransitionTime":"2025-09-30T19:31:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:31:50 crc kubenswrapper[4756]: I0930 19:31:50.479116 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:50 crc kubenswrapper[4756]: I0930 19:31:50.479210 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:50 crc kubenswrapper[4756]: I0930 19:31:50.479236 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:50 crc kubenswrapper[4756]: I0930 19:31:50.479267 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:31:50 crc kubenswrapper[4756]: I0930 19:31:50.479285 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:31:50Z","lastTransitionTime":"2025-09-30T19:31:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:31:50 crc kubenswrapper[4756]: I0930 19:31:50.581927 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:50 crc kubenswrapper[4756]: I0930 19:31:50.582003 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:50 crc kubenswrapper[4756]: I0930 19:31:50.582023 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:50 crc kubenswrapper[4756]: I0930 19:31:50.582053 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:31:50 crc kubenswrapper[4756]: I0930 19:31:50.582073 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:31:50Z","lastTransitionTime":"2025-09-30T19:31:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:31:50 crc kubenswrapper[4756]: I0930 19:31:50.685492 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:50 crc kubenswrapper[4756]: I0930 19:31:50.685532 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:50 crc kubenswrapper[4756]: I0930 19:31:50.685542 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:50 crc kubenswrapper[4756]: I0930 19:31:50.685560 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:31:50 crc kubenswrapper[4756]: I0930 19:31:50.685572 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:31:50Z","lastTransitionTime":"2025-09-30T19:31:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:31:50 crc kubenswrapper[4756]: I0930 19:31:50.790856 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:50 crc kubenswrapper[4756]: I0930 19:31:50.790929 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:50 crc kubenswrapper[4756]: I0930 19:31:50.790953 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:50 crc kubenswrapper[4756]: I0930 19:31:50.790981 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:31:50 crc kubenswrapper[4756]: I0930 19:31:50.791009 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:31:50Z","lastTransitionTime":"2025-09-30T19:31:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:31:50 crc kubenswrapper[4756]: I0930 19:31:50.894608 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:50 crc kubenswrapper[4756]: I0930 19:31:50.894666 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:50 crc kubenswrapper[4756]: I0930 19:31:50.894677 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:50 crc kubenswrapper[4756]: I0930 19:31:50.894698 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:31:50 crc kubenswrapper[4756]: I0930 19:31:50.894716 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:31:50Z","lastTransitionTime":"2025-09-30T19:31:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:31:51 crc kubenswrapper[4756]: I0930 19:31:51.001969 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:51 crc kubenswrapper[4756]: I0930 19:31:51.002044 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:51 crc kubenswrapper[4756]: I0930 19:31:51.002062 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:51 crc kubenswrapper[4756]: I0930 19:31:51.002092 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:31:51 crc kubenswrapper[4756]: I0930 19:31:51.002112 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:31:51Z","lastTransitionTime":"2025-09-30T19:31:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:31:51 crc kubenswrapper[4756]: I0930 19:31:51.105920 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:51 crc kubenswrapper[4756]: I0930 19:31:51.105997 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:51 crc kubenswrapper[4756]: I0930 19:31:51.106014 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:51 crc kubenswrapper[4756]: I0930 19:31:51.106043 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:31:51 crc kubenswrapper[4756]: I0930 19:31:51.106062 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:31:51Z","lastTransitionTime":"2025-09-30T19:31:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:31:51 crc kubenswrapper[4756]: I0930 19:31:51.115275 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 19:31:51 crc kubenswrapper[4756]: I0930 19:31:51.115352 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 19:31:51 crc kubenswrapper[4756]: E0930 19:31:51.115493 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 19:31:51 crc kubenswrapper[4756]: E0930 19:31:51.115682 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 19:31:51 crc kubenswrapper[4756]: I0930 19:31:51.115517 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 19:31:51 crc kubenswrapper[4756]: E0930 19:31:51.115801 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 19:31:51 crc kubenswrapper[4756]: I0930 19:31:51.138237 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0dfdb5d6-9311-4699-a1ac-5f1a6cde2441\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a78698331fc3db3b12b06c8816256c5d055d1decfd12cc0099d6cfd155d673d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32fb926334772dc454adcdabd6070f751ef1aded82a5c13732875e1ec04d2c48\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6195128a0e66f0f932a444f4717084f00ea05ff648f7c26364bb9c96483b652a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://746e37834d01867d327b2cb407df8e9a8489f58884a28f1da6a336f74e4d1033\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:11Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:51Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:51 crc kubenswrapper[4756]: I0930 19:31:51.154243 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3370c2ca-fec3-4f90-8df7-51e21e6c7e1c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1a5ef539293ea3ff94f53b11a0510bb8bb45c62c945cffab746512efeeb19a5a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pstrc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ba18abf718fde52cadd61c25b05097873901a4dbbe7631fd7ce64133627b9473\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pstrc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:32Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-4n9zj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:51Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:51 crc kubenswrapper[4756]: I0930 19:31:51.186254 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-kmxk8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d96acc31-a519-46fc-94d8-f19522e77391\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe054d1844c58433aebfe8f4e500e17558f1b4afb5423002d60b50f210052c93\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c5badf6335e4358fa0078ae48d7d50a908bf754fdb57c786e445ccae31933ebe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f20230d520af06912183a05d0b9bcabb8581bf772fe34e037fa8d1a157c4a1b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://261b104308c48bbe4631db85f56d63cefeb0bc6b7112076b0bf3f4fb6b67caad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1fa8babb1f5d1213dec99c75be1d5f37d09c2257a31babd51ec297621fc7d564\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://990926364c8d973164f3da73740b7d31c8caf87f0fb691539f7a62520b822fba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://721fa5da1ae99aae66e7486221afabcd9c996982c008c5b00f72787911d29921\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2a2a998d0733193f87d6782c9553ac48179934e6534304d8f8c3e1edc868b8b1\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T19:31:42Z\\\",\\\"message\\\":\\\" k8s.io/client-go/informers/factory.go:160\\\\nI0930 19:31:42.479361 6071 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0930 19:31:42.479312 6071 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0930 19:31:42.479421 6071 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0930 19:31:42.479450 6071 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI0930 19:31:42.479463 6071 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI0930 19:31:42.479489 6071 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0930 19:31:42.479493 6071 handler.go:208] Removed *v1.Pod event handler 6\\\\nI0930 19:31:42.479507 6071 handler.go:208] Removed *v1.Pod event handler 3\\\\nI0930 19:31:42.479503 6071 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0930 19:31:42.479331 6071 reflector.go:311] Stopping reflector *v1.ClusterUserDefinedNetwork (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/userdefinednetwork/v1/apis/informers/externalversions/factory.go:140\\\\nI0930 19:31:42.479748 6071 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0930 19:31:42.480441 6071 factory.go:656] Stopping \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:39Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://721fa5da1ae99aae66e7486221afabcd9c996982c008c5b00f72787911d29921\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T19:31:44Z\\\",\\\"message\\\":\\\"erver-crc in node crc\\\\nI0930 19:31:44.135110 6217 obj_retry.go:303] Retry object setup: *v1.Pod openshift-multus/multus-additional-cni-plugins-m6zsp\\\\nI0930 19:31:44.135119 6217 obj_retry.go:386] Retry successful for *v1.Pod openshift-kube-apiserver/kube-apiserver-crc after 0 failed attempt(s)\\\\nF0930 19:31:44.135041 6217 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:44Z is after 2025-08-24T17:21:41Z]\\\\nI0930 19:31:44.135126 6217 obj_retry.go:365] Adding new object: *v1.Pod openshift-multus/multus-additional-cni-plugins-m6zsp\\\\nI0930 19:31:44.135121 6217 obj_retry.go:365] Adding new object: *v1.Pod openshift-image\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://64404e34c6a077df9cc9e9f9273dbe188c6feda3bc1186a83e05a43e1fa2ea99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://76bb398eefb2b0a9ef06b44c2ea182deefd51e4daf42b17946b911822d6eb0af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://76bb398eefb2b0a9ef06b44c2ea182deefd51e4daf42b17946b911822d6eb0af\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:32Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-kmxk8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:51Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:51 crc kubenswrapper[4756]: I0930 19:31:51.201960 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-8nj5z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"02bb1d9a-1ad2-406d-afc2-c4cd283fe1f4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://28f7474a36dde55478614d47fe30f93b44cb549a25e148e295987e64158fc3c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l6nxq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:34Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-8nj5z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:51Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:51 crc kubenswrapper[4756]: I0930 19:31:51.209954 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:51 crc kubenswrapper[4756]: I0930 19:31:51.210028 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:51 crc kubenswrapper[4756]: I0930 19:31:51.210048 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:51 crc kubenswrapper[4756]: I0930 19:31:51.210079 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:31:51 crc kubenswrapper[4756]: I0930 19:31:51.210099 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:31:51Z","lastTransitionTime":"2025-09-30T19:31:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:31:51 crc kubenswrapper[4756]: I0930 19:31:51.229084 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-b4n45" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6c151bbd-9757-44ba-aa11-ff679e841fdc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fsl2z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fsl2z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:46Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-b4n45\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:51Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:51 crc kubenswrapper[4756]: I0930 19:31:51.262496 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6aa869dd-ec28-4032-82be-c656f27bd7b1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://99767cc72b9d6057d38c45fbec6231bafa47ccffefb78e5924b5e9c1e797722b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b852143b26114c05edce20729cf706c5394609a74f522b7ecf44d92f9d66aed7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ee47cdb77e416535bd00569159712241bbd0952651774af6440fd04000d1722\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5cca81a0aeb985d6044f19cccc580ca5c78553a0abb60fb2a2bed955233f7a97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://621b37e117d0eeef289e110d75521c3262ced12144d665be35d6823780b4bae2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a5550fd486f7fad8080fb3eed0f4c73140172c68983fa8a423cd48da8f31ef3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a5550fd486f7fad8080fb3eed0f4c73140172c68983fa8a423cd48da8f31ef3a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://390ef7ee5c5d98dc226f1d061c6c9bb83236c988f2af15635e7834f643862840\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://390ef7ee5c5d98dc226f1d061c6c9bb83236c988f2af15635e7834f643862840\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:13Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://6f98786c914dcdd3ca1cde8421686a1d536abb97c375f26f458d8f514bdccd52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6f98786c914dcdd3ca1cde8421686a1d536abb97c375f26f458d8f514bdccd52\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:11Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:51Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:51 crc kubenswrapper[4756]: I0930 19:31:51.283434 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://449a4827a34c17e8cdc851d2714ced931581b5c3a1bcd2f7f058b95d68403740\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://163fab064016913eb90cd87fe0da1c702eeb7f000c92b108bfb24a21ef0e98d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:51Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:51 crc kubenswrapper[4756]: I0930 19:31:51.311603 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"218f95d8-7230-4b64-83a5-00af4f5ec7dc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0a76d38dbf936ca79e57470de2c8bf98e8681f7d94f2c572aa5c0d2b2acbb484\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://57f786181b4762f1e04a48ac6e352090b62e1691091ddc390821abb693e8f7a6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c275af67ad8675a437b3f7fae34236bbdfbdf1f216be6a442d6765471d1d11e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7673b8a2ea701b95437a1ad447661ea3140ce0b27204ab946f39dbf0a25e29b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a2e0caca690592c9bd17dae14f79390b94f47507afb05a1c1732453700b99786\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"message\\\":\\\"g.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 19:31:31.245412 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 19:31:31.245417 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0930 19:31:31.245423 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0930 19:31:31.245426 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0930 19:31:31.245428 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0930 19:31:31.245860 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI0930 19:31:31.249125 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0930 19:31:31.249169 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0930 19:31:31.249201 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0930 19:31:31.249211 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0930 19:31:31.249225 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0930 19:31:31.249238 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0930 19:31:31.250125 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI0930 19:31:31.250150 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nF0930 19:31:31.250576 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:25Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://01164eb1afdb771ef4d2dc268107dadd206959e1ef66b154caf3ff18717e20de\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:14Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7365492f5fe72c39f569bfa6e2b1a44236da10b4687a738b97ce95626081c1b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7365492f5fe72c39f569bfa6e2b1a44236da10b4687a738b97ce95626081c1b5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:11Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:51Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:51 crc kubenswrapper[4756]: I0930 19:31:51.312843 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:51 crc kubenswrapper[4756]: I0930 19:31:51.312874 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:51 crc kubenswrapper[4756]: I0930 19:31:51.312884 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:51 crc kubenswrapper[4756]: I0930 19:31:51.312901 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:31:51 crc kubenswrapper[4756]: I0930 19:31:51.312914 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:31:51Z","lastTransitionTime":"2025-09-30T19:31:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:31:51 crc kubenswrapper[4756]: I0930 19:31:51.330508 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:51Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:51 crc kubenswrapper[4756]: I0930 19:31:51.347749 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-l9mw5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"104562fb-2a2d-4291-963c-7a95062ec13a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c012009175f2ca25af13346dce481a4640354a2b7c771ce9a8d92f0a6a014f23\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mhq89\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:32Z\\\"}}\" for pod \"openshift-multus\"/\"multus-l9mw5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:51Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:51 crc kubenswrapper[4756]: I0930 19:31:51.360038 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:51Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:51 crc kubenswrapper[4756]: I0930 19:31:51.373089 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d37999f5cb36e7071b109ed90026aea745a55dbdd26d53799ee4196dc6a983c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:51Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:51 crc kubenswrapper[4756]: I0930 19:31:51.386588 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-ghp5t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"32ba8a0a-215c-415b-a893-74f7a40e3c20\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a96a8eea15e5d12ad93cdb41ef3d1054fae455aea2878451bc5715de6d5b612f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pslwb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:32Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-ghp5t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:51Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:51 crc kubenswrapper[4756]: I0930 19:31:51.400245 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-m6zsp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2b4cba15-5ae1-4c98-a5e5-060b3af3aec6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://13307050242b53fcc9d2c9ab6870c36d5c2c1e43adfa36c1e76addf32166b95b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://828a01ba594287ceea1e72baa5419d522ce14d2ab3df5a69fb6332400c3e4abc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://828a01ba594287ceea1e72baa5419d522ce14d2ab3df5a69fb6332400c3e4abc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f91b3382af5267116de235e9d5f480e574b479350e738621b5e7568329dd879\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0f91b3382af5267116de235e9d5f480e574b479350e738621b5e7568329dd879\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://677982485b1155ded1bce2a0f5b3262766d498642307d1744d5cbae6c66102e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://677982485b1155ded1bce2a0f5b3262766d498642307d1744d5cbae6c66102e3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://14b8b318d4cd0da2272b87ac74a4dccdf29874a818aadc0698e9d85a75a169da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://14b8b318d4cd0da2272b87ac74a4dccdf29874a818aadc0698e9d85a75a169da\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e877edd834105d5bd0dcd24e6ba9c11ead17ab39a95ca0169c54acaad7544ebe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e877edd834105d5bd0dcd24e6ba9c11ead17ab39a95ca0169c54acaad7544ebe\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://decf531f70c78d1d142bef3d265612ebac2c8790ea10aba75def6d785fba84a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://decf531f70c78d1d142bef3d265612ebac2c8790ea10aba75def6d785fba84a8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:32Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-m6zsp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:51Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:51 crc kubenswrapper[4756]: I0930 19:31:51.413647 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-gld2h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7ae264bc-c901-4628-89f4-53e94403dff8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d059c5c1b5cded992e4971b04737c4f528ed04278d98651c01c8b8a168fe5655\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc8dq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://764e5cdcf5a9c07e5d046217ffd9db27e2b09a827d02f165459deb24d99f141c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc8dq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:44Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-gld2h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:51Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:51 crc kubenswrapper[4756]: I0930 19:31:51.416439 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:51 crc kubenswrapper[4756]: I0930 19:31:51.416481 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:51 crc kubenswrapper[4756]: I0930 19:31:51.416515 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:51 crc kubenswrapper[4756]: I0930 19:31:51.416540 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:31:51 crc kubenswrapper[4756]: I0930 19:31:51.416556 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:31:51Z","lastTransitionTime":"2025-09-30T19:31:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:31:51 crc kubenswrapper[4756]: I0930 19:31:51.430958 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:51Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:51 crc kubenswrapper[4756]: I0930 19:31:51.448902 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d39cfd1b725ded02a27e8f7bed73221dd180e3fffb7f06d1877d1bc2f6910334\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:51Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:51 crc kubenswrapper[4756]: I0930 19:31:51.520260 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:51 crc kubenswrapper[4756]: I0930 19:31:51.520321 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:51 crc kubenswrapper[4756]: I0930 19:31:51.520363 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:51 crc kubenswrapper[4756]: I0930 19:31:51.520384 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:31:51 crc kubenswrapper[4756]: I0930 19:31:51.520421 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:31:51Z","lastTransitionTime":"2025-09-30T19:31:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:31:51 crc kubenswrapper[4756]: I0930 19:31:51.623256 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:51 crc kubenswrapper[4756]: I0930 19:31:51.624163 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:51 crc kubenswrapper[4756]: I0930 19:31:51.624246 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:51 crc kubenswrapper[4756]: I0930 19:31:51.624425 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:31:51 crc kubenswrapper[4756]: I0930 19:31:51.624509 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:31:51Z","lastTransitionTime":"2025-09-30T19:31:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:31:51 crc kubenswrapper[4756]: I0930 19:31:51.727810 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:51 crc kubenswrapper[4756]: I0930 19:31:51.727851 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:51 crc kubenswrapper[4756]: I0930 19:31:51.727860 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:51 crc kubenswrapper[4756]: I0930 19:31:51.727877 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:31:51 crc kubenswrapper[4756]: I0930 19:31:51.727887 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:31:51Z","lastTransitionTime":"2025-09-30T19:31:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:31:51 crc kubenswrapper[4756]: I0930 19:31:51.830930 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:51 crc kubenswrapper[4756]: I0930 19:31:51.830985 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:51 crc kubenswrapper[4756]: I0930 19:31:51.830996 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:51 crc kubenswrapper[4756]: I0930 19:31:51.831018 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:31:51 crc kubenswrapper[4756]: I0930 19:31:51.831031 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:31:51Z","lastTransitionTime":"2025-09-30T19:31:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:31:51 crc kubenswrapper[4756]: I0930 19:31:51.933349 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:51 crc kubenswrapper[4756]: I0930 19:31:51.933410 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:51 crc kubenswrapper[4756]: I0930 19:31:51.933423 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:51 crc kubenswrapper[4756]: I0930 19:31:51.933439 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:31:51 crc kubenswrapper[4756]: I0930 19:31:51.933451 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:31:51Z","lastTransitionTime":"2025-09-30T19:31:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:31:52 crc kubenswrapper[4756]: I0930 19:31:52.036060 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:52 crc kubenswrapper[4756]: I0930 19:31:52.036132 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:52 crc kubenswrapper[4756]: I0930 19:31:52.036142 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:52 crc kubenswrapper[4756]: I0930 19:31:52.036156 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:31:52 crc kubenswrapper[4756]: I0930 19:31:52.036167 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:31:52Z","lastTransitionTime":"2025-09-30T19:31:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:31:52 crc kubenswrapper[4756]: I0930 19:31:52.115342 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-b4n45" Sep 30 19:31:52 crc kubenswrapper[4756]: E0930 19:31:52.115566 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-b4n45" podUID="6c151bbd-9757-44ba-aa11-ff679e841fdc" Sep 30 19:31:52 crc kubenswrapper[4756]: I0930 19:31:52.115814 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-kmxk8" Sep 30 19:31:52 crc kubenswrapper[4756]: I0930 19:31:52.117325 4756 scope.go:117] "RemoveContainer" containerID="721fa5da1ae99aae66e7486221afabcd9c996982c008c5b00f72787911d29921" Sep 30 19:31:52 crc kubenswrapper[4756]: E0930 19:31:52.117647 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-kmxk8_openshift-ovn-kubernetes(d96acc31-a519-46fc-94d8-f19522e77391)\"" pod="openshift-ovn-kubernetes/ovnkube-node-kmxk8" podUID="d96acc31-a519-46fc-94d8-f19522e77391" Sep 30 19:31:52 crc kubenswrapper[4756]: I0930 19:31:52.139899 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:52 crc kubenswrapper[4756]: I0930 19:31:52.140279 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:52 crc kubenswrapper[4756]: I0930 19:31:52.140530 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:52 crc kubenswrapper[4756]: I0930 19:31:52.140084 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-kmxk8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d96acc31-a519-46fc-94d8-f19522e77391\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe054d1844c58433aebfe8f4e500e17558f1b4afb5423002d60b50f210052c93\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c5badf6335e4358fa0078ae48d7d50a908bf754fdb57c786e445ccae31933ebe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f20230d520af06912183a05d0b9bcabb8581bf772fe34e037fa8d1a157c4a1b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://261b104308c48bbe4631db85f56d63cefeb0bc6b7112076b0bf3f4fb6b67caad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1fa8babb1f5d1213dec99c75be1d5f37d09c2257a31babd51ec297621fc7d564\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://990926364c8d973164f3da73740b7d31c8caf87f0fb691539f7a62520b822fba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://721fa5da1ae99aae66e7486221afabcd9c996982c008c5b00f72787911d29921\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://721fa5da1ae99aae66e7486221afabcd9c996982c008c5b00f72787911d29921\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T19:31:44Z\\\",\\\"message\\\":\\\"erver-crc in node crc\\\\nI0930 19:31:44.135110 6217 obj_retry.go:303] Retry object setup: *v1.Pod openshift-multus/multus-additional-cni-plugins-m6zsp\\\\nI0930 19:31:44.135119 6217 obj_retry.go:386] Retry successful for *v1.Pod openshift-kube-apiserver/kube-apiserver-crc after 0 failed attempt(s)\\\\nF0930 19:31:44.135041 6217 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:44Z is after 2025-08-24T17:21:41Z]\\\\nI0930 19:31:44.135126 6217 obj_retry.go:365] Adding new object: *v1.Pod openshift-multus/multus-additional-cni-plugins-m6zsp\\\\nI0930 19:31:44.135121 6217 obj_retry.go:365] Adding new object: *v1.Pod openshift-image\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:43Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-kmxk8_openshift-ovn-kubernetes(d96acc31-a519-46fc-94d8-f19522e77391)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://64404e34c6a077df9cc9e9f9273dbe188c6feda3bc1186a83e05a43e1fa2ea99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://76bb398eefb2b0a9ef06b44c2ea182deefd51e4daf42b17946b911822d6eb0af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://76bb398eefb2b0a9ef06b44c2ea182deefd51e4daf42b17946b911822d6eb0af\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:32Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-kmxk8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:52Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:52 crc kubenswrapper[4756]: I0930 19:31:52.140641 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:31:52 crc kubenswrapper[4756]: I0930 19:31:52.140804 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:31:52Z","lastTransitionTime":"2025-09-30T19:31:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:31:52 crc kubenswrapper[4756]: I0930 19:31:52.154751 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-8nj5z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"02bb1d9a-1ad2-406d-afc2-c4cd283fe1f4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://28f7474a36dde55478614d47fe30f93b44cb549a25e148e295987e64158fc3c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l6nxq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:34Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-8nj5z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:52Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:52 crc kubenswrapper[4756]: I0930 19:31:52.168742 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-b4n45" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6c151bbd-9757-44ba-aa11-ff679e841fdc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fsl2z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fsl2z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:46Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-b4n45\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:52Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:52 crc kubenswrapper[4756]: I0930 19:31:52.193162 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6aa869dd-ec28-4032-82be-c656f27bd7b1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://99767cc72b9d6057d38c45fbec6231bafa47ccffefb78e5924b5e9c1e797722b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b852143b26114c05edce20729cf706c5394609a74f522b7ecf44d92f9d66aed7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ee47cdb77e416535bd00569159712241bbd0952651774af6440fd04000d1722\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5cca81a0aeb985d6044f19cccc580ca5c78553a0abb60fb2a2bed955233f7a97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://621b37e117d0eeef289e110d75521c3262ced12144d665be35d6823780b4bae2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a5550fd486f7fad8080fb3eed0f4c73140172c68983fa8a423cd48da8f31ef3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a5550fd486f7fad8080fb3eed0f4c73140172c68983fa8a423cd48da8f31ef3a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://390ef7ee5c5d98dc226f1d061c6c9bb83236c988f2af15635e7834f643862840\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://390ef7ee5c5d98dc226f1d061c6c9bb83236c988f2af15635e7834f643862840\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:13Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://6f98786c914dcdd3ca1cde8421686a1d536abb97c375f26f458d8f514bdccd52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6f98786c914dcdd3ca1cde8421686a1d536abb97c375f26f458d8f514bdccd52\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:11Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:52Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:52 crc kubenswrapper[4756]: I0930 19:31:52.207945 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0dfdb5d6-9311-4699-a1ac-5f1a6cde2441\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a78698331fc3db3b12b06c8816256c5d055d1decfd12cc0099d6cfd155d673d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32fb926334772dc454adcdabd6070f751ef1aded82a5c13732875e1ec04d2c48\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6195128a0e66f0f932a444f4717084f00ea05ff648f7c26364bb9c96483b652a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://746e37834d01867d327b2cb407df8e9a8489f58884a28f1da6a336f74e4d1033\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:11Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:52Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:52 crc kubenswrapper[4756]: I0930 19:31:52.220222 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3370c2ca-fec3-4f90-8df7-51e21e6c7e1c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1a5ef539293ea3ff94f53b11a0510bb8bb45c62c945cffab746512efeeb19a5a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pstrc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ba18abf718fde52cadd61c25b05097873901a4dbbe7631fd7ce64133627b9473\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pstrc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:32Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-4n9zj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:52Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:52 crc kubenswrapper[4756]: I0930 19:31:52.233558 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"218f95d8-7230-4b64-83a5-00af4f5ec7dc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0a76d38dbf936ca79e57470de2c8bf98e8681f7d94f2c572aa5c0d2b2acbb484\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://57f786181b4762f1e04a48ac6e352090b62e1691091ddc390821abb693e8f7a6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c275af67ad8675a437b3f7fae34236bbdfbdf1f216be6a442d6765471d1d11e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7673b8a2ea701b95437a1ad447661ea3140ce0b27204ab946f39dbf0a25e29b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a2e0caca690592c9bd17dae14f79390b94f47507afb05a1c1732453700b99786\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"message\\\":\\\"g.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 19:31:31.245412 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 19:31:31.245417 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0930 19:31:31.245423 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0930 19:31:31.245426 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0930 19:31:31.245428 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0930 19:31:31.245860 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI0930 19:31:31.249125 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0930 19:31:31.249169 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0930 19:31:31.249201 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0930 19:31:31.249211 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0930 19:31:31.249225 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0930 19:31:31.249238 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0930 19:31:31.250125 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI0930 19:31:31.250150 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nF0930 19:31:31.250576 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:25Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://01164eb1afdb771ef4d2dc268107dadd206959e1ef66b154caf3ff18717e20de\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:14Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7365492f5fe72c39f569bfa6e2b1a44236da10b4687a738b97ce95626081c1b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7365492f5fe72c39f569bfa6e2b1a44236da10b4687a738b97ce95626081c1b5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:11Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:52Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:52 crc kubenswrapper[4756]: I0930 19:31:52.243963 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:52 crc kubenswrapper[4756]: I0930 19:31:52.244346 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:52 crc kubenswrapper[4756]: I0930 19:31:52.244450 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:52 crc kubenswrapper[4756]: I0930 19:31:52.244540 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:31:52 crc kubenswrapper[4756]: I0930 19:31:52.244608 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:31:52Z","lastTransitionTime":"2025-09-30T19:31:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:31:52 crc kubenswrapper[4756]: I0930 19:31:52.248857 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:52Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:52 crc kubenswrapper[4756]: I0930 19:31:52.261479 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://449a4827a34c17e8cdc851d2714ced931581b5c3a1bcd2f7f058b95d68403740\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://163fab064016913eb90cd87fe0da1c702eeb7f000c92b108bfb24a21ef0e98d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:52Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:52 crc kubenswrapper[4756]: I0930 19:31:52.274964 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:52Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:52 crc kubenswrapper[4756]: I0930 19:31:52.287862 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d37999f5cb36e7071b109ed90026aea745a55dbdd26d53799ee4196dc6a983c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:52Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:52 crc kubenswrapper[4756]: I0930 19:31:52.301415 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-l9mw5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"104562fb-2a2d-4291-963c-7a95062ec13a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c012009175f2ca25af13346dce481a4640354a2b7c771ce9a8d92f0a6a014f23\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mhq89\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:32Z\\\"}}\" for pod \"openshift-multus\"/\"multus-l9mw5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:52Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:52 crc kubenswrapper[4756]: I0930 19:31:52.320284 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:52Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:52 crc kubenswrapper[4756]: I0930 19:31:52.335162 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d39cfd1b725ded02a27e8f7bed73221dd180e3fffb7f06d1877d1bc2f6910334\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:52Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:52 crc kubenswrapper[4756]: I0930 19:31:52.347604 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:52 crc kubenswrapper[4756]: I0930 19:31:52.347656 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:52 crc kubenswrapper[4756]: I0930 19:31:52.347671 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:52 crc kubenswrapper[4756]: I0930 19:31:52.347693 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:31:52 crc kubenswrapper[4756]: I0930 19:31:52.347708 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:31:52Z","lastTransitionTime":"2025-09-30T19:31:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:31:52 crc kubenswrapper[4756]: I0930 19:31:52.347895 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-ghp5t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"32ba8a0a-215c-415b-a893-74f7a40e3c20\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a96a8eea15e5d12ad93cdb41ef3d1054fae455aea2878451bc5715de6d5b612f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pslwb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:32Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-ghp5t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:52Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:52 crc kubenswrapper[4756]: I0930 19:31:52.367867 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-m6zsp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2b4cba15-5ae1-4c98-a5e5-060b3af3aec6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://13307050242b53fcc9d2c9ab6870c36d5c2c1e43adfa36c1e76addf32166b95b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://828a01ba594287ceea1e72baa5419d522ce14d2ab3df5a69fb6332400c3e4abc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://828a01ba594287ceea1e72baa5419d522ce14d2ab3df5a69fb6332400c3e4abc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f91b3382af5267116de235e9d5f480e574b479350e738621b5e7568329dd879\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0f91b3382af5267116de235e9d5f480e574b479350e738621b5e7568329dd879\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://677982485b1155ded1bce2a0f5b3262766d498642307d1744d5cbae6c66102e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://677982485b1155ded1bce2a0f5b3262766d498642307d1744d5cbae6c66102e3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://14b8b318d4cd0da2272b87ac74a4dccdf29874a818aadc0698e9d85a75a169da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://14b8b318d4cd0da2272b87ac74a4dccdf29874a818aadc0698e9d85a75a169da\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e877edd834105d5bd0dcd24e6ba9c11ead17ab39a95ca0169c54acaad7544ebe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e877edd834105d5bd0dcd24e6ba9c11ead17ab39a95ca0169c54acaad7544ebe\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://decf531f70c78d1d142bef3d265612ebac2c8790ea10aba75def6d785fba84a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://decf531f70c78d1d142bef3d265612ebac2c8790ea10aba75def6d785fba84a8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:32Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-m6zsp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:52Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:52 crc kubenswrapper[4756]: I0930 19:31:52.381245 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-gld2h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7ae264bc-c901-4628-89f4-53e94403dff8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d059c5c1b5cded992e4971b04737c4f528ed04278d98651c01c8b8a168fe5655\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc8dq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://764e5cdcf5a9c07e5d046217ffd9db27e2b09a827d02f165459deb24d99f141c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc8dq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:44Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-gld2h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:52Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:52 crc kubenswrapper[4756]: I0930 19:31:52.450362 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:52 crc kubenswrapper[4756]: I0930 19:31:52.450436 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:52 crc kubenswrapper[4756]: I0930 19:31:52.450448 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:52 crc kubenswrapper[4756]: I0930 19:31:52.450495 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:31:52 crc kubenswrapper[4756]: I0930 19:31:52.450507 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:31:52Z","lastTransitionTime":"2025-09-30T19:31:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:31:52 crc kubenswrapper[4756]: I0930 19:31:52.552767 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:52 crc kubenswrapper[4756]: I0930 19:31:52.552804 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:52 crc kubenswrapper[4756]: I0930 19:31:52.552814 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:52 crc kubenswrapper[4756]: I0930 19:31:52.552832 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:31:52 crc kubenswrapper[4756]: I0930 19:31:52.552846 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:31:52Z","lastTransitionTime":"2025-09-30T19:31:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:31:52 crc kubenswrapper[4756]: I0930 19:31:52.655914 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:52 crc kubenswrapper[4756]: I0930 19:31:52.655965 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:52 crc kubenswrapper[4756]: I0930 19:31:52.655977 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:52 crc kubenswrapper[4756]: I0930 19:31:52.655997 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:31:52 crc kubenswrapper[4756]: I0930 19:31:52.656011 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:31:52Z","lastTransitionTime":"2025-09-30T19:31:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:31:52 crc kubenswrapper[4756]: I0930 19:31:52.759113 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:52 crc kubenswrapper[4756]: I0930 19:31:52.759188 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:52 crc kubenswrapper[4756]: I0930 19:31:52.759198 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:52 crc kubenswrapper[4756]: I0930 19:31:52.759218 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:31:52 crc kubenswrapper[4756]: I0930 19:31:52.759239 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:31:52Z","lastTransitionTime":"2025-09-30T19:31:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:31:52 crc kubenswrapper[4756]: I0930 19:31:52.862359 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:52 crc kubenswrapper[4756]: I0930 19:31:52.862438 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:52 crc kubenswrapper[4756]: I0930 19:31:52.862450 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:52 crc kubenswrapper[4756]: I0930 19:31:52.862468 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:31:52 crc kubenswrapper[4756]: I0930 19:31:52.862479 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:31:52Z","lastTransitionTime":"2025-09-30T19:31:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:31:52 crc kubenswrapper[4756]: I0930 19:31:52.965961 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:52 crc kubenswrapper[4756]: I0930 19:31:52.966020 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:52 crc kubenswrapper[4756]: I0930 19:31:52.966033 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:52 crc kubenswrapper[4756]: I0930 19:31:52.966054 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:31:52 crc kubenswrapper[4756]: I0930 19:31:52.966068 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:31:52Z","lastTransitionTime":"2025-09-30T19:31:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:31:53 crc kubenswrapper[4756]: I0930 19:31:53.069755 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:53 crc kubenswrapper[4756]: I0930 19:31:53.069821 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:53 crc kubenswrapper[4756]: I0930 19:31:53.069839 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:53 crc kubenswrapper[4756]: I0930 19:31:53.069867 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:31:53 crc kubenswrapper[4756]: I0930 19:31:53.069887 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:31:53Z","lastTransitionTime":"2025-09-30T19:31:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:31:53 crc kubenswrapper[4756]: I0930 19:31:53.115775 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 19:31:53 crc kubenswrapper[4756]: I0930 19:31:53.115866 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 19:31:53 crc kubenswrapper[4756]: E0930 19:31:53.115989 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 19:31:53 crc kubenswrapper[4756]: E0930 19:31:53.116136 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 19:31:53 crc kubenswrapper[4756]: I0930 19:31:53.116326 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 19:31:53 crc kubenswrapper[4756]: E0930 19:31:53.116427 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 19:31:53 crc kubenswrapper[4756]: I0930 19:31:53.174172 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:53 crc kubenswrapper[4756]: I0930 19:31:53.174222 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:53 crc kubenswrapper[4756]: I0930 19:31:53.174232 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:53 crc kubenswrapper[4756]: I0930 19:31:53.174253 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:31:53 crc kubenswrapper[4756]: I0930 19:31:53.174266 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:31:53Z","lastTransitionTime":"2025-09-30T19:31:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:31:53 crc kubenswrapper[4756]: I0930 19:31:53.278022 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:53 crc kubenswrapper[4756]: I0930 19:31:53.278096 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:53 crc kubenswrapper[4756]: I0930 19:31:53.278123 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:53 crc kubenswrapper[4756]: I0930 19:31:53.278156 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:31:53 crc kubenswrapper[4756]: I0930 19:31:53.278179 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:31:53Z","lastTransitionTime":"2025-09-30T19:31:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:31:53 crc kubenswrapper[4756]: I0930 19:31:53.380763 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:53 crc kubenswrapper[4756]: I0930 19:31:53.380808 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:53 crc kubenswrapper[4756]: I0930 19:31:53.380817 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:53 crc kubenswrapper[4756]: I0930 19:31:53.380834 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:31:53 crc kubenswrapper[4756]: I0930 19:31:53.380846 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:31:53Z","lastTransitionTime":"2025-09-30T19:31:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:31:53 crc kubenswrapper[4756]: I0930 19:31:53.483095 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:53 crc kubenswrapper[4756]: I0930 19:31:53.483139 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:53 crc kubenswrapper[4756]: I0930 19:31:53.483189 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:53 crc kubenswrapper[4756]: I0930 19:31:53.483212 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:31:53 crc kubenswrapper[4756]: I0930 19:31:53.483225 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:31:53Z","lastTransitionTime":"2025-09-30T19:31:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:31:53 crc kubenswrapper[4756]: I0930 19:31:53.586220 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:53 crc kubenswrapper[4756]: I0930 19:31:53.586280 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:53 crc kubenswrapper[4756]: I0930 19:31:53.586297 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:53 crc kubenswrapper[4756]: I0930 19:31:53.586324 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:31:53 crc kubenswrapper[4756]: I0930 19:31:53.586342 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:31:53Z","lastTransitionTime":"2025-09-30T19:31:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:31:53 crc kubenswrapper[4756]: I0930 19:31:53.689150 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:53 crc kubenswrapper[4756]: I0930 19:31:53.689210 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:53 crc kubenswrapper[4756]: I0930 19:31:53.689226 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:53 crc kubenswrapper[4756]: I0930 19:31:53.689247 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:31:53 crc kubenswrapper[4756]: I0930 19:31:53.689269 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:31:53Z","lastTransitionTime":"2025-09-30T19:31:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:31:53 crc kubenswrapper[4756]: I0930 19:31:53.792685 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:53 crc kubenswrapper[4756]: I0930 19:31:53.792727 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:53 crc kubenswrapper[4756]: I0930 19:31:53.792737 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:53 crc kubenswrapper[4756]: I0930 19:31:53.792755 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:31:53 crc kubenswrapper[4756]: I0930 19:31:53.792767 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:31:53Z","lastTransitionTime":"2025-09-30T19:31:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:31:53 crc kubenswrapper[4756]: I0930 19:31:53.888372 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/6c151bbd-9757-44ba-aa11-ff679e841fdc-metrics-certs\") pod \"network-metrics-daemon-b4n45\" (UID: \"6c151bbd-9757-44ba-aa11-ff679e841fdc\") " pod="openshift-multus/network-metrics-daemon-b4n45" Sep 30 19:31:53 crc kubenswrapper[4756]: E0930 19:31:53.888710 4756 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Sep 30 19:31:53 crc kubenswrapper[4756]: E0930 19:31:53.888825 4756 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/6c151bbd-9757-44ba-aa11-ff679e841fdc-metrics-certs podName:6c151bbd-9757-44ba-aa11-ff679e841fdc nodeName:}" failed. No retries permitted until 2025-09-30 19:32:01.888790074 +0000 UTC m=+51.509723591 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/6c151bbd-9757-44ba-aa11-ff679e841fdc-metrics-certs") pod "network-metrics-daemon-b4n45" (UID: "6c151bbd-9757-44ba-aa11-ff679e841fdc") : object "openshift-multus"/"metrics-daemon-secret" not registered Sep 30 19:31:53 crc kubenswrapper[4756]: I0930 19:31:53.895863 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:53 crc kubenswrapper[4756]: I0930 19:31:53.895929 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:53 crc kubenswrapper[4756]: I0930 19:31:53.895946 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:53 crc kubenswrapper[4756]: I0930 19:31:53.895974 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:31:53 crc kubenswrapper[4756]: I0930 19:31:53.895996 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:31:53Z","lastTransitionTime":"2025-09-30T19:31:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:31:54 crc kubenswrapper[4756]: I0930 19:31:54.001498 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:54 crc kubenswrapper[4756]: I0930 19:31:54.001555 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:54 crc kubenswrapper[4756]: I0930 19:31:54.001564 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:54 crc kubenswrapper[4756]: I0930 19:31:54.001580 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:31:54 crc kubenswrapper[4756]: I0930 19:31:54.001591 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:31:54Z","lastTransitionTime":"2025-09-30T19:31:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:31:54 crc kubenswrapper[4756]: I0930 19:31:54.104684 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:54 crc kubenswrapper[4756]: I0930 19:31:54.104737 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:54 crc kubenswrapper[4756]: I0930 19:31:54.104750 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:54 crc kubenswrapper[4756]: I0930 19:31:54.104771 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:31:54 crc kubenswrapper[4756]: I0930 19:31:54.104789 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:31:54Z","lastTransitionTime":"2025-09-30T19:31:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:31:54 crc kubenswrapper[4756]: I0930 19:31:54.114907 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-b4n45" Sep 30 19:31:54 crc kubenswrapper[4756]: E0930 19:31:54.115058 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-b4n45" podUID="6c151bbd-9757-44ba-aa11-ff679e841fdc" Sep 30 19:31:54 crc kubenswrapper[4756]: I0930 19:31:54.207440 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:54 crc kubenswrapper[4756]: I0930 19:31:54.207493 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:54 crc kubenswrapper[4756]: I0930 19:31:54.207506 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:54 crc kubenswrapper[4756]: I0930 19:31:54.207524 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:31:54 crc kubenswrapper[4756]: I0930 19:31:54.207536 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:31:54Z","lastTransitionTime":"2025-09-30T19:31:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:31:54 crc kubenswrapper[4756]: I0930 19:31:54.310125 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:54 crc kubenswrapper[4756]: I0930 19:31:54.310212 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:54 crc kubenswrapper[4756]: I0930 19:31:54.310237 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:54 crc kubenswrapper[4756]: I0930 19:31:54.310269 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:31:54 crc kubenswrapper[4756]: I0930 19:31:54.310295 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:31:54Z","lastTransitionTime":"2025-09-30T19:31:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:31:54 crc kubenswrapper[4756]: I0930 19:31:54.413993 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:54 crc kubenswrapper[4756]: I0930 19:31:54.414066 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:54 crc kubenswrapper[4756]: I0930 19:31:54.414083 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:54 crc kubenswrapper[4756]: I0930 19:31:54.414114 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:31:54 crc kubenswrapper[4756]: I0930 19:31:54.414136 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:31:54Z","lastTransitionTime":"2025-09-30T19:31:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:31:54 crc kubenswrapper[4756]: I0930 19:31:54.516927 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:54 crc kubenswrapper[4756]: I0930 19:31:54.516994 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:54 crc kubenswrapper[4756]: I0930 19:31:54.517005 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:54 crc kubenswrapper[4756]: I0930 19:31:54.517029 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:31:54 crc kubenswrapper[4756]: I0930 19:31:54.517045 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:31:54Z","lastTransitionTime":"2025-09-30T19:31:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:31:54 crc kubenswrapper[4756]: I0930 19:31:54.620766 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:54 crc kubenswrapper[4756]: I0930 19:31:54.620820 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:54 crc kubenswrapper[4756]: I0930 19:31:54.620830 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:54 crc kubenswrapper[4756]: I0930 19:31:54.620852 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:31:54 crc kubenswrapper[4756]: I0930 19:31:54.620865 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:31:54Z","lastTransitionTime":"2025-09-30T19:31:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:31:54 crc kubenswrapper[4756]: I0930 19:31:54.723266 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:54 crc kubenswrapper[4756]: I0930 19:31:54.723315 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:54 crc kubenswrapper[4756]: I0930 19:31:54.723325 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:54 crc kubenswrapper[4756]: I0930 19:31:54.723344 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:31:54 crc kubenswrapper[4756]: I0930 19:31:54.723356 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:31:54Z","lastTransitionTime":"2025-09-30T19:31:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:31:54 crc kubenswrapper[4756]: I0930 19:31:54.826232 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:54 crc kubenswrapper[4756]: I0930 19:31:54.826285 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:54 crc kubenswrapper[4756]: I0930 19:31:54.826304 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:54 crc kubenswrapper[4756]: I0930 19:31:54.826326 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:31:54 crc kubenswrapper[4756]: I0930 19:31:54.826338 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:31:54Z","lastTransitionTime":"2025-09-30T19:31:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:31:54 crc kubenswrapper[4756]: I0930 19:31:54.929611 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:54 crc kubenswrapper[4756]: I0930 19:31:54.929653 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:54 crc kubenswrapper[4756]: I0930 19:31:54.929662 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:54 crc kubenswrapper[4756]: I0930 19:31:54.929678 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:31:54 crc kubenswrapper[4756]: I0930 19:31:54.929691 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:31:54Z","lastTransitionTime":"2025-09-30T19:31:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:31:55 crc kubenswrapper[4756]: I0930 19:31:55.032489 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:55 crc kubenswrapper[4756]: I0930 19:31:55.032531 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:55 crc kubenswrapper[4756]: I0930 19:31:55.032540 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:55 crc kubenswrapper[4756]: I0930 19:31:55.032558 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:31:55 crc kubenswrapper[4756]: I0930 19:31:55.032568 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:31:55Z","lastTransitionTime":"2025-09-30T19:31:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:31:55 crc kubenswrapper[4756]: I0930 19:31:55.115509 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 19:31:55 crc kubenswrapper[4756]: I0930 19:31:55.115566 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 19:31:55 crc kubenswrapper[4756]: I0930 19:31:55.115541 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 19:31:55 crc kubenswrapper[4756]: E0930 19:31:55.115764 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 19:31:55 crc kubenswrapper[4756]: E0930 19:31:55.116542 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 19:31:55 crc kubenswrapper[4756]: E0930 19:31:55.116464 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 19:31:55 crc kubenswrapper[4756]: I0930 19:31:55.135186 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:55 crc kubenswrapper[4756]: I0930 19:31:55.135223 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:55 crc kubenswrapper[4756]: I0930 19:31:55.135232 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:55 crc kubenswrapper[4756]: I0930 19:31:55.135247 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:31:55 crc kubenswrapper[4756]: I0930 19:31:55.135261 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:31:55Z","lastTransitionTime":"2025-09-30T19:31:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:31:55 crc kubenswrapper[4756]: I0930 19:31:55.243020 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:55 crc kubenswrapper[4756]: I0930 19:31:55.243094 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:55 crc kubenswrapper[4756]: I0930 19:31:55.243113 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:55 crc kubenswrapper[4756]: I0930 19:31:55.243145 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:31:55 crc kubenswrapper[4756]: I0930 19:31:55.243164 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:31:55Z","lastTransitionTime":"2025-09-30T19:31:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:31:55 crc kubenswrapper[4756]: I0930 19:31:55.346988 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:55 crc kubenswrapper[4756]: I0930 19:31:55.347070 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:55 crc kubenswrapper[4756]: I0930 19:31:55.347085 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:55 crc kubenswrapper[4756]: I0930 19:31:55.347104 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:31:55 crc kubenswrapper[4756]: I0930 19:31:55.347119 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:31:55Z","lastTransitionTime":"2025-09-30T19:31:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:31:55 crc kubenswrapper[4756]: I0930 19:31:55.449111 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:55 crc kubenswrapper[4756]: I0930 19:31:55.449155 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:55 crc kubenswrapper[4756]: I0930 19:31:55.449164 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:55 crc kubenswrapper[4756]: I0930 19:31:55.449182 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:31:55 crc kubenswrapper[4756]: I0930 19:31:55.449195 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:31:55Z","lastTransitionTime":"2025-09-30T19:31:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:31:55 crc kubenswrapper[4756]: I0930 19:31:55.552274 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:55 crc kubenswrapper[4756]: I0930 19:31:55.552318 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:55 crc kubenswrapper[4756]: I0930 19:31:55.552327 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:55 crc kubenswrapper[4756]: I0930 19:31:55.552345 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:31:55 crc kubenswrapper[4756]: I0930 19:31:55.552356 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:31:55Z","lastTransitionTime":"2025-09-30T19:31:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:31:55 crc kubenswrapper[4756]: I0930 19:31:55.655058 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:55 crc kubenswrapper[4756]: I0930 19:31:55.655102 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:55 crc kubenswrapper[4756]: I0930 19:31:55.655113 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:55 crc kubenswrapper[4756]: I0930 19:31:55.655130 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:31:55 crc kubenswrapper[4756]: I0930 19:31:55.655141 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:31:55Z","lastTransitionTime":"2025-09-30T19:31:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:31:55 crc kubenswrapper[4756]: I0930 19:31:55.758553 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:55 crc kubenswrapper[4756]: I0930 19:31:55.758604 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:55 crc kubenswrapper[4756]: I0930 19:31:55.758616 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:55 crc kubenswrapper[4756]: I0930 19:31:55.758635 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:31:55 crc kubenswrapper[4756]: I0930 19:31:55.758649 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:31:55Z","lastTransitionTime":"2025-09-30T19:31:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:31:55 crc kubenswrapper[4756]: I0930 19:31:55.862513 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:55 crc kubenswrapper[4756]: I0930 19:31:55.862573 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:55 crc kubenswrapper[4756]: I0930 19:31:55.862584 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:55 crc kubenswrapper[4756]: I0930 19:31:55.862605 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:31:55 crc kubenswrapper[4756]: I0930 19:31:55.862620 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:31:55Z","lastTransitionTime":"2025-09-30T19:31:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:31:55 crc kubenswrapper[4756]: I0930 19:31:55.966045 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:55 crc kubenswrapper[4756]: I0930 19:31:55.966126 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:55 crc kubenswrapper[4756]: I0930 19:31:55.966146 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:55 crc kubenswrapper[4756]: I0930 19:31:55.966176 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:31:55 crc kubenswrapper[4756]: I0930 19:31:55.966202 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:31:55Z","lastTransitionTime":"2025-09-30T19:31:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:31:56 crc kubenswrapper[4756]: I0930 19:31:56.070066 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:56 crc kubenswrapper[4756]: I0930 19:31:56.070152 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:56 crc kubenswrapper[4756]: I0930 19:31:56.070199 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:56 crc kubenswrapper[4756]: I0930 19:31:56.070227 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:31:56 crc kubenswrapper[4756]: I0930 19:31:56.070241 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:31:56Z","lastTransitionTime":"2025-09-30T19:31:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:31:56 crc kubenswrapper[4756]: I0930 19:31:56.115816 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-b4n45" Sep 30 19:31:56 crc kubenswrapper[4756]: E0930 19:31:56.115998 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-b4n45" podUID="6c151bbd-9757-44ba-aa11-ff679e841fdc" Sep 30 19:31:56 crc kubenswrapper[4756]: I0930 19:31:56.173554 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:56 crc kubenswrapper[4756]: I0930 19:31:56.173637 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:56 crc kubenswrapper[4756]: I0930 19:31:56.173662 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:56 crc kubenswrapper[4756]: I0930 19:31:56.173695 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:31:56 crc kubenswrapper[4756]: I0930 19:31:56.173720 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:31:56Z","lastTransitionTime":"2025-09-30T19:31:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:31:56 crc kubenswrapper[4756]: I0930 19:31:56.276256 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:56 crc kubenswrapper[4756]: I0930 19:31:56.276316 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:56 crc kubenswrapper[4756]: I0930 19:31:56.276330 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:56 crc kubenswrapper[4756]: I0930 19:31:56.276350 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:31:56 crc kubenswrapper[4756]: I0930 19:31:56.276365 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:31:56Z","lastTransitionTime":"2025-09-30T19:31:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:31:56 crc kubenswrapper[4756]: I0930 19:31:56.379000 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:56 crc kubenswrapper[4756]: I0930 19:31:56.379066 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:56 crc kubenswrapper[4756]: I0930 19:31:56.379085 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:56 crc kubenswrapper[4756]: I0930 19:31:56.379110 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:31:56 crc kubenswrapper[4756]: I0930 19:31:56.379128 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:31:56Z","lastTransitionTime":"2025-09-30T19:31:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:31:56 crc kubenswrapper[4756]: I0930 19:31:56.482752 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:56 crc kubenswrapper[4756]: I0930 19:31:56.482806 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:56 crc kubenswrapper[4756]: I0930 19:31:56.482820 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:56 crc kubenswrapper[4756]: I0930 19:31:56.482843 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:31:56 crc kubenswrapper[4756]: I0930 19:31:56.482859 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:31:56Z","lastTransitionTime":"2025-09-30T19:31:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:31:56 crc kubenswrapper[4756]: I0930 19:31:56.586037 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:56 crc kubenswrapper[4756]: I0930 19:31:56.586079 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:56 crc kubenswrapper[4756]: I0930 19:31:56.586087 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:56 crc kubenswrapper[4756]: I0930 19:31:56.586121 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:31:56 crc kubenswrapper[4756]: I0930 19:31:56.586131 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:31:56Z","lastTransitionTime":"2025-09-30T19:31:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:31:56 crc kubenswrapper[4756]: I0930 19:31:56.689654 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:56 crc kubenswrapper[4756]: I0930 19:31:56.689728 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:56 crc kubenswrapper[4756]: I0930 19:31:56.689747 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:56 crc kubenswrapper[4756]: I0930 19:31:56.689775 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:31:56 crc kubenswrapper[4756]: I0930 19:31:56.689792 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:31:56Z","lastTransitionTime":"2025-09-30T19:31:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:31:56 crc kubenswrapper[4756]: I0930 19:31:56.792805 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:56 crc kubenswrapper[4756]: I0930 19:31:56.792869 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:56 crc kubenswrapper[4756]: I0930 19:31:56.792888 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:56 crc kubenswrapper[4756]: I0930 19:31:56.792914 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:31:56 crc kubenswrapper[4756]: I0930 19:31:56.792931 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:31:56Z","lastTransitionTime":"2025-09-30T19:31:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:31:56 crc kubenswrapper[4756]: I0930 19:31:56.895550 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:56 crc kubenswrapper[4756]: I0930 19:31:56.895592 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:56 crc kubenswrapper[4756]: I0930 19:31:56.895604 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:56 crc kubenswrapper[4756]: I0930 19:31:56.895623 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:31:56 crc kubenswrapper[4756]: I0930 19:31:56.895635 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:31:56Z","lastTransitionTime":"2025-09-30T19:31:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:31:56 crc kubenswrapper[4756]: I0930 19:31:56.998562 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:56 crc kubenswrapper[4756]: I0930 19:31:56.998601 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:56 crc kubenswrapper[4756]: I0930 19:31:56.998609 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:56 crc kubenswrapper[4756]: I0930 19:31:56.998623 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:31:56 crc kubenswrapper[4756]: I0930 19:31:56.998633 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:31:56Z","lastTransitionTime":"2025-09-30T19:31:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:31:57 crc kubenswrapper[4756]: I0930 19:31:57.101747 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:57 crc kubenswrapper[4756]: I0930 19:31:57.101983 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:57 crc kubenswrapper[4756]: I0930 19:31:57.102002 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:57 crc kubenswrapper[4756]: I0930 19:31:57.102019 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:31:57 crc kubenswrapper[4756]: I0930 19:31:57.102033 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:31:57Z","lastTransitionTime":"2025-09-30T19:31:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:31:57 crc kubenswrapper[4756]: I0930 19:31:57.115300 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 19:31:57 crc kubenswrapper[4756]: I0930 19:31:57.115365 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 19:31:57 crc kubenswrapper[4756]: E0930 19:31:57.115451 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 19:31:57 crc kubenswrapper[4756]: E0930 19:31:57.115599 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 19:31:57 crc kubenswrapper[4756]: I0930 19:31:57.115694 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 19:31:57 crc kubenswrapper[4756]: E0930 19:31:57.115787 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 19:31:57 crc kubenswrapper[4756]: I0930 19:31:57.205264 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:57 crc kubenswrapper[4756]: I0930 19:31:57.205306 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:57 crc kubenswrapper[4756]: I0930 19:31:57.205317 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:57 crc kubenswrapper[4756]: I0930 19:31:57.205342 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:31:57 crc kubenswrapper[4756]: I0930 19:31:57.205356 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:31:57Z","lastTransitionTime":"2025-09-30T19:31:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:31:57 crc kubenswrapper[4756]: I0930 19:31:57.308641 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:57 crc kubenswrapper[4756]: I0930 19:31:57.308702 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:57 crc kubenswrapper[4756]: I0930 19:31:57.308717 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:57 crc kubenswrapper[4756]: I0930 19:31:57.308737 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:31:57 crc kubenswrapper[4756]: I0930 19:31:57.308751 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:31:57Z","lastTransitionTime":"2025-09-30T19:31:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:31:57 crc kubenswrapper[4756]: I0930 19:31:57.411681 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:57 crc kubenswrapper[4756]: I0930 19:31:57.411733 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:57 crc kubenswrapper[4756]: I0930 19:31:57.411745 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:57 crc kubenswrapper[4756]: I0930 19:31:57.411762 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:31:57 crc kubenswrapper[4756]: I0930 19:31:57.411774 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:31:57Z","lastTransitionTime":"2025-09-30T19:31:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:31:57 crc kubenswrapper[4756]: I0930 19:31:57.520621 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:57 crc kubenswrapper[4756]: I0930 19:31:57.520675 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:57 crc kubenswrapper[4756]: I0930 19:31:57.520689 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:57 crc kubenswrapper[4756]: I0930 19:31:57.520715 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:31:57 crc kubenswrapper[4756]: I0930 19:31:57.520729 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:31:57Z","lastTransitionTime":"2025-09-30T19:31:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:31:57 crc kubenswrapper[4756]: I0930 19:31:57.586198 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:57 crc kubenswrapper[4756]: I0930 19:31:57.586276 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:57 crc kubenswrapper[4756]: I0930 19:31:57.586288 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:57 crc kubenswrapper[4756]: I0930 19:31:57.586310 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:31:57 crc kubenswrapper[4756]: I0930 19:31:57.586323 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:31:57Z","lastTransitionTime":"2025-09-30T19:31:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:31:57 crc kubenswrapper[4756]: E0930 19:31:57.604825 4756 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:31:57Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:57Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:31:57Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:57Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:31:57Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:57Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:31:57Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:57Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"0611c06e-4fb1-42eb-9eae-fbc2363d05f7\\\",\\\"systemUUID\\\":\\\"9e1f3995-67e3-42b5-a320-7b79274c960e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:57Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:57 crc kubenswrapper[4756]: I0930 19:31:57.609113 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:57 crc kubenswrapper[4756]: I0930 19:31:57.609261 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:57 crc kubenswrapper[4756]: I0930 19:31:57.609330 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:57 crc kubenswrapper[4756]: I0930 19:31:57.609434 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:31:57 crc kubenswrapper[4756]: I0930 19:31:57.609549 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:31:57Z","lastTransitionTime":"2025-09-30T19:31:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:31:57 crc kubenswrapper[4756]: E0930 19:31:57.629905 4756 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:31:57Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:57Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:31:57Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:57Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:31:57Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:57Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:31:57Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:57Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"0611c06e-4fb1-42eb-9eae-fbc2363d05f7\\\",\\\"systemUUID\\\":\\\"9e1f3995-67e3-42b5-a320-7b79274c960e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:57Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:57 crc kubenswrapper[4756]: I0930 19:31:57.634893 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:57 crc kubenswrapper[4756]: I0930 19:31:57.635006 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:57 crc kubenswrapper[4756]: I0930 19:31:57.635194 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:57 crc kubenswrapper[4756]: I0930 19:31:57.635290 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:31:57 crc kubenswrapper[4756]: I0930 19:31:57.635357 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:31:57Z","lastTransitionTime":"2025-09-30T19:31:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:31:57 crc kubenswrapper[4756]: E0930 19:31:57.650356 4756 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:31:57Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:57Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:31:57Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:57Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:31:57Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:57Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:31:57Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:57Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"0611c06e-4fb1-42eb-9eae-fbc2363d05f7\\\",\\\"systemUUID\\\":\\\"9e1f3995-67e3-42b5-a320-7b79274c960e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:57Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:57 crc kubenswrapper[4756]: I0930 19:31:57.656092 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:57 crc kubenswrapper[4756]: I0930 19:31:57.656313 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:57 crc kubenswrapper[4756]: I0930 19:31:57.656596 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:57 crc kubenswrapper[4756]: I0930 19:31:57.656713 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:31:57 crc kubenswrapper[4756]: I0930 19:31:57.656838 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:31:57Z","lastTransitionTime":"2025-09-30T19:31:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:31:57 crc kubenswrapper[4756]: E0930 19:31:57.673218 4756 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:31:57Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:57Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:31:57Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:57Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:31:57Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:57Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:31:57Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:57Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"0611c06e-4fb1-42eb-9eae-fbc2363d05f7\\\",\\\"systemUUID\\\":\\\"9e1f3995-67e3-42b5-a320-7b79274c960e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:57Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:57 crc kubenswrapper[4756]: I0930 19:31:57.678474 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:57 crc kubenswrapper[4756]: I0930 19:31:57.678535 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:57 crc kubenswrapper[4756]: I0930 19:31:57.678552 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:57 crc kubenswrapper[4756]: I0930 19:31:57.678577 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:31:57 crc kubenswrapper[4756]: I0930 19:31:57.678592 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:31:57Z","lastTransitionTime":"2025-09-30T19:31:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:31:57 crc kubenswrapper[4756]: E0930 19:31:57.694075 4756 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:31:57Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:57Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:31:57Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:57Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:31:57Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:57Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:31:57Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:57Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"0611c06e-4fb1-42eb-9eae-fbc2363d05f7\\\",\\\"systemUUID\\\":\\\"9e1f3995-67e3-42b5-a320-7b79274c960e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:57Z is after 2025-08-24T17:21:41Z" Sep 30 19:31:57 crc kubenswrapper[4756]: E0930 19:31:57.694257 4756 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Sep 30 19:31:57 crc kubenswrapper[4756]: I0930 19:31:57.696413 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:57 crc kubenswrapper[4756]: I0930 19:31:57.696452 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:57 crc kubenswrapper[4756]: I0930 19:31:57.696469 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:57 crc kubenswrapper[4756]: I0930 19:31:57.696490 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:31:57 crc kubenswrapper[4756]: I0930 19:31:57.696502 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:31:57Z","lastTransitionTime":"2025-09-30T19:31:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:31:57 crc kubenswrapper[4756]: I0930 19:31:57.799996 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:57 crc kubenswrapper[4756]: I0930 19:31:57.800050 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:57 crc kubenswrapper[4756]: I0930 19:31:57.800063 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:57 crc kubenswrapper[4756]: I0930 19:31:57.800086 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:31:57 crc kubenswrapper[4756]: I0930 19:31:57.800103 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:31:57Z","lastTransitionTime":"2025-09-30T19:31:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:31:57 crc kubenswrapper[4756]: I0930 19:31:57.903002 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:57 crc kubenswrapper[4756]: I0930 19:31:57.903070 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:57 crc kubenswrapper[4756]: I0930 19:31:57.903088 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:57 crc kubenswrapper[4756]: I0930 19:31:57.903117 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:31:57 crc kubenswrapper[4756]: I0930 19:31:57.903134 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:31:57Z","lastTransitionTime":"2025-09-30T19:31:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:31:58 crc kubenswrapper[4756]: I0930 19:31:58.005706 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:58 crc kubenswrapper[4756]: I0930 19:31:58.005762 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:58 crc kubenswrapper[4756]: I0930 19:31:58.005774 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:58 crc kubenswrapper[4756]: I0930 19:31:58.005794 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:31:58 crc kubenswrapper[4756]: I0930 19:31:58.005807 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:31:58Z","lastTransitionTime":"2025-09-30T19:31:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:31:58 crc kubenswrapper[4756]: I0930 19:31:58.108561 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:58 crc kubenswrapper[4756]: I0930 19:31:58.108636 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:58 crc kubenswrapper[4756]: I0930 19:31:58.108654 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:58 crc kubenswrapper[4756]: I0930 19:31:58.108681 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:31:58 crc kubenswrapper[4756]: I0930 19:31:58.108699 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:31:58Z","lastTransitionTime":"2025-09-30T19:31:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:31:58 crc kubenswrapper[4756]: I0930 19:31:58.115872 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-b4n45" Sep 30 19:31:58 crc kubenswrapper[4756]: E0930 19:31:58.116045 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-b4n45" podUID="6c151bbd-9757-44ba-aa11-ff679e841fdc" Sep 30 19:31:58 crc kubenswrapper[4756]: I0930 19:31:58.212123 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:58 crc kubenswrapper[4756]: I0930 19:31:58.212180 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:58 crc kubenswrapper[4756]: I0930 19:31:58.212191 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:58 crc kubenswrapper[4756]: I0930 19:31:58.212213 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:31:58 crc kubenswrapper[4756]: I0930 19:31:58.212225 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:31:58Z","lastTransitionTime":"2025-09-30T19:31:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:31:58 crc kubenswrapper[4756]: I0930 19:31:58.315511 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:58 crc kubenswrapper[4756]: I0930 19:31:58.315576 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:58 crc kubenswrapper[4756]: I0930 19:31:58.315584 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:58 crc kubenswrapper[4756]: I0930 19:31:58.315603 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:31:58 crc kubenswrapper[4756]: I0930 19:31:58.315617 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:31:58Z","lastTransitionTime":"2025-09-30T19:31:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:31:58 crc kubenswrapper[4756]: I0930 19:31:58.418916 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:58 crc kubenswrapper[4756]: I0930 19:31:58.418977 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:58 crc kubenswrapper[4756]: I0930 19:31:58.418988 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:58 crc kubenswrapper[4756]: I0930 19:31:58.419018 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:31:58 crc kubenswrapper[4756]: I0930 19:31:58.419030 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:31:58Z","lastTransitionTime":"2025-09-30T19:31:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:31:58 crc kubenswrapper[4756]: I0930 19:31:58.521634 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:58 crc kubenswrapper[4756]: I0930 19:31:58.522121 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:58 crc kubenswrapper[4756]: I0930 19:31:58.522266 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:58 crc kubenswrapper[4756]: I0930 19:31:58.522380 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:31:58 crc kubenswrapper[4756]: I0930 19:31:58.522506 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:31:58Z","lastTransitionTime":"2025-09-30T19:31:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:31:58 crc kubenswrapper[4756]: I0930 19:31:58.625682 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:58 crc kubenswrapper[4756]: I0930 19:31:58.625723 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:58 crc kubenswrapper[4756]: I0930 19:31:58.625732 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:58 crc kubenswrapper[4756]: I0930 19:31:58.625770 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:31:58 crc kubenswrapper[4756]: I0930 19:31:58.625781 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:31:58Z","lastTransitionTime":"2025-09-30T19:31:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:31:58 crc kubenswrapper[4756]: I0930 19:31:58.728426 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:58 crc kubenswrapper[4756]: I0930 19:31:58.728489 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:58 crc kubenswrapper[4756]: I0930 19:31:58.728500 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:58 crc kubenswrapper[4756]: I0930 19:31:58.728518 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:31:58 crc kubenswrapper[4756]: I0930 19:31:58.728530 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:31:58Z","lastTransitionTime":"2025-09-30T19:31:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:31:58 crc kubenswrapper[4756]: I0930 19:31:58.832046 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:58 crc kubenswrapper[4756]: I0930 19:31:58.832102 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:58 crc kubenswrapper[4756]: I0930 19:31:58.832120 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:58 crc kubenswrapper[4756]: I0930 19:31:58.832147 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:31:58 crc kubenswrapper[4756]: I0930 19:31:58.832164 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:31:58Z","lastTransitionTime":"2025-09-30T19:31:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:31:58 crc kubenswrapper[4756]: I0930 19:31:58.935239 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:58 crc kubenswrapper[4756]: I0930 19:31:58.935306 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:58 crc kubenswrapper[4756]: I0930 19:31:58.935325 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:58 crc kubenswrapper[4756]: I0930 19:31:58.935357 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:31:58 crc kubenswrapper[4756]: I0930 19:31:58.935377 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:31:58Z","lastTransitionTime":"2025-09-30T19:31:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:31:59 crc kubenswrapper[4756]: I0930 19:31:59.038377 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:59 crc kubenswrapper[4756]: I0930 19:31:59.038505 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:59 crc kubenswrapper[4756]: I0930 19:31:59.038533 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:59 crc kubenswrapper[4756]: I0930 19:31:59.038575 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:31:59 crc kubenswrapper[4756]: I0930 19:31:59.038601 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:31:59Z","lastTransitionTime":"2025-09-30T19:31:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:31:59 crc kubenswrapper[4756]: I0930 19:31:59.115449 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 19:31:59 crc kubenswrapper[4756]: I0930 19:31:59.115473 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 19:31:59 crc kubenswrapper[4756]: E0930 19:31:59.115667 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 19:31:59 crc kubenswrapper[4756]: I0930 19:31:59.115474 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 19:31:59 crc kubenswrapper[4756]: E0930 19:31:59.115821 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 19:31:59 crc kubenswrapper[4756]: E0930 19:31:59.115898 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 19:31:59 crc kubenswrapper[4756]: I0930 19:31:59.141686 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:59 crc kubenswrapper[4756]: I0930 19:31:59.141763 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:59 crc kubenswrapper[4756]: I0930 19:31:59.141777 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:59 crc kubenswrapper[4756]: I0930 19:31:59.141805 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:31:59 crc kubenswrapper[4756]: I0930 19:31:59.141822 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:31:59Z","lastTransitionTime":"2025-09-30T19:31:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:31:59 crc kubenswrapper[4756]: I0930 19:31:59.245366 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:59 crc kubenswrapper[4756]: I0930 19:31:59.245425 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:59 crc kubenswrapper[4756]: I0930 19:31:59.245437 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:59 crc kubenswrapper[4756]: I0930 19:31:59.245457 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:31:59 crc kubenswrapper[4756]: I0930 19:31:59.245470 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:31:59Z","lastTransitionTime":"2025-09-30T19:31:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:31:59 crc kubenswrapper[4756]: I0930 19:31:59.349529 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:59 crc kubenswrapper[4756]: I0930 19:31:59.349591 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:59 crc kubenswrapper[4756]: I0930 19:31:59.349616 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:59 crc kubenswrapper[4756]: I0930 19:31:59.349642 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:31:59 crc kubenswrapper[4756]: I0930 19:31:59.349660 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:31:59Z","lastTransitionTime":"2025-09-30T19:31:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:31:59 crc kubenswrapper[4756]: I0930 19:31:59.452174 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:59 crc kubenswrapper[4756]: I0930 19:31:59.452217 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:59 crc kubenswrapper[4756]: I0930 19:31:59.452227 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:59 crc kubenswrapper[4756]: I0930 19:31:59.452246 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:31:59 crc kubenswrapper[4756]: I0930 19:31:59.452256 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:31:59Z","lastTransitionTime":"2025-09-30T19:31:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:31:59 crc kubenswrapper[4756]: I0930 19:31:59.555034 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:59 crc kubenswrapper[4756]: I0930 19:31:59.555094 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:59 crc kubenswrapper[4756]: I0930 19:31:59.555109 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:59 crc kubenswrapper[4756]: I0930 19:31:59.555132 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:31:59 crc kubenswrapper[4756]: I0930 19:31:59.555148 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:31:59Z","lastTransitionTime":"2025-09-30T19:31:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:31:59 crc kubenswrapper[4756]: I0930 19:31:59.657832 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:59 crc kubenswrapper[4756]: I0930 19:31:59.657870 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:59 crc kubenswrapper[4756]: I0930 19:31:59.657881 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:59 crc kubenswrapper[4756]: I0930 19:31:59.657900 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:31:59 crc kubenswrapper[4756]: I0930 19:31:59.657913 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:31:59Z","lastTransitionTime":"2025-09-30T19:31:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:31:59 crc kubenswrapper[4756]: I0930 19:31:59.760485 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:59 crc kubenswrapper[4756]: I0930 19:31:59.760561 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:59 crc kubenswrapper[4756]: I0930 19:31:59.760586 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:59 crc kubenswrapper[4756]: I0930 19:31:59.760621 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:31:59 crc kubenswrapper[4756]: I0930 19:31:59.760652 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:31:59Z","lastTransitionTime":"2025-09-30T19:31:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:31:59 crc kubenswrapper[4756]: I0930 19:31:59.863759 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:59 crc kubenswrapper[4756]: I0930 19:31:59.863817 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:59 crc kubenswrapper[4756]: I0930 19:31:59.863828 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:59 crc kubenswrapper[4756]: I0930 19:31:59.863846 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:31:59 crc kubenswrapper[4756]: I0930 19:31:59.863855 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:31:59Z","lastTransitionTime":"2025-09-30T19:31:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:31:59 crc kubenswrapper[4756]: I0930 19:31:59.967565 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:31:59 crc kubenswrapper[4756]: I0930 19:31:59.967642 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:31:59 crc kubenswrapper[4756]: I0930 19:31:59.967667 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:31:59 crc kubenswrapper[4756]: I0930 19:31:59.967699 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:31:59 crc kubenswrapper[4756]: I0930 19:31:59.967726 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:31:59Z","lastTransitionTime":"2025-09-30T19:31:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:00 crc kubenswrapper[4756]: I0930 19:32:00.071253 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:00 crc kubenswrapper[4756]: I0930 19:32:00.071312 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:00 crc kubenswrapper[4756]: I0930 19:32:00.071323 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:00 crc kubenswrapper[4756]: I0930 19:32:00.071342 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:00 crc kubenswrapper[4756]: I0930 19:32:00.071356 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:00Z","lastTransitionTime":"2025-09-30T19:32:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:00 crc kubenswrapper[4756]: I0930 19:32:00.115752 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-b4n45" Sep 30 19:32:00 crc kubenswrapper[4756]: E0930 19:32:00.116005 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-b4n45" podUID="6c151bbd-9757-44ba-aa11-ff679e841fdc" Sep 30 19:32:00 crc kubenswrapper[4756]: I0930 19:32:00.174359 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:00 crc kubenswrapper[4756]: I0930 19:32:00.174457 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:00 crc kubenswrapper[4756]: I0930 19:32:00.174478 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:00 crc kubenswrapper[4756]: I0930 19:32:00.174502 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:00 crc kubenswrapper[4756]: I0930 19:32:00.174518 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:00Z","lastTransitionTime":"2025-09-30T19:32:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:00 crc kubenswrapper[4756]: I0930 19:32:00.278129 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:00 crc kubenswrapper[4756]: I0930 19:32:00.278202 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:00 crc kubenswrapper[4756]: I0930 19:32:00.278217 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:00 crc kubenswrapper[4756]: I0930 19:32:00.278237 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:00 crc kubenswrapper[4756]: I0930 19:32:00.278249 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:00Z","lastTransitionTime":"2025-09-30T19:32:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:00 crc kubenswrapper[4756]: I0930 19:32:00.381523 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:00 crc kubenswrapper[4756]: I0930 19:32:00.381624 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:00 crc kubenswrapper[4756]: I0930 19:32:00.381651 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:00 crc kubenswrapper[4756]: I0930 19:32:00.381685 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:00 crc kubenswrapper[4756]: I0930 19:32:00.381712 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:00Z","lastTransitionTime":"2025-09-30T19:32:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:00 crc kubenswrapper[4756]: I0930 19:32:00.484660 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:00 crc kubenswrapper[4756]: I0930 19:32:00.484743 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:00 crc kubenswrapper[4756]: I0930 19:32:00.484764 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:00 crc kubenswrapper[4756]: I0930 19:32:00.484803 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:00 crc kubenswrapper[4756]: I0930 19:32:00.484823 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:00Z","lastTransitionTime":"2025-09-30T19:32:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:00 crc kubenswrapper[4756]: I0930 19:32:00.588747 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:00 crc kubenswrapper[4756]: I0930 19:32:00.588799 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:00 crc kubenswrapper[4756]: I0930 19:32:00.588811 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:00 crc kubenswrapper[4756]: I0930 19:32:00.588832 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:00 crc kubenswrapper[4756]: I0930 19:32:00.588847 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:00Z","lastTransitionTime":"2025-09-30T19:32:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:00 crc kubenswrapper[4756]: I0930 19:32:00.691472 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:00 crc kubenswrapper[4756]: I0930 19:32:00.691540 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:00 crc kubenswrapper[4756]: I0930 19:32:00.691549 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:00 crc kubenswrapper[4756]: I0930 19:32:00.691565 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:00 crc kubenswrapper[4756]: I0930 19:32:00.691574 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:00Z","lastTransitionTime":"2025-09-30T19:32:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:00 crc kubenswrapper[4756]: I0930 19:32:00.795303 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:00 crc kubenswrapper[4756]: I0930 19:32:00.795369 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:00 crc kubenswrapper[4756]: I0930 19:32:00.795387 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:00 crc kubenswrapper[4756]: I0930 19:32:00.795438 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:00 crc kubenswrapper[4756]: I0930 19:32:00.795469 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:00Z","lastTransitionTime":"2025-09-30T19:32:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:00 crc kubenswrapper[4756]: I0930 19:32:00.898640 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:00 crc kubenswrapper[4756]: I0930 19:32:00.898699 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:00 crc kubenswrapper[4756]: I0930 19:32:00.898710 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:00 crc kubenswrapper[4756]: I0930 19:32:00.898731 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:00 crc kubenswrapper[4756]: I0930 19:32:00.898744 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:00Z","lastTransitionTime":"2025-09-30T19:32:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:01 crc kubenswrapper[4756]: I0930 19:32:01.001108 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:01 crc kubenswrapper[4756]: I0930 19:32:01.001154 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:01 crc kubenswrapper[4756]: I0930 19:32:01.001163 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:01 crc kubenswrapper[4756]: I0930 19:32:01.001183 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:01 crc kubenswrapper[4756]: I0930 19:32:01.001193 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:01Z","lastTransitionTime":"2025-09-30T19:32:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:01 crc kubenswrapper[4756]: I0930 19:32:01.104518 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:01 crc kubenswrapper[4756]: I0930 19:32:01.104575 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:01 crc kubenswrapper[4756]: I0930 19:32:01.104590 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:01 crc kubenswrapper[4756]: I0930 19:32:01.104615 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:01 crc kubenswrapper[4756]: I0930 19:32:01.104690 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:01Z","lastTransitionTime":"2025-09-30T19:32:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:01 crc kubenswrapper[4756]: I0930 19:32:01.115026 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 19:32:01 crc kubenswrapper[4756]: I0930 19:32:01.115136 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 19:32:01 crc kubenswrapper[4756]: E0930 19:32:01.115201 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 19:32:01 crc kubenswrapper[4756]: I0930 19:32:01.115301 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 19:32:01 crc kubenswrapper[4756]: E0930 19:32:01.115437 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 19:32:01 crc kubenswrapper[4756]: E0930 19:32:01.115692 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 19:32:01 crc kubenswrapper[4756]: I0930 19:32:01.140914 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6aa869dd-ec28-4032-82be-c656f27bd7b1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://99767cc72b9d6057d38c45fbec6231bafa47ccffefb78e5924b5e9c1e797722b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b852143b26114c05edce20729cf706c5394609a74f522b7ecf44d92f9d66aed7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ee47cdb77e416535bd00569159712241bbd0952651774af6440fd04000d1722\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5cca81a0aeb985d6044f19cccc580ca5c78553a0abb60fb2a2bed955233f7a97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://621b37e117d0eeef289e110d75521c3262ced12144d665be35d6823780b4bae2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a5550fd486f7fad8080fb3eed0f4c73140172c68983fa8a423cd48da8f31ef3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a5550fd486f7fad8080fb3eed0f4c73140172c68983fa8a423cd48da8f31ef3a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://390ef7ee5c5d98dc226f1d061c6c9bb83236c988f2af15635e7834f643862840\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://390ef7ee5c5d98dc226f1d061c6c9bb83236c988f2af15635e7834f643862840\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:13Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://6f98786c914dcdd3ca1cde8421686a1d536abb97c375f26f458d8f514bdccd52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6f98786c914dcdd3ca1cde8421686a1d536abb97c375f26f458d8f514bdccd52\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:11Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:01Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:01 crc kubenswrapper[4756]: I0930 19:32:01.154137 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0dfdb5d6-9311-4699-a1ac-5f1a6cde2441\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a78698331fc3db3b12b06c8816256c5d055d1decfd12cc0099d6cfd155d673d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32fb926334772dc454adcdabd6070f751ef1aded82a5c13732875e1ec04d2c48\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6195128a0e66f0f932a444f4717084f00ea05ff648f7c26364bb9c96483b652a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://746e37834d01867d327b2cb407df8e9a8489f58884a28f1da6a336f74e4d1033\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:11Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:01Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:01 crc kubenswrapper[4756]: I0930 19:32:01.166848 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3370c2ca-fec3-4f90-8df7-51e21e6c7e1c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1a5ef539293ea3ff94f53b11a0510bb8bb45c62c945cffab746512efeeb19a5a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pstrc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ba18abf718fde52cadd61c25b05097873901a4dbbe7631fd7ce64133627b9473\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pstrc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:32Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-4n9zj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:01Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:01 crc kubenswrapper[4756]: I0930 19:32:01.201290 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-kmxk8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d96acc31-a519-46fc-94d8-f19522e77391\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe054d1844c58433aebfe8f4e500e17558f1b4afb5423002d60b50f210052c93\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c5badf6335e4358fa0078ae48d7d50a908bf754fdb57c786e445ccae31933ebe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f20230d520af06912183a05d0b9bcabb8581bf772fe34e037fa8d1a157c4a1b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://261b104308c48bbe4631db85f56d63cefeb0bc6b7112076b0bf3f4fb6b67caad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1fa8babb1f5d1213dec99c75be1d5f37d09c2257a31babd51ec297621fc7d564\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://990926364c8d973164f3da73740b7d31c8caf87f0fb691539f7a62520b822fba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://721fa5da1ae99aae66e7486221afabcd9c996982c008c5b00f72787911d29921\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://721fa5da1ae99aae66e7486221afabcd9c996982c008c5b00f72787911d29921\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T19:31:44Z\\\",\\\"message\\\":\\\"erver-crc in node crc\\\\nI0930 19:31:44.135110 6217 obj_retry.go:303] Retry object setup: *v1.Pod openshift-multus/multus-additional-cni-plugins-m6zsp\\\\nI0930 19:31:44.135119 6217 obj_retry.go:386] Retry successful for *v1.Pod openshift-kube-apiserver/kube-apiserver-crc after 0 failed attempt(s)\\\\nF0930 19:31:44.135041 6217 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:44Z is after 2025-08-24T17:21:41Z]\\\\nI0930 19:31:44.135126 6217 obj_retry.go:365] Adding new object: *v1.Pod openshift-multus/multus-additional-cni-plugins-m6zsp\\\\nI0930 19:31:44.135121 6217 obj_retry.go:365] Adding new object: *v1.Pod openshift-image\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:43Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-kmxk8_openshift-ovn-kubernetes(d96acc31-a519-46fc-94d8-f19522e77391)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://64404e34c6a077df9cc9e9f9273dbe188c6feda3bc1186a83e05a43e1fa2ea99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://76bb398eefb2b0a9ef06b44c2ea182deefd51e4daf42b17946b911822d6eb0af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://76bb398eefb2b0a9ef06b44c2ea182deefd51e4daf42b17946b911822d6eb0af\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:32Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-kmxk8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:01Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:01 crc kubenswrapper[4756]: I0930 19:32:01.207328 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:01 crc kubenswrapper[4756]: I0930 19:32:01.207371 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:01 crc kubenswrapper[4756]: I0930 19:32:01.207382 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:01 crc kubenswrapper[4756]: I0930 19:32:01.207412 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:01 crc kubenswrapper[4756]: I0930 19:32:01.207423 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:01Z","lastTransitionTime":"2025-09-30T19:32:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:01 crc kubenswrapper[4756]: I0930 19:32:01.216067 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-8nj5z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"02bb1d9a-1ad2-406d-afc2-c4cd283fe1f4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://28f7474a36dde55478614d47fe30f93b44cb549a25e148e295987e64158fc3c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l6nxq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:34Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-8nj5z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:01Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:01 crc kubenswrapper[4756]: I0930 19:32:01.232210 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-b4n45" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6c151bbd-9757-44ba-aa11-ff679e841fdc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fsl2z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fsl2z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:46Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-b4n45\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:01Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:01 crc kubenswrapper[4756]: I0930 19:32:01.248540 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:01Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:01 crc kubenswrapper[4756]: I0930 19:32:01.263691 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://449a4827a34c17e8cdc851d2714ced931581b5c3a1bcd2f7f058b95d68403740\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://163fab064016913eb90cd87fe0da1c702eeb7f000c92b108bfb24a21ef0e98d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:01Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:01 crc kubenswrapper[4756]: I0930 19:32:01.282949 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"218f95d8-7230-4b64-83a5-00af4f5ec7dc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0a76d38dbf936ca79e57470de2c8bf98e8681f7d94f2c572aa5c0d2b2acbb484\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://57f786181b4762f1e04a48ac6e352090b62e1691091ddc390821abb693e8f7a6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c275af67ad8675a437b3f7fae34236bbdfbdf1f216be6a442d6765471d1d11e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7673b8a2ea701b95437a1ad447661ea3140ce0b27204ab946f39dbf0a25e29b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a2e0caca690592c9bd17dae14f79390b94f47507afb05a1c1732453700b99786\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"message\\\":\\\"g.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 19:31:31.245412 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 19:31:31.245417 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0930 19:31:31.245423 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0930 19:31:31.245426 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0930 19:31:31.245428 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0930 19:31:31.245860 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI0930 19:31:31.249125 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0930 19:31:31.249169 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0930 19:31:31.249201 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0930 19:31:31.249211 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0930 19:31:31.249225 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0930 19:31:31.249238 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0930 19:31:31.250125 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI0930 19:31:31.250150 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nF0930 19:31:31.250576 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:25Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://01164eb1afdb771ef4d2dc268107dadd206959e1ef66b154caf3ff18717e20de\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:14Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7365492f5fe72c39f569bfa6e2b1a44236da10b4687a738b97ce95626081c1b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7365492f5fe72c39f569bfa6e2b1a44236da10b4687a738b97ce95626081c1b5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:11Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:01Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:01 crc kubenswrapper[4756]: I0930 19:32:01.298189 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d37999f5cb36e7071b109ed90026aea745a55dbdd26d53799ee4196dc6a983c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:01Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:01 crc kubenswrapper[4756]: I0930 19:32:01.311199 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:01 crc kubenswrapper[4756]: I0930 19:32:01.311272 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:01 crc kubenswrapper[4756]: I0930 19:32:01.311298 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:01 crc kubenswrapper[4756]: I0930 19:32:01.311335 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:01 crc kubenswrapper[4756]: I0930 19:32:01.311361 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:01Z","lastTransitionTime":"2025-09-30T19:32:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:01 crc kubenswrapper[4756]: I0930 19:32:01.319785 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-l9mw5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"104562fb-2a2d-4291-963c-7a95062ec13a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c012009175f2ca25af13346dce481a4640354a2b7c771ce9a8d92f0a6a014f23\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mhq89\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:32Z\\\"}}\" for pod \"openshift-multus\"/\"multus-l9mw5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:01Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:01 crc kubenswrapper[4756]: I0930 19:32:01.334154 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:01Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:01 crc kubenswrapper[4756]: I0930 19:32:01.353064 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d39cfd1b725ded02a27e8f7bed73221dd180e3fffb7f06d1877d1bc2f6910334\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:01Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:01 crc kubenswrapper[4756]: I0930 19:32:01.366439 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-ghp5t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"32ba8a0a-215c-415b-a893-74f7a40e3c20\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a96a8eea15e5d12ad93cdb41ef3d1054fae455aea2878451bc5715de6d5b612f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pslwb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:32Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-ghp5t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:01Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:01 crc kubenswrapper[4756]: I0930 19:32:01.386658 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-m6zsp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2b4cba15-5ae1-4c98-a5e5-060b3af3aec6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://13307050242b53fcc9d2c9ab6870c36d5c2c1e43adfa36c1e76addf32166b95b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://828a01ba594287ceea1e72baa5419d522ce14d2ab3df5a69fb6332400c3e4abc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://828a01ba594287ceea1e72baa5419d522ce14d2ab3df5a69fb6332400c3e4abc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f91b3382af5267116de235e9d5f480e574b479350e738621b5e7568329dd879\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0f91b3382af5267116de235e9d5f480e574b479350e738621b5e7568329dd879\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://677982485b1155ded1bce2a0f5b3262766d498642307d1744d5cbae6c66102e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://677982485b1155ded1bce2a0f5b3262766d498642307d1744d5cbae6c66102e3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://14b8b318d4cd0da2272b87ac74a4dccdf29874a818aadc0698e9d85a75a169da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://14b8b318d4cd0da2272b87ac74a4dccdf29874a818aadc0698e9d85a75a169da\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e877edd834105d5bd0dcd24e6ba9c11ead17ab39a95ca0169c54acaad7544ebe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e877edd834105d5bd0dcd24e6ba9c11ead17ab39a95ca0169c54acaad7544ebe\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://decf531f70c78d1d142bef3d265612ebac2c8790ea10aba75def6d785fba84a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://decf531f70c78d1d142bef3d265612ebac2c8790ea10aba75def6d785fba84a8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:32Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-m6zsp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:01Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:01 crc kubenswrapper[4756]: I0930 19:32:01.399124 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-gld2h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7ae264bc-c901-4628-89f4-53e94403dff8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d059c5c1b5cded992e4971b04737c4f528ed04278d98651c01c8b8a168fe5655\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc8dq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://764e5cdcf5a9c07e5d046217ffd9db27e2b09a827d02f165459deb24d99f141c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc8dq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:44Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-gld2h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:01Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:01 crc kubenswrapper[4756]: I0930 19:32:01.413756 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:01Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:01 crc kubenswrapper[4756]: I0930 19:32:01.414726 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:01 crc kubenswrapper[4756]: I0930 19:32:01.414773 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:01 crc kubenswrapper[4756]: I0930 19:32:01.414788 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:01 crc kubenswrapper[4756]: I0930 19:32:01.414814 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:01 crc kubenswrapper[4756]: I0930 19:32:01.414828 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:01Z","lastTransitionTime":"2025-09-30T19:32:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:01 crc kubenswrapper[4756]: I0930 19:32:01.518156 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:01 crc kubenswrapper[4756]: I0930 19:32:01.518714 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:01 crc kubenswrapper[4756]: I0930 19:32:01.518878 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:01 crc kubenswrapper[4756]: I0930 19:32:01.519026 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:01 crc kubenswrapper[4756]: I0930 19:32:01.519328 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:01Z","lastTransitionTime":"2025-09-30T19:32:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:01 crc kubenswrapper[4756]: I0930 19:32:01.622919 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:01 crc kubenswrapper[4756]: I0930 19:32:01.622985 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:01 crc kubenswrapper[4756]: I0930 19:32:01.623004 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:01 crc kubenswrapper[4756]: I0930 19:32:01.623035 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:01 crc kubenswrapper[4756]: I0930 19:32:01.623055 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:01Z","lastTransitionTime":"2025-09-30T19:32:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:01 crc kubenswrapper[4756]: I0930 19:32:01.727286 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:01 crc kubenswrapper[4756]: I0930 19:32:01.727369 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:01 crc kubenswrapper[4756]: I0930 19:32:01.727435 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:01 crc kubenswrapper[4756]: I0930 19:32:01.727476 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:01 crc kubenswrapper[4756]: I0930 19:32:01.727537 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:01Z","lastTransitionTime":"2025-09-30T19:32:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:01 crc kubenswrapper[4756]: I0930 19:32:01.831561 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:01 crc kubenswrapper[4756]: I0930 19:32:01.832002 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:01 crc kubenswrapper[4756]: I0930 19:32:01.832152 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:01 crc kubenswrapper[4756]: I0930 19:32:01.832298 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:01 crc kubenswrapper[4756]: I0930 19:32:01.832482 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:01Z","lastTransitionTime":"2025-09-30T19:32:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:01 crc kubenswrapper[4756]: I0930 19:32:01.936235 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:01 crc kubenswrapper[4756]: I0930 19:32:01.936308 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:01 crc kubenswrapper[4756]: I0930 19:32:01.936327 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:01 crc kubenswrapper[4756]: I0930 19:32:01.936354 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:01 crc kubenswrapper[4756]: I0930 19:32:01.936375 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:01Z","lastTransitionTime":"2025-09-30T19:32:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:01 crc kubenswrapper[4756]: I0930 19:32:01.980120 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/6c151bbd-9757-44ba-aa11-ff679e841fdc-metrics-certs\") pod \"network-metrics-daemon-b4n45\" (UID: \"6c151bbd-9757-44ba-aa11-ff679e841fdc\") " pod="openshift-multus/network-metrics-daemon-b4n45" Sep 30 19:32:01 crc kubenswrapper[4756]: E0930 19:32:01.980373 4756 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Sep 30 19:32:01 crc kubenswrapper[4756]: E0930 19:32:01.980559 4756 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/6c151bbd-9757-44ba-aa11-ff679e841fdc-metrics-certs podName:6c151bbd-9757-44ba-aa11-ff679e841fdc nodeName:}" failed. No retries permitted until 2025-09-30 19:32:17.980525645 +0000 UTC m=+67.601459152 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/6c151bbd-9757-44ba-aa11-ff679e841fdc-metrics-certs") pod "network-metrics-daemon-b4n45" (UID: "6c151bbd-9757-44ba-aa11-ff679e841fdc") : object "openshift-multus"/"metrics-daemon-secret" not registered Sep 30 19:32:02 crc kubenswrapper[4756]: I0930 19:32:02.039619 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:02 crc kubenswrapper[4756]: I0930 19:32:02.039665 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:02 crc kubenswrapper[4756]: I0930 19:32:02.039675 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:02 crc kubenswrapper[4756]: I0930 19:32:02.039691 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:02 crc kubenswrapper[4756]: I0930 19:32:02.039702 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:02Z","lastTransitionTime":"2025-09-30T19:32:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:02 crc kubenswrapper[4756]: I0930 19:32:02.115696 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-b4n45" Sep 30 19:32:02 crc kubenswrapper[4756]: E0930 19:32:02.115856 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-b4n45" podUID="6c151bbd-9757-44ba-aa11-ff679e841fdc" Sep 30 19:32:02 crc kubenswrapper[4756]: I0930 19:32:02.142765 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:02 crc kubenswrapper[4756]: I0930 19:32:02.142809 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:02 crc kubenswrapper[4756]: I0930 19:32:02.142821 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:02 crc kubenswrapper[4756]: I0930 19:32:02.142840 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:02 crc kubenswrapper[4756]: I0930 19:32:02.142855 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:02Z","lastTransitionTime":"2025-09-30T19:32:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:02 crc kubenswrapper[4756]: I0930 19:32:02.245880 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:02 crc kubenswrapper[4756]: I0930 19:32:02.245930 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:02 crc kubenswrapper[4756]: I0930 19:32:02.245946 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:02 crc kubenswrapper[4756]: I0930 19:32:02.245971 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:02 crc kubenswrapper[4756]: I0930 19:32:02.245990 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:02Z","lastTransitionTime":"2025-09-30T19:32:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:02 crc kubenswrapper[4756]: I0930 19:32:02.350242 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:02 crc kubenswrapper[4756]: I0930 19:32:02.350323 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:02 crc kubenswrapper[4756]: I0930 19:32:02.350352 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:02 crc kubenswrapper[4756]: I0930 19:32:02.350385 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:02 crc kubenswrapper[4756]: I0930 19:32:02.350449 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:02Z","lastTransitionTime":"2025-09-30T19:32:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:02 crc kubenswrapper[4756]: I0930 19:32:02.454204 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:02 crc kubenswrapper[4756]: I0930 19:32:02.454286 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:02 crc kubenswrapper[4756]: I0930 19:32:02.454310 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:02 crc kubenswrapper[4756]: I0930 19:32:02.454342 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:02 crc kubenswrapper[4756]: I0930 19:32:02.454365 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:02Z","lastTransitionTime":"2025-09-30T19:32:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:02 crc kubenswrapper[4756]: I0930 19:32:02.558961 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:02 crc kubenswrapper[4756]: I0930 19:32:02.559072 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:02 crc kubenswrapper[4756]: I0930 19:32:02.559154 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:02 crc kubenswrapper[4756]: I0930 19:32:02.559194 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:02 crc kubenswrapper[4756]: I0930 19:32:02.559220 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:02Z","lastTransitionTime":"2025-09-30T19:32:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:02 crc kubenswrapper[4756]: I0930 19:32:02.663239 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:02 crc kubenswrapper[4756]: I0930 19:32:02.663328 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:02 crc kubenswrapper[4756]: I0930 19:32:02.663364 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:02 crc kubenswrapper[4756]: I0930 19:32:02.663447 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:02 crc kubenswrapper[4756]: I0930 19:32:02.663478 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:02Z","lastTransitionTime":"2025-09-30T19:32:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:02 crc kubenswrapper[4756]: I0930 19:32:02.766116 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:02 crc kubenswrapper[4756]: I0930 19:32:02.766159 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:02 crc kubenswrapper[4756]: I0930 19:32:02.766173 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:02 crc kubenswrapper[4756]: I0930 19:32:02.766194 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:02 crc kubenswrapper[4756]: I0930 19:32:02.766207 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:02Z","lastTransitionTime":"2025-09-30T19:32:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:02 crc kubenswrapper[4756]: I0930 19:32:02.869874 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:02 crc kubenswrapper[4756]: I0930 19:32:02.869928 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:02 crc kubenswrapper[4756]: I0930 19:32:02.869941 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:02 crc kubenswrapper[4756]: I0930 19:32:02.869960 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:02 crc kubenswrapper[4756]: I0930 19:32:02.869973 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:02Z","lastTransitionTime":"2025-09-30T19:32:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:02 crc kubenswrapper[4756]: I0930 19:32:02.972858 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:02 crc kubenswrapper[4756]: I0930 19:32:02.972919 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:02 crc kubenswrapper[4756]: I0930 19:32:02.972933 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:02 crc kubenswrapper[4756]: I0930 19:32:02.972953 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:02 crc kubenswrapper[4756]: I0930 19:32:02.972966 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:02Z","lastTransitionTime":"2025-09-30T19:32:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:02 crc kubenswrapper[4756]: I0930 19:32:02.990246 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 19:32:02 crc kubenswrapper[4756]: I0930 19:32:02.990506 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 19:32:02 crc kubenswrapper[4756]: E0930 19:32:02.990531 4756 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 19:32:34.990489078 +0000 UTC m=+84.611422595 (durationBeforeRetry 32s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:32:02 crc kubenswrapper[4756]: I0930 19:32:02.990621 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 19:32:02 crc kubenswrapper[4756]: I0930 19:32:02.990676 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 19:32:02 crc kubenswrapper[4756]: I0930 19:32:02.990764 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 19:32:02 crc kubenswrapper[4756]: E0930 19:32:02.990805 4756 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Sep 30 19:32:02 crc kubenswrapper[4756]: E0930 19:32:02.990810 4756 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Sep 30 19:32:02 crc kubenswrapper[4756]: E0930 19:32:02.990857 4756 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Sep 30 19:32:02 crc kubenswrapper[4756]: E0930 19:32:02.990880 4756 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-09-30 19:32:34.990857147 +0000 UTC m=+84.611790664 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Sep 30 19:32:02 crc kubenswrapper[4756]: E0930 19:32:02.990882 4756 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 30 19:32:02 crc kubenswrapper[4756]: E0930 19:32:02.991008 4756 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-09-30 19:32:34.990976761 +0000 UTC m=+84.611910278 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 30 19:32:02 crc kubenswrapper[4756]: E0930 19:32:02.991043 4756 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Sep 30 19:32:02 crc kubenswrapper[4756]: E0930 19:32:02.991091 4756 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Sep 30 19:32:02 crc kubenswrapper[4756]: E0930 19:32:02.991122 4756 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 30 19:32:02 crc kubenswrapper[4756]: E0930 19:32:02.991040 4756 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Sep 30 19:32:02 crc kubenswrapper[4756]: E0930 19:32:02.991221 4756 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-09-30 19:32:34.991185076 +0000 UTC m=+84.612118603 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 30 19:32:02 crc kubenswrapper[4756]: E0930 19:32:02.991280 4756 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-09-30 19:32:34.991244958 +0000 UTC m=+84.612178475 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Sep 30 19:32:03 crc kubenswrapper[4756]: I0930 19:32:03.075588 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:03 crc kubenswrapper[4756]: I0930 19:32:03.076016 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:03 crc kubenswrapper[4756]: I0930 19:32:03.076031 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:03 crc kubenswrapper[4756]: I0930 19:32:03.076053 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:03 crc kubenswrapper[4756]: I0930 19:32:03.076067 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:03Z","lastTransitionTime":"2025-09-30T19:32:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:03 crc kubenswrapper[4756]: I0930 19:32:03.101065 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Sep 30 19:32:03 crc kubenswrapper[4756]: I0930 19:32:03.115155 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 19:32:03 crc kubenswrapper[4756]: I0930 19:32:03.115192 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 19:32:03 crc kubenswrapper[4756]: I0930 19:32:03.115258 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 19:32:03 crc kubenswrapper[4756]: E0930 19:32:03.115340 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 19:32:03 crc kubenswrapper[4756]: E0930 19:32:03.115582 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 19:32:03 crc kubenswrapper[4756]: E0930 19:32:03.115822 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 19:32:03 crc kubenswrapper[4756]: I0930 19:32:03.121104 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3370c2ca-fec3-4f90-8df7-51e21e6c7e1c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1a5ef539293ea3ff94f53b11a0510bb8bb45c62c945cffab746512efeeb19a5a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pstrc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ba18abf718fde52cadd61c25b05097873901a4dbbe7631fd7ce64133627b9473\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pstrc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:32Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-4n9zj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:03Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:03 crc kubenswrapper[4756]: I0930 19:32:03.122283 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler/openshift-kube-scheduler-crc"] Sep 30 19:32:03 crc kubenswrapper[4756]: I0930 19:32:03.150499 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-kmxk8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d96acc31-a519-46fc-94d8-f19522e77391\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe054d1844c58433aebfe8f4e500e17558f1b4afb5423002d60b50f210052c93\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c5badf6335e4358fa0078ae48d7d50a908bf754fdb57c786e445ccae31933ebe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f20230d520af06912183a05d0b9bcabb8581bf772fe34e037fa8d1a157c4a1b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://261b104308c48bbe4631db85f56d63cefeb0bc6b7112076b0bf3f4fb6b67caad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1fa8babb1f5d1213dec99c75be1d5f37d09c2257a31babd51ec297621fc7d564\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://990926364c8d973164f3da73740b7d31c8caf87f0fb691539f7a62520b822fba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://721fa5da1ae99aae66e7486221afabcd9c996982c008c5b00f72787911d29921\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://721fa5da1ae99aae66e7486221afabcd9c996982c008c5b00f72787911d29921\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T19:31:44Z\\\",\\\"message\\\":\\\"erver-crc in node crc\\\\nI0930 19:31:44.135110 6217 obj_retry.go:303] Retry object setup: *v1.Pod openshift-multus/multus-additional-cni-plugins-m6zsp\\\\nI0930 19:31:44.135119 6217 obj_retry.go:386] Retry successful for *v1.Pod openshift-kube-apiserver/kube-apiserver-crc after 0 failed attempt(s)\\\\nF0930 19:31:44.135041 6217 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:44Z is after 2025-08-24T17:21:41Z]\\\\nI0930 19:31:44.135126 6217 obj_retry.go:365] Adding new object: *v1.Pod openshift-multus/multus-additional-cni-plugins-m6zsp\\\\nI0930 19:31:44.135121 6217 obj_retry.go:365] Adding new object: *v1.Pod openshift-image\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:43Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-kmxk8_openshift-ovn-kubernetes(d96acc31-a519-46fc-94d8-f19522e77391)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://64404e34c6a077df9cc9e9f9273dbe188c6feda3bc1186a83e05a43e1fa2ea99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://76bb398eefb2b0a9ef06b44c2ea182deefd51e4daf42b17946b911822d6eb0af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://76bb398eefb2b0a9ef06b44c2ea182deefd51e4daf42b17946b911822d6eb0af\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:32Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-kmxk8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:03Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:03 crc kubenswrapper[4756]: I0930 19:32:03.165207 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-8nj5z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"02bb1d9a-1ad2-406d-afc2-c4cd283fe1f4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://28f7474a36dde55478614d47fe30f93b44cb549a25e148e295987e64158fc3c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l6nxq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:34Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-8nj5z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:03Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:03 crc kubenswrapper[4756]: I0930 19:32:03.180388 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:03 crc kubenswrapper[4756]: I0930 19:32:03.180477 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:03 crc kubenswrapper[4756]: I0930 19:32:03.180495 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:03 crc kubenswrapper[4756]: I0930 19:32:03.180519 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:03 crc kubenswrapper[4756]: I0930 19:32:03.180533 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:03Z","lastTransitionTime":"2025-09-30T19:32:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:03 crc kubenswrapper[4756]: I0930 19:32:03.181612 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-b4n45" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6c151bbd-9757-44ba-aa11-ff679e841fdc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fsl2z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fsl2z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:46Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-b4n45\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:03Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:03 crc kubenswrapper[4756]: I0930 19:32:03.204865 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6aa869dd-ec28-4032-82be-c656f27bd7b1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://99767cc72b9d6057d38c45fbec6231bafa47ccffefb78e5924b5e9c1e797722b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b852143b26114c05edce20729cf706c5394609a74f522b7ecf44d92f9d66aed7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ee47cdb77e416535bd00569159712241bbd0952651774af6440fd04000d1722\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5cca81a0aeb985d6044f19cccc580ca5c78553a0abb60fb2a2bed955233f7a97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://621b37e117d0eeef289e110d75521c3262ced12144d665be35d6823780b4bae2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a5550fd486f7fad8080fb3eed0f4c73140172c68983fa8a423cd48da8f31ef3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a5550fd486f7fad8080fb3eed0f4c73140172c68983fa8a423cd48da8f31ef3a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://390ef7ee5c5d98dc226f1d061c6c9bb83236c988f2af15635e7834f643862840\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://390ef7ee5c5d98dc226f1d061c6c9bb83236c988f2af15635e7834f643862840\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:13Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://6f98786c914dcdd3ca1cde8421686a1d536abb97c375f26f458d8f514bdccd52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6f98786c914dcdd3ca1cde8421686a1d536abb97c375f26f458d8f514bdccd52\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:11Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:03Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:03 crc kubenswrapper[4756]: I0930 19:32:03.223281 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0dfdb5d6-9311-4699-a1ac-5f1a6cde2441\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a78698331fc3db3b12b06c8816256c5d055d1decfd12cc0099d6cfd155d673d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32fb926334772dc454adcdabd6070f751ef1aded82a5c13732875e1ec04d2c48\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6195128a0e66f0f932a444f4717084f00ea05ff648f7c26364bb9c96483b652a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://746e37834d01867d327b2cb407df8e9a8489f58884a28f1da6a336f74e4d1033\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:11Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:03Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:03 crc kubenswrapper[4756]: I0930 19:32:03.283500 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:03 crc kubenswrapper[4756]: I0930 19:32:03.283551 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:03 crc kubenswrapper[4756]: I0930 19:32:03.283562 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:03 crc kubenswrapper[4756]: I0930 19:32:03.283579 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:03 crc kubenswrapper[4756]: I0930 19:32:03.283591 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:03Z","lastTransitionTime":"2025-09-30T19:32:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:03 crc kubenswrapper[4756]: I0930 19:32:03.291322 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"218f95d8-7230-4b64-83a5-00af4f5ec7dc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0a76d38dbf936ca79e57470de2c8bf98e8681f7d94f2c572aa5c0d2b2acbb484\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://57f786181b4762f1e04a48ac6e352090b62e1691091ddc390821abb693e8f7a6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c275af67ad8675a437b3f7fae34236bbdfbdf1f216be6a442d6765471d1d11e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7673b8a2ea701b95437a1ad447661ea3140ce0b27204ab946f39dbf0a25e29b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a2e0caca690592c9bd17dae14f79390b94f47507afb05a1c1732453700b99786\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"message\\\":\\\"g.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 19:31:31.245412 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 19:31:31.245417 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0930 19:31:31.245423 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0930 19:31:31.245426 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0930 19:31:31.245428 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0930 19:31:31.245860 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI0930 19:31:31.249125 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0930 19:31:31.249169 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0930 19:31:31.249201 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0930 19:31:31.249211 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0930 19:31:31.249225 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0930 19:31:31.249238 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0930 19:31:31.250125 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI0930 19:31:31.250150 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nF0930 19:31:31.250576 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:25Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://01164eb1afdb771ef4d2dc268107dadd206959e1ef66b154caf3ff18717e20de\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:14Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7365492f5fe72c39f569bfa6e2b1a44236da10b4687a738b97ce95626081c1b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7365492f5fe72c39f569bfa6e2b1a44236da10b4687a738b97ce95626081c1b5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:11Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:03Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:03 crc kubenswrapper[4756]: I0930 19:32:03.306679 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:03Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:03 crc kubenswrapper[4756]: I0930 19:32:03.319669 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://449a4827a34c17e8cdc851d2714ced931581b5c3a1bcd2f7f058b95d68403740\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://163fab064016913eb90cd87fe0da1c702eeb7f000c92b108bfb24a21ef0e98d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:03Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:03 crc kubenswrapper[4756]: I0930 19:32:03.332590 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:03Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:03 crc kubenswrapper[4756]: I0930 19:32:03.343832 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d37999f5cb36e7071b109ed90026aea745a55dbdd26d53799ee4196dc6a983c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:03Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:03 crc kubenswrapper[4756]: I0930 19:32:03.357225 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-l9mw5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"104562fb-2a2d-4291-963c-7a95062ec13a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c012009175f2ca25af13346dce481a4640354a2b7c771ce9a8d92f0a6a014f23\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mhq89\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:32Z\\\"}}\" for pod \"openshift-multus\"/\"multus-l9mw5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:03Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:03 crc kubenswrapper[4756]: I0930 19:32:03.370876 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-gld2h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7ae264bc-c901-4628-89f4-53e94403dff8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d059c5c1b5cded992e4971b04737c4f528ed04278d98651c01c8b8a168fe5655\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc8dq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://764e5cdcf5a9c07e5d046217ffd9db27e2b09a827d02f165459deb24d99f141c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc8dq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:44Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-gld2h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:03Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:03 crc kubenswrapper[4756]: I0930 19:32:03.382365 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:03Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:03 crc kubenswrapper[4756]: I0930 19:32:03.386114 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:03 crc kubenswrapper[4756]: I0930 19:32:03.386145 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:03 crc kubenswrapper[4756]: I0930 19:32:03.386175 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:03 crc kubenswrapper[4756]: I0930 19:32:03.386196 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:03 crc kubenswrapper[4756]: I0930 19:32:03.386209 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:03Z","lastTransitionTime":"2025-09-30T19:32:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:03 crc kubenswrapper[4756]: I0930 19:32:03.398735 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d39cfd1b725ded02a27e8f7bed73221dd180e3fffb7f06d1877d1bc2f6910334\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:03Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:03 crc kubenswrapper[4756]: I0930 19:32:03.412762 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-ghp5t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"32ba8a0a-215c-415b-a893-74f7a40e3c20\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a96a8eea15e5d12ad93cdb41ef3d1054fae455aea2878451bc5715de6d5b612f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pslwb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:32Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-ghp5t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:03Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:03 crc kubenswrapper[4756]: I0930 19:32:03.429250 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-m6zsp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2b4cba15-5ae1-4c98-a5e5-060b3af3aec6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://13307050242b53fcc9d2c9ab6870c36d5c2c1e43adfa36c1e76addf32166b95b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://828a01ba594287ceea1e72baa5419d522ce14d2ab3df5a69fb6332400c3e4abc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://828a01ba594287ceea1e72baa5419d522ce14d2ab3df5a69fb6332400c3e4abc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f91b3382af5267116de235e9d5f480e574b479350e738621b5e7568329dd879\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0f91b3382af5267116de235e9d5f480e574b479350e738621b5e7568329dd879\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://677982485b1155ded1bce2a0f5b3262766d498642307d1744d5cbae6c66102e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://677982485b1155ded1bce2a0f5b3262766d498642307d1744d5cbae6c66102e3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://14b8b318d4cd0da2272b87ac74a4dccdf29874a818aadc0698e9d85a75a169da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://14b8b318d4cd0da2272b87ac74a4dccdf29874a818aadc0698e9d85a75a169da\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e877edd834105d5bd0dcd24e6ba9c11ead17ab39a95ca0169c54acaad7544ebe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e877edd834105d5bd0dcd24e6ba9c11ead17ab39a95ca0169c54acaad7544ebe\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://decf531f70c78d1d142bef3d265612ebac2c8790ea10aba75def6d785fba84a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://decf531f70c78d1d142bef3d265612ebac2c8790ea10aba75def6d785fba84a8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:32Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-m6zsp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:03Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:03 crc kubenswrapper[4756]: I0930 19:32:03.488888 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:03 crc kubenswrapper[4756]: I0930 19:32:03.488967 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:03 crc kubenswrapper[4756]: I0930 19:32:03.488992 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:03 crc kubenswrapper[4756]: I0930 19:32:03.489025 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:03 crc kubenswrapper[4756]: I0930 19:32:03.489052 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:03Z","lastTransitionTime":"2025-09-30T19:32:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:03 crc kubenswrapper[4756]: I0930 19:32:03.593728 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:03 crc kubenswrapper[4756]: I0930 19:32:03.593790 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:03 crc kubenswrapper[4756]: I0930 19:32:03.593803 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:03 crc kubenswrapper[4756]: I0930 19:32:03.593824 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:03 crc kubenswrapper[4756]: I0930 19:32:03.593838 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:03Z","lastTransitionTime":"2025-09-30T19:32:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:03 crc kubenswrapper[4756]: I0930 19:32:03.697952 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:03 crc kubenswrapper[4756]: I0930 19:32:03.698012 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:03 crc kubenswrapper[4756]: I0930 19:32:03.698031 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:03 crc kubenswrapper[4756]: I0930 19:32:03.698058 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:03 crc kubenswrapper[4756]: I0930 19:32:03.698077 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:03Z","lastTransitionTime":"2025-09-30T19:32:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:03 crc kubenswrapper[4756]: I0930 19:32:03.808124 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:03 crc kubenswrapper[4756]: I0930 19:32:03.808182 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:03 crc kubenswrapper[4756]: I0930 19:32:03.808193 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:03 crc kubenswrapper[4756]: I0930 19:32:03.808213 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:03 crc kubenswrapper[4756]: I0930 19:32:03.808224 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:03Z","lastTransitionTime":"2025-09-30T19:32:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:03 crc kubenswrapper[4756]: I0930 19:32:03.840100 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 30 19:32:03 crc kubenswrapper[4756]: I0930 19:32:03.856554 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:03Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:03 crc kubenswrapper[4756]: I0930 19:32:03.873107 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d39cfd1b725ded02a27e8f7bed73221dd180e3fffb7f06d1877d1bc2f6910334\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:03Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:03 crc kubenswrapper[4756]: I0930 19:32:03.886758 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-ghp5t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"32ba8a0a-215c-415b-a893-74f7a40e3c20\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a96a8eea15e5d12ad93cdb41ef3d1054fae455aea2878451bc5715de6d5b612f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pslwb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:32Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-ghp5t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:03Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:03 crc kubenswrapper[4756]: I0930 19:32:03.912079 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:03 crc kubenswrapper[4756]: I0930 19:32:03.912157 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:03 crc kubenswrapper[4756]: I0930 19:32:03.912182 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:03 crc kubenswrapper[4756]: I0930 19:32:03.912213 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:03 crc kubenswrapper[4756]: I0930 19:32:03.912237 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:03Z","lastTransitionTime":"2025-09-30T19:32:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:03 crc kubenswrapper[4756]: I0930 19:32:03.912166 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-m6zsp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2b4cba15-5ae1-4c98-a5e5-060b3af3aec6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://13307050242b53fcc9d2c9ab6870c36d5c2c1e43adfa36c1e76addf32166b95b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://828a01ba594287ceea1e72baa5419d522ce14d2ab3df5a69fb6332400c3e4abc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://828a01ba594287ceea1e72baa5419d522ce14d2ab3df5a69fb6332400c3e4abc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f91b3382af5267116de235e9d5f480e574b479350e738621b5e7568329dd879\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0f91b3382af5267116de235e9d5f480e574b479350e738621b5e7568329dd879\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://677982485b1155ded1bce2a0f5b3262766d498642307d1744d5cbae6c66102e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://677982485b1155ded1bce2a0f5b3262766d498642307d1744d5cbae6c66102e3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://14b8b318d4cd0da2272b87ac74a4dccdf29874a818aadc0698e9d85a75a169da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://14b8b318d4cd0da2272b87ac74a4dccdf29874a818aadc0698e9d85a75a169da\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e877edd834105d5bd0dcd24e6ba9c11ead17ab39a95ca0169c54acaad7544ebe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e877edd834105d5bd0dcd24e6ba9c11ead17ab39a95ca0169c54acaad7544ebe\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://decf531f70c78d1d142bef3d265612ebac2c8790ea10aba75def6d785fba84a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://decf531f70c78d1d142bef3d265612ebac2c8790ea10aba75def6d785fba84a8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:32Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-m6zsp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:03Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:03 crc kubenswrapper[4756]: I0930 19:32:03.928908 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-gld2h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7ae264bc-c901-4628-89f4-53e94403dff8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d059c5c1b5cded992e4971b04737c4f528ed04278d98651c01c8b8a168fe5655\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc8dq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://764e5cdcf5a9c07e5d046217ffd9db27e2b09a827d02f165459deb24d99f141c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc8dq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:44Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-gld2h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:03Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:03 crc kubenswrapper[4756]: I0930 19:32:03.945366 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-b4n45" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6c151bbd-9757-44ba-aa11-ff679e841fdc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fsl2z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fsl2z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:46Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-b4n45\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:03Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:03 crc kubenswrapper[4756]: I0930 19:32:03.977664 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6aa869dd-ec28-4032-82be-c656f27bd7b1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://99767cc72b9d6057d38c45fbec6231bafa47ccffefb78e5924b5e9c1e797722b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b852143b26114c05edce20729cf706c5394609a74f522b7ecf44d92f9d66aed7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ee47cdb77e416535bd00569159712241bbd0952651774af6440fd04000d1722\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5cca81a0aeb985d6044f19cccc580ca5c78553a0abb60fb2a2bed955233f7a97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://621b37e117d0eeef289e110d75521c3262ced12144d665be35d6823780b4bae2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a5550fd486f7fad8080fb3eed0f4c73140172c68983fa8a423cd48da8f31ef3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a5550fd486f7fad8080fb3eed0f4c73140172c68983fa8a423cd48da8f31ef3a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://390ef7ee5c5d98dc226f1d061c6c9bb83236c988f2af15635e7834f643862840\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://390ef7ee5c5d98dc226f1d061c6c9bb83236c988f2af15635e7834f643862840\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:13Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://6f98786c914dcdd3ca1cde8421686a1d536abb97c375f26f458d8f514bdccd52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6f98786c914dcdd3ca1cde8421686a1d536abb97c375f26f458d8f514bdccd52\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:11Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:03Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:04 crc kubenswrapper[4756]: I0930 19:32:04.000976 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0dfdb5d6-9311-4699-a1ac-5f1a6cde2441\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a78698331fc3db3b12b06c8816256c5d055d1decfd12cc0099d6cfd155d673d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32fb926334772dc454adcdabd6070f751ef1aded82a5c13732875e1ec04d2c48\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6195128a0e66f0f932a444f4717084f00ea05ff648f7c26364bb9c96483b652a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://746e37834d01867d327b2cb407df8e9a8489f58884a28f1da6a336f74e4d1033\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:11Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:03Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:04 crc kubenswrapper[4756]: I0930 19:32:04.016840 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:04 crc kubenswrapper[4756]: I0930 19:32:04.016912 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:04 crc kubenswrapper[4756]: I0930 19:32:04.016932 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:04 crc kubenswrapper[4756]: I0930 19:32:04.016960 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:04 crc kubenswrapper[4756]: I0930 19:32:04.016981 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:04Z","lastTransitionTime":"2025-09-30T19:32:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:04 crc kubenswrapper[4756]: I0930 19:32:04.024578 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c7764cf2-26f0-4b53-85b6-2b07668edda7\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:32:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:32:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6cbb0f481cae6dbc480af93bb03f4b4e597381979fed1a9b91fb23aef7868b65\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d2adbf3b2ade3e39ca3d2f7a70212537c9e7531907dd4a1e7748220183d34b9d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b904814bd38b98f875532fedfa07321ce8d149f697e027ad926f8f40ddce61e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f9299d12673c7a8e182e0d4354088d1aa539ca35a788ba6ff507d2ca96cff07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4f9299d12673c7a8e182e0d4354088d1aa539ca35a788ba6ff507d2ca96cff07\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:12Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:11Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:04Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:04 crc kubenswrapper[4756]: I0930 19:32:04.038312 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3370c2ca-fec3-4f90-8df7-51e21e6c7e1c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1a5ef539293ea3ff94f53b11a0510bb8bb45c62c945cffab746512efeeb19a5a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pstrc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ba18abf718fde52cadd61c25b05097873901a4dbbe7631fd7ce64133627b9473\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pstrc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:32Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-4n9zj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:04Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:04 crc kubenswrapper[4756]: I0930 19:32:04.071541 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-kmxk8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d96acc31-a519-46fc-94d8-f19522e77391\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe054d1844c58433aebfe8f4e500e17558f1b4afb5423002d60b50f210052c93\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c5badf6335e4358fa0078ae48d7d50a908bf754fdb57c786e445ccae31933ebe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f20230d520af06912183a05d0b9bcabb8581bf772fe34e037fa8d1a157c4a1b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://261b104308c48bbe4631db85f56d63cefeb0bc6b7112076b0bf3f4fb6b67caad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1fa8babb1f5d1213dec99c75be1d5f37d09c2257a31babd51ec297621fc7d564\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://990926364c8d973164f3da73740b7d31c8caf87f0fb691539f7a62520b822fba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://721fa5da1ae99aae66e7486221afabcd9c996982c008c5b00f72787911d29921\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://721fa5da1ae99aae66e7486221afabcd9c996982c008c5b00f72787911d29921\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T19:31:44Z\\\",\\\"message\\\":\\\"erver-crc in node crc\\\\nI0930 19:31:44.135110 6217 obj_retry.go:303] Retry object setup: *v1.Pod openshift-multus/multus-additional-cni-plugins-m6zsp\\\\nI0930 19:31:44.135119 6217 obj_retry.go:386] Retry successful for *v1.Pod openshift-kube-apiserver/kube-apiserver-crc after 0 failed attempt(s)\\\\nF0930 19:31:44.135041 6217 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:44Z is after 2025-08-24T17:21:41Z]\\\\nI0930 19:31:44.135126 6217 obj_retry.go:365] Adding new object: *v1.Pod openshift-multus/multus-additional-cni-plugins-m6zsp\\\\nI0930 19:31:44.135121 6217 obj_retry.go:365] Adding new object: *v1.Pod openshift-image\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:43Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-kmxk8_openshift-ovn-kubernetes(d96acc31-a519-46fc-94d8-f19522e77391)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://64404e34c6a077df9cc9e9f9273dbe188c6feda3bc1186a83e05a43e1fa2ea99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://76bb398eefb2b0a9ef06b44c2ea182deefd51e4daf42b17946b911822d6eb0af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://76bb398eefb2b0a9ef06b44c2ea182deefd51e4daf42b17946b911822d6eb0af\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:32Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-kmxk8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:04Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:04 crc kubenswrapper[4756]: I0930 19:32:04.088798 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-8nj5z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"02bb1d9a-1ad2-406d-afc2-c4cd283fe1f4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://28f7474a36dde55478614d47fe30f93b44cb549a25e148e295987e64158fc3c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l6nxq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:34Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-8nj5z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:04Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:04 crc kubenswrapper[4756]: I0930 19:32:04.111353 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"218f95d8-7230-4b64-83a5-00af4f5ec7dc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:32:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:32:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0a76d38dbf936ca79e57470de2c8bf98e8681f7d94f2c572aa5c0d2b2acbb484\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://57f786181b4762f1e04a48ac6e352090b62e1691091ddc390821abb693e8f7a6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c275af67ad8675a437b3f7fae34236bbdfbdf1f216be6a442d6765471d1d11e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7673b8a2ea701b95437a1ad447661ea3140ce0b27204ab946f39dbf0a25e29b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a2e0caca690592c9bd17dae14f79390b94f47507afb05a1c1732453700b99786\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"message\\\":\\\"g.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 19:31:31.245412 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 19:31:31.245417 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0930 19:31:31.245423 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0930 19:31:31.245426 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0930 19:31:31.245428 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0930 19:31:31.245860 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI0930 19:31:31.249125 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0930 19:31:31.249169 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0930 19:31:31.249201 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0930 19:31:31.249211 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0930 19:31:31.249225 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0930 19:31:31.249238 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0930 19:31:31.250125 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI0930 19:31:31.250150 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nF0930 19:31:31.250576 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:25Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://01164eb1afdb771ef4d2dc268107dadd206959e1ef66b154caf3ff18717e20de\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:14Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7365492f5fe72c39f569bfa6e2b1a44236da10b4687a738b97ce95626081c1b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7365492f5fe72c39f569bfa6e2b1a44236da10b4687a738b97ce95626081c1b5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:11Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:04Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:04 crc kubenswrapper[4756]: I0930 19:32:04.115276 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-b4n45" Sep 30 19:32:04 crc kubenswrapper[4756]: E0930 19:32:04.115857 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-b4n45" podUID="6c151bbd-9757-44ba-aa11-ff679e841fdc" Sep 30 19:32:04 crc kubenswrapper[4756]: I0930 19:32:04.116148 4756 scope.go:117] "RemoveContainer" containerID="721fa5da1ae99aae66e7486221afabcd9c996982c008c5b00f72787911d29921" Sep 30 19:32:04 crc kubenswrapper[4756]: I0930 19:32:04.120624 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:04 crc kubenswrapper[4756]: I0930 19:32:04.120669 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:04 crc kubenswrapper[4756]: I0930 19:32:04.120681 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:04 crc kubenswrapper[4756]: I0930 19:32:04.120711 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:04 crc kubenswrapper[4756]: I0930 19:32:04.120729 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:04Z","lastTransitionTime":"2025-09-30T19:32:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:04 crc kubenswrapper[4756]: I0930 19:32:04.131050 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:04Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:04 crc kubenswrapper[4756]: I0930 19:32:04.147879 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://449a4827a34c17e8cdc851d2714ced931581b5c3a1bcd2f7f058b95d68403740\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://163fab064016913eb90cd87fe0da1c702eeb7f000c92b108bfb24a21ef0e98d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:04Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:04 crc kubenswrapper[4756]: I0930 19:32:04.166254 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:04Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:04 crc kubenswrapper[4756]: I0930 19:32:04.181342 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d37999f5cb36e7071b109ed90026aea745a55dbdd26d53799ee4196dc6a983c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:04Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:04 crc kubenswrapper[4756]: I0930 19:32:04.200821 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-l9mw5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"104562fb-2a2d-4291-963c-7a95062ec13a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c012009175f2ca25af13346dce481a4640354a2b7c771ce9a8d92f0a6a014f23\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mhq89\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:32Z\\\"}}\" for pod \"openshift-multus\"/\"multus-l9mw5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:04Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:04 crc kubenswrapper[4756]: I0930 19:32:04.224498 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:04 crc kubenswrapper[4756]: I0930 19:32:04.224545 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:04 crc kubenswrapper[4756]: I0930 19:32:04.224557 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:04 crc kubenswrapper[4756]: I0930 19:32:04.224576 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:04 crc kubenswrapper[4756]: I0930 19:32:04.224590 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:04Z","lastTransitionTime":"2025-09-30T19:32:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:04 crc kubenswrapper[4756]: I0930 19:32:04.327628 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:04 crc kubenswrapper[4756]: I0930 19:32:04.327695 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:04 crc kubenswrapper[4756]: I0930 19:32:04.327705 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:04 crc kubenswrapper[4756]: I0930 19:32:04.327721 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:04 crc kubenswrapper[4756]: I0930 19:32:04.327731 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:04Z","lastTransitionTime":"2025-09-30T19:32:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:04 crc kubenswrapper[4756]: I0930 19:32:04.431322 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:04 crc kubenswrapper[4756]: I0930 19:32:04.431380 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:04 crc kubenswrapper[4756]: I0930 19:32:04.431416 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:04 crc kubenswrapper[4756]: I0930 19:32:04.431438 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:04 crc kubenswrapper[4756]: I0930 19:32:04.431452 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:04Z","lastTransitionTime":"2025-09-30T19:32:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:04 crc kubenswrapper[4756]: I0930 19:32:04.478512 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-kmxk8_d96acc31-a519-46fc-94d8-f19522e77391/ovnkube-controller/1.log" Sep 30 19:32:04 crc kubenswrapper[4756]: I0930 19:32:04.482553 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-kmxk8" event={"ID":"d96acc31-a519-46fc-94d8-f19522e77391","Type":"ContainerStarted","Data":"539ad7633e1076358f49a836687191a85b8b2967d0dd7c7e6591ec78416e1115"} Sep 30 19:32:04 crc kubenswrapper[4756]: I0930 19:32:04.483242 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-kmxk8" Sep 30 19:32:04 crc kubenswrapper[4756]: I0930 19:32:04.505844 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3370c2ca-fec3-4f90-8df7-51e21e6c7e1c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1a5ef539293ea3ff94f53b11a0510bb8bb45c62c945cffab746512efeeb19a5a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pstrc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ba18abf718fde52cadd61c25b05097873901a4dbbe7631fd7ce64133627b9473\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pstrc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:32Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-4n9zj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:04Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:04 crc kubenswrapper[4756]: I0930 19:32:04.531213 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-kmxk8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d96acc31-a519-46fc-94d8-f19522e77391\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe054d1844c58433aebfe8f4e500e17558f1b4afb5423002d60b50f210052c93\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c5badf6335e4358fa0078ae48d7d50a908bf754fdb57c786e445ccae31933ebe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f20230d520af06912183a05d0b9bcabb8581bf772fe34e037fa8d1a157c4a1b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://261b104308c48bbe4631db85f56d63cefeb0bc6b7112076b0bf3f4fb6b67caad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1fa8babb1f5d1213dec99c75be1d5f37d09c2257a31babd51ec297621fc7d564\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://990926364c8d973164f3da73740b7d31c8caf87f0fb691539f7a62520b822fba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://539ad7633e1076358f49a836687191a85b8b2967d0dd7c7e6591ec78416e1115\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://721fa5da1ae99aae66e7486221afabcd9c996982c008c5b00f72787911d29921\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T19:31:44Z\\\",\\\"message\\\":\\\"erver-crc in node crc\\\\nI0930 19:31:44.135110 6217 obj_retry.go:303] Retry object setup: *v1.Pod openshift-multus/multus-additional-cni-plugins-m6zsp\\\\nI0930 19:31:44.135119 6217 obj_retry.go:386] Retry successful for *v1.Pod openshift-kube-apiserver/kube-apiserver-crc after 0 failed attempt(s)\\\\nF0930 19:31:44.135041 6217 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:44Z is after 2025-08-24T17:21:41Z]\\\\nI0930 19:31:44.135126 6217 obj_retry.go:365] Adding new object: *v1.Pod openshift-multus/multus-additional-cni-plugins-m6zsp\\\\nI0930 19:31:44.135121 6217 obj_retry.go:365] Adding new object: *v1.Pod openshift-image\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:43Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://64404e34c6a077df9cc9e9f9273dbe188c6feda3bc1186a83e05a43e1fa2ea99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://76bb398eefb2b0a9ef06b44c2ea182deefd51e4daf42b17946b911822d6eb0af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://76bb398eefb2b0a9ef06b44c2ea182deefd51e4daf42b17946b911822d6eb0af\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:32Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-kmxk8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:04Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:04 crc kubenswrapper[4756]: I0930 19:32:04.534687 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:04 crc kubenswrapper[4756]: I0930 19:32:04.534774 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:04 crc kubenswrapper[4756]: I0930 19:32:04.534802 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:04 crc kubenswrapper[4756]: I0930 19:32:04.534841 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:04 crc kubenswrapper[4756]: I0930 19:32:04.534868 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:04Z","lastTransitionTime":"2025-09-30T19:32:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:04 crc kubenswrapper[4756]: I0930 19:32:04.547654 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-8nj5z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"02bb1d9a-1ad2-406d-afc2-c4cd283fe1f4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://28f7474a36dde55478614d47fe30f93b44cb549a25e148e295987e64158fc3c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l6nxq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:34Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-8nj5z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:04Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:04 crc kubenswrapper[4756]: I0930 19:32:04.561418 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-b4n45" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6c151bbd-9757-44ba-aa11-ff679e841fdc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fsl2z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fsl2z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:46Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-b4n45\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:04Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:04 crc kubenswrapper[4756]: I0930 19:32:04.595261 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6aa869dd-ec28-4032-82be-c656f27bd7b1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://99767cc72b9d6057d38c45fbec6231bafa47ccffefb78e5924b5e9c1e797722b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b852143b26114c05edce20729cf706c5394609a74f522b7ecf44d92f9d66aed7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ee47cdb77e416535bd00569159712241bbd0952651774af6440fd04000d1722\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5cca81a0aeb985d6044f19cccc580ca5c78553a0abb60fb2a2bed955233f7a97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://621b37e117d0eeef289e110d75521c3262ced12144d665be35d6823780b4bae2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a5550fd486f7fad8080fb3eed0f4c73140172c68983fa8a423cd48da8f31ef3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a5550fd486f7fad8080fb3eed0f4c73140172c68983fa8a423cd48da8f31ef3a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://390ef7ee5c5d98dc226f1d061c6c9bb83236c988f2af15635e7834f643862840\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://390ef7ee5c5d98dc226f1d061c6c9bb83236c988f2af15635e7834f643862840\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:13Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://6f98786c914dcdd3ca1cde8421686a1d536abb97c375f26f458d8f514bdccd52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6f98786c914dcdd3ca1cde8421686a1d536abb97c375f26f458d8f514bdccd52\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:11Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:04Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:04 crc kubenswrapper[4756]: I0930 19:32:04.612302 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0dfdb5d6-9311-4699-a1ac-5f1a6cde2441\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a78698331fc3db3b12b06c8816256c5d055d1decfd12cc0099d6cfd155d673d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32fb926334772dc454adcdabd6070f751ef1aded82a5c13732875e1ec04d2c48\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6195128a0e66f0f932a444f4717084f00ea05ff648f7c26364bb9c96483b652a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://746e37834d01867d327b2cb407df8e9a8489f58884a28f1da6a336f74e4d1033\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:11Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:04Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:04 crc kubenswrapper[4756]: I0930 19:32:04.637646 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:04 crc kubenswrapper[4756]: I0930 19:32:04.637695 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:04 crc kubenswrapper[4756]: I0930 19:32:04.637708 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:04 crc kubenswrapper[4756]: I0930 19:32:04.637731 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:04 crc kubenswrapper[4756]: I0930 19:32:04.637744 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:04Z","lastTransitionTime":"2025-09-30T19:32:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:04 crc kubenswrapper[4756]: I0930 19:32:04.638325 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c7764cf2-26f0-4b53-85b6-2b07668edda7\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:32:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:32:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6cbb0f481cae6dbc480af93bb03f4b4e597381979fed1a9b91fb23aef7868b65\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d2adbf3b2ade3e39ca3d2f7a70212537c9e7531907dd4a1e7748220183d34b9d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b904814bd38b98f875532fedfa07321ce8d149f697e027ad926f8f40ddce61e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f9299d12673c7a8e182e0d4354088d1aa539ca35a788ba6ff507d2ca96cff07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4f9299d12673c7a8e182e0d4354088d1aa539ca35a788ba6ff507d2ca96cff07\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:12Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:11Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:04Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:04 crc kubenswrapper[4756]: I0930 19:32:04.660660 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"218f95d8-7230-4b64-83a5-00af4f5ec7dc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:32:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:32:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0a76d38dbf936ca79e57470de2c8bf98e8681f7d94f2c572aa5c0d2b2acbb484\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://57f786181b4762f1e04a48ac6e352090b62e1691091ddc390821abb693e8f7a6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c275af67ad8675a437b3f7fae34236bbdfbdf1f216be6a442d6765471d1d11e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7673b8a2ea701b95437a1ad447661ea3140ce0b27204ab946f39dbf0a25e29b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a2e0caca690592c9bd17dae14f79390b94f47507afb05a1c1732453700b99786\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"message\\\":\\\"g.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 19:31:31.245412 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 19:31:31.245417 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0930 19:31:31.245423 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0930 19:31:31.245426 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0930 19:31:31.245428 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0930 19:31:31.245860 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI0930 19:31:31.249125 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0930 19:31:31.249169 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0930 19:31:31.249201 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0930 19:31:31.249211 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0930 19:31:31.249225 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0930 19:31:31.249238 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0930 19:31:31.250125 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI0930 19:31:31.250150 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nF0930 19:31:31.250576 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:25Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://01164eb1afdb771ef4d2dc268107dadd206959e1ef66b154caf3ff18717e20de\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:14Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7365492f5fe72c39f569bfa6e2b1a44236da10b4687a738b97ce95626081c1b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7365492f5fe72c39f569bfa6e2b1a44236da10b4687a738b97ce95626081c1b5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:11Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:04Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:04 crc kubenswrapper[4756]: I0930 19:32:04.681843 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:04Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:04 crc kubenswrapper[4756]: I0930 19:32:04.701953 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://449a4827a34c17e8cdc851d2714ced931581b5c3a1bcd2f7f058b95d68403740\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://163fab064016913eb90cd87fe0da1c702eeb7f000c92b108bfb24a21ef0e98d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:04Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:04 crc kubenswrapper[4756]: I0930 19:32:04.715243 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:04Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:04 crc kubenswrapper[4756]: I0930 19:32:04.727333 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d37999f5cb36e7071b109ed90026aea745a55dbdd26d53799ee4196dc6a983c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:04Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:04 crc kubenswrapper[4756]: I0930 19:32:04.738939 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-l9mw5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"104562fb-2a2d-4291-963c-7a95062ec13a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c012009175f2ca25af13346dce481a4640354a2b7c771ce9a8d92f0a6a014f23\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mhq89\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:32Z\\\"}}\" for pod \"openshift-multus\"/\"multus-l9mw5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:04Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:04 crc kubenswrapper[4756]: I0930 19:32:04.739977 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:04 crc kubenswrapper[4756]: I0930 19:32:04.740037 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:04 crc kubenswrapper[4756]: I0930 19:32:04.740051 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:04 crc kubenswrapper[4756]: I0930 19:32:04.740072 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:04 crc kubenswrapper[4756]: I0930 19:32:04.740087 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:04Z","lastTransitionTime":"2025-09-30T19:32:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:04 crc kubenswrapper[4756]: I0930 19:32:04.751275 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-gld2h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7ae264bc-c901-4628-89f4-53e94403dff8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d059c5c1b5cded992e4971b04737c4f528ed04278d98651c01c8b8a168fe5655\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc8dq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://764e5cdcf5a9c07e5d046217ffd9db27e2b09a827d02f165459deb24d99f141c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc8dq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:44Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-gld2h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:04Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:04 crc kubenswrapper[4756]: I0930 19:32:04.763387 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:04Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:04 crc kubenswrapper[4756]: I0930 19:32:04.777051 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d39cfd1b725ded02a27e8f7bed73221dd180e3fffb7f06d1877d1bc2f6910334\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:04Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:04 crc kubenswrapper[4756]: I0930 19:32:04.787102 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-ghp5t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"32ba8a0a-215c-415b-a893-74f7a40e3c20\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a96a8eea15e5d12ad93cdb41ef3d1054fae455aea2878451bc5715de6d5b612f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pslwb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:32Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-ghp5t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:04Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:04 crc kubenswrapper[4756]: I0930 19:32:04.806534 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-m6zsp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2b4cba15-5ae1-4c98-a5e5-060b3af3aec6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://13307050242b53fcc9d2c9ab6870c36d5c2c1e43adfa36c1e76addf32166b95b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://828a01ba594287ceea1e72baa5419d522ce14d2ab3df5a69fb6332400c3e4abc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://828a01ba594287ceea1e72baa5419d522ce14d2ab3df5a69fb6332400c3e4abc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f91b3382af5267116de235e9d5f480e574b479350e738621b5e7568329dd879\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0f91b3382af5267116de235e9d5f480e574b479350e738621b5e7568329dd879\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://677982485b1155ded1bce2a0f5b3262766d498642307d1744d5cbae6c66102e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://677982485b1155ded1bce2a0f5b3262766d498642307d1744d5cbae6c66102e3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://14b8b318d4cd0da2272b87ac74a4dccdf29874a818aadc0698e9d85a75a169da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://14b8b318d4cd0da2272b87ac74a4dccdf29874a818aadc0698e9d85a75a169da\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e877edd834105d5bd0dcd24e6ba9c11ead17ab39a95ca0169c54acaad7544ebe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e877edd834105d5bd0dcd24e6ba9c11ead17ab39a95ca0169c54acaad7544ebe\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://decf531f70c78d1d142bef3d265612ebac2c8790ea10aba75def6d785fba84a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://decf531f70c78d1d142bef3d265612ebac2c8790ea10aba75def6d785fba84a8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:32Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-m6zsp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:04Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:04 crc kubenswrapper[4756]: I0930 19:32:04.842896 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:04 crc kubenswrapper[4756]: I0930 19:32:04.842941 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:04 crc kubenswrapper[4756]: I0930 19:32:04.842956 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:04 crc kubenswrapper[4756]: I0930 19:32:04.842978 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:04 crc kubenswrapper[4756]: I0930 19:32:04.842992 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:04Z","lastTransitionTime":"2025-09-30T19:32:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:04 crc kubenswrapper[4756]: I0930 19:32:04.946192 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:04 crc kubenswrapper[4756]: I0930 19:32:04.946568 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:04 crc kubenswrapper[4756]: I0930 19:32:04.946579 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:04 crc kubenswrapper[4756]: I0930 19:32:04.946599 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:04 crc kubenswrapper[4756]: I0930 19:32:04.946614 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:04Z","lastTransitionTime":"2025-09-30T19:32:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:05 crc kubenswrapper[4756]: I0930 19:32:05.049152 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:05 crc kubenswrapper[4756]: I0930 19:32:05.049195 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:05 crc kubenswrapper[4756]: I0930 19:32:05.049213 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:05 crc kubenswrapper[4756]: I0930 19:32:05.049234 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:05 crc kubenswrapper[4756]: I0930 19:32:05.049252 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:05Z","lastTransitionTime":"2025-09-30T19:32:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:05 crc kubenswrapper[4756]: I0930 19:32:05.115339 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 19:32:05 crc kubenswrapper[4756]: E0930 19:32:05.115525 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 19:32:05 crc kubenswrapper[4756]: I0930 19:32:05.115599 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 19:32:05 crc kubenswrapper[4756]: I0930 19:32:05.115676 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 19:32:05 crc kubenswrapper[4756]: E0930 19:32:05.115791 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 19:32:05 crc kubenswrapper[4756]: E0930 19:32:05.115920 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 19:32:05 crc kubenswrapper[4756]: I0930 19:32:05.152579 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:05 crc kubenswrapper[4756]: I0930 19:32:05.152623 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:05 crc kubenswrapper[4756]: I0930 19:32:05.152633 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:05 crc kubenswrapper[4756]: I0930 19:32:05.152650 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:05 crc kubenswrapper[4756]: I0930 19:32:05.152663 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:05Z","lastTransitionTime":"2025-09-30T19:32:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:05 crc kubenswrapper[4756]: I0930 19:32:05.255857 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:05 crc kubenswrapper[4756]: I0930 19:32:05.255901 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:05 crc kubenswrapper[4756]: I0930 19:32:05.255909 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:05 crc kubenswrapper[4756]: I0930 19:32:05.255924 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:05 crc kubenswrapper[4756]: I0930 19:32:05.255934 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:05Z","lastTransitionTime":"2025-09-30T19:32:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:05 crc kubenswrapper[4756]: I0930 19:32:05.358940 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:05 crc kubenswrapper[4756]: I0930 19:32:05.358995 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:05 crc kubenswrapper[4756]: I0930 19:32:05.359012 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:05 crc kubenswrapper[4756]: I0930 19:32:05.359032 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:05 crc kubenswrapper[4756]: I0930 19:32:05.359047 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:05Z","lastTransitionTime":"2025-09-30T19:32:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:05 crc kubenswrapper[4756]: I0930 19:32:05.462281 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:05 crc kubenswrapper[4756]: I0930 19:32:05.462362 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:05 crc kubenswrapper[4756]: I0930 19:32:05.462424 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:05 crc kubenswrapper[4756]: I0930 19:32:05.462471 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:05 crc kubenswrapper[4756]: I0930 19:32:05.462497 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:05Z","lastTransitionTime":"2025-09-30T19:32:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:05 crc kubenswrapper[4756]: I0930 19:32:05.487571 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-kmxk8_d96acc31-a519-46fc-94d8-f19522e77391/ovnkube-controller/2.log" Sep 30 19:32:05 crc kubenswrapper[4756]: I0930 19:32:05.489636 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-kmxk8_d96acc31-a519-46fc-94d8-f19522e77391/ovnkube-controller/1.log" Sep 30 19:32:05 crc kubenswrapper[4756]: I0930 19:32:05.493473 4756 generic.go:334] "Generic (PLEG): container finished" podID="d96acc31-a519-46fc-94d8-f19522e77391" containerID="539ad7633e1076358f49a836687191a85b8b2967d0dd7c7e6591ec78416e1115" exitCode=1 Sep 30 19:32:05 crc kubenswrapper[4756]: I0930 19:32:05.493543 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-kmxk8" event={"ID":"d96acc31-a519-46fc-94d8-f19522e77391","Type":"ContainerDied","Data":"539ad7633e1076358f49a836687191a85b8b2967d0dd7c7e6591ec78416e1115"} Sep 30 19:32:05 crc kubenswrapper[4756]: I0930 19:32:05.493598 4756 scope.go:117] "RemoveContainer" containerID="721fa5da1ae99aae66e7486221afabcd9c996982c008c5b00f72787911d29921" Sep 30 19:32:05 crc kubenswrapper[4756]: I0930 19:32:05.495046 4756 scope.go:117] "RemoveContainer" containerID="539ad7633e1076358f49a836687191a85b8b2967d0dd7c7e6591ec78416e1115" Sep 30 19:32:05 crc kubenswrapper[4756]: E0930 19:32:05.495441 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-kmxk8_openshift-ovn-kubernetes(d96acc31-a519-46fc-94d8-f19522e77391)\"" pod="openshift-ovn-kubernetes/ovnkube-node-kmxk8" podUID="d96acc31-a519-46fc-94d8-f19522e77391" Sep 30 19:32:05 crc kubenswrapper[4756]: I0930 19:32:05.515430 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:05Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:05 crc kubenswrapper[4756]: I0930 19:32:05.531842 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d37999f5cb36e7071b109ed90026aea745a55dbdd26d53799ee4196dc6a983c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:05Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:05 crc kubenswrapper[4756]: I0930 19:32:05.552750 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-l9mw5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"104562fb-2a2d-4291-963c-7a95062ec13a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c012009175f2ca25af13346dce481a4640354a2b7c771ce9a8d92f0a6a014f23\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mhq89\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:32Z\\\"}}\" for pod \"openshift-multus\"/\"multus-l9mw5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:05Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:05 crc kubenswrapper[4756]: I0930 19:32:05.565246 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:05 crc kubenswrapper[4756]: I0930 19:32:05.565297 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:05 crc kubenswrapper[4756]: I0930 19:32:05.565310 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:05 crc kubenswrapper[4756]: I0930 19:32:05.565331 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:05 crc kubenswrapper[4756]: I0930 19:32:05.565348 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:05Z","lastTransitionTime":"2025-09-30T19:32:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:05 crc kubenswrapper[4756]: I0930 19:32:05.569090 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:05Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:05 crc kubenswrapper[4756]: I0930 19:32:05.588645 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d39cfd1b725ded02a27e8f7bed73221dd180e3fffb7f06d1877d1bc2f6910334\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:05Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:05 crc kubenswrapper[4756]: I0930 19:32:05.602988 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-ghp5t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"32ba8a0a-215c-415b-a893-74f7a40e3c20\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a96a8eea15e5d12ad93cdb41ef3d1054fae455aea2878451bc5715de6d5b612f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pslwb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:32Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-ghp5t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:05Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:05 crc kubenswrapper[4756]: I0930 19:32:05.625577 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-m6zsp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2b4cba15-5ae1-4c98-a5e5-060b3af3aec6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://13307050242b53fcc9d2c9ab6870c36d5c2c1e43adfa36c1e76addf32166b95b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://828a01ba594287ceea1e72baa5419d522ce14d2ab3df5a69fb6332400c3e4abc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://828a01ba594287ceea1e72baa5419d522ce14d2ab3df5a69fb6332400c3e4abc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f91b3382af5267116de235e9d5f480e574b479350e738621b5e7568329dd879\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0f91b3382af5267116de235e9d5f480e574b479350e738621b5e7568329dd879\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://677982485b1155ded1bce2a0f5b3262766d498642307d1744d5cbae6c66102e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://677982485b1155ded1bce2a0f5b3262766d498642307d1744d5cbae6c66102e3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://14b8b318d4cd0da2272b87ac74a4dccdf29874a818aadc0698e9d85a75a169da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://14b8b318d4cd0da2272b87ac74a4dccdf29874a818aadc0698e9d85a75a169da\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e877edd834105d5bd0dcd24e6ba9c11ead17ab39a95ca0169c54acaad7544ebe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e877edd834105d5bd0dcd24e6ba9c11ead17ab39a95ca0169c54acaad7544ebe\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://decf531f70c78d1d142bef3d265612ebac2c8790ea10aba75def6d785fba84a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://decf531f70c78d1d142bef3d265612ebac2c8790ea10aba75def6d785fba84a8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:32Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-m6zsp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:05Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:05 crc kubenswrapper[4756]: I0930 19:32:05.640361 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-gld2h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7ae264bc-c901-4628-89f4-53e94403dff8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d059c5c1b5cded992e4971b04737c4f528ed04278d98651c01c8b8a168fe5655\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc8dq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://764e5cdcf5a9c07e5d046217ffd9db27e2b09a827d02f165459deb24d99f141c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc8dq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:44Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-gld2h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:05Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:05 crc kubenswrapper[4756]: I0930 19:32:05.668374 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:05 crc kubenswrapper[4756]: I0930 19:32:05.668666 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:05 crc kubenswrapper[4756]: I0930 19:32:05.668804 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:05 crc kubenswrapper[4756]: I0930 19:32:05.668896 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:05 crc kubenswrapper[4756]: I0930 19:32:05.668958 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:05Z","lastTransitionTime":"2025-09-30T19:32:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:05 crc kubenswrapper[4756]: I0930 19:32:05.680764 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-kmxk8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d96acc31-a519-46fc-94d8-f19522e77391\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe054d1844c58433aebfe8f4e500e17558f1b4afb5423002d60b50f210052c93\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c5badf6335e4358fa0078ae48d7d50a908bf754fdb57c786e445ccae31933ebe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f20230d520af06912183a05d0b9bcabb8581bf772fe34e037fa8d1a157c4a1b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://261b104308c48bbe4631db85f56d63cefeb0bc6b7112076b0bf3f4fb6b67caad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1fa8babb1f5d1213dec99c75be1d5f37d09c2257a31babd51ec297621fc7d564\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://990926364c8d973164f3da73740b7d31c8caf87f0fb691539f7a62520b822fba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://539ad7633e1076358f49a836687191a85b8b2967d0dd7c7e6591ec78416e1115\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://721fa5da1ae99aae66e7486221afabcd9c996982c008c5b00f72787911d29921\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T19:31:44Z\\\",\\\"message\\\":\\\"erver-crc in node crc\\\\nI0930 19:31:44.135110 6217 obj_retry.go:303] Retry object setup: *v1.Pod openshift-multus/multus-additional-cni-plugins-m6zsp\\\\nI0930 19:31:44.135119 6217 obj_retry.go:386] Retry successful for *v1.Pod openshift-kube-apiserver/kube-apiserver-crc after 0 failed attempt(s)\\\\nF0930 19:31:44.135041 6217 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:31:44Z is after 2025-08-24T17:21:41Z]\\\\nI0930 19:31:44.135126 6217 obj_retry.go:365] Adding new object: *v1.Pod openshift-multus/multus-additional-cni-plugins-m6zsp\\\\nI0930 19:31:44.135121 6217 obj_retry.go:365] Adding new object: *v1.Pod openshift-image\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:43Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://539ad7633e1076358f49a836687191a85b8b2967d0dd7c7e6591ec78416e1115\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T19:32:05Z\\\",\\\"message\\\":\\\"mns:[] Mutations:[{Column:policies Mutator:insert Value:{GoSet:[{GoUUID:a5a72d02-1a0f-4f7f-a8c5-6923a1c4274a}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {f6d604c1-9711-4e25-be6c-79ec28bbad1b}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI0930 19:32:05.081516 6485 address_set.go:302] New(0d39bc5c-d5b9-432c-81be-2275bce5d7aa/default-network-controller:EgressIP:node-ips:v4:default/a712973235162149816) with []\\\\nI0930 19:32:05.081551 6485 address_set.go:302] New(aa6fc2dc-fab0-4812-b9da-809058e4dcf7/default-network-controller:EgressIP:egressip-served-pods:v4:default/a8519615025667110816) with []\\\\nI0930 19:32:05.081577 6485 address_set.go:302] New(bf133528-8652-4c84-85ff-881f0afe9837/default-network-controller:EgressService:egresssvc-served-pods:v4/a13607449821398607916) with []\\\\nI0930 19:32:05.081647 6485 factory.go:1336] Added *v1.Node event handler 7\\\\nI0930 19:32:05.081686 6485 factory.go:1336] Added *v1.EgressIP event handler 8\\\\nI0930 19:32:05.082052 6485 factory.go:1336] Added *v1.EgressFirewall event handler 9\\\\nI0930 19:32:05.082149 6485 controller.go:132] Adding controller ef_node_controller event handlers\\\\nI0930 19:32:05.082187 6485 ovnkube.go:599] Stopped ovnkube\\\\nI0930 19:32:05.082214 6485 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF0930 19:32:05.082302 6485 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T19:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://64404e34c6a077df9cc9e9f9273dbe188c6feda3bc1186a83e05a43e1fa2ea99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://76bb398eefb2b0a9ef06b44c2ea182deefd51e4daf42b17946b911822d6eb0af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://76bb398eefb2b0a9ef06b44c2ea182deefd51e4daf42b17946b911822d6eb0af\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:32Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-kmxk8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:05Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:05 crc kubenswrapper[4756]: I0930 19:32:05.703943 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-8nj5z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"02bb1d9a-1ad2-406d-afc2-c4cd283fe1f4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://28f7474a36dde55478614d47fe30f93b44cb549a25e148e295987e64158fc3c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l6nxq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:34Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-8nj5z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:05Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:05 crc kubenswrapper[4756]: I0930 19:32:05.725816 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-b4n45" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6c151bbd-9757-44ba-aa11-ff679e841fdc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fsl2z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fsl2z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:46Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-b4n45\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:05Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:05 crc kubenswrapper[4756]: I0930 19:32:05.754555 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6aa869dd-ec28-4032-82be-c656f27bd7b1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://99767cc72b9d6057d38c45fbec6231bafa47ccffefb78e5924b5e9c1e797722b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b852143b26114c05edce20729cf706c5394609a74f522b7ecf44d92f9d66aed7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ee47cdb77e416535bd00569159712241bbd0952651774af6440fd04000d1722\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5cca81a0aeb985d6044f19cccc580ca5c78553a0abb60fb2a2bed955233f7a97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://621b37e117d0eeef289e110d75521c3262ced12144d665be35d6823780b4bae2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a5550fd486f7fad8080fb3eed0f4c73140172c68983fa8a423cd48da8f31ef3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a5550fd486f7fad8080fb3eed0f4c73140172c68983fa8a423cd48da8f31ef3a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://390ef7ee5c5d98dc226f1d061c6c9bb83236c988f2af15635e7834f643862840\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://390ef7ee5c5d98dc226f1d061c6c9bb83236c988f2af15635e7834f643862840\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:13Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://6f98786c914dcdd3ca1cde8421686a1d536abb97c375f26f458d8f514bdccd52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6f98786c914dcdd3ca1cde8421686a1d536abb97c375f26f458d8f514bdccd52\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:11Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:05Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:05 crc kubenswrapper[4756]: I0930 19:32:05.772645 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0dfdb5d6-9311-4699-a1ac-5f1a6cde2441\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a78698331fc3db3b12b06c8816256c5d055d1decfd12cc0099d6cfd155d673d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32fb926334772dc454adcdabd6070f751ef1aded82a5c13732875e1ec04d2c48\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6195128a0e66f0f932a444f4717084f00ea05ff648f7c26364bb9c96483b652a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://746e37834d01867d327b2cb407df8e9a8489f58884a28f1da6a336f74e4d1033\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:11Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:05Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:05 crc kubenswrapper[4756]: I0930 19:32:05.779014 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:05 crc kubenswrapper[4756]: I0930 19:32:05.779072 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:05 crc kubenswrapper[4756]: I0930 19:32:05.779093 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:05 crc kubenswrapper[4756]: I0930 19:32:05.779122 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:05 crc kubenswrapper[4756]: I0930 19:32:05.779147 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:05Z","lastTransitionTime":"2025-09-30T19:32:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:05 crc kubenswrapper[4756]: I0930 19:32:05.788123 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c7764cf2-26f0-4b53-85b6-2b07668edda7\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:32:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:32:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6cbb0f481cae6dbc480af93bb03f4b4e597381979fed1a9b91fb23aef7868b65\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d2adbf3b2ade3e39ca3d2f7a70212537c9e7531907dd4a1e7748220183d34b9d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b904814bd38b98f875532fedfa07321ce8d149f697e027ad926f8f40ddce61e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f9299d12673c7a8e182e0d4354088d1aa539ca35a788ba6ff507d2ca96cff07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4f9299d12673c7a8e182e0d4354088d1aa539ca35a788ba6ff507d2ca96cff07\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:12Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:11Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:05Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:05 crc kubenswrapper[4756]: I0930 19:32:05.800167 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3370c2ca-fec3-4f90-8df7-51e21e6c7e1c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1a5ef539293ea3ff94f53b11a0510bb8bb45c62c945cffab746512efeeb19a5a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pstrc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ba18abf718fde52cadd61c25b05097873901a4dbbe7631fd7ce64133627b9473\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pstrc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:32Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-4n9zj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:05Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:05 crc kubenswrapper[4756]: I0930 19:32:05.814626 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"218f95d8-7230-4b64-83a5-00af4f5ec7dc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:32:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:32:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0a76d38dbf936ca79e57470de2c8bf98e8681f7d94f2c572aa5c0d2b2acbb484\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://57f786181b4762f1e04a48ac6e352090b62e1691091ddc390821abb693e8f7a6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c275af67ad8675a437b3f7fae34236bbdfbdf1f216be6a442d6765471d1d11e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7673b8a2ea701b95437a1ad447661ea3140ce0b27204ab946f39dbf0a25e29b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a2e0caca690592c9bd17dae14f79390b94f47507afb05a1c1732453700b99786\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"message\\\":\\\"g.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 19:31:31.245412 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 19:31:31.245417 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0930 19:31:31.245423 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0930 19:31:31.245426 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0930 19:31:31.245428 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0930 19:31:31.245860 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI0930 19:31:31.249125 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0930 19:31:31.249169 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0930 19:31:31.249201 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0930 19:31:31.249211 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0930 19:31:31.249225 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0930 19:31:31.249238 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0930 19:31:31.250125 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI0930 19:31:31.250150 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nF0930 19:31:31.250576 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:25Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://01164eb1afdb771ef4d2dc268107dadd206959e1ef66b154caf3ff18717e20de\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:14Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7365492f5fe72c39f569bfa6e2b1a44236da10b4687a738b97ce95626081c1b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7365492f5fe72c39f569bfa6e2b1a44236da10b4687a738b97ce95626081c1b5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:11Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:05Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:05 crc kubenswrapper[4756]: I0930 19:32:05.830182 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:05Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:05 crc kubenswrapper[4756]: I0930 19:32:05.844994 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://449a4827a34c17e8cdc851d2714ced931581b5c3a1bcd2f7f058b95d68403740\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://163fab064016913eb90cd87fe0da1c702eeb7f000c92b108bfb24a21ef0e98d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:05Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:05 crc kubenswrapper[4756]: I0930 19:32:05.881737 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:05 crc kubenswrapper[4756]: I0930 19:32:05.881776 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:05 crc kubenswrapper[4756]: I0930 19:32:05.881786 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:05 crc kubenswrapper[4756]: I0930 19:32:05.881804 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:05 crc kubenswrapper[4756]: I0930 19:32:05.881816 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:05Z","lastTransitionTime":"2025-09-30T19:32:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:05 crc kubenswrapper[4756]: I0930 19:32:05.984789 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:05 crc kubenswrapper[4756]: I0930 19:32:05.984863 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:05 crc kubenswrapper[4756]: I0930 19:32:05.984891 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:05 crc kubenswrapper[4756]: I0930 19:32:05.984926 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:05 crc kubenswrapper[4756]: I0930 19:32:05.984951 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:05Z","lastTransitionTime":"2025-09-30T19:32:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:06 crc kubenswrapper[4756]: I0930 19:32:06.088704 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:06 crc kubenswrapper[4756]: I0930 19:32:06.088774 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:06 crc kubenswrapper[4756]: I0930 19:32:06.088793 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:06 crc kubenswrapper[4756]: I0930 19:32:06.088822 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:06 crc kubenswrapper[4756]: I0930 19:32:06.088845 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:06Z","lastTransitionTime":"2025-09-30T19:32:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:06 crc kubenswrapper[4756]: I0930 19:32:06.115186 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-b4n45" Sep 30 19:32:06 crc kubenswrapper[4756]: E0930 19:32:06.115388 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-b4n45" podUID="6c151bbd-9757-44ba-aa11-ff679e841fdc" Sep 30 19:32:06 crc kubenswrapper[4756]: I0930 19:32:06.191983 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:06 crc kubenswrapper[4756]: I0930 19:32:06.192039 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:06 crc kubenswrapper[4756]: I0930 19:32:06.192058 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:06 crc kubenswrapper[4756]: I0930 19:32:06.192087 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:06 crc kubenswrapper[4756]: I0930 19:32:06.192108 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:06Z","lastTransitionTime":"2025-09-30T19:32:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:06 crc kubenswrapper[4756]: I0930 19:32:06.296616 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:06 crc kubenswrapper[4756]: I0930 19:32:06.297151 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:06 crc kubenswrapper[4756]: I0930 19:32:06.297385 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:06 crc kubenswrapper[4756]: I0930 19:32:06.297586 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:06 crc kubenswrapper[4756]: I0930 19:32:06.297731 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:06Z","lastTransitionTime":"2025-09-30T19:32:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:06 crc kubenswrapper[4756]: I0930 19:32:06.401952 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:06 crc kubenswrapper[4756]: I0930 19:32:06.402483 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:06 crc kubenswrapper[4756]: I0930 19:32:06.402504 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:06 crc kubenswrapper[4756]: I0930 19:32:06.402537 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:06 crc kubenswrapper[4756]: I0930 19:32:06.402558 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:06Z","lastTransitionTime":"2025-09-30T19:32:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:06 crc kubenswrapper[4756]: I0930 19:32:06.500896 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-kmxk8_d96acc31-a519-46fc-94d8-f19522e77391/ovnkube-controller/2.log" Sep 30 19:32:06 crc kubenswrapper[4756]: I0930 19:32:06.505086 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:06 crc kubenswrapper[4756]: I0930 19:32:06.505145 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:06 crc kubenswrapper[4756]: I0930 19:32:06.505168 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:06 crc kubenswrapper[4756]: I0930 19:32:06.505200 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:06 crc kubenswrapper[4756]: I0930 19:32:06.505218 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:06Z","lastTransitionTime":"2025-09-30T19:32:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:06 crc kubenswrapper[4756]: I0930 19:32:06.507655 4756 scope.go:117] "RemoveContainer" containerID="539ad7633e1076358f49a836687191a85b8b2967d0dd7c7e6591ec78416e1115" Sep 30 19:32:06 crc kubenswrapper[4756]: E0930 19:32:06.507855 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-kmxk8_openshift-ovn-kubernetes(d96acc31-a519-46fc-94d8-f19522e77391)\"" pod="openshift-ovn-kubernetes/ovnkube-node-kmxk8" podUID="d96acc31-a519-46fc-94d8-f19522e77391" Sep 30 19:32:06 crc kubenswrapper[4756]: I0930 19:32:06.532027 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-l9mw5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"104562fb-2a2d-4291-963c-7a95062ec13a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c012009175f2ca25af13346dce481a4640354a2b7c771ce9a8d92f0a6a014f23\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mhq89\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:32Z\\\"}}\" for pod \"openshift-multus\"/\"multus-l9mw5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:06Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:06 crc kubenswrapper[4756]: I0930 19:32:06.552950 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:06Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:06 crc kubenswrapper[4756]: I0930 19:32:06.572649 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d37999f5cb36e7071b109ed90026aea745a55dbdd26d53799ee4196dc6a983c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:06Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:06 crc kubenswrapper[4756]: I0930 19:32:06.590740 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-ghp5t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"32ba8a0a-215c-415b-a893-74f7a40e3c20\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a96a8eea15e5d12ad93cdb41ef3d1054fae455aea2878451bc5715de6d5b612f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pslwb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:32Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-ghp5t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:06Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:06 crc kubenswrapper[4756]: I0930 19:32:06.607994 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:06 crc kubenswrapper[4756]: I0930 19:32:06.608036 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:06 crc kubenswrapper[4756]: I0930 19:32:06.608049 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:06 crc kubenswrapper[4756]: I0930 19:32:06.608069 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:06 crc kubenswrapper[4756]: I0930 19:32:06.608083 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:06Z","lastTransitionTime":"2025-09-30T19:32:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:06 crc kubenswrapper[4756]: I0930 19:32:06.613934 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-m6zsp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2b4cba15-5ae1-4c98-a5e5-060b3af3aec6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://13307050242b53fcc9d2c9ab6870c36d5c2c1e43adfa36c1e76addf32166b95b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://828a01ba594287ceea1e72baa5419d522ce14d2ab3df5a69fb6332400c3e4abc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://828a01ba594287ceea1e72baa5419d522ce14d2ab3df5a69fb6332400c3e4abc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f91b3382af5267116de235e9d5f480e574b479350e738621b5e7568329dd879\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0f91b3382af5267116de235e9d5f480e574b479350e738621b5e7568329dd879\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://677982485b1155ded1bce2a0f5b3262766d498642307d1744d5cbae6c66102e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://677982485b1155ded1bce2a0f5b3262766d498642307d1744d5cbae6c66102e3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://14b8b318d4cd0da2272b87ac74a4dccdf29874a818aadc0698e9d85a75a169da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://14b8b318d4cd0da2272b87ac74a4dccdf29874a818aadc0698e9d85a75a169da\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e877edd834105d5bd0dcd24e6ba9c11ead17ab39a95ca0169c54acaad7544ebe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e877edd834105d5bd0dcd24e6ba9c11ead17ab39a95ca0169c54acaad7544ebe\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://decf531f70c78d1d142bef3d265612ebac2c8790ea10aba75def6d785fba84a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://decf531f70c78d1d142bef3d265612ebac2c8790ea10aba75def6d785fba84a8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:32Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-m6zsp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:06Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:06 crc kubenswrapper[4756]: I0930 19:32:06.629672 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-gld2h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7ae264bc-c901-4628-89f4-53e94403dff8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d059c5c1b5cded992e4971b04737c4f528ed04278d98651c01c8b8a168fe5655\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc8dq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://764e5cdcf5a9c07e5d046217ffd9db27e2b09a827d02f165459deb24d99f141c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc8dq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:44Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-gld2h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:06Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:06 crc kubenswrapper[4756]: I0930 19:32:06.649035 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:06Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:06 crc kubenswrapper[4756]: I0930 19:32:06.674374 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d39cfd1b725ded02a27e8f7bed73221dd180e3fffb7f06d1877d1bc2f6910334\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:06Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:06 crc kubenswrapper[4756]: I0930 19:32:06.695563 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0dfdb5d6-9311-4699-a1ac-5f1a6cde2441\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a78698331fc3db3b12b06c8816256c5d055d1decfd12cc0099d6cfd155d673d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32fb926334772dc454adcdabd6070f751ef1aded82a5c13732875e1ec04d2c48\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6195128a0e66f0f932a444f4717084f00ea05ff648f7c26364bb9c96483b652a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://746e37834d01867d327b2cb407df8e9a8489f58884a28f1da6a336f74e4d1033\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:11Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:06Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:06 crc kubenswrapper[4756]: I0930 19:32:06.710643 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:06 crc kubenswrapper[4756]: I0930 19:32:06.710943 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:06 crc kubenswrapper[4756]: I0930 19:32:06.711290 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:06 crc kubenswrapper[4756]: I0930 19:32:06.711468 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:06 crc kubenswrapper[4756]: I0930 19:32:06.711567 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:06Z","lastTransitionTime":"2025-09-30T19:32:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:06 crc kubenswrapper[4756]: I0930 19:32:06.713279 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c7764cf2-26f0-4b53-85b6-2b07668edda7\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:32:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:32:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6cbb0f481cae6dbc480af93bb03f4b4e597381979fed1a9b91fb23aef7868b65\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d2adbf3b2ade3e39ca3d2f7a70212537c9e7531907dd4a1e7748220183d34b9d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b904814bd38b98f875532fedfa07321ce8d149f697e027ad926f8f40ddce61e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f9299d12673c7a8e182e0d4354088d1aa539ca35a788ba6ff507d2ca96cff07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4f9299d12673c7a8e182e0d4354088d1aa539ca35a788ba6ff507d2ca96cff07\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:12Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:11Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:06Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:06 crc kubenswrapper[4756]: I0930 19:32:06.731530 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3370c2ca-fec3-4f90-8df7-51e21e6c7e1c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1a5ef539293ea3ff94f53b11a0510bb8bb45c62c945cffab746512efeeb19a5a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pstrc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ba18abf718fde52cadd61c25b05097873901a4dbbe7631fd7ce64133627b9473\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pstrc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:32Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-4n9zj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:06Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:06 crc kubenswrapper[4756]: I0930 19:32:06.756151 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-kmxk8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d96acc31-a519-46fc-94d8-f19522e77391\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe054d1844c58433aebfe8f4e500e17558f1b4afb5423002d60b50f210052c93\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c5badf6335e4358fa0078ae48d7d50a908bf754fdb57c786e445ccae31933ebe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f20230d520af06912183a05d0b9bcabb8581bf772fe34e037fa8d1a157c4a1b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://261b104308c48bbe4631db85f56d63cefeb0bc6b7112076b0bf3f4fb6b67caad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1fa8babb1f5d1213dec99c75be1d5f37d09c2257a31babd51ec297621fc7d564\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://990926364c8d973164f3da73740b7d31c8caf87f0fb691539f7a62520b822fba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://539ad7633e1076358f49a836687191a85b8b2967d0dd7c7e6591ec78416e1115\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://539ad7633e1076358f49a836687191a85b8b2967d0dd7c7e6591ec78416e1115\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T19:32:05Z\\\",\\\"message\\\":\\\"mns:[] Mutations:[{Column:policies Mutator:insert Value:{GoSet:[{GoUUID:a5a72d02-1a0f-4f7f-a8c5-6923a1c4274a}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {f6d604c1-9711-4e25-be6c-79ec28bbad1b}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI0930 19:32:05.081516 6485 address_set.go:302] New(0d39bc5c-d5b9-432c-81be-2275bce5d7aa/default-network-controller:EgressIP:node-ips:v4:default/a712973235162149816) with []\\\\nI0930 19:32:05.081551 6485 address_set.go:302] New(aa6fc2dc-fab0-4812-b9da-809058e4dcf7/default-network-controller:EgressIP:egressip-served-pods:v4:default/a8519615025667110816) with []\\\\nI0930 19:32:05.081577 6485 address_set.go:302] New(bf133528-8652-4c84-85ff-881f0afe9837/default-network-controller:EgressService:egresssvc-served-pods:v4/a13607449821398607916) with []\\\\nI0930 19:32:05.081647 6485 factory.go:1336] Added *v1.Node event handler 7\\\\nI0930 19:32:05.081686 6485 factory.go:1336] Added *v1.EgressIP event handler 8\\\\nI0930 19:32:05.082052 6485 factory.go:1336] Added *v1.EgressFirewall event handler 9\\\\nI0930 19:32:05.082149 6485 controller.go:132] Adding controller ef_node_controller event handlers\\\\nI0930 19:32:05.082187 6485 ovnkube.go:599] Stopped ovnkube\\\\nI0930 19:32:05.082214 6485 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF0930 19:32:05.082302 6485 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T19:32:04Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-kmxk8_openshift-ovn-kubernetes(d96acc31-a519-46fc-94d8-f19522e77391)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://64404e34c6a077df9cc9e9f9273dbe188c6feda3bc1186a83e05a43e1fa2ea99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://76bb398eefb2b0a9ef06b44c2ea182deefd51e4daf42b17946b911822d6eb0af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://76bb398eefb2b0a9ef06b44c2ea182deefd51e4daf42b17946b911822d6eb0af\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:32Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-kmxk8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:06Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:06 crc kubenswrapper[4756]: I0930 19:32:06.770885 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-8nj5z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"02bb1d9a-1ad2-406d-afc2-c4cd283fe1f4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://28f7474a36dde55478614d47fe30f93b44cb549a25e148e295987e64158fc3c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l6nxq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:34Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-8nj5z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:06Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:06 crc kubenswrapper[4756]: I0930 19:32:06.782961 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-b4n45" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6c151bbd-9757-44ba-aa11-ff679e841fdc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fsl2z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fsl2z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:46Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-b4n45\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:06Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:06 crc kubenswrapper[4756]: I0930 19:32:06.805844 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6aa869dd-ec28-4032-82be-c656f27bd7b1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://99767cc72b9d6057d38c45fbec6231bafa47ccffefb78e5924b5e9c1e797722b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b852143b26114c05edce20729cf706c5394609a74f522b7ecf44d92f9d66aed7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ee47cdb77e416535bd00569159712241bbd0952651774af6440fd04000d1722\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5cca81a0aeb985d6044f19cccc580ca5c78553a0abb60fb2a2bed955233f7a97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://621b37e117d0eeef289e110d75521c3262ced12144d665be35d6823780b4bae2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a5550fd486f7fad8080fb3eed0f4c73140172c68983fa8a423cd48da8f31ef3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a5550fd486f7fad8080fb3eed0f4c73140172c68983fa8a423cd48da8f31ef3a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://390ef7ee5c5d98dc226f1d061c6c9bb83236c988f2af15635e7834f643862840\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://390ef7ee5c5d98dc226f1d061c6c9bb83236c988f2af15635e7834f643862840\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:13Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://6f98786c914dcdd3ca1cde8421686a1d536abb97c375f26f458d8f514bdccd52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6f98786c914dcdd3ca1cde8421686a1d536abb97c375f26f458d8f514bdccd52\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:11Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:06Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:06 crc kubenswrapper[4756]: I0930 19:32:06.814498 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:06 crc kubenswrapper[4756]: I0930 19:32:06.814562 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:06 crc kubenswrapper[4756]: I0930 19:32:06.814580 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:06 crc kubenswrapper[4756]: I0930 19:32:06.814608 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:06 crc kubenswrapper[4756]: I0930 19:32:06.814626 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:06Z","lastTransitionTime":"2025-09-30T19:32:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:06 crc kubenswrapper[4756]: I0930 19:32:06.827540 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://449a4827a34c17e8cdc851d2714ced931581b5c3a1bcd2f7f058b95d68403740\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://163fab064016913eb90cd87fe0da1c702eeb7f000c92b108bfb24a21ef0e98d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:06Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:06 crc kubenswrapper[4756]: I0930 19:32:06.858094 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"218f95d8-7230-4b64-83a5-00af4f5ec7dc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:32:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:32:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0a76d38dbf936ca79e57470de2c8bf98e8681f7d94f2c572aa5c0d2b2acbb484\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://57f786181b4762f1e04a48ac6e352090b62e1691091ddc390821abb693e8f7a6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c275af67ad8675a437b3f7fae34236bbdfbdf1f216be6a442d6765471d1d11e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7673b8a2ea701b95437a1ad447661ea3140ce0b27204ab946f39dbf0a25e29b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a2e0caca690592c9bd17dae14f79390b94f47507afb05a1c1732453700b99786\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"message\\\":\\\"g.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 19:31:31.245412 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 19:31:31.245417 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0930 19:31:31.245423 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0930 19:31:31.245426 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0930 19:31:31.245428 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0930 19:31:31.245860 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI0930 19:31:31.249125 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0930 19:31:31.249169 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0930 19:31:31.249201 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0930 19:31:31.249211 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0930 19:31:31.249225 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0930 19:31:31.249238 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0930 19:31:31.250125 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI0930 19:31:31.250150 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nF0930 19:31:31.250576 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:25Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://01164eb1afdb771ef4d2dc268107dadd206959e1ef66b154caf3ff18717e20de\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:14Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7365492f5fe72c39f569bfa6e2b1a44236da10b4687a738b97ce95626081c1b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7365492f5fe72c39f569bfa6e2b1a44236da10b4687a738b97ce95626081c1b5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:11Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:06Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:06 crc kubenswrapper[4756]: I0930 19:32:06.874480 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:06Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:06 crc kubenswrapper[4756]: I0930 19:32:06.918328 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:06 crc kubenswrapper[4756]: I0930 19:32:06.918698 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:06 crc kubenswrapper[4756]: I0930 19:32:06.918780 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:06 crc kubenswrapper[4756]: I0930 19:32:06.918854 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:06 crc kubenswrapper[4756]: I0930 19:32:06.918912 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:06Z","lastTransitionTime":"2025-09-30T19:32:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:07 crc kubenswrapper[4756]: I0930 19:32:07.022731 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:07 crc kubenswrapper[4756]: I0930 19:32:07.023137 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:07 crc kubenswrapper[4756]: I0930 19:32:07.023267 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:07 crc kubenswrapper[4756]: I0930 19:32:07.023490 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:07 crc kubenswrapper[4756]: I0930 19:32:07.023710 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:07Z","lastTransitionTime":"2025-09-30T19:32:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:07 crc kubenswrapper[4756]: I0930 19:32:07.116079 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 19:32:07 crc kubenswrapper[4756]: I0930 19:32:07.116123 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 19:32:07 crc kubenswrapper[4756]: I0930 19:32:07.116123 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 19:32:07 crc kubenswrapper[4756]: E0930 19:32:07.116302 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 19:32:07 crc kubenswrapper[4756]: E0930 19:32:07.116517 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 19:32:07 crc kubenswrapper[4756]: E0930 19:32:07.116596 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 19:32:07 crc kubenswrapper[4756]: I0930 19:32:07.126224 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:07 crc kubenswrapper[4756]: I0930 19:32:07.126277 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:07 crc kubenswrapper[4756]: I0930 19:32:07.126294 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:07 crc kubenswrapper[4756]: I0930 19:32:07.126320 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:07 crc kubenswrapper[4756]: I0930 19:32:07.126341 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:07Z","lastTransitionTime":"2025-09-30T19:32:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:07 crc kubenswrapper[4756]: I0930 19:32:07.229496 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:07 crc kubenswrapper[4756]: I0930 19:32:07.229555 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:07 crc kubenswrapper[4756]: I0930 19:32:07.229567 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:07 crc kubenswrapper[4756]: I0930 19:32:07.229590 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:07 crc kubenswrapper[4756]: I0930 19:32:07.229606 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:07Z","lastTransitionTime":"2025-09-30T19:32:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:07 crc kubenswrapper[4756]: I0930 19:32:07.333566 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:07 crc kubenswrapper[4756]: I0930 19:32:07.333627 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:07 crc kubenswrapper[4756]: I0930 19:32:07.333644 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:07 crc kubenswrapper[4756]: I0930 19:32:07.333671 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:07 crc kubenswrapper[4756]: I0930 19:32:07.333690 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:07Z","lastTransitionTime":"2025-09-30T19:32:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:07 crc kubenswrapper[4756]: I0930 19:32:07.437038 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:07 crc kubenswrapper[4756]: I0930 19:32:07.437142 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:07 crc kubenswrapper[4756]: I0930 19:32:07.437162 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:07 crc kubenswrapper[4756]: I0930 19:32:07.437194 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:07 crc kubenswrapper[4756]: I0930 19:32:07.437212 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:07Z","lastTransitionTime":"2025-09-30T19:32:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:07 crc kubenswrapper[4756]: I0930 19:32:07.540710 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:07 crc kubenswrapper[4756]: I0930 19:32:07.540799 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:07 crc kubenswrapper[4756]: I0930 19:32:07.540814 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:07 crc kubenswrapper[4756]: I0930 19:32:07.540840 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:07 crc kubenswrapper[4756]: I0930 19:32:07.540855 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:07Z","lastTransitionTime":"2025-09-30T19:32:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:07 crc kubenswrapper[4756]: I0930 19:32:07.644243 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:07 crc kubenswrapper[4756]: I0930 19:32:07.644304 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:07 crc kubenswrapper[4756]: I0930 19:32:07.644322 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:07 crc kubenswrapper[4756]: I0930 19:32:07.644345 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:07 crc kubenswrapper[4756]: I0930 19:32:07.644360 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:07Z","lastTransitionTime":"2025-09-30T19:32:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:07 crc kubenswrapper[4756]: I0930 19:32:07.747163 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:07 crc kubenswrapper[4756]: I0930 19:32:07.747235 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:07 crc kubenswrapper[4756]: I0930 19:32:07.747252 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:07 crc kubenswrapper[4756]: I0930 19:32:07.747274 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:07 crc kubenswrapper[4756]: I0930 19:32:07.747298 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:07Z","lastTransitionTime":"2025-09-30T19:32:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:07 crc kubenswrapper[4756]: I0930 19:32:07.850749 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:07 crc kubenswrapper[4756]: I0930 19:32:07.850842 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:07 crc kubenswrapper[4756]: I0930 19:32:07.850867 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:07 crc kubenswrapper[4756]: I0930 19:32:07.850903 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:07 crc kubenswrapper[4756]: I0930 19:32:07.850927 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:07Z","lastTransitionTime":"2025-09-30T19:32:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:07 crc kubenswrapper[4756]: I0930 19:32:07.953708 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:07 crc kubenswrapper[4756]: I0930 19:32:07.953789 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:07 crc kubenswrapper[4756]: I0930 19:32:07.953817 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:07 crc kubenswrapper[4756]: I0930 19:32:07.953855 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:07 crc kubenswrapper[4756]: I0930 19:32:07.953878 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:07Z","lastTransitionTime":"2025-09-30T19:32:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:08 crc kubenswrapper[4756]: I0930 19:32:08.056369 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:08 crc kubenswrapper[4756]: I0930 19:32:08.056794 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:08 crc kubenswrapper[4756]: I0930 19:32:08.056904 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:08 crc kubenswrapper[4756]: I0930 19:32:08.056994 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:08 crc kubenswrapper[4756]: I0930 19:32:08.057084 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:08Z","lastTransitionTime":"2025-09-30T19:32:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:08 crc kubenswrapper[4756]: I0930 19:32:08.067491 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:08 crc kubenswrapper[4756]: I0930 19:32:08.067554 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:08 crc kubenswrapper[4756]: I0930 19:32:08.067566 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:08 crc kubenswrapper[4756]: I0930 19:32:08.067580 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:08 crc kubenswrapper[4756]: I0930 19:32:08.067591 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:08Z","lastTransitionTime":"2025-09-30T19:32:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:08 crc kubenswrapper[4756]: E0930 19:32:08.082043 4756 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:32:08Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:32:08Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:32:08Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:32:08Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:32:08Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:32:08Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:32:08Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:32:08Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"0611c06e-4fb1-42eb-9eae-fbc2363d05f7\\\",\\\"systemUUID\\\":\\\"9e1f3995-67e3-42b5-a320-7b79274c960e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:08Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:08 crc kubenswrapper[4756]: I0930 19:32:08.087651 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:08 crc kubenswrapper[4756]: I0930 19:32:08.087784 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:08 crc kubenswrapper[4756]: I0930 19:32:08.087869 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:08 crc kubenswrapper[4756]: I0930 19:32:08.087943 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:08 crc kubenswrapper[4756]: I0930 19:32:08.088009 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:08Z","lastTransitionTime":"2025-09-30T19:32:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:08 crc kubenswrapper[4756]: E0930 19:32:08.107271 4756 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:32:08Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:32:08Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:32:08Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:32:08Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:32:08Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:32:08Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:32:08Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:32:08Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"0611c06e-4fb1-42eb-9eae-fbc2363d05f7\\\",\\\"systemUUID\\\":\\\"9e1f3995-67e3-42b5-a320-7b79274c960e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:08Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:08 crc kubenswrapper[4756]: I0930 19:32:08.112964 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:08 crc kubenswrapper[4756]: I0930 19:32:08.113221 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:08 crc kubenswrapper[4756]: I0930 19:32:08.113339 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:08 crc kubenswrapper[4756]: I0930 19:32:08.113478 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:08 crc kubenswrapper[4756]: I0930 19:32:08.113593 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:08Z","lastTransitionTime":"2025-09-30T19:32:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:08 crc kubenswrapper[4756]: I0930 19:32:08.115021 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-b4n45" Sep 30 19:32:08 crc kubenswrapper[4756]: E0930 19:32:08.115226 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-b4n45" podUID="6c151bbd-9757-44ba-aa11-ff679e841fdc" Sep 30 19:32:08 crc kubenswrapper[4756]: E0930 19:32:08.129536 4756 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:32:08Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:32:08Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:32:08Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:32:08Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:32:08Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:32:08Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:32:08Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:32:08Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"0611c06e-4fb1-42eb-9eae-fbc2363d05f7\\\",\\\"systemUUID\\\":\\\"9e1f3995-67e3-42b5-a320-7b79274c960e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:08Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:08 crc kubenswrapper[4756]: I0930 19:32:08.133999 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:08 crc kubenswrapper[4756]: I0930 19:32:08.134040 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:08 crc kubenswrapper[4756]: I0930 19:32:08.134053 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:08 crc kubenswrapper[4756]: I0930 19:32:08.134075 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:08 crc kubenswrapper[4756]: I0930 19:32:08.134088 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:08Z","lastTransitionTime":"2025-09-30T19:32:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:08 crc kubenswrapper[4756]: E0930 19:32:08.152791 4756 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:32:08Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:32:08Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:32:08Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:32:08Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:32:08Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:32:08Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:32:08Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:32:08Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"0611c06e-4fb1-42eb-9eae-fbc2363d05f7\\\",\\\"systemUUID\\\":\\\"9e1f3995-67e3-42b5-a320-7b79274c960e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:08Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:08 crc kubenswrapper[4756]: I0930 19:32:08.156736 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:08 crc kubenswrapper[4756]: I0930 19:32:08.156880 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:08 crc kubenswrapper[4756]: I0930 19:32:08.156971 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:08 crc kubenswrapper[4756]: I0930 19:32:08.157076 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:08 crc kubenswrapper[4756]: I0930 19:32:08.157159 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:08Z","lastTransitionTime":"2025-09-30T19:32:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:08 crc kubenswrapper[4756]: E0930 19:32:08.170758 4756 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:32:08Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:32:08Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:32:08Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:32:08Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:32:08Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:32:08Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:32:08Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:32:08Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"0611c06e-4fb1-42eb-9eae-fbc2363d05f7\\\",\\\"systemUUID\\\":\\\"9e1f3995-67e3-42b5-a320-7b79274c960e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:08Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:08 crc kubenswrapper[4756]: E0930 19:32:08.171024 4756 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Sep 30 19:32:08 crc kubenswrapper[4756]: I0930 19:32:08.172908 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:08 crc kubenswrapper[4756]: I0930 19:32:08.172996 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:08 crc kubenswrapper[4756]: I0930 19:32:08.173069 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:08 crc kubenswrapper[4756]: I0930 19:32:08.173147 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:08 crc kubenswrapper[4756]: I0930 19:32:08.173219 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:08Z","lastTransitionTime":"2025-09-30T19:32:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:08 crc kubenswrapper[4756]: I0930 19:32:08.276013 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:08 crc kubenswrapper[4756]: I0930 19:32:08.276076 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:08 crc kubenswrapper[4756]: I0930 19:32:08.276458 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:08 crc kubenswrapper[4756]: I0930 19:32:08.276492 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:08 crc kubenswrapper[4756]: I0930 19:32:08.276506 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:08Z","lastTransitionTime":"2025-09-30T19:32:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:08 crc kubenswrapper[4756]: I0930 19:32:08.379637 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:08 crc kubenswrapper[4756]: I0930 19:32:08.379677 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:08 crc kubenswrapper[4756]: I0930 19:32:08.379686 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:08 crc kubenswrapper[4756]: I0930 19:32:08.379703 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:08 crc kubenswrapper[4756]: I0930 19:32:08.379713 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:08Z","lastTransitionTime":"2025-09-30T19:32:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:08 crc kubenswrapper[4756]: I0930 19:32:08.482871 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:08 crc kubenswrapper[4756]: I0930 19:32:08.483312 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:08 crc kubenswrapper[4756]: I0930 19:32:08.483466 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:08 crc kubenswrapper[4756]: I0930 19:32:08.483566 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:08 crc kubenswrapper[4756]: I0930 19:32:08.483657 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:08Z","lastTransitionTime":"2025-09-30T19:32:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:08 crc kubenswrapper[4756]: I0930 19:32:08.586804 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:08 crc kubenswrapper[4756]: I0930 19:32:08.586872 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:08 crc kubenswrapper[4756]: I0930 19:32:08.586884 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:08 crc kubenswrapper[4756]: I0930 19:32:08.586902 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:08 crc kubenswrapper[4756]: I0930 19:32:08.586914 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:08Z","lastTransitionTime":"2025-09-30T19:32:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:08 crc kubenswrapper[4756]: I0930 19:32:08.689784 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:08 crc kubenswrapper[4756]: I0930 19:32:08.689853 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:08 crc kubenswrapper[4756]: I0930 19:32:08.689872 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:08 crc kubenswrapper[4756]: I0930 19:32:08.689931 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:08 crc kubenswrapper[4756]: I0930 19:32:08.689950 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:08Z","lastTransitionTime":"2025-09-30T19:32:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:08 crc kubenswrapper[4756]: I0930 19:32:08.792566 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:08 crc kubenswrapper[4756]: I0930 19:32:08.792800 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:08 crc kubenswrapper[4756]: I0930 19:32:08.792815 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:08 crc kubenswrapper[4756]: I0930 19:32:08.792838 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:08 crc kubenswrapper[4756]: I0930 19:32:08.792852 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:08Z","lastTransitionTime":"2025-09-30T19:32:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:08 crc kubenswrapper[4756]: I0930 19:32:08.895853 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:08 crc kubenswrapper[4756]: I0930 19:32:08.895905 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:08 crc kubenswrapper[4756]: I0930 19:32:08.895917 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:08 crc kubenswrapper[4756]: I0930 19:32:08.895939 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:08 crc kubenswrapper[4756]: I0930 19:32:08.895952 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:08Z","lastTransitionTime":"2025-09-30T19:32:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:08 crc kubenswrapper[4756]: I0930 19:32:08.998238 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:08 crc kubenswrapper[4756]: I0930 19:32:08.998280 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:08 crc kubenswrapper[4756]: I0930 19:32:08.998293 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:08 crc kubenswrapper[4756]: I0930 19:32:08.998313 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:08 crc kubenswrapper[4756]: I0930 19:32:08.998325 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:08Z","lastTransitionTime":"2025-09-30T19:32:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:09 crc kubenswrapper[4756]: I0930 19:32:09.101034 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:09 crc kubenswrapper[4756]: I0930 19:32:09.101086 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:09 crc kubenswrapper[4756]: I0930 19:32:09.101098 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:09 crc kubenswrapper[4756]: I0930 19:32:09.101121 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:09 crc kubenswrapper[4756]: I0930 19:32:09.101135 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:09Z","lastTransitionTime":"2025-09-30T19:32:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:09 crc kubenswrapper[4756]: I0930 19:32:09.115299 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 19:32:09 crc kubenswrapper[4756]: I0930 19:32:09.115333 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 19:32:09 crc kubenswrapper[4756]: I0930 19:32:09.115365 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 19:32:09 crc kubenswrapper[4756]: E0930 19:32:09.115454 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 19:32:09 crc kubenswrapper[4756]: E0930 19:32:09.115566 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 19:32:09 crc kubenswrapper[4756]: E0930 19:32:09.115737 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 19:32:09 crc kubenswrapper[4756]: I0930 19:32:09.203664 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:09 crc kubenswrapper[4756]: I0930 19:32:09.203724 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:09 crc kubenswrapper[4756]: I0930 19:32:09.203743 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:09 crc kubenswrapper[4756]: I0930 19:32:09.203769 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:09 crc kubenswrapper[4756]: I0930 19:32:09.203788 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:09Z","lastTransitionTime":"2025-09-30T19:32:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:09 crc kubenswrapper[4756]: I0930 19:32:09.307179 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:09 crc kubenswrapper[4756]: I0930 19:32:09.307236 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:09 crc kubenswrapper[4756]: I0930 19:32:09.307253 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:09 crc kubenswrapper[4756]: I0930 19:32:09.307279 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:09 crc kubenswrapper[4756]: I0930 19:32:09.307300 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:09Z","lastTransitionTime":"2025-09-30T19:32:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:09 crc kubenswrapper[4756]: I0930 19:32:09.411549 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:09 crc kubenswrapper[4756]: I0930 19:32:09.411621 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:09 crc kubenswrapper[4756]: I0930 19:32:09.411640 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:09 crc kubenswrapper[4756]: I0930 19:32:09.411669 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:09 crc kubenswrapper[4756]: I0930 19:32:09.411691 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:09Z","lastTransitionTime":"2025-09-30T19:32:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:09 crc kubenswrapper[4756]: I0930 19:32:09.515375 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:09 crc kubenswrapper[4756]: I0930 19:32:09.515478 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:09 crc kubenswrapper[4756]: I0930 19:32:09.515495 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:09 crc kubenswrapper[4756]: I0930 19:32:09.515522 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:09 crc kubenswrapper[4756]: I0930 19:32:09.515543 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:09Z","lastTransitionTime":"2025-09-30T19:32:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:09 crc kubenswrapper[4756]: I0930 19:32:09.619726 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:09 crc kubenswrapper[4756]: I0930 19:32:09.619839 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:09 crc kubenswrapper[4756]: I0930 19:32:09.619858 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:09 crc kubenswrapper[4756]: I0930 19:32:09.619882 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:09 crc kubenswrapper[4756]: I0930 19:32:09.619932 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:09Z","lastTransitionTime":"2025-09-30T19:32:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:09 crc kubenswrapper[4756]: I0930 19:32:09.723475 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:09 crc kubenswrapper[4756]: I0930 19:32:09.723540 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:09 crc kubenswrapper[4756]: I0930 19:32:09.723557 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:09 crc kubenswrapper[4756]: I0930 19:32:09.723581 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:09 crc kubenswrapper[4756]: I0930 19:32:09.723598 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:09Z","lastTransitionTime":"2025-09-30T19:32:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:09 crc kubenswrapper[4756]: I0930 19:32:09.826968 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:09 crc kubenswrapper[4756]: I0930 19:32:09.827044 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:09 crc kubenswrapper[4756]: I0930 19:32:09.827061 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:09 crc kubenswrapper[4756]: I0930 19:32:09.827093 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:09 crc kubenswrapper[4756]: I0930 19:32:09.827114 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:09Z","lastTransitionTime":"2025-09-30T19:32:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:09 crc kubenswrapper[4756]: I0930 19:32:09.930714 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:09 crc kubenswrapper[4756]: I0930 19:32:09.930786 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:09 crc kubenswrapper[4756]: I0930 19:32:09.930805 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:09 crc kubenswrapper[4756]: I0930 19:32:09.930834 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:09 crc kubenswrapper[4756]: I0930 19:32:09.930859 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:09Z","lastTransitionTime":"2025-09-30T19:32:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:10 crc kubenswrapper[4756]: I0930 19:32:10.035175 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:10 crc kubenswrapper[4756]: I0930 19:32:10.035267 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:10 crc kubenswrapper[4756]: I0930 19:32:10.035294 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:10 crc kubenswrapper[4756]: I0930 19:32:10.035334 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:10 crc kubenswrapper[4756]: I0930 19:32:10.035359 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:10Z","lastTransitionTime":"2025-09-30T19:32:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:10 crc kubenswrapper[4756]: I0930 19:32:10.115551 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-b4n45" Sep 30 19:32:10 crc kubenswrapper[4756]: E0930 19:32:10.115803 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-b4n45" podUID="6c151bbd-9757-44ba-aa11-ff679e841fdc" Sep 30 19:32:10 crc kubenswrapper[4756]: I0930 19:32:10.139031 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:10 crc kubenswrapper[4756]: I0930 19:32:10.139093 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:10 crc kubenswrapper[4756]: I0930 19:32:10.139115 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:10 crc kubenswrapper[4756]: I0930 19:32:10.139141 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:10 crc kubenswrapper[4756]: I0930 19:32:10.139163 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:10Z","lastTransitionTime":"2025-09-30T19:32:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:10 crc kubenswrapper[4756]: I0930 19:32:10.242690 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:10 crc kubenswrapper[4756]: I0930 19:32:10.242759 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:10 crc kubenswrapper[4756]: I0930 19:32:10.242777 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:10 crc kubenswrapper[4756]: I0930 19:32:10.242801 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:10 crc kubenswrapper[4756]: I0930 19:32:10.242820 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:10Z","lastTransitionTime":"2025-09-30T19:32:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:10 crc kubenswrapper[4756]: I0930 19:32:10.346114 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:10 crc kubenswrapper[4756]: I0930 19:32:10.346191 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:10 crc kubenswrapper[4756]: I0930 19:32:10.346219 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:10 crc kubenswrapper[4756]: I0930 19:32:10.346251 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:10 crc kubenswrapper[4756]: I0930 19:32:10.346274 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:10Z","lastTransitionTime":"2025-09-30T19:32:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:10 crc kubenswrapper[4756]: I0930 19:32:10.449171 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:10 crc kubenswrapper[4756]: I0930 19:32:10.449227 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:10 crc kubenswrapper[4756]: I0930 19:32:10.449240 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:10 crc kubenswrapper[4756]: I0930 19:32:10.449263 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:10 crc kubenswrapper[4756]: I0930 19:32:10.449276 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:10Z","lastTransitionTime":"2025-09-30T19:32:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:10 crc kubenswrapper[4756]: I0930 19:32:10.552510 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:10 crc kubenswrapper[4756]: I0930 19:32:10.552573 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:10 crc kubenswrapper[4756]: I0930 19:32:10.552592 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:10 crc kubenswrapper[4756]: I0930 19:32:10.552622 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:10 crc kubenswrapper[4756]: I0930 19:32:10.552662 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:10Z","lastTransitionTime":"2025-09-30T19:32:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:10 crc kubenswrapper[4756]: I0930 19:32:10.655277 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:10 crc kubenswrapper[4756]: I0930 19:32:10.655310 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:10 crc kubenswrapper[4756]: I0930 19:32:10.655320 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:10 crc kubenswrapper[4756]: I0930 19:32:10.655337 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:10 crc kubenswrapper[4756]: I0930 19:32:10.655349 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:10Z","lastTransitionTime":"2025-09-30T19:32:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:10 crc kubenswrapper[4756]: I0930 19:32:10.758607 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:10 crc kubenswrapper[4756]: I0930 19:32:10.758712 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:10 crc kubenswrapper[4756]: I0930 19:32:10.758733 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:10 crc kubenswrapper[4756]: I0930 19:32:10.758762 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:10 crc kubenswrapper[4756]: I0930 19:32:10.758783 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:10Z","lastTransitionTime":"2025-09-30T19:32:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:10 crc kubenswrapper[4756]: I0930 19:32:10.862028 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:10 crc kubenswrapper[4756]: I0930 19:32:10.862119 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:10 crc kubenswrapper[4756]: I0930 19:32:10.862138 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:10 crc kubenswrapper[4756]: I0930 19:32:10.862166 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:10 crc kubenswrapper[4756]: I0930 19:32:10.862184 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:10Z","lastTransitionTime":"2025-09-30T19:32:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:10 crc kubenswrapper[4756]: I0930 19:32:10.966046 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:10 crc kubenswrapper[4756]: I0930 19:32:10.966159 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:10 crc kubenswrapper[4756]: I0930 19:32:10.966187 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:10 crc kubenswrapper[4756]: I0930 19:32:10.966220 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:10 crc kubenswrapper[4756]: I0930 19:32:10.966247 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:10Z","lastTransitionTime":"2025-09-30T19:32:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:11 crc kubenswrapper[4756]: I0930 19:32:11.069603 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:11 crc kubenswrapper[4756]: I0930 19:32:11.069683 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:11 crc kubenswrapper[4756]: I0930 19:32:11.069701 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:11 crc kubenswrapper[4756]: I0930 19:32:11.069732 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:11 crc kubenswrapper[4756]: I0930 19:32:11.069750 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:11Z","lastTransitionTime":"2025-09-30T19:32:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:11 crc kubenswrapper[4756]: I0930 19:32:11.115677 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 19:32:11 crc kubenswrapper[4756]: I0930 19:32:11.115762 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 19:32:11 crc kubenswrapper[4756]: E0930 19:32:11.115944 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 19:32:11 crc kubenswrapper[4756]: I0930 19:32:11.115806 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 19:32:11 crc kubenswrapper[4756]: E0930 19:32:11.116172 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 19:32:11 crc kubenswrapper[4756]: E0930 19:32:11.116372 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 19:32:11 crc kubenswrapper[4756]: I0930 19:32:11.135964 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d37999f5cb36e7071b109ed90026aea745a55dbdd26d53799ee4196dc6a983c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:11Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:11 crc kubenswrapper[4756]: I0930 19:32:11.160859 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-l9mw5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"104562fb-2a2d-4291-963c-7a95062ec13a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c012009175f2ca25af13346dce481a4640354a2b7c771ce9a8d92f0a6a014f23\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mhq89\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:32Z\\\"}}\" for pod \"openshift-multus\"/\"multus-l9mw5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:11Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:11 crc kubenswrapper[4756]: I0930 19:32:11.172725 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:11 crc kubenswrapper[4756]: I0930 19:32:11.172782 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:11 crc kubenswrapper[4756]: I0930 19:32:11.172799 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:11 crc kubenswrapper[4756]: I0930 19:32:11.172826 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:11 crc kubenswrapper[4756]: I0930 19:32:11.172844 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:11Z","lastTransitionTime":"2025-09-30T19:32:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:11 crc kubenswrapper[4756]: I0930 19:32:11.182511 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:11Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:11 crc kubenswrapper[4756]: I0930 19:32:11.205853 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d39cfd1b725ded02a27e8f7bed73221dd180e3fffb7f06d1877d1bc2f6910334\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:11Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:11 crc kubenswrapper[4756]: I0930 19:32:11.224313 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-ghp5t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"32ba8a0a-215c-415b-a893-74f7a40e3c20\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a96a8eea15e5d12ad93cdb41ef3d1054fae455aea2878451bc5715de6d5b612f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pslwb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:32Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-ghp5t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:11Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:11 crc kubenswrapper[4756]: I0930 19:32:11.254702 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-m6zsp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2b4cba15-5ae1-4c98-a5e5-060b3af3aec6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://13307050242b53fcc9d2c9ab6870c36d5c2c1e43adfa36c1e76addf32166b95b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://828a01ba594287ceea1e72baa5419d522ce14d2ab3df5a69fb6332400c3e4abc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://828a01ba594287ceea1e72baa5419d522ce14d2ab3df5a69fb6332400c3e4abc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f91b3382af5267116de235e9d5f480e574b479350e738621b5e7568329dd879\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0f91b3382af5267116de235e9d5f480e574b479350e738621b5e7568329dd879\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://677982485b1155ded1bce2a0f5b3262766d498642307d1744d5cbae6c66102e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://677982485b1155ded1bce2a0f5b3262766d498642307d1744d5cbae6c66102e3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://14b8b318d4cd0da2272b87ac74a4dccdf29874a818aadc0698e9d85a75a169da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://14b8b318d4cd0da2272b87ac74a4dccdf29874a818aadc0698e9d85a75a169da\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e877edd834105d5bd0dcd24e6ba9c11ead17ab39a95ca0169c54acaad7544ebe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e877edd834105d5bd0dcd24e6ba9c11ead17ab39a95ca0169c54acaad7544ebe\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://decf531f70c78d1d142bef3d265612ebac2c8790ea10aba75def6d785fba84a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://decf531f70c78d1d142bef3d265612ebac2c8790ea10aba75def6d785fba84a8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:32Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-m6zsp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:11Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:11 crc kubenswrapper[4756]: I0930 19:32:11.274224 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-gld2h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7ae264bc-c901-4628-89f4-53e94403dff8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d059c5c1b5cded992e4971b04737c4f528ed04278d98651c01c8b8a168fe5655\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc8dq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://764e5cdcf5a9c07e5d046217ffd9db27e2b09a827d02f165459deb24d99f141c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc8dq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:44Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-gld2h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:11Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:11 crc kubenswrapper[4756]: I0930 19:32:11.276288 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:11 crc kubenswrapper[4756]: I0930 19:32:11.276367 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:11 crc kubenswrapper[4756]: I0930 19:32:11.276426 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:11 crc kubenswrapper[4756]: I0930 19:32:11.276471 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:11 crc kubenswrapper[4756]: I0930 19:32:11.276499 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:11Z","lastTransitionTime":"2025-09-30T19:32:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:11 crc kubenswrapper[4756]: I0930 19:32:11.296578 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:11Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:11 crc kubenswrapper[4756]: I0930 19:32:11.330712 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6aa869dd-ec28-4032-82be-c656f27bd7b1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://99767cc72b9d6057d38c45fbec6231bafa47ccffefb78e5924b5e9c1e797722b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b852143b26114c05edce20729cf706c5394609a74f522b7ecf44d92f9d66aed7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ee47cdb77e416535bd00569159712241bbd0952651774af6440fd04000d1722\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5cca81a0aeb985d6044f19cccc580ca5c78553a0abb60fb2a2bed955233f7a97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://621b37e117d0eeef289e110d75521c3262ced12144d665be35d6823780b4bae2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a5550fd486f7fad8080fb3eed0f4c73140172c68983fa8a423cd48da8f31ef3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a5550fd486f7fad8080fb3eed0f4c73140172c68983fa8a423cd48da8f31ef3a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://390ef7ee5c5d98dc226f1d061c6c9bb83236c988f2af15635e7834f643862840\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://390ef7ee5c5d98dc226f1d061c6c9bb83236c988f2af15635e7834f643862840\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:13Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://6f98786c914dcdd3ca1cde8421686a1d536abb97c375f26f458d8f514bdccd52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6f98786c914dcdd3ca1cde8421686a1d536abb97c375f26f458d8f514bdccd52\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:11Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:11Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:11 crc kubenswrapper[4756]: I0930 19:32:11.354151 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0dfdb5d6-9311-4699-a1ac-5f1a6cde2441\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a78698331fc3db3b12b06c8816256c5d055d1decfd12cc0099d6cfd155d673d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32fb926334772dc454adcdabd6070f751ef1aded82a5c13732875e1ec04d2c48\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6195128a0e66f0f932a444f4717084f00ea05ff648f7c26364bb9c96483b652a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://746e37834d01867d327b2cb407df8e9a8489f58884a28f1da6a336f74e4d1033\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:11Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:11Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:11 crc kubenswrapper[4756]: I0930 19:32:11.375824 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c7764cf2-26f0-4b53-85b6-2b07668edda7\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:32:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:32:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6cbb0f481cae6dbc480af93bb03f4b4e597381979fed1a9b91fb23aef7868b65\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d2adbf3b2ade3e39ca3d2f7a70212537c9e7531907dd4a1e7748220183d34b9d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b904814bd38b98f875532fedfa07321ce8d149f697e027ad926f8f40ddce61e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f9299d12673c7a8e182e0d4354088d1aa539ca35a788ba6ff507d2ca96cff07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4f9299d12673c7a8e182e0d4354088d1aa539ca35a788ba6ff507d2ca96cff07\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:12Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:11Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:11Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:11 crc kubenswrapper[4756]: I0930 19:32:11.381158 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:11 crc kubenswrapper[4756]: I0930 19:32:11.381228 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:11 crc kubenswrapper[4756]: I0930 19:32:11.381245 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:11 crc kubenswrapper[4756]: I0930 19:32:11.381274 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:11 crc kubenswrapper[4756]: I0930 19:32:11.381295 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:11Z","lastTransitionTime":"2025-09-30T19:32:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:11 crc kubenswrapper[4756]: I0930 19:32:11.397474 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3370c2ca-fec3-4f90-8df7-51e21e6c7e1c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1a5ef539293ea3ff94f53b11a0510bb8bb45c62c945cffab746512efeeb19a5a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pstrc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ba18abf718fde52cadd61c25b05097873901a4dbbe7631fd7ce64133627b9473\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pstrc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:32Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-4n9zj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:11Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:11 crc kubenswrapper[4756]: I0930 19:32:11.433986 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-kmxk8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d96acc31-a519-46fc-94d8-f19522e77391\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe054d1844c58433aebfe8f4e500e17558f1b4afb5423002d60b50f210052c93\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c5badf6335e4358fa0078ae48d7d50a908bf754fdb57c786e445ccae31933ebe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f20230d520af06912183a05d0b9bcabb8581bf772fe34e037fa8d1a157c4a1b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://261b104308c48bbe4631db85f56d63cefeb0bc6b7112076b0bf3f4fb6b67caad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1fa8babb1f5d1213dec99c75be1d5f37d09c2257a31babd51ec297621fc7d564\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://990926364c8d973164f3da73740b7d31c8caf87f0fb691539f7a62520b822fba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://539ad7633e1076358f49a836687191a85b8b2967d0dd7c7e6591ec78416e1115\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://539ad7633e1076358f49a836687191a85b8b2967d0dd7c7e6591ec78416e1115\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T19:32:05Z\\\",\\\"message\\\":\\\"mns:[] Mutations:[{Column:policies Mutator:insert Value:{GoSet:[{GoUUID:a5a72d02-1a0f-4f7f-a8c5-6923a1c4274a}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {f6d604c1-9711-4e25-be6c-79ec28bbad1b}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI0930 19:32:05.081516 6485 address_set.go:302] New(0d39bc5c-d5b9-432c-81be-2275bce5d7aa/default-network-controller:EgressIP:node-ips:v4:default/a712973235162149816) with []\\\\nI0930 19:32:05.081551 6485 address_set.go:302] New(aa6fc2dc-fab0-4812-b9da-809058e4dcf7/default-network-controller:EgressIP:egressip-served-pods:v4:default/a8519615025667110816) with []\\\\nI0930 19:32:05.081577 6485 address_set.go:302] New(bf133528-8652-4c84-85ff-881f0afe9837/default-network-controller:EgressService:egresssvc-served-pods:v4/a13607449821398607916) with []\\\\nI0930 19:32:05.081647 6485 factory.go:1336] Added *v1.Node event handler 7\\\\nI0930 19:32:05.081686 6485 factory.go:1336] Added *v1.EgressIP event handler 8\\\\nI0930 19:32:05.082052 6485 factory.go:1336] Added *v1.EgressFirewall event handler 9\\\\nI0930 19:32:05.082149 6485 controller.go:132] Adding controller ef_node_controller event handlers\\\\nI0930 19:32:05.082187 6485 ovnkube.go:599] Stopped ovnkube\\\\nI0930 19:32:05.082214 6485 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF0930 19:32:05.082302 6485 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T19:32:04Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-kmxk8_openshift-ovn-kubernetes(d96acc31-a519-46fc-94d8-f19522e77391)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://64404e34c6a077df9cc9e9f9273dbe188c6feda3bc1186a83e05a43e1fa2ea99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://76bb398eefb2b0a9ef06b44c2ea182deefd51e4daf42b17946b911822d6eb0af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://76bb398eefb2b0a9ef06b44c2ea182deefd51e4daf42b17946b911822d6eb0af\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:32Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-kmxk8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:11Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:11 crc kubenswrapper[4756]: I0930 19:32:11.451205 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-8nj5z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"02bb1d9a-1ad2-406d-afc2-c4cd283fe1f4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://28f7474a36dde55478614d47fe30f93b44cb549a25e148e295987e64158fc3c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l6nxq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:34Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-8nj5z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:11Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:11 crc kubenswrapper[4756]: I0930 19:32:11.467982 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-b4n45" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6c151bbd-9757-44ba-aa11-ff679e841fdc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fsl2z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fsl2z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:46Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-b4n45\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:11Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:11 crc kubenswrapper[4756]: I0930 19:32:11.483843 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:11 crc kubenswrapper[4756]: I0930 19:32:11.483900 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:11 crc kubenswrapper[4756]: I0930 19:32:11.483917 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:11 crc kubenswrapper[4756]: I0930 19:32:11.483939 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:11 crc kubenswrapper[4756]: I0930 19:32:11.483954 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:11Z","lastTransitionTime":"2025-09-30T19:32:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:11 crc kubenswrapper[4756]: I0930 19:32:11.488366 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:11Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:11 crc kubenswrapper[4756]: I0930 19:32:11.502844 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://449a4827a34c17e8cdc851d2714ced931581b5c3a1bcd2f7f058b95d68403740\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://163fab064016913eb90cd87fe0da1c702eeb7f000c92b108bfb24a21ef0e98d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:11Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:11 crc kubenswrapper[4756]: I0930 19:32:11.519637 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"218f95d8-7230-4b64-83a5-00af4f5ec7dc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:32:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:32:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0a76d38dbf936ca79e57470de2c8bf98e8681f7d94f2c572aa5c0d2b2acbb484\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://57f786181b4762f1e04a48ac6e352090b62e1691091ddc390821abb693e8f7a6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c275af67ad8675a437b3f7fae34236bbdfbdf1f216be6a442d6765471d1d11e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7673b8a2ea701b95437a1ad447661ea3140ce0b27204ab946f39dbf0a25e29b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a2e0caca690592c9bd17dae14f79390b94f47507afb05a1c1732453700b99786\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"message\\\":\\\"g.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 19:31:31.245412 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 19:31:31.245417 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0930 19:31:31.245423 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0930 19:31:31.245426 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0930 19:31:31.245428 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0930 19:31:31.245860 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI0930 19:31:31.249125 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0930 19:31:31.249169 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0930 19:31:31.249201 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0930 19:31:31.249211 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0930 19:31:31.249225 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0930 19:31:31.249238 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0930 19:31:31.250125 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI0930 19:31:31.250150 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nF0930 19:31:31.250576 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:25Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://01164eb1afdb771ef4d2dc268107dadd206959e1ef66b154caf3ff18717e20de\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:14Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7365492f5fe72c39f569bfa6e2b1a44236da10b4687a738b97ce95626081c1b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7365492f5fe72c39f569bfa6e2b1a44236da10b4687a738b97ce95626081c1b5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:11Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:11Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:11 crc kubenswrapper[4756]: I0930 19:32:11.586620 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:11 crc kubenswrapper[4756]: I0930 19:32:11.586649 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:11 crc kubenswrapper[4756]: I0930 19:32:11.586657 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:11 crc kubenswrapper[4756]: I0930 19:32:11.586671 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:11 crc kubenswrapper[4756]: I0930 19:32:11.586681 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:11Z","lastTransitionTime":"2025-09-30T19:32:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:11 crc kubenswrapper[4756]: I0930 19:32:11.689523 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:11 crc kubenswrapper[4756]: I0930 19:32:11.689591 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:11 crc kubenswrapper[4756]: I0930 19:32:11.689628 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:11 crc kubenswrapper[4756]: I0930 19:32:11.689661 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:11 crc kubenswrapper[4756]: I0930 19:32:11.689680 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:11Z","lastTransitionTime":"2025-09-30T19:32:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:11 crc kubenswrapper[4756]: I0930 19:32:11.792676 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:11 crc kubenswrapper[4756]: I0930 19:32:11.792921 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:11 crc kubenswrapper[4756]: I0930 19:32:11.792935 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:11 crc kubenswrapper[4756]: I0930 19:32:11.792953 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:11 crc kubenswrapper[4756]: I0930 19:32:11.792965 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:11Z","lastTransitionTime":"2025-09-30T19:32:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:11 crc kubenswrapper[4756]: I0930 19:32:11.896149 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:11 crc kubenswrapper[4756]: I0930 19:32:11.896233 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:11 crc kubenswrapper[4756]: I0930 19:32:11.896261 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:11 crc kubenswrapper[4756]: I0930 19:32:11.896297 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:11 crc kubenswrapper[4756]: I0930 19:32:11.896323 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:11Z","lastTransitionTime":"2025-09-30T19:32:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:12 crc kubenswrapper[4756]: I0930 19:32:11.999999 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:12 crc kubenswrapper[4756]: I0930 19:32:12.000070 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:12 crc kubenswrapper[4756]: I0930 19:32:12.000095 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:12 crc kubenswrapper[4756]: I0930 19:32:12.000128 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:12 crc kubenswrapper[4756]: I0930 19:32:12.000150 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:12Z","lastTransitionTime":"2025-09-30T19:32:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:12 crc kubenswrapper[4756]: I0930 19:32:12.102702 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:12 crc kubenswrapper[4756]: I0930 19:32:12.102742 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:12 crc kubenswrapper[4756]: I0930 19:32:12.102757 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:12 crc kubenswrapper[4756]: I0930 19:32:12.102776 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:12 crc kubenswrapper[4756]: I0930 19:32:12.102791 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:12Z","lastTransitionTime":"2025-09-30T19:32:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:12 crc kubenswrapper[4756]: I0930 19:32:12.115373 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-b4n45" Sep 30 19:32:12 crc kubenswrapper[4756]: E0930 19:32:12.115680 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-b4n45" podUID="6c151bbd-9757-44ba-aa11-ff679e841fdc" Sep 30 19:32:12 crc kubenswrapper[4756]: I0930 19:32:12.205467 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:12 crc kubenswrapper[4756]: I0930 19:32:12.205529 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:12 crc kubenswrapper[4756]: I0930 19:32:12.205551 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:12 crc kubenswrapper[4756]: I0930 19:32:12.205578 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:12 crc kubenswrapper[4756]: I0930 19:32:12.205595 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:12Z","lastTransitionTime":"2025-09-30T19:32:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:12 crc kubenswrapper[4756]: I0930 19:32:12.309315 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:12 crc kubenswrapper[4756]: I0930 19:32:12.309381 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:12 crc kubenswrapper[4756]: I0930 19:32:12.309392 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:12 crc kubenswrapper[4756]: I0930 19:32:12.309433 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:12 crc kubenswrapper[4756]: I0930 19:32:12.309448 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:12Z","lastTransitionTime":"2025-09-30T19:32:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:12 crc kubenswrapper[4756]: I0930 19:32:12.413082 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:12 crc kubenswrapper[4756]: I0930 19:32:12.413155 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:12 crc kubenswrapper[4756]: I0930 19:32:12.413181 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:12 crc kubenswrapper[4756]: I0930 19:32:12.413216 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:12 crc kubenswrapper[4756]: I0930 19:32:12.413240 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:12Z","lastTransitionTime":"2025-09-30T19:32:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:12 crc kubenswrapper[4756]: I0930 19:32:12.516700 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:12 crc kubenswrapper[4756]: I0930 19:32:12.517148 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:12 crc kubenswrapper[4756]: I0930 19:32:12.517298 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:12 crc kubenswrapper[4756]: I0930 19:32:12.517501 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:12 crc kubenswrapper[4756]: I0930 19:32:12.517730 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:12Z","lastTransitionTime":"2025-09-30T19:32:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:12 crc kubenswrapper[4756]: I0930 19:32:12.620977 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:12 crc kubenswrapper[4756]: I0930 19:32:12.621358 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:12 crc kubenswrapper[4756]: I0930 19:32:12.621562 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:12 crc kubenswrapper[4756]: I0930 19:32:12.621750 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:12 crc kubenswrapper[4756]: I0930 19:32:12.621904 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:12Z","lastTransitionTime":"2025-09-30T19:32:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:12 crc kubenswrapper[4756]: I0930 19:32:12.725807 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:12 crc kubenswrapper[4756]: I0930 19:32:12.725910 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:12 crc kubenswrapper[4756]: I0930 19:32:12.725935 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:12 crc kubenswrapper[4756]: I0930 19:32:12.725962 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:12 crc kubenswrapper[4756]: I0930 19:32:12.725980 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:12Z","lastTransitionTime":"2025-09-30T19:32:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:12 crc kubenswrapper[4756]: I0930 19:32:12.829385 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:12 crc kubenswrapper[4756]: I0930 19:32:12.829474 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:12 crc kubenswrapper[4756]: I0930 19:32:12.829491 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:12 crc kubenswrapper[4756]: I0930 19:32:12.829517 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:12 crc kubenswrapper[4756]: I0930 19:32:12.829536 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:12Z","lastTransitionTime":"2025-09-30T19:32:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:12 crc kubenswrapper[4756]: I0930 19:32:12.933505 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:12 crc kubenswrapper[4756]: I0930 19:32:12.933563 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:12 crc kubenswrapper[4756]: I0930 19:32:12.933581 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:12 crc kubenswrapper[4756]: I0930 19:32:12.933604 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:12 crc kubenswrapper[4756]: I0930 19:32:12.933624 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:12Z","lastTransitionTime":"2025-09-30T19:32:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:13 crc kubenswrapper[4756]: I0930 19:32:13.037478 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:13 crc kubenswrapper[4756]: I0930 19:32:13.037542 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:13 crc kubenswrapper[4756]: I0930 19:32:13.037561 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:13 crc kubenswrapper[4756]: I0930 19:32:13.037592 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:13 crc kubenswrapper[4756]: I0930 19:32:13.037612 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:13Z","lastTransitionTime":"2025-09-30T19:32:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:13 crc kubenswrapper[4756]: I0930 19:32:13.115527 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 19:32:13 crc kubenswrapper[4756]: I0930 19:32:13.115895 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 19:32:13 crc kubenswrapper[4756]: I0930 19:32:13.115541 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 19:32:13 crc kubenswrapper[4756]: E0930 19:32:13.115946 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 19:32:13 crc kubenswrapper[4756]: E0930 19:32:13.116163 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 19:32:13 crc kubenswrapper[4756]: E0930 19:32:13.116191 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 19:32:13 crc kubenswrapper[4756]: I0930 19:32:13.141653 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:13 crc kubenswrapper[4756]: I0930 19:32:13.141736 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:13 crc kubenswrapper[4756]: I0930 19:32:13.141943 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:13 crc kubenswrapper[4756]: I0930 19:32:13.141974 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:13 crc kubenswrapper[4756]: I0930 19:32:13.141997 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:13Z","lastTransitionTime":"2025-09-30T19:32:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:13 crc kubenswrapper[4756]: I0930 19:32:13.245047 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:13 crc kubenswrapper[4756]: I0930 19:32:13.245119 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:13 crc kubenswrapper[4756]: I0930 19:32:13.245139 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:13 crc kubenswrapper[4756]: I0930 19:32:13.245169 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:13 crc kubenswrapper[4756]: I0930 19:32:13.245189 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:13Z","lastTransitionTime":"2025-09-30T19:32:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:13 crc kubenswrapper[4756]: I0930 19:32:13.347856 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:13 crc kubenswrapper[4756]: I0930 19:32:13.347902 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:13 crc kubenswrapper[4756]: I0930 19:32:13.347918 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:13 crc kubenswrapper[4756]: I0930 19:32:13.347939 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:13 crc kubenswrapper[4756]: I0930 19:32:13.347956 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:13Z","lastTransitionTime":"2025-09-30T19:32:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:13 crc kubenswrapper[4756]: I0930 19:32:13.450952 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:13 crc kubenswrapper[4756]: I0930 19:32:13.450995 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:13 crc kubenswrapper[4756]: I0930 19:32:13.451005 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:13 crc kubenswrapper[4756]: I0930 19:32:13.451022 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:13 crc kubenswrapper[4756]: I0930 19:32:13.451033 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:13Z","lastTransitionTime":"2025-09-30T19:32:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:13 crc kubenswrapper[4756]: I0930 19:32:13.553884 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:13 crc kubenswrapper[4756]: I0930 19:32:13.553955 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:13 crc kubenswrapper[4756]: I0930 19:32:13.553983 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:13 crc kubenswrapper[4756]: I0930 19:32:13.554017 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:13 crc kubenswrapper[4756]: I0930 19:32:13.554041 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:13Z","lastTransitionTime":"2025-09-30T19:32:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:13 crc kubenswrapper[4756]: I0930 19:32:13.658504 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:13 crc kubenswrapper[4756]: I0930 19:32:13.658563 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:13 crc kubenswrapper[4756]: I0930 19:32:13.658585 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:13 crc kubenswrapper[4756]: I0930 19:32:13.658614 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:13 crc kubenswrapper[4756]: I0930 19:32:13.658639 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:13Z","lastTransitionTime":"2025-09-30T19:32:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:13 crc kubenswrapper[4756]: I0930 19:32:13.762220 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:13 crc kubenswrapper[4756]: I0930 19:32:13.762275 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:13 crc kubenswrapper[4756]: I0930 19:32:13.762287 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:13 crc kubenswrapper[4756]: I0930 19:32:13.762309 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:13 crc kubenswrapper[4756]: I0930 19:32:13.762323 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:13Z","lastTransitionTime":"2025-09-30T19:32:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:13 crc kubenswrapper[4756]: I0930 19:32:13.865716 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:13 crc kubenswrapper[4756]: I0930 19:32:13.865771 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:13 crc kubenswrapper[4756]: I0930 19:32:13.865782 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:13 crc kubenswrapper[4756]: I0930 19:32:13.865803 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:13 crc kubenswrapper[4756]: I0930 19:32:13.865818 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:13Z","lastTransitionTime":"2025-09-30T19:32:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:13 crc kubenswrapper[4756]: I0930 19:32:13.969319 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:13 crc kubenswrapper[4756]: I0930 19:32:13.969748 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:13 crc kubenswrapper[4756]: I0930 19:32:13.969765 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:13 crc kubenswrapper[4756]: I0930 19:32:13.969791 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:13 crc kubenswrapper[4756]: I0930 19:32:13.969810 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:13Z","lastTransitionTime":"2025-09-30T19:32:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:14 crc kubenswrapper[4756]: I0930 19:32:14.073471 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:14 crc kubenswrapper[4756]: I0930 19:32:14.073535 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:14 crc kubenswrapper[4756]: I0930 19:32:14.073555 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:14 crc kubenswrapper[4756]: I0930 19:32:14.073586 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:14 crc kubenswrapper[4756]: I0930 19:32:14.073605 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:14Z","lastTransitionTime":"2025-09-30T19:32:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:14 crc kubenswrapper[4756]: I0930 19:32:14.115829 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-b4n45" Sep 30 19:32:14 crc kubenswrapper[4756]: E0930 19:32:14.116027 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-b4n45" podUID="6c151bbd-9757-44ba-aa11-ff679e841fdc" Sep 30 19:32:14 crc kubenswrapper[4756]: I0930 19:32:14.177626 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:14 crc kubenswrapper[4756]: I0930 19:32:14.177679 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:14 crc kubenswrapper[4756]: I0930 19:32:14.177699 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:14 crc kubenswrapper[4756]: I0930 19:32:14.177723 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:14 crc kubenswrapper[4756]: I0930 19:32:14.177743 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:14Z","lastTransitionTime":"2025-09-30T19:32:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:14 crc kubenswrapper[4756]: I0930 19:32:14.283355 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:14 crc kubenswrapper[4756]: I0930 19:32:14.283431 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:14 crc kubenswrapper[4756]: I0930 19:32:14.283451 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:14 crc kubenswrapper[4756]: I0930 19:32:14.283476 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:14 crc kubenswrapper[4756]: I0930 19:32:14.283494 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:14Z","lastTransitionTime":"2025-09-30T19:32:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:14 crc kubenswrapper[4756]: I0930 19:32:14.386637 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:14 crc kubenswrapper[4756]: I0930 19:32:14.386708 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:14 crc kubenswrapper[4756]: I0930 19:32:14.386725 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:14 crc kubenswrapper[4756]: I0930 19:32:14.386748 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:14 crc kubenswrapper[4756]: I0930 19:32:14.386762 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:14Z","lastTransitionTime":"2025-09-30T19:32:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:14 crc kubenswrapper[4756]: I0930 19:32:14.490604 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:14 crc kubenswrapper[4756]: I0930 19:32:14.490678 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:14 crc kubenswrapper[4756]: I0930 19:32:14.490713 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:14 crc kubenswrapper[4756]: I0930 19:32:14.490733 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:14 crc kubenswrapper[4756]: I0930 19:32:14.490744 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:14Z","lastTransitionTime":"2025-09-30T19:32:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:14 crc kubenswrapper[4756]: I0930 19:32:14.593716 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:14 crc kubenswrapper[4756]: I0930 19:32:14.593769 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:14 crc kubenswrapper[4756]: I0930 19:32:14.593786 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:14 crc kubenswrapper[4756]: I0930 19:32:14.593809 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:14 crc kubenswrapper[4756]: I0930 19:32:14.593824 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:14Z","lastTransitionTime":"2025-09-30T19:32:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:14 crc kubenswrapper[4756]: I0930 19:32:14.697538 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:14 crc kubenswrapper[4756]: I0930 19:32:14.697582 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:14 crc kubenswrapper[4756]: I0930 19:32:14.697591 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:14 crc kubenswrapper[4756]: I0930 19:32:14.697607 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:14 crc kubenswrapper[4756]: I0930 19:32:14.697617 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:14Z","lastTransitionTime":"2025-09-30T19:32:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:14 crc kubenswrapper[4756]: I0930 19:32:14.801161 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:14 crc kubenswrapper[4756]: I0930 19:32:14.801230 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:14 crc kubenswrapper[4756]: I0930 19:32:14.801249 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:14 crc kubenswrapper[4756]: I0930 19:32:14.801280 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:14 crc kubenswrapper[4756]: I0930 19:32:14.801298 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:14Z","lastTransitionTime":"2025-09-30T19:32:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:14 crc kubenswrapper[4756]: I0930 19:32:14.938087 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:14 crc kubenswrapper[4756]: I0930 19:32:14.938137 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:14 crc kubenswrapper[4756]: I0930 19:32:14.938148 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:14 crc kubenswrapper[4756]: I0930 19:32:14.938169 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:14 crc kubenswrapper[4756]: I0930 19:32:14.938181 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:14Z","lastTransitionTime":"2025-09-30T19:32:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:15 crc kubenswrapper[4756]: I0930 19:32:15.045343 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:15 crc kubenswrapper[4756]: I0930 19:32:15.046539 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:15 crc kubenswrapper[4756]: I0930 19:32:15.046556 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:15 crc kubenswrapper[4756]: I0930 19:32:15.046580 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:15 crc kubenswrapper[4756]: I0930 19:32:15.046592 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:15Z","lastTransitionTime":"2025-09-30T19:32:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:15 crc kubenswrapper[4756]: I0930 19:32:15.115379 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 19:32:15 crc kubenswrapper[4756]: I0930 19:32:15.115374 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 19:32:15 crc kubenswrapper[4756]: I0930 19:32:15.115593 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 19:32:15 crc kubenswrapper[4756]: E0930 19:32:15.115670 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 19:32:15 crc kubenswrapper[4756]: E0930 19:32:15.115816 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 19:32:15 crc kubenswrapper[4756]: E0930 19:32:15.115906 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 19:32:15 crc kubenswrapper[4756]: I0930 19:32:15.149169 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:15 crc kubenswrapper[4756]: I0930 19:32:15.149208 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:15 crc kubenswrapper[4756]: I0930 19:32:15.149216 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:15 crc kubenswrapper[4756]: I0930 19:32:15.149234 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:15 crc kubenswrapper[4756]: I0930 19:32:15.149244 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:15Z","lastTransitionTime":"2025-09-30T19:32:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:15 crc kubenswrapper[4756]: I0930 19:32:15.252180 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:15 crc kubenswrapper[4756]: I0930 19:32:15.252226 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:15 crc kubenswrapper[4756]: I0930 19:32:15.252236 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:15 crc kubenswrapper[4756]: I0930 19:32:15.252252 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:15 crc kubenswrapper[4756]: I0930 19:32:15.252263 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:15Z","lastTransitionTime":"2025-09-30T19:32:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:15 crc kubenswrapper[4756]: I0930 19:32:15.355358 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:15 crc kubenswrapper[4756]: I0930 19:32:15.355413 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:15 crc kubenswrapper[4756]: I0930 19:32:15.355423 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:15 crc kubenswrapper[4756]: I0930 19:32:15.355441 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:15 crc kubenswrapper[4756]: I0930 19:32:15.355473 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:15Z","lastTransitionTime":"2025-09-30T19:32:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:15 crc kubenswrapper[4756]: I0930 19:32:15.457801 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:15 crc kubenswrapper[4756]: I0930 19:32:15.457864 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:15 crc kubenswrapper[4756]: I0930 19:32:15.457879 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:15 crc kubenswrapper[4756]: I0930 19:32:15.457904 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:15 crc kubenswrapper[4756]: I0930 19:32:15.457921 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:15Z","lastTransitionTime":"2025-09-30T19:32:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:15 crc kubenswrapper[4756]: I0930 19:32:15.559969 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:15 crc kubenswrapper[4756]: I0930 19:32:15.560011 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:15 crc kubenswrapper[4756]: I0930 19:32:15.560020 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:15 crc kubenswrapper[4756]: I0930 19:32:15.560037 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:15 crc kubenswrapper[4756]: I0930 19:32:15.560048 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:15Z","lastTransitionTime":"2025-09-30T19:32:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:15 crc kubenswrapper[4756]: I0930 19:32:15.663064 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:15 crc kubenswrapper[4756]: I0930 19:32:15.663172 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:15 crc kubenswrapper[4756]: I0930 19:32:15.663200 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:15 crc kubenswrapper[4756]: I0930 19:32:15.663238 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:15 crc kubenswrapper[4756]: I0930 19:32:15.663266 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:15Z","lastTransitionTime":"2025-09-30T19:32:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:15 crc kubenswrapper[4756]: I0930 19:32:15.767267 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:15 crc kubenswrapper[4756]: I0930 19:32:15.767326 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:15 crc kubenswrapper[4756]: I0930 19:32:15.767337 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:15 crc kubenswrapper[4756]: I0930 19:32:15.767360 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:15 crc kubenswrapper[4756]: I0930 19:32:15.767373 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:15Z","lastTransitionTime":"2025-09-30T19:32:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:15 crc kubenswrapper[4756]: I0930 19:32:15.870542 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:15 crc kubenswrapper[4756]: I0930 19:32:15.870606 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:15 crc kubenswrapper[4756]: I0930 19:32:15.870620 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:15 crc kubenswrapper[4756]: I0930 19:32:15.870639 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:15 crc kubenswrapper[4756]: I0930 19:32:15.870651 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:15Z","lastTransitionTime":"2025-09-30T19:32:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:15 crc kubenswrapper[4756]: I0930 19:32:15.974247 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:15 crc kubenswrapper[4756]: I0930 19:32:15.974301 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:15 crc kubenswrapper[4756]: I0930 19:32:15.974314 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:15 crc kubenswrapper[4756]: I0930 19:32:15.974336 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:15 crc kubenswrapper[4756]: I0930 19:32:15.974350 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:15Z","lastTransitionTime":"2025-09-30T19:32:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:16 crc kubenswrapper[4756]: I0930 19:32:16.078206 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:16 crc kubenswrapper[4756]: I0930 19:32:16.078268 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:16 crc kubenswrapper[4756]: I0930 19:32:16.078435 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:16 crc kubenswrapper[4756]: I0930 19:32:16.078465 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:16 crc kubenswrapper[4756]: I0930 19:32:16.078481 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:16Z","lastTransitionTime":"2025-09-30T19:32:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:16 crc kubenswrapper[4756]: I0930 19:32:16.115499 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-b4n45" Sep 30 19:32:16 crc kubenswrapper[4756]: E0930 19:32:16.115686 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-b4n45" podUID="6c151bbd-9757-44ba-aa11-ff679e841fdc" Sep 30 19:32:16 crc kubenswrapper[4756]: I0930 19:32:16.181458 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:16 crc kubenswrapper[4756]: I0930 19:32:16.181509 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:16 crc kubenswrapper[4756]: I0930 19:32:16.181519 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:16 crc kubenswrapper[4756]: I0930 19:32:16.181539 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:16 crc kubenswrapper[4756]: I0930 19:32:16.181556 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:16Z","lastTransitionTime":"2025-09-30T19:32:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:16 crc kubenswrapper[4756]: I0930 19:32:16.284916 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:16 crc kubenswrapper[4756]: I0930 19:32:16.284970 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:16 crc kubenswrapper[4756]: I0930 19:32:16.284987 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:16 crc kubenswrapper[4756]: I0930 19:32:16.285007 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:16 crc kubenswrapper[4756]: I0930 19:32:16.285020 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:16Z","lastTransitionTime":"2025-09-30T19:32:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:16 crc kubenswrapper[4756]: I0930 19:32:16.387876 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:16 crc kubenswrapper[4756]: I0930 19:32:16.387916 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:16 crc kubenswrapper[4756]: I0930 19:32:16.387929 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:16 crc kubenswrapper[4756]: I0930 19:32:16.387947 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:16 crc kubenswrapper[4756]: I0930 19:32:16.387960 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:16Z","lastTransitionTime":"2025-09-30T19:32:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:16 crc kubenswrapper[4756]: I0930 19:32:16.491573 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:16 crc kubenswrapper[4756]: I0930 19:32:16.491670 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:16 crc kubenswrapper[4756]: I0930 19:32:16.491696 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:16 crc kubenswrapper[4756]: I0930 19:32:16.491732 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:16 crc kubenswrapper[4756]: I0930 19:32:16.491758 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:16Z","lastTransitionTime":"2025-09-30T19:32:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:16 crc kubenswrapper[4756]: I0930 19:32:16.594680 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:16 crc kubenswrapper[4756]: I0930 19:32:16.594775 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:16 crc kubenswrapper[4756]: I0930 19:32:16.594805 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:16 crc kubenswrapper[4756]: I0930 19:32:16.594840 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:16 crc kubenswrapper[4756]: I0930 19:32:16.594863 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:16Z","lastTransitionTime":"2025-09-30T19:32:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:16 crc kubenswrapper[4756]: I0930 19:32:16.698656 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:16 crc kubenswrapper[4756]: I0930 19:32:16.698726 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:16 crc kubenswrapper[4756]: I0930 19:32:16.698749 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:16 crc kubenswrapper[4756]: I0930 19:32:16.698780 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:16 crc kubenswrapper[4756]: I0930 19:32:16.698804 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:16Z","lastTransitionTime":"2025-09-30T19:32:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:16 crc kubenswrapper[4756]: I0930 19:32:16.801316 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:16 crc kubenswrapper[4756]: I0930 19:32:16.801381 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:16 crc kubenswrapper[4756]: I0930 19:32:16.801415 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:16 crc kubenswrapper[4756]: I0930 19:32:16.801439 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:16 crc kubenswrapper[4756]: I0930 19:32:16.801455 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:16Z","lastTransitionTime":"2025-09-30T19:32:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:16 crc kubenswrapper[4756]: I0930 19:32:16.905116 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:16 crc kubenswrapper[4756]: I0930 19:32:16.905187 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:16 crc kubenswrapper[4756]: I0930 19:32:16.905200 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:16 crc kubenswrapper[4756]: I0930 19:32:16.905222 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:16 crc kubenswrapper[4756]: I0930 19:32:16.905236 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:16Z","lastTransitionTime":"2025-09-30T19:32:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:17 crc kubenswrapper[4756]: I0930 19:32:17.008608 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:17 crc kubenswrapper[4756]: I0930 19:32:17.008671 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:17 crc kubenswrapper[4756]: I0930 19:32:17.008685 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:17 crc kubenswrapper[4756]: I0930 19:32:17.008706 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:17 crc kubenswrapper[4756]: I0930 19:32:17.008723 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:17Z","lastTransitionTime":"2025-09-30T19:32:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:17 crc kubenswrapper[4756]: I0930 19:32:17.112237 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:17 crc kubenswrapper[4756]: I0930 19:32:17.112301 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:17 crc kubenswrapper[4756]: I0930 19:32:17.112311 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:17 crc kubenswrapper[4756]: I0930 19:32:17.112334 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:17 crc kubenswrapper[4756]: I0930 19:32:17.112356 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:17Z","lastTransitionTime":"2025-09-30T19:32:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:17 crc kubenswrapper[4756]: I0930 19:32:17.115725 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 19:32:17 crc kubenswrapper[4756]: I0930 19:32:17.115740 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 19:32:17 crc kubenswrapper[4756]: E0930 19:32:17.115859 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 19:32:17 crc kubenswrapper[4756]: I0930 19:32:17.115966 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 19:32:17 crc kubenswrapper[4756]: E0930 19:32:17.116115 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 19:32:17 crc kubenswrapper[4756]: E0930 19:32:17.116241 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 19:32:17 crc kubenswrapper[4756]: I0930 19:32:17.215361 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:17 crc kubenswrapper[4756]: I0930 19:32:17.215453 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:17 crc kubenswrapper[4756]: I0930 19:32:17.215473 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:17 crc kubenswrapper[4756]: I0930 19:32:17.215499 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:17 crc kubenswrapper[4756]: I0930 19:32:17.215518 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:17Z","lastTransitionTime":"2025-09-30T19:32:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:17 crc kubenswrapper[4756]: I0930 19:32:17.318248 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:17 crc kubenswrapper[4756]: I0930 19:32:17.318374 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:17 crc kubenswrapper[4756]: I0930 19:32:17.318433 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:17 crc kubenswrapper[4756]: I0930 19:32:17.318462 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:17 crc kubenswrapper[4756]: I0930 19:32:17.318484 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:17Z","lastTransitionTime":"2025-09-30T19:32:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:17 crc kubenswrapper[4756]: I0930 19:32:17.430154 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:17 crc kubenswrapper[4756]: I0930 19:32:17.430199 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:17 crc kubenswrapper[4756]: I0930 19:32:17.430211 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:17 crc kubenswrapper[4756]: I0930 19:32:17.430232 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:17 crc kubenswrapper[4756]: I0930 19:32:17.430242 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:17Z","lastTransitionTime":"2025-09-30T19:32:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:17 crc kubenswrapper[4756]: I0930 19:32:17.533776 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:17 crc kubenswrapper[4756]: I0930 19:32:17.533824 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:17 crc kubenswrapper[4756]: I0930 19:32:17.533833 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:17 crc kubenswrapper[4756]: I0930 19:32:17.533852 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:17 crc kubenswrapper[4756]: I0930 19:32:17.533862 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:17Z","lastTransitionTime":"2025-09-30T19:32:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:17 crc kubenswrapper[4756]: I0930 19:32:17.637122 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:17 crc kubenswrapper[4756]: I0930 19:32:17.637203 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:17 crc kubenswrapper[4756]: I0930 19:32:17.637221 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:17 crc kubenswrapper[4756]: I0930 19:32:17.637250 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:17 crc kubenswrapper[4756]: I0930 19:32:17.637273 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:17Z","lastTransitionTime":"2025-09-30T19:32:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:17 crc kubenswrapper[4756]: I0930 19:32:17.740188 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:17 crc kubenswrapper[4756]: I0930 19:32:17.740225 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:17 crc kubenswrapper[4756]: I0930 19:32:17.740257 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:17 crc kubenswrapper[4756]: I0930 19:32:17.740275 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:17 crc kubenswrapper[4756]: I0930 19:32:17.740301 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:17Z","lastTransitionTime":"2025-09-30T19:32:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:17 crc kubenswrapper[4756]: I0930 19:32:17.843157 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:17 crc kubenswrapper[4756]: I0930 19:32:17.843214 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:17 crc kubenswrapper[4756]: I0930 19:32:17.843226 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:17 crc kubenswrapper[4756]: I0930 19:32:17.843244 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:17 crc kubenswrapper[4756]: I0930 19:32:17.843259 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:17Z","lastTransitionTime":"2025-09-30T19:32:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:17 crc kubenswrapper[4756]: I0930 19:32:17.947347 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:17 crc kubenswrapper[4756]: I0930 19:32:17.947427 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:17 crc kubenswrapper[4756]: I0930 19:32:17.947443 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:17 crc kubenswrapper[4756]: I0930 19:32:17.947465 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:17 crc kubenswrapper[4756]: I0930 19:32:17.947478 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:17Z","lastTransitionTime":"2025-09-30T19:32:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:18 crc kubenswrapper[4756]: I0930 19:32:18.050594 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:18 crc kubenswrapper[4756]: I0930 19:32:18.050657 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:18 crc kubenswrapper[4756]: I0930 19:32:18.050677 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:18 crc kubenswrapper[4756]: I0930 19:32:18.050705 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:18 crc kubenswrapper[4756]: I0930 19:32:18.050727 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:18Z","lastTransitionTime":"2025-09-30T19:32:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:18 crc kubenswrapper[4756]: I0930 19:32:18.070786 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/6c151bbd-9757-44ba-aa11-ff679e841fdc-metrics-certs\") pod \"network-metrics-daemon-b4n45\" (UID: \"6c151bbd-9757-44ba-aa11-ff679e841fdc\") " pod="openshift-multus/network-metrics-daemon-b4n45" Sep 30 19:32:18 crc kubenswrapper[4756]: E0930 19:32:18.070990 4756 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Sep 30 19:32:18 crc kubenswrapper[4756]: E0930 19:32:18.071104 4756 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/6c151bbd-9757-44ba-aa11-ff679e841fdc-metrics-certs podName:6c151bbd-9757-44ba-aa11-ff679e841fdc nodeName:}" failed. No retries permitted until 2025-09-30 19:32:50.071076529 +0000 UTC m=+99.692010036 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/6c151bbd-9757-44ba-aa11-ff679e841fdc-metrics-certs") pod "network-metrics-daemon-b4n45" (UID: "6c151bbd-9757-44ba-aa11-ff679e841fdc") : object "openshift-multus"/"metrics-daemon-secret" not registered Sep 30 19:32:18 crc kubenswrapper[4756]: I0930 19:32:18.115969 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-b4n45" Sep 30 19:32:18 crc kubenswrapper[4756]: E0930 19:32:18.116231 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-b4n45" podUID="6c151bbd-9757-44ba-aa11-ff679e841fdc" Sep 30 19:32:18 crc kubenswrapper[4756]: I0930 19:32:18.153722 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:18 crc kubenswrapper[4756]: I0930 19:32:18.153794 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:18 crc kubenswrapper[4756]: I0930 19:32:18.153805 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:18 crc kubenswrapper[4756]: I0930 19:32:18.153827 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:18 crc kubenswrapper[4756]: I0930 19:32:18.153847 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:18Z","lastTransitionTime":"2025-09-30T19:32:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:18 crc kubenswrapper[4756]: I0930 19:32:18.189742 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:18 crc kubenswrapper[4756]: I0930 19:32:18.189794 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:18 crc kubenswrapper[4756]: I0930 19:32:18.189806 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:18 crc kubenswrapper[4756]: I0930 19:32:18.189827 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:18 crc kubenswrapper[4756]: I0930 19:32:18.189842 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:18Z","lastTransitionTime":"2025-09-30T19:32:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:18 crc kubenswrapper[4756]: E0930 19:32:18.213507 4756 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:32:18Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:32:18Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:32:18Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:32:18Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:32:18Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:32:18Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:32:18Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:32:18Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"0611c06e-4fb1-42eb-9eae-fbc2363d05f7\\\",\\\"systemUUID\\\":\\\"9e1f3995-67e3-42b5-a320-7b79274c960e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:18Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:18 crc kubenswrapper[4756]: I0930 19:32:18.218798 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:18 crc kubenswrapper[4756]: I0930 19:32:18.218849 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:18 crc kubenswrapper[4756]: I0930 19:32:18.218860 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:18 crc kubenswrapper[4756]: I0930 19:32:18.218879 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:18 crc kubenswrapper[4756]: I0930 19:32:18.218891 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:18Z","lastTransitionTime":"2025-09-30T19:32:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:18 crc kubenswrapper[4756]: E0930 19:32:18.239272 4756 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:32:18Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:32:18Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:32:18Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:32:18Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:32:18Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:32:18Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:32:18Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:32:18Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"0611c06e-4fb1-42eb-9eae-fbc2363d05f7\\\",\\\"systemUUID\\\":\\\"9e1f3995-67e3-42b5-a320-7b79274c960e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:18Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:18 crc kubenswrapper[4756]: I0930 19:32:18.244274 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:18 crc kubenswrapper[4756]: I0930 19:32:18.244353 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:18 crc kubenswrapper[4756]: I0930 19:32:18.244374 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:18 crc kubenswrapper[4756]: I0930 19:32:18.244440 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:18 crc kubenswrapper[4756]: I0930 19:32:18.244463 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:18Z","lastTransitionTime":"2025-09-30T19:32:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:18 crc kubenswrapper[4756]: E0930 19:32:18.265383 4756 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:32:18Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:32:18Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:32:18Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:32:18Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:32:18Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:32:18Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:32:18Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:32:18Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"0611c06e-4fb1-42eb-9eae-fbc2363d05f7\\\",\\\"systemUUID\\\":\\\"9e1f3995-67e3-42b5-a320-7b79274c960e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:18Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:18 crc kubenswrapper[4756]: I0930 19:32:18.271288 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:18 crc kubenswrapper[4756]: I0930 19:32:18.271380 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:18 crc kubenswrapper[4756]: I0930 19:32:18.271456 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:18 crc kubenswrapper[4756]: I0930 19:32:18.271495 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:18 crc kubenswrapper[4756]: I0930 19:32:18.271522 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:18Z","lastTransitionTime":"2025-09-30T19:32:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:18 crc kubenswrapper[4756]: E0930 19:32:18.292105 4756 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:32:18Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:32:18Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:32:18Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:32:18Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:32:18Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:32:18Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:32:18Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:32:18Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"0611c06e-4fb1-42eb-9eae-fbc2363d05f7\\\",\\\"systemUUID\\\":\\\"9e1f3995-67e3-42b5-a320-7b79274c960e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:18Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:18 crc kubenswrapper[4756]: I0930 19:32:18.296421 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:18 crc kubenswrapper[4756]: I0930 19:32:18.296456 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:18 crc kubenswrapper[4756]: I0930 19:32:18.296467 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:18 crc kubenswrapper[4756]: I0930 19:32:18.296487 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:18 crc kubenswrapper[4756]: I0930 19:32:18.296501 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:18Z","lastTransitionTime":"2025-09-30T19:32:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:18 crc kubenswrapper[4756]: E0930 19:32:18.313287 4756 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:32:18Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:32:18Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:32:18Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:32:18Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:32:18Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:32:18Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:32:18Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:32:18Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"0611c06e-4fb1-42eb-9eae-fbc2363d05f7\\\",\\\"systemUUID\\\":\\\"9e1f3995-67e3-42b5-a320-7b79274c960e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:18Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:18 crc kubenswrapper[4756]: E0930 19:32:18.313436 4756 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Sep 30 19:32:18 crc kubenswrapper[4756]: I0930 19:32:18.315434 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:18 crc kubenswrapper[4756]: I0930 19:32:18.315481 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:18 crc kubenswrapper[4756]: I0930 19:32:18.315494 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:18 crc kubenswrapper[4756]: I0930 19:32:18.315514 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:18 crc kubenswrapper[4756]: I0930 19:32:18.315529 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:18Z","lastTransitionTime":"2025-09-30T19:32:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:18 crc kubenswrapper[4756]: I0930 19:32:18.419215 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:18 crc kubenswrapper[4756]: I0930 19:32:18.419261 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:18 crc kubenswrapper[4756]: I0930 19:32:18.419272 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:18 crc kubenswrapper[4756]: I0930 19:32:18.419292 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:18 crc kubenswrapper[4756]: I0930 19:32:18.419306 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:18Z","lastTransitionTime":"2025-09-30T19:32:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:18 crc kubenswrapper[4756]: I0930 19:32:18.521255 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:18 crc kubenswrapper[4756]: I0930 19:32:18.521289 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:18 crc kubenswrapper[4756]: I0930 19:32:18.521297 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:18 crc kubenswrapper[4756]: I0930 19:32:18.521312 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:18 crc kubenswrapper[4756]: I0930 19:32:18.521322 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:18Z","lastTransitionTime":"2025-09-30T19:32:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:18 crc kubenswrapper[4756]: I0930 19:32:18.625079 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:18 crc kubenswrapper[4756]: I0930 19:32:18.625155 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:18 crc kubenswrapper[4756]: I0930 19:32:18.625169 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:18 crc kubenswrapper[4756]: I0930 19:32:18.625192 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:18 crc kubenswrapper[4756]: I0930 19:32:18.625210 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:18Z","lastTransitionTime":"2025-09-30T19:32:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:18 crc kubenswrapper[4756]: I0930 19:32:18.728414 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:18 crc kubenswrapper[4756]: I0930 19:32:18.728487 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:18 crc kubenswrapper[4756]: I0930 19:32:18.728508 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:18 crc kubenswrapper[4756]: I0930 19:32:18.728537 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:18 crc kubenswrapper[4756]: I0930 19:32:18.728556 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:18Z","lastTransitionTime":"2025-09-30T19:32:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:18 crc kubenswrapper[4756]: I0930 19:32:18.831472 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:18 crc kubenswrapper[4756]: I0930 19:32:18.831526 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:18 crc kubenswrapper[4756]: I0930 19:32:18.831542 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:18 crc kubenswrapper[4756]: I0930 19:32:18.831560 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:18 crc kubenswrapper[4756]: I0930 19:32:18.831571 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:18Z","lastTransitionTime":"2025-09-30T19:32:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:18 crc kubenswrapper[4756]: I0930 19:32:18.934832 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:18 crc kubenswrapper[4756]: I0930 19:32:18.934885 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:18 crc kubenswrapper[4756]: I0930 19:32:18.934896 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:18 crc kubenswrapper[4756]: I0930 19:32:18.934918 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:18 crc kubenswrapper[4756]: I0930 19:32:18.934930 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:18Z","lastTransitionTime":"2025-09-30T19:32:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:19 crc kubenswrapper[4756]: I0930 19:32:19.038534 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:19 crc kubenswrapper[4756]: I0930 19:32:19.038621 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:19 crc kubenswrapper[4756]: I0930 19:32:19.038640 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:19 crc kubenswrapper[4756]: I0930 19:32:19.038668 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:19 crc kubenswrapper[4756]: I0930 19:32:19.038688 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:19Z","lastTransitionTime":"2025-09-30T19:32:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:19 crc kubenswrapper[4756]: I0930 19:32:19.116020 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 19:32:19 crc kubenswrapper[4756]: I0930 19:32:19.116132 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 19:32:19 crc kubenswrapper[4756]: E0930 19:32:19.116201 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 19:32:19 crc kubenswrapper[4756]: I0930 19:32:19.116318 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 19:32:19 crc kubenswrapper[4756]: E0930 19:32:19.116555 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 19:32:19 crc kubenswrapper[4756]: E0930 19:32:19.116764 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 19:32:19 crc kubenswrapper[4756]: I0930 19:32:19.117492 4756 scope.go:117] "RemoveContainer" containerID="539ad7633e1076358f49a836687191a85b8b2967d0dd7c7e6591ec78416e1115" Sep 30 19:32:19 crc kubenswrapper[4756]: E0930 19:32:19.117716 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-kmxk8_openshift-ovn-kubernetes(d96acc31-a519-46fc-94d8-f19522e77391)\"" pod="openshift-ovn-kubernetes/ovnkube-node-kmxk8" podUID="d96acc31-a519-46fc-94d8-f19522e77391" Sep 30 19:32:19 crc kubenswrapper[4756]: I0930 19:32:19.140734 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:19 crc kubenswrapper[4756]: I0930 19:32:19.140817 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:19 crc kubenswrapper[4756]: I0930 19:32:19.140836 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:19 crc kubenswrapper[4756]: I0930 19:32:19.140867 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:19 crc kubenswrapper[4756]: I0930 19:32:19.140888 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:19Z","lastTransitionTime":"2025-09-30T19:32:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:19 crc kubenswrapper[4756]: I0930 19:32:19.243939 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:19 crc kubenswrapper[4756]: I0930 19:32:19.244025 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:19 crc kubenswrapper[4756]: I0930 19:32:19.244043 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:19 crc kubenswrapper[4756]: I0930 19:32:19.244069 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:19 crc kubenswrapper[4756]: I0930 19:32:19.244088 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:19Z","lastTransitionTime":"2025-09-30T19:32:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:19 crc kubenswrapper[4756]: I0930 19:32:19.349303 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:19 crc kubenswrapper[4756]: I0930 19:32:19.349370 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:19 crc kubenswrapper[4756]: I0930 19:32:19.349388 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:19 crc kubenswrapper[4756]: I0930 19:32:19.349464 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:19 crc kubenswrapper[4756]: I0930 19:32:19.349483 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:19Z","lastTransitionTime":"2025-09-30T19:32:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:19 crc kubenswrapper[4756]: I0930 19:32:19.452532 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:19 crc kubenswrapper[4756]: I0930 19:32:19.452616 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:19 crc kubenswrapper[4756]: I0930 19:32:19.452637 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:19 crc kubenswrapper[4756]: I0930 19:32:19.452668 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:19 crc kubenswrapper[4756]: I0930 19:32:19.452695 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:19Z","lastTransitionTime":"2025-09-30T19:32:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:19 crc kubenswrapper[4756]: I0930 19:32:19.554898 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:19 crc kubenswrapper[4756]: I0930 19:32:19.554962 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:19 crc kubenswrapper[4756]: I0930 19:32:19.554980 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:19 crc kubenswrapper[4756]: I0930 19:32:19.555007 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:19 crc kubenswrapper[4756]: I0930 19:32:19.555028 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:19Z","lastTransitionTime":"2025-09-30T19:32:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:19 crc kubenswrapper[4756]: I0930 19:32:19.657725 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:19 crc kubenswrapper[4756]: I0930 19:32:19.657794 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:19 crc kubenswrapper[4756]: I0930 19:32:19.657809 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:19 crc kubenswrapper[4756]: I0930 19:32:19.657836 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:19 crc kubenswrapper[4756]: I0930 19:32:19.657853 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:19Z","lastTransitionTime":"2025-09-30T19:32:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:19 crc kubenswrapper[4756]: I0930 19:32:19.760636 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:19 crc kubenswrapper[4756]: I0930 19:32:19.760691 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:19 crc kubenswrapper[4756]: I0930 19:32:19.760702 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:19 crc kubenswrapper[4756]: I0930 19:32:19.760723 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:19 crc kubenswrapper[4756]: I0930 19:32:19.760735 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:19Z","lastTransitionTime":"2025-09-30T19:32:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:19 crc kubenswrapper[4756]: I0930 19:32:19.863478 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:19 crc kubenswrapper[4756]: I0930 19:32:19.863528 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:19 crc kubenswrapper[4756]: I0930 19:32:19.863539 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:19 crc kubenswrapper[4756]: I0930 19:32:19.863559 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:19 crc kubenswrapper[4756]: I0930 19:32:19.863570 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:19Z","lastTransitionTime":"2025-09-30T19:32:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:19 crc kubenswrapper[4756]: I0930 19:32:19.966506 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:19 crc kubenswrapper[4756]: I0930 19:32:19.966598 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:19 crc kubenswrapper[4756]: I0930 19:32:19.966621 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:19 crc kubenswrapper[4756]: I0930 19:32:19.966651 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:19 crc kubenswrapper[4756]: I0930 19:32:19.966672 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:19Z","lastTransitionTime":"2025-09-30T19:32:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:20 crc kubenswrapper[4756]: I0930 19:32:20.069853 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:20 crc kubenswrapper[4756]: I0930 19:32:20.069919 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:20 crc kubenswrapper[4756]: I0930 19:32:20.069939 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:20 crc kubenswrapper[4756]: I0930 19:32:20.069967 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:20 crc kubenswrapper[4756]: I0930 19:32:20.069987 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:20Z","lastTransitionTime":"2025-09-30T19:32:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:20 crc kubenswrapper[4756]: I0930 19:32:20.115492 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-b4n45" Sep 30 19:32:20 crc kubenswrapper[4756]: E0930 19:32:20.115687 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-b4n45" podUID="6c151bbd-9757-44ba-aa11-ff679e841fdc" Sep 30 19:32:20 crc kubenswrapper[4756]: I0930 19:32:20.173877 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:20 crc kubenswrapper[4756]: I0930 19:32:20.173934 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:20 crc kubenswrapper[4756]: I0930 19:32:20.173947 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:20 crc kubenswrapper[4756]: I0930 19:32:20.173968 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:20 crc kubenswrapper[4756]: I0930 19:32:20.173982 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:20Z","lastTransitionTime":"2025-09-30T19:32:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:20 crc kubenswrapper[4756]: I0930 19:32:20.276561 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:20 crc kubenswrapper[4756]: I0930 19:32:20.276632 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:20 crc kubenswrapper[4756]: I0930 19:32:20.276651 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:20 crc kubenswrapper[4756]: I0930 19:32:20.276683 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:20 crc kubenswrapper[4756]: I0930 19:32:20.276705 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:20Z","lastTransitionTime":"2025-09-30T19:32:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:20 crc kubenswrapper[4756]: I0930 19:32:20.379485 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:20 crc kubenswrapper[4756]: I0930 19:32:20.379541 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:20 crc kubenswrapper[4756]: I0930 19:32:20.379553 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:20 crc kubenswrapper[4756]: I0930 19:32:20.379573 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:20 crc kubenswrapper[4756]: I0930 19:32:20.379588 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:20Z","lastTransitionTime":"2025-09-30T19:32:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:20 crc kubenswrapper[4756]: I0930 19:32:20.482425 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:20 crc kubenswrapper[4756]: I0930 19:32:20.482482 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:20 crc kubenswrapper[4756]: I0930 19:32:20.482493 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:20 crc kubenswrapper[4756]: I0930 19:32:20.482514 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:20 crc kubenswrapper[4756]: I0930 19:32:20.482529 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:20Z","lastTransitionTime":"2025-09-30T19:32:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:20 crc kubenswrapper[4756]: I0930 19:32:20.557340 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-l9mw5_104562fb-2a2d-4291-963c-7a95062ec13a/kube-multus/0.log" Sep 30 19:32:20 crc kubenswrapper[4756]: I0930 19:32:20.557411 4756 generic.go:334] "Generic (PLEG): container finished" podID="104562fb-2a2d-4291-963c-7a95062ec13a" containerID="c012009175f2ca25af13346dce481a4640354a2b7c771ce9a8d92f0a6a014f23" exitCode=1 Sep 30 19:32:20 crc kubenswrapper[4756]: I0930 19:32:20.557456 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-l9mw5" event={"ID":"104562fb-2a2d-4291-963c-7a95062ec13a","Type":"ContainerDied","Data":"c012009175f2ca25af13346dce481a4640354a2b7c771ce9a8d92f0a6a014f23"} Sep 30 19:32:20 crc kubenswrapper[4756]: I0930 19:32:20.557918 4756 scope.go:117] "RemoveContainer" containerID="c012009175f2ca25af13346dce481a4640354a2b7c771ce9a8d92f0a6a014f23" Sep 30 19:32:20 crc kubenswrapper[4756]: I0930 19:32:20.570003 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-ghp5t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"32ba8a0a-215c-415b-a893-74f7a40e3c20\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a96a8eea15e5d12ad93cdb41ef3d1054fae455aea2878451bc5715de6d5b612f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pslwb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:32Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-ghp5t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:20Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:20 crc kubenswrapper[4756]: I0930 19:32:20.583622 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-m6zsp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2b4cba15-5ae1-4c98-a5e5-060b3af3aec6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://13307050242b53fcc9d2c9ab6870c36d5c2c1e43adfa36c1e76addf32166b95b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://828a01ba594287ceea1e72baa5419d522ce14d2ab3df5a69fb6332400c3e4abc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://828a01ba594287ceea1e72baa5419d522ce14d2ab3df5a69fb6332400c3e4abc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f91b3382af5267116de235e9d5f480e574b479350e738621b5e7568329dd879\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0f91b3382af5267116de235e9d5f480e574b479350e738621b5e7568329dd879\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://677982485b1155ded1bce2a0f5b3262766d498642307d1744d5cbae6c66102e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://677982485b1155ded1bce2a0f5b3262766d498642307d1744d5cbae6c66102e3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://14b8b318d4cd0da2272b87ac74a4dccdf29874a818aadc0698e9d85a75a169da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://14b8b318d4cd0da2272b87ac74a4dccdf29874a818aadc0698e9d85a75a169da\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e877edd834105d5bd0dcd24e6ba9c11ead17ab39a95ca0169c54acaad7544ebe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e877edd834105d5bd0dcd24e6ba9c11ead17ab39a95ca0169c54acaad7544ebe\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://decf531f70c78d1d142bef3d265612ebac2c8790ea10aba75def6d785fba84a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://decf531f70c78d1d142bef3d265612ebac2c8790ea10aba75def6d785fba84a8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:32Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-m6zsp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:20Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:20 crc kubenswrapper[4756]: I0930 19:32:20.585719 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:20 crc kubenswrapper[4756]: I0930 19:32:20.585747 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:20 crc kubenswrapper[4756]: I0930 19:32:20.585756 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:20 crc kubenswrapper[4756]: I0930 19:32:20.585773 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:20 crc kubenswrapper[4756]: I0930 19:32:20.585824 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:20Z","lastTransitionTime":"2025-09-30T19:32:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:20 crc kubenswrapper[4756]: I0930 19:32:20.594814 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-gld2h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7ae264bc-c901-4628-89f4-53e94403dff8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d059c5c1b5cded992e4971b04737c4f528ed04278d98651c01c8b8a168fe5655\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc8dq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://764e5cdcf5a9c07e5d046217ffd9db27e2b09a827d02f165459deb24d99f141c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc8dq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:44Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-gld2h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:20Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:20 crc kubenswrapper[4756]: I0930 19:32:20.605624 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:20Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:20 crc kubenswrapper[4756]: I0930 19:32:20.616957 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d39cfd1b725ded02a27e8f7bed73221dd180e3fffb7f06d1877d1bc2f6910334\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:20Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:20 crc kubenswrapper[4756]: I0930 19:32:20.630922 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0dfdb5d6-9311-4699-a1ac-5f1a6cde2441\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a78698331fc3db3b12b06c8816256c5d055d1decfd12cc0099d6cfd155d673d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32fb926334772dc454adcdabd6070f751ef1aded82a5c13732875e1ec04d2c48\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6195128a0e66f0f932a444f4717084f00ea05ff648f7c26364bb9c96483b652a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://746e37834d01867d327b2cb407df8e9a8489f58884a28f1da6a336f74e4d1033\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:11Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:20Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:20 crc kubenswrapper[4756]: I0930 19:32:20.642739 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c7764cf2-26f0-4b53-85b6-2b07668edda7\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:32:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:32:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6cbb0f481cae6dbc480af93bb03f4b4e597381979fed1a9b91fb23aef7868b65\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d2adbf3b2ade3e39ca3d2f7a70212537c9e7531907dd4a1e7748220183d34b9d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b904814bd38b98f875532fedfa07321ce8d149f697e027ad926f8f40ddce61e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f9299d12673c7a8e182e0d4354088d1aa539ca35a788ba6ff507d2ca96cff07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4f9299d12673c7a8e182e0d4354088d1aa539ca35a788ba6ff507d2ca96cff07\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:12Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:11Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:20Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:20 crc kubenswrapper[4756]: I0930 19:32:20.658220 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3370c2ca-fec3-4f90-8df7-51e21e6c7e1c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1a5ef539293ea3ff94f53b11a0510bb8bb45c62c945cffab746512efeeb19a5a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pstrc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ba18abf718fde52cadd61c25b05097873901a4dbbe7631fd7ce64133627b9473\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pstrc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:32Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-4n9zj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:20Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:20 crc kubenswrapper[4756]: I0930 19:32:20.675775 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-kmxk8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d96acc31-a519-46fc-94d8-f19522e77391\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe054d1844c58433aebfe8f4e500e17558f1b4afb5423002d60b50f210052c93\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c5badf6335e4358fa0078ae48d7d50a908bf754fdb57c786e445ccae31933ebe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f20230d520af06912183a05d0b9bcabb8581bf772fe34e037fa8d1a157c4a1b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://261b104308c48bbe4631db85f56d63cefeb0bc6b7112076b0bf3f4fb6b67caad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1fa8babb1f5d1213dec99c75be1d5f37d09c2257a31babd51ec297621fc7d564\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://990926364c8d973164f3da73740b7d31c8caf87f0fb691539f7a62520b822fba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://539ad7633e1076358f49a836687191a85b8b2967d0dd7c7e6591ec78416e1115\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://539ad7633e1076358f49a836687191a85b8b2967d0dd7c7e6591ec78416e1115\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T19:32:05Z\\\",\\\"message\\\":\\\"mns:[] Mutations:[{Column:policies Mutator:insert Value:{GoSet:[{GoUUID:a5a72d02-1a0f-4f7f-a8c5-6923a1c4274a}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {f6d604c1-9711-4e25-be6c-79ec28bbad1b}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI0930 19:32:05.081516 6485 address_set.go:302] New(0d39bc5c-d5b9-432c-81be-2275bce5d7aa/default-network-controller:EgressIP:node-ips:v4:default/a712973235162149816) with []\\\\nI0930 19:32:05.081551 6485 address_set.go:302] New(aa6fc2dc-fab0-4812-b9da-809058e4dcf7/default-network-controller:EgressIP:egressip-served-pods:v4:default/a8519615025667110816) with []\\\\nI0930 19:32:05.081577 6485 address_set.go:302] New(bf133528-8652-4c84-85ff-881f0afe9837/default-network-controller:EgressService:egresssvc-served-pods:v4/a13607449821398607916) with []\\\\nI0930 19:32:05.081647 6485 factory.go:1336] Added *v1.Node event handler 7\\\\nI0930 19:32:05.081686 6485 factory.go:1336] Added *v1.EgressIP event handler 8\\\\nI0930 19:32:05.082052 6485 factory.go:1336] Added *v1.EgressFirewall event handler 9\\\\nI0930 19:32:05.082149 6485 controller.go:132] Adding controller ef_node_controller event handlers\\\\nI0930 19:32:05.082187 6485 ovnkube.go:599] Stopped ovnkube\\\\nI0930 19:32:05.082214 6485 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF0930 19:32:05.082302 6485 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T19:32:04Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-kmxk8_openshift-ovn-kubernetes(d96acc31-a519-46fc-94d8-f19522e77391)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://64404e34c6a077df9cc9e9f9273dbe188c6feda3bc1186a83e05a43e1fa2ea99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://76bb398eefb2b0a9ef06b44c2ea182deefd51e4daf42b17946b911822d6eb0af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://76bb398eefb2b0a9ef06b44c2ea182deefd51e4daf42b17946b911822d6eb0af\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:32Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-kmxk8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:20Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:20 crc kubenswrapper[4756]: I0930 19:32:20.688294 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:20 crc kubenswrapper[4756]: I0930 19:32:20.688384 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:20 crc kubenswrapper[4756]: I0930 19:32:20.688416 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:20 crc kubenswrapper[4756]: I0930 19:32:20.688441 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:20 crc kubenswrapper[4756]: I0930 19:32:20.688453 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:20Z","lastTransitionTime":"2025-09-30T19:32:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:20 crc kubenswrapper[4756]: I0930 19:32:20.690190 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-8nj5z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"02bb1d9a-1ad2-406d-afc2-c4cd283fe1f4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://28f7474a36dde55478614d47fe30f93b44cb549a25e148e295987e64158fc3c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l6nxq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:34Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-8nj5z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:20Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:20 crc kubenswrapper[4756]: I0930 19:32:20.702586 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-b4n45" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6c151bbd-9757-44ba-aa11-ff679e841fdc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fsl2z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fsl2z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:46Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-b4n45\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:20Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:20 crc kubenswrapper[4756]: I0930 19:32:20.725379 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6aa869dd-ec28-4032-82be-c656f27bd7b1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://99767cc72b9d6057d38c45fbec6231bafa47ccffefb78e5924b5e9c1e797722b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b852143b26114c05edce20729cf706c5394609a74f522b7ecf44d92f9d66aed7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ee47cdb77e416535bd00569159712241bbd0952651774af6440fd04000d1722\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5cca81a0aeb985d6044f19cccc580ca5c78553a0abb60fb2a2bed955233f7a97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://621b37e117d0eeef289e110d75521c3262ced12144d665be35d6823780b4bae2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a5550fd486f7fad8080fb3eed0f4c73140172c68983fa8a423cd48da8f31ef3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a5550fd486f7fad8080fb3eed0f4c73140172c68983fa8a423cd48da8f31ef3a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://390ef7ee5c5d98dc226f1d061c6c9bb83236c988f2af15635e7834f643862840\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://390ef7ee5c5d98dc226f1d061c6c9bb83236c988f2af15635e7834f643862840\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:13Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://6f98786c914dcdd3ca1cde8421686a1d536abb97c375f26f458d8f514bdccd52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6f98786c914dcdd3ca1cde8421686a1d536abb97c375f26f458d8f514bdccd52\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:11Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:20Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:20 crc kubenswrapper[4756]: I0930 19:32:20.748443 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://449a4827a34c17e8cdc851d2714ced931581b5c3a1bcd2f7f058b95d68403740\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://163fab064016913eb90cd87fe0da1c702eeb7f000c92b108bfb24a21ef0e98d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:20Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:20 crc kubenswrapper[4756]: I0930 19:32:20.760640 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"218f95d8-7230-4b64-83a5-00af4f5ec7dc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:32:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:32:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0a76d38dbf936ca79e57470de2c8bf98e8681f7d94f2c572aa5c0d2b2acbb484\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://57f786181b4762f1e04a48ac6e352090b62e1691091ddc390821abb693e8f7a6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c275af67ad8675a437b3f7fae34236bbdfbdf1f216be6a442d6765471d1d11e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7673b8a2ea701b95437a1ad447661ea3140ce0b27204ab946f39dbf0a25e29b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a2e0caca690592c9bd17dae14f79390b94f47507afb05a1c1732453700b99786\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"message\\\":\\\"g.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 19:31:31.245412 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 19:31:31.245417 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0930 19:31:31.245423 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0930 19:31:31.245426 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0930 19:31:31.245428 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0930 19:31:31.245860 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI0930 19:31:31.249125 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0930 19:31:31.249169 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0930 19:31:31.249201 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0930 19:31:31.249211 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0930 19:31:31.249225 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0930 19:31:31.249238 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0930 19:31:31.250125 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI0930 19:31:31.250150 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nF0930 19:31:31.250576 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:25Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://01164eb1afdb771ef4d2dc268107dadd206959e1ef66b154caf3ff18717e20de\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:14Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7365492f5fe72c39f569bfa6e2b1a44236da10b4687a738b97ce95626081c1b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7365492f5fe72c39f569bfa6e2b1a44236da10b4687a738b97ce95626081c1b5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:11Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:20Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:20 crc kubenswrapper[4756]: I0930 19:32:20.771762 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:20Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:20 crc kubenswrapper[4756]: I0930 19:32:20.783645 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-l9mw5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"104562fb-2a2d-4291-963c-7a95062ec13a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:32:20Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:32:20Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c012009175f2ca25af13346dce481a4640354a2b7c771ce9a8d92f0a6a014f23\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c012009175f2ca25af13346dce481a4640354a2b7c771ce9a8d92f0a6a014f23\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T19:32:19Z\\\",\\\"message\\\":\\\"2025-09-30T19:31:34+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_84cba474-668f-4940-86a3-7be18c7a30f3\\\\n2025-09-30T19:31:34+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_84cba474-668f-4940-86a3-7be18c7a30f3 to /host/opt/cni/bin/\\\\n2025-09-30T19:31:34Z [verbose] multus-daemon started\\\\n2025-09-30T19:31:34Z [verbose] Readiness Indicator file check\\\\n2025-09-30T19:32:19Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mhq89\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:32Z\\\"}}\" for pod \"openshift-multus\"/\"multus-l9mw5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:20Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:20 crc kubenswrapper[4756]: I0930 19:32:20.790829 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:20 crc kubenswrapper[4756]: I0930 19:32:20.790875 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:20 crc kubenswrapper[4756]: I0930 19:32:20.790886 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:20 crc kubenswrapper[4756]: I0930 19:32:20.790905 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:20 crc kubenswrapper[4756]: I0930 19:32:20.790916 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:20Z","lastTransitionTime":"2025-09-30T19:32:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:20 crc kubenswrapper[4756]: I0930 19:32:20.794668 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:20Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:20 crc kubenswrapper[4756]: I0930 19:32:20.803292 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d37999f5cb36e7071b109ed90026aea745a55dbdd26d53799ee4196dc6a983c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:20Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:20 crc kubenswrapper[4756]: I0930 19:32:20.893436 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:20 crc kubenswrapper[4756]: I0930 19:32:20.893494 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:20 crc kubenswrapper[4756]: I0930 19:32:20.893504 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:20 crc kubenswrapper[4756]: I0930 19:32:20.893523 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:20 crc kubenswrapper[4756]: I0930 19:32:20.893534 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:20Z","lastTransitionTime":"2025-09-30T19:32:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:20 crc kubenswrapper[4756]: I0930 19:32:20.995931 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:20 crc kubenswrapper[4756]: I0930 19:32:20.995966 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:20 crc kubenswrapper[4756]: I0930 19:32:20.995976 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:20 crc kubenswrapper[4756]: I0930 19:32:20.995991 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:20 crc kubenswrapper[4756]: I0930 19:32:20.996004 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:20Z","lastTransitionTime":"2025-09-30T19:32:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:21 crc kubenswrapper[4756]: I0930 19:32:21.098740 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:21 crc kubenswrapper[4756]: I0930 19:32:21.098781 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:21 crc kubenswrapper[4756]: I0930 19:32:21.098791 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:21 crc kubenswrapper[4756]: I0930 19:32:21.098809 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:21 crc kubenswrapper[4756]: I0930 19:32:21.098819 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:21Z","lastTransitionTime":"2025-09-30T19:32:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:21 crc kubenswrapper[4756]: I0930 19:32:21.115570 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 19:32:21 crc kubenswrapper[4756]: E0930 19:32:21.115695 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 19:32:21 crc kubenswrapper[4756]: I0930 19:32:21.115956 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 19:32:21 crc kubenswrapper[4756]: E0930 19:32:21.116006 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 19:32:21 crc kubenswrapper[4756]: I0930 19:32:21.116142 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 19:32:21 crc kubenswrapper[4756]: E0930 19:32:21.116191 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 19:32:21 crc kubenswrapper[4756]: I0930 19:32:21.133061 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-kmxk8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d96acc31-a519-46fc-94d8-f19522e77391\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe054d1844c58433aebfe8f4e500e17558f1b4afb5423002d60b50f210052c93\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c5badf6335e4358fa0078ae48d7d50a908bf754fdb57c786e445ccae31933ebe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f20230d520af06912183a05d0b9bcabb8581bf772fe34e037fa8d1a157c4a1b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://261b104308c48bbe4631db85f56d63cefeb0bc6b7112076b0bf3f4fb6b67caad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1fa8babb1f5d1213dec99c75be1d5f37d09c2257a31babd51ec297621fc7d564\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://990926364c8d973164f3da73740b7d31c8caf87f0fb691539f7a62520b822fba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://539ad7633e1076358f49a836687191a85b8b2967d0dd7c7e6591ec78416e1115\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://539ad7633e1076358f49a836687191a85b8b2967d0dd7c7e6591ec78416e1115\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T19:32:05Z\\\",\\\"message\\\":\\\"mns:[] Mutations:[{Column:policies Mutator:insert Value:{GoSet:[{GoUUID:a5a72d02-1a0f-4f7f-a8c5-6923a1c4274a}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {f6d604c1-9711-4e25-be6c-79ec28bbad1b}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI0930 19:32:05.081516 6485 address_set.go:302] New(0d39bc5c-d5b9-432c-81be-2275bce5d7aa/default-network-controller:EgressIP:node-ips:v4:default/a712973235162149816) with []\\\\nI0930 19:32:05.081551 6485 address_set.go:302] New(aa6fc2dc-fab0-4812-b9da-809058e4dcf7/default-network-controller:EgressIP:egressip-served-pods:v4:default/a8519615025667110816) with []\\\\nI0930 19:32:05.081577 6485 address_set.go:302] New(bf133528-8652-4c84-85ff-881f0afe9837/default-network-controller:EgressService:egresssvc-served-pods:v4/a13607449821398607916) with []\\\\nI0930 19:32:05.081647 6485 factory.go:1336] Added *v1.Node event handler 7\\\\nI0930 19:32:05.081686 6485 factory.go:1336] Added *v1.EgressIP event handler 8\\\\nI0930 19:32:05.082052 6485 factory.go:1336] Added *v1.EgressFirewall event handler 9\\\\nI0930 19:32:05.082149 6485 controller.go:132] Adding controller ef_node_controller event handlers\\\\nI0930 19:32:05.082187 6485 ovnkube.go:599] Stopped ovnkube\\\\nI0930 19:32:05.082214 6485 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF0930 19:32:05.082302 6485 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T19:32:04Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-kmxk8_openshift-ovn-kubernetes(d96acc31-a519-46fc-94d8-f19522e77391)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://64404e34c6a077df9cc9e9f9273dbe188c6feda3bc1186a83e05a43e1fa2ea99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://76bb398eefb2b0a9ef06b44c2ea182deefd51e4daf42b17946b911822d6eb0af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://76bb398eefb2b0a9ef06b44c2ea182deefd51e4daf42b17946b911822d6eb0af\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:32Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-kmxk8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:21Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:21 crc kubenswrapper[4756]: I0930 19:32:21.146922 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-8nj5z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"02bb1d9a-1ad2-406d-afc2-c4cd283fe1f4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://28f7474a36dde55478614d47fe30f93b44cb549a25e148e295987e64158fc3c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l6nxq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:34Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-8nj5z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:21Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:21 crc kubenswrapper[4756]: I0930 19:32:21.158105 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-b4n45" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6c151bbd-9757-44ba-aa11-ff679e841fdc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fsl2z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fsl2z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:46Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-b4n45\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:21Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:21 crc kubenswrapper[4756]: I0930 19:32:21.188044 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6aa869dd-ec28-4032-82be-c656f27bd7b1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://99767cc72b9d6057d38c45fbec6231bafa47ccffefb78e5924b5e9c1e797722b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b852143b26114c05edce20729cf706c5394609a74f522b7ecf44d92f9d66aed7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ee47cdb77e416535bd00569159712241bbd0952651774af6440fd04000d1722\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5cca81a0aeb985d6044f19cccc580ca5c78553a0abb60fb2a2bed955233f7a97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://621b37e117d0eeef289e110d75521c3262ced12144d665be35d6823780b4bae2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a5550fd486f7fad8080fb3eed0f4c73140172c68983fa8a423cd48da8f31ef3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a5550fd486f7fad8080fb3eed0f4c73140172c68983fa8a423cd48da8f31ef3a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://390ef7ee5c5d98dc226f1d061c6c9bb83236c988f2af15635e7834f643862840\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://390ef7ee5c5d98dc226f1d061c6c9bb83236c988f2af15635e7834f643862840\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:13Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://6f98786c914dcdd3ca1cde8421686a1d536abb97c375f26f458d8f514bdccd52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6f98786c914dcdd3ca1cde8421686a1d536abb97c375f26f458d8f514bdccd52\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:11Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:21Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:21 crc kubenswrapper[4756]: I0930 19:32:21.198982 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0dfdb5d6-9311-4699-a1ac-5f1a6cde2441\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a78698331fc3db3b12b06c8816256c5d055d1decfd12cc0099d6cfd155d673d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32fb926334772dc454adcdabd6070f751ef1aded82a5c13732875e1ec04d2c48\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6195128a0e66f0f932a444f4717084f00ea05ff648f7c26364bb9c96483b652a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://746e37834d01867d327b2cb407df8e9a8489f58884a28f1da6a336f74e4d1033\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:11Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:21Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:21 crc kubenswrapper[4756]: I0930 19:32:21.201263 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:21 crc kubenswrapper[4756]: I0930 19:32:21.201441 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:21 crc kubenswrapper[4756]: I0930 19:32:21.201559 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:21 crc kubenswrapper[4756]: I0930 19:32:21.201675 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:21 crc kubenswrapper[4756]: I0930 19:32:21.201784 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:21Z","lastTransitionTime":"2025-09-30T19:32:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:21 crc kubenswrapper[4756]: I0930 19:32:21.211650 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c7764cf2-26f0-4b53-85b6-2b07668edda7\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:32:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:32:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6cbb0f481cae6dbc480af93bb03f4b4e597381979fed1a9b91fb23aef7868b65\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d2adbf3b2ade3e39ca3d2f7a70212537c9e7531907dd4a1e7748220183d34b9d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b904814bd38b98f875532fedfa07321ce8d149f697e027ad926f8f40ddce61e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f9299d12673c7a8e182e0d4354088d1aa539ca35a788ba6ff507d2ca96cff07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4f9299d12673c7a8e182e0d4354088d1aa539ca35a788ba6ff507d2ca96cff07\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:12Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:11Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:21Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:21 crc kubenswrapper[4756]: I0930 19:32:21.222179 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3370c2ca-fec3-4f90-8df7-51e21e6c7e1c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1a5ef539293ea3ff94f53b11a0510bb8bb45c62c945cffab746512efeeb19a5a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pstrc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ba18abf718fde52cadd61c25b05097873901a4dbbe7631fd7ce64133627b9473\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pstrc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:32Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-4n9zj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:21Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:21 crc kubenswrapper[4756]: I0930 19:32:21.240572 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"218f95d8-7230-4b64-83a5-00af4f5ec7dc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:32:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:32:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0a76d38dbf936ca79e57470de2c8bf98e8681f7d94f2c572aa5c0d2b2acbb484\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://57f786181b4762f1e04a48ac6e352090b62e1691091ddc390821abb693e8f7a6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c275af67ad8675a437b3f7fae34236bbdfbdf1f216be6a442d6765471d1d11e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7673b8a2ea701b95437a1ad447661ea3140ce0b27204ab946f39dbf0a25e29b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a2e0caca690592c9bd17dae14f79390b94f47507afb05a1c1732453700b99786\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"message\\\":\\\"g.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 19:31:31.245412 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 19:31:31.245417 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0930 19:31:31.245423 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0930 19:31:31.245426 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0930 19:31:31.245428 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0930 19:31:31.245860 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI0930 19:31:31.249125 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0930 19:31:31.249169 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0930 19:31:31.249201 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0930 19:31:31.249211 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0930 19:31:31.249225 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0930 19:31:31.249238 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0930 19:31:31.250125 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI0930 19:31:31.250150 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nF0930 19:31:31.250576 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:25Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://01164eb1afdb771ef4d2dc268107dadd206959e1ef66b154caf3ff18717e20de\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:14Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7365492f5fe72c39f569bfa6e2b1a44236da10b4687a738b97ce95626081c1b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7365492f5fe72c39f569bfa6e2b1a44236da10b4687a738b97ce95626081c1b5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:11Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:21Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:21 crc kubenswrapper[4756]: I0930 19:32:21.252965 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:21Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:21 crc kubenswrapper[4756]: I0930 19:32:21.265450 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://449a4827a34c17e8cdc851d2714ced931581b5c3a1bcd2f7f058b95d68403740\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://163fab064016913eb90cd87fe0da1c702eeb7f000c92b108bfb24a21ef0e98d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:21Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:21 crc kubenswrapper[4756]: I0930 19:32:21.281937 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:21Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:21 crc kubenswrapper[4756]: I0930 19:32:21.293704 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d37999f5cb36e7071b109ed90026aea745a55dbdd26d53799ee4196dc6a983c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:21Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:21 crc kubenswrapper[4756]: I0930 19:32:21.304533 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:21 crc kubenswrapper[4756]: I0930 19:32:21.304596 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:21 crc kubenswrapper[4756]: I0930 19:32:21.304613 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:21 crc kubenswrapper[4756]: I0930 19:32:21.304638 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:21 crc kubenswrapper[4756]: I0930 19:32:21.304656 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:21Z","lastTransitionTime":"2025-09-30T19:32:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:21 crc kubenswrapper[4756]: I0930 19:32:21.305841 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-l9mw5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"104562fb-2a2d-4291-963c-7a95062ec13a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:32:20Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:32:20Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c012009175f2ca25af13346dce481a4640354a2b7c771ce9a8d92f0a6a014f23\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c012009175f2ca25af13346dce481a4640354a2b7c771ce9a8d92f0a6a014f23\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T19:32:19Z\\\",\\\"message\\\":\\\"2025-09-30T19:31:34+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_84cba474-668f-4940-86a3-7be18c7a30f3\\\\n2025-09-30T19:31:34+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_84cba474-668f-4940-86a3-7be18c7a30f3 to /host/opt/cni/bin/\\\\n2025-09-30T19:31:34Z [verbose] multus-daemon started\\\\n2025-09-30T19:31:34Z [verbose] Readiness Indicator file check\\\\n2025-09-30T19:32:19Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mhq89\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:32Z\\\"}}\" for pod \"openshift-multus\"/\"multus-l9mw5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:21Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:21 crc kubenswrapper[4756]: I0930 19:32:21.319305 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:21Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:21 crc kubenswrapper[4756]: I0930 19:32:21.330765 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d39cfd1b725ded02a27e8f7bed73221dd180e3fffb7f06d1877d1bc2f6910334\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:21Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:21 crc kubenswrapper[4756]: I0930 19:32:21.340156 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-ghp5t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"32ba8a0a-215c-415b-a893-74f7a40e3c20\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a96a8eea15e5d12ad93cdb41ef3d1054fae455aea2878451bc5715de6d5b612f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pslwb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:32Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-ghp5t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:21Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:21 crc kubenswrapper[4756]: I0930 19:32:21.352409 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-m6zsp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2b4cba15-5ae1-4c98-a5e5-060b3af3aec6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://13307050242b53fcc9d2c9ab6870c36d5c2c1e43adfa36c1e76addf32166b95b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://828a01ba594287ceea1e72baa5419d522ce14d2ab3df5a69fb6332400c3e4abc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://828a01ba594287ceea1e72baa5419d522ce14d2ab3df5a69fb6332400c3e4abc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f91b3382af5267116de235e9d5f480e574b479350e738621b5e7568329dd879\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0f91b3382af5267116de235e9d5f480e574b479350e738621b5e7568329dd879\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://677982485b1155ded1bce2a0f5b3262766d498642307d1744d5cbae6c66102e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://677982485b1155ded1bce2a0f5b3262766d498642307d1744d5cbae6c66102e3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://14b8b318d4cd0da2272b87ac74a4dccdf29874a818aadc0698e9d85a75a169da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://14b8b318d4cd0da2272b87ac74a4dccdf29874a818aadc0698e9d85a75a169da\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e877edd834105d5bd0dcd24e6ba9c11ead17ab39a95ca0169c54acaad7544ebe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e877edd834105d5bd0dcd24e6ba9c11ead17ab39a95ca0169c54acaad7544ebe\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://decf531f70c78d1d142bef3d265612ebac2c8790ea10aba75def6d785fba84a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://decf531f70c78d1d142bef3d265612ebac2c8790ea10aba75def6d785fba84a8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:32Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-m6zsp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:21Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:21 crc kubenswrapper[4756]: I0930 19:32:21.365288 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-gld2h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7ae264bc-c901-4628-89f4-53e94403dff8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d059c5c1b5cded992e4971b04737c4f528ed04278d98651c01c8b8a168fe5655\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc8dq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://764e5cdcf5a9c07e5d046217ffd9db27e2b09a827d02f165459deb24d99f141c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc8dq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:44Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-gld2h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:21Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:21 crc kubenswrapper[4756]: I0930 19:32:21.407264 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:21 crc kubenswrapper[4756]: I0930 19:32:21.407318 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:21 crc kubenswrapper[4756]: I0930 19:32:21.407329 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:21 crc kubenswrapper[4756]: I0930 19:32:21.407350 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:21 crc kubenswrapper[4756]: I0930 19:32:21.407362 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:21Z","lastTransitionTime":"2025-09-30T19:32:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:21 crc kubenswrapper[4756]: I0930 19:32:21.509952 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:21 crc kubenswrapper[4756]: I0930 19:32:21.509997 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:21 crc kubenswrapper[4756]: I0930 19:32:21.510011 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:21 crc kubenswrapper[4756]: I0930 19:32:21.510032 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:21 crc kubenswrapper[4756]: I0930 19:32:21.510047 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:21Z","lastTransitionTime":"2025-09-30T19:32:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:21 crc kubenswrapper[4756]: I0930 19:32:21.564839 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-l9mw5_104562fb-2a2d-4291-963c-7a95062ec13a/kube-multus/0.log" Sep 30 19:32:21 crc kubenswrapper[4756]: I0930 19:32:21.564970 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-l9mw5" event={"ID":"104562fb-2a2d-4291-963c-7a95062ec13a","Type":"ContainerStarted","Data":"3d7e48a55049d09339652ebc1700f595dcbba5c2434a42ccd4a1d644c388f996"} Sep 30 19:32:21 crc kubenswrapper[4756]: I0930 19:32:21.584159 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"218f95d8-7230-4b64-83a5-00af4f5ec7dc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:32:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:32:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0a76d38dbf936ca79e57470de2c8bf98e8681f7d94f2c572aa5c0d2b2acbb484\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://57f786181b4762f1e04a48ac6e352090b62e1691091ddc390821abb693e8f7a6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c275af67ad8675a437b3f7fae34236bbdfbdf1f216be6a442d6765471d1d11e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7673b8a2ea701b95437a1ad447661ea3140ce0b27204ab946f39dbf0a25e29b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a2e0caca690592c9bd17dae14f79390b94f47507afb05a1c1732453700b99786\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"message\\\":\\\"g.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 19:31:31.245412 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 19:31:31.245417 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0930 19:31:31.245423 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0930 19:31:31.245426 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0930 19:31:31.245428 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0930 19:31:31.245860 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI0930 19:31:31.249125 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0930 19:31:31.249169 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0930 19:31:31.249201 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0930 19:31:31.249211 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0930 19:31:31.249225 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0930 19:31:31.249238 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0930 19:31:31.250125 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI0930 19:31:31.250150 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nF0930 19:31:31.250576 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:25Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://01164eb1afdb771ef4d2dc268107dadd206959e1ef66b154caf3ff18717e20de\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:14Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7365492f5fe72c39f569bfa6e2b1a44236da10b4687a738b97ce95626081c1b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7365492f5fe72c39f569bfa6e2b1a44236da10b4687a738b97ce95626081c1b5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:11Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:21Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:21 crc kubenswrapper[4756]: I0930 19:32:21.597022 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:21Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:21 crc kubenswrapper[4756]: I0930 19:32:21.613729 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:21 crc kubenswrapper[4756]: I0930 19:32:21.613770 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:21 crc kubenswrapper[4756]: I0930 19:32:21.613785 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:21 crc kubenswrapper[4756]: I0930 19:32:21.613805 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:21 crc kubenswrapper[4756]: I0930 19:32:21.613818 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:21Z","lastTransitionTime":"2025-09-30T19:32:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:21 crc kubenswrapper[4756]: I0930 19:32:21.613979 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://449a4827a34c17e8cdc851d2714ced931581b5c3a1bcd2f7f058b95d68403740\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://163fab064016913eb90cd87fe0da1c702eeb7f000c92b108bfb24a21ef0e98d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:21Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:21 crc kubenswrapper[4756]: I0930 19:32:21.628955 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:21Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:21 crc kubenswrapper[4756]: I0930 19:32:21.647142 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d37999f5cb36e7071b109ed90026aea745a55dbdd26d53799ee4196dc6a983c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:21Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:21 crc kubenswrapper[4756]: I0930 19:32:21.663935 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-l9mw5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"104562fb-2a2d-4291-963c-7a95062ec13a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:32:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:32:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d7e48a55049d09339652ebc1700f595dcbba5c2434a42ccd4a1d644c388f996\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c012009175f2ca25af13346dce481a4640354a2b7c771ce9a8d92f0a6a014f23\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T19:32:19Z\\\",\\\"message\\\":\\\"2025-09-30T19:31:34+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_84cba474-668f-4940-86a3-7be18c7a30f3\\\\n2025-09-30T19:31:34+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_84cba474-668f-4940-86a3-7be18c7a30f3 to /host/opt/cni/bin/\\\\n2025-09-30T19:31:34Z [verbose] multus-daemon started\\\\n2025-09-30T19:31:34Z [verbose] Readiness Indicator file check\\\\n2025-09-30T19:32:19Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:33Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:32:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mhq89\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:32Z\\\"}}\" for pod \"openshift-multus\"/\"multus-l9mw5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:21Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:21 crc kubenswrapper[4756]: I0930 19:32:21.678078 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:21Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:21 crc kubenswrapper[4756]: I0930 19:32:21.696685 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d39cfd1b725ded02a27e8f7bed73221dd180e3fffb7f06d1877d1bc2f6910334\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:21Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:21 crc kubenswrapper[4756]: I0930 19:32:21.708998 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-ghp5t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"32ba8a0a-215c-415b-a893-74f7a40e3c20\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a96a8eea15e5d12ad93cdb41ef3d1054fae455aea2878451bc5715de6d5b612f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pslwb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:32Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-ghp5t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:21Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:21 crc kubenswrapper[4756]: I0930 19:32:21.715988 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:21 crc kubenswrapper[4756]: I0930 19:32:21.716039 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:21 crc kubenswrapper[4756]: I0930 19:32:21.716052 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:21 crc kubenswrapper[4756]: I0930 19:32:21.716071 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:21 crc kubenswrapper[4756]: I0930 19:32:21.716085 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:21Z","lastTransitionTime":"2025-09-30T19:32:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:21 crc kubenswrapper[4756]: I0930 19:32:21.723210 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-m6zsp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2b4cba15-5ae1-4c98-a5e5-060b3af3aec6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://13307050242b53fcc9d2c9ab6870c36d5c2c1e43adfa36c1e76addf32166b95b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://828a01ba594287ceea1e72baa5419d522ce14d2ab3df5a69fb6332400c3e4abc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://828a01ba594287ceea1e72baa5419d522ce14d2ab3df5a69fb6332400c3e4abc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f91b3382af5267116de235e9d5f480e574b479350e738621b5e7568329dd879\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0f91b3382af5267116de235e9d5f480e574b479350e738621b5e7568329dd879\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://677982485b1155ded1bce2a0f5b3262766d498642307d1744d5cbae6c66102e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://677982485b1155ded1bce2a0f5b3262766d498642307d1744d5cbae6c66102e3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://14b8b318d4cd0da2272b87ac74a4dccdf29874a818aadc0698e9d85a75a169da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://14b8b318d4cd0da2272b87ac74a4dccdf29874a818aadc0698e9d85a75a169da\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e877edd834105d5bd0dcd24e6ba9c11ead17ab39a95ca0169c54acaad7544ebe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e877edd834105d5bd0dcd24e6ba9c11ead17ab39a95ca0169c54acaad7544ebe\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://decf531f70c78d1d142bef3d265612ebac2c8790ea10aba75def6d785fba84a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://decf531f70c78d1d142bef3d265612ebac2c8790ea10aba75def6d785fba84a8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:32Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-m6zsp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:21Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:21 crc kubenswrapper[4756]: I0930 19:32:21.738498 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-gld2h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7ae264bc-c901-4628-89f4-53e94403dff8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d059c5c1b5cded992e4971b04737c4f528ed04278d98651c01c8b8a168fe5655\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc8dq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://764e5cdcf5a9c07e5d046217ffd9db27e2b09a827d02f165459deb24d99f141c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc8dq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:44Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-gld2h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:21Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:21 crc kubenswrapper[4756]: I0930 19:32:21.768888 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6aa869dd-ec28-4032-82be-c656f27bd7b1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://99767cc72b9d6057d38c45fbec6231bafa47ccffefb78e5924b5e9c1e797722b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b852143b26114c05edce20729cf706c5394609a74f522b7ecf44d92f9d66aed7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ee47cdb77e416535bd00569159712241bbd0952651774af6440fd04000d1722\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5cca81a0aeb985d6044f19cccc580ca5c78553a0abb60fb2a2bed955233f7a97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://621b37e117d0eeef289e110d75521c3262ced12144d665be35d6823780b4bae2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a5550fd486f7fad8080fb3eed0f4c73140172c68983fa8a423cd48da8f31ef3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a5550fd486f7fad8080fb3eed0f4c73140172c68983fa8a423cd48da8f31ef3a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://390ef7ee5c5d98dc226f1d061c6c9bb83236c988f2af15635e7834f643862840\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://390ef7ee5c5d98dc226f1d061c6c9bb83236c988f2af15635e7834f643862840\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:13Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://6f98786c914dcdd3ca1cde8421686a1d536abb97c375f26f458d8f514bdccd52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6f98786c914dcdd3ca1cde8421686a1d536abb97c375f26f458d8f514bdccd52\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:11Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:21Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:21 crc kubenswrapper[4756]: I0930 19:32:21.782384 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0dfdb5d6-9311-4699-a1ac-5f1a6cde2441\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a78698331fc3db3b12b06c8816256c5d055d1decfd12cc0099d6cfd155d673d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32fb926334772dc454adcdabd6070f751ef1aded82a5c13732875e1ec04d2c48\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6195128a0e66f0f932a444f4717084f00ea05ff648f7c26364bb9c96483b652a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://746e37834d01867d327b2cb407df8e9a8489f58884a28f1da6a336f74e4d1033\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:11Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:21Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:21 crc kubenswrapper[4756]: I0930 19:32:21.794244 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c7764cf2-26f0-4b53-85b6-2b07668edda7\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:32:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:32:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6cbb0f481cae6dbc480af93bb03f4b4e597381979fed1a9b91fb23aef7868b65\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d2adbf3b2ade3e39ca3d2f7a70212537c9e7531907dd4a1e7748220183d34b9d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b904814bd38b98f875532fedfa07321ce8d149f697e027ad926f8f40ddce61e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f9299d12673c7a8e182e0d4354088d1aa539ca35a788ba6ff507d2ca96cff07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4f9299d12673c7a8e182e0d4354088d1aa539ca35a788ba6ff507d2ca96cff07\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:12Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:11Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:21Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:21 crc kubenswrapper[4756]: I0930 19:32:21.806043 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3370c2ca-fec3-4f90-8df7-51e21e6c7e1c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1a5ef539293ea3ff94f53b11a0510bb8bb45c62c945cffab746512efeeb19a5a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pstrc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ba18abf718fde52cadd61c25b05097873901a4dbbe7631fd7ce64133627b9473\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pstrc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:32Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-4n9zj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:21Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:21 crc kubenswrapper[4756]: I0930 19:32:21.818744 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:21 crc kubenswrapper[4756]: I0930 19:32:21.818787 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:21 crc kubenswrapper[4756]: I0930 19:32:21.818800 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:21 crc kubenswrapper[4756]: I0930 19:32:21.818823 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:21 crc kubenswrapper[4756]: I0930 19:32:21.818838 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:21Z","lastTransitionTime":"2025-09-30T19:32:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:21 crc kubenswrapper[4756]: I0930 19:32:21.827747 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-kmxk8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d96acc31-a519-46fc-94d8-f19522e77391\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe054d1844c58433aebfe8f4e500e17558f1b4afb5423002d60b50f210052c93\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c5badf6335e4358fa0078ae48d7d50a908bf754fdb57c786e445ccae31933ebe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f20230d520af06912183a05d0b9bcabb8581bf772fe34e037fa8d1a157c4a1b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://261b104308c48bbe4631db85f56d63cefeb0bc6b7112076b0bf3f4fb6b67caad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1fa8babb1f5d1213dec99c75be1d5f37d09c2257a31babd51ec297621fc7d564\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://990926364c8d973164f3da73740b7d31c8caf87f0fb691539f7a62520b822fba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://539ad7633e1076358f49a836687191a85b8b2967d0dd7c7e6591ec78416e1115\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://539ad7633e1076358f49a836687191a85b8b2967d0dd7c7e6591ec78416e1115\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T19:32:05Z\\\",\\\"message\\\":\\\"mns:[] Mutations:[{Column:policies Mutator:insert Value:{GoSet:[{GoUUID:a5a72d02-1a0f-4f7f-a8c5-6923a1c4274a}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {f6d604c1-9711-4e25-be6c-79ec28bbad1b}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI0930 19:32:05.081516 6485 address_set.go:302] New(0d39bc5c-d5b9-432c-81be-2275bce5d7aa/default-network-controller:EgressIP:node-ips:v4:default/a712973235162149816) with []\\\\nI0930 19:32:05.081551 6485 address_set.go:302] New(aa6fc2dc-fab0-4812-b9da-809058e4dcf7/default-network-controller:EgressIP:egressip-served-pods:v4:default/a8519615025667110816) with []\\\\nI0930 19:32:05.081577 6485 address_set.go:302] New(bf133528-8652-4c84-85ff-881f0afe9837/default-network-controller:EgressService:egresssvc-served-pods:v4/a13607449821398607916) with []\\\\nI0930 19:32:05.081647 6485 factory.go:1336] Added *v1.Node event handler 7\\\\nI0930 19:32:05.081686 6485 factory.go:1336] Added *v1.EgressIP event handler 8\\\\nI0930 19:32:05.082052 6485 factory.go:1336] Added *v1.EgressFirewall event handler 9\\\\nI0930 19:32:05.082149 6485 controller.go:132] Adding controller ef_node_controller event handlers\\\\nI0930 19:32:05.082187 6485 ovnkube.go:599] Stopped ovnkube\\\\nI0930 19:32:05.082214 6485 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF0930 19:32:05.082302 6485 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T19:32:04Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-kmxk8_openshift-ovn-kubernetes(d96acc31-a519-46fc-94d8-f19522e77391)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://64404e34c6a077df9cc9e9f9273dbe188c6feda3bc1186a83e05a43e1fa2ea99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://76bb398eefb2b0a9ef06b44c2ea182deefd51e4daf42b17946b911822d6eb0af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://76bb398eefb2b0a9ef06b44c2ea182deefd51e4daf42b17946b911822d6eb0af\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:32Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-kmxk8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:21Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:21 crc kubenswrapper[4756]: I0930 19:32:21.837948 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-8nj5z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"02bb1d9a-1ad2-406d-afc2-c4cd283fe1f4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://28f7474a36dde55478614d47fe30f93b44cb549a25e148e295987e64158fc3c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l6nxq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:34Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-8nj5z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:21Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:21 crc kubenswrapper[4756]: I0930 19:32:21.848296 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-b4n45" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6c151bbd-9757-44ba-aa11-ff679e841fdc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fsl2z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fsl2z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:46Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-b4n45\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:21Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:21 crc kubenswrapper[4756]: I0930 19:32:21.922103 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:21 crc kubenswrapper[4756]: I0930 19:32:21.922148 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:21 crc kubenswrapper[4756]: I0930 19:32:21.922157 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:21 crc kubenswrapper[4756]: I0930 19:32:21.922178 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:21 crc kubenswrapper[4756]: I0930 19:32:21.922190 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:21Z","lastTransitionTime":"2025-09-30T19:32:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:22 crc kubenswrapper[4756]: I0930 19:32:22.024533 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:22 crc kubenswrapper[4756]: I0930 19:32:22.024579 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:22 crc kubenswrapper[4756]: I0930 19:32:22.024592 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:22 crc kubenswrapper[4756]: I0930 19:32:22.024613 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:22 crc kubenswrapper[4756]: I0930 19:32:22.024627 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:22Z","lastTransitionTime":"2025-09-30T19:32:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:22 crc kubenswrapper[4756]: I0930 19:32:22.115558 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-b4n45" Sep 30 19:32:22 crc kubenswrapper[4756]: E0930 19:32:22.115903 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-b4n45" podUID="6c151bbd-9757-44ba-aa11-ff679e841fdc" Sep 30 19:32:22 crc kubenswrapper[4756]: I0930 19:32:22.127551 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:22 crc kubenswrapper[4756]: I0930 19:32:22.127593 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:22 crc kubenswrapper[4756]: I0930 19:32:22.127605 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:22 crc kubenswrapper[4756]: I0930 19:32:22.127624 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:22 crc kubenswrapper[4756]: I0930 19:32:22.127637 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:22Z","lastTransitionTime":"2025-09-30T19:32:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:22 crc kubenswrapper[4756]: I0930 19:32:22.230553 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:22 crc kubenswrapper[4756]: I0930 19:32:22.230645 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:22 crc kubenswrapper[4756]: I0930 19:32:22.230660 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:22 crc kubenswrapper[4756]: I0930 19:32:22.230681 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:22 crc kubenswrapper[4756]: I0930 19:32:22.230696 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:22Z","lastTransitionTime":"2025-09-30T19:32:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:22 crc kubenswrapper[4756]: I0930 19:32:22.333447 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:22 crc kubenswrapper[4756]: I0930 19:32:22.333487 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:22 crc kubenswrapper[4756]: I0930 19:32:22.333501 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:22 crc kubenswrapper[4756]: I0930 19:32:22.333520 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:22 crc kubenswrapper[4756]: I0930 19:32:22.333532 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:22Z","lastTransitionTime":"2025-09-30T19:32:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:22 crc kubenswrapper[4756]: I0930 19:32:22.436567 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:22 crc kubenswrapper[4756]: I0930 19:32:22.436643 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:22 crc kubenswrapper[4756]: I0930 19:32:22.436667 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:22 crc kubenswrapper[4756]: I0930 19:32:22.436698 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:22 crc kubenswrapper[4756]: I0930 19:32:22.436721 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:22Z","lastTransitionTime":"2025-09-30T19:32:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:22 crc kubenswrapper[4756]: I0930 19:32:22.538792 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:22 crc kubenswrapper[4756]: I0930 19:32:22.538834 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:22 crc kubenswrapper[4756]: I0930 19:32:22.538847 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:22 crc kubenswrapper[4756]: I0930 19:32:22.538866 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:22 crc kubenswrapper[4756]: I0930 19:32:22.538877 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:22Z","lastTransitionTime":"2025-09-30T19:32:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:22 crc kubenswrapper[4756]: I0930 19:32:22.641687 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:22 crc kubenswrapper[4756]: I0930 19:32:22.641739 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:22 crc kubenswrapper[4756]: I0930 19:32:22.641753 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:22 crc kubenswrapper[4756]: I0930 19:32:22.641773 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:22 crc kubenswrapper[4756]: I0930 19:32:22.641787 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:22Z","lastTransitionTime":"2025-09-30T19:32:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:22 crc kubenswrapper[4756]: I0930 19:32:22.744797 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:22 crc kubenswrapper[4756]: I0930 19:32:22.744856 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:22 crc kubenswrapper[4756]: I0930 19:32:22.744875 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:22 crc kubenswrapper[4756]: I0930 19:32:22.744899 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:22 crc kubenswrapper[4756]: I0930 19:32:22.744915 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:22Z","lastTransitionTime":"2025-09-30T19:32:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:22 crc kubenswrapper[4756]: I0930 19:32:22.847582 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:22 crc kubenswrapper[4756]: I0930 19:32:22.847613 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:22 crc kubenswrapper[4756]: I0930 19:32:22.847621 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:22 crc kubenswrapper[4756]: I0930 19:32:22.847636 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:22 crc kubenswrapper[4756]: I0930 19:32:22.847648 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:22Z","lastTransitionTime":"2025-09-30T19:32:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:22 crc kubenswrapper[4756]: I0930 19:32:22.949717 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:22 crc kubenswrapper[4756]: I0930 19:32:22.949752 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:22 crc kubenswrapper[4756]: I0930 19:32:22.949760 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:22 crc kubenswrapper[4756]: I0930 19:32:22.949776 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:22 crc kubenswrapper[4756]: I0930 19:32:22.949786 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:22Z","lastTransitionTime":"2025-09-30T19:32:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:23 crc kubenswrapper[4756]: I0930 19:32:23.051615 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:23 crc kubenswrapper[4756]: I0930 19:32:23.051645 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:23 crc kubenswrapper[4756]: I0930 19:32:23.051654 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:23 crc kubenswrapper[4756]: I0930 19:32:23.051668 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:23 crc kubenswrapper[4756]: I0930 19:32:23.051678 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:23Z","lastTransitionTime":"2025-09-30T19:32:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:23 crc kubenswrapper[4756]: I0930 19:32:23.117554 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 19:32:23 crc kubenswrapper[4756]: E0930 19:32:23.117694 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 19:32:23 crc kubenswrapper[4756]: I0930 19:32:23.117763 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 19:32:23 crc kubenswrapper[4756]: E0930 19:32:23.117820 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 19:32:23 crc kubenswrapper[4756]: I0930 19:32:23.117867 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 19:32:23 crc kubenswrapper[4756]: E0930 19:32:23.117916 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 19:32:23 crc kubenswrapper[4756]: I0930 19:32:23.153156 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:23 crc kubenswrapper[4756]: I0930 19:32:23.153205 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:23 crc kubenswrapper[4756]: I0930 19:32:23.153218 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:23 crc kubenswrapper[4756]: I0930 19:32:23.153233 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:23 crc kubenswrapper[4756]: I0930 19:32:23.153587 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:23Z","lastTransitionTime":"2025-09-30T19:32:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:23 crc kubenswrapper[4756]: I0930 19:32:23.261684 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:23 crc kubenswrapper[4756]: I0930 19:32:23.261769 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:23 crc kubenswrapper[4756]: I0930 19:32:23.261785 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:23 crc kubenswrapper[4756]: I0930 19:32:23.261831 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:23 crc kubenswrapper[4756]: I0930 19:32:23.261850 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:23Z","lastTransitionTime":"2025-09-30T19:32:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:23 crc kubenswrapper[4756]: I0930 19:32:23.364349 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:23 crc kubenswrapper[4756]: I0930 19:32:23.364420 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:23 crc kubenswrapper[4756]: I0930 19:32:23.364437 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:23 crc kubenswrapper[4756]: I0930 19:32:23.364458 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:23 crc kubenswrapper[4756]: I0930 19:32:23.364469 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:23Z","lastTransitionTime":"2025-09-30T19:32:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:23 crc kubenswrapper[4756]: I0930 19:32:23.468198 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:23 crc kubenswrapper[4756]: I0930 19:32:23.468678 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:23 crc kubenswrapper[4756]: I0930 19:32:23.468717 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:23 crc kubenswrapper[4756]: I0930 19:32:23.468748 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:23 crc kubenswrapper[4756]: I0930 19:32:23.468768 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:23Z","lastTransitionTime":"2025-09-30T19:32:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:23 crc kubenswrapper[4756]: I0930 19:32:23.571955 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:23 crc kubenswrapper[4756]: I0930 19:32:23.572022 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:23 crc kubenswrapper[4756]: I0930 19:32:23.572043 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:23 crc kubenswrapper[4756]: I0930 19:32:23.572071 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:23 crc kubenswrapper[4756]: I0930 19:32:23.572093 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:23Z","lastTransitionTime":"2025-09-30T19:32:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:23 crc kubenswrapper[4756]: I0930 19:32:23.675492 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:23 crc kubenswrapper[4756]: I0930 19:32:23.675549 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:23 crc kubenswrapper[4756]: I0930 19:32:23.675564 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:23 crc kubenswrapper[4756]: I0930 19:32:23.675584 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:23 crc kubenswrapper[4756]: I0930 19:32:23.675598 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:23Z","lastTransitionTime":"2025-09-30T19:32:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:23 crc kubenswrapper[4756]: I0930 19:32:23.778418 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:23 crc kubenswrapper[4756]: I0930 19:32:23.779356 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:23 crc kubenswrapper[4756]: I0930 19:32:23.779546 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:23 crc kubenswrapper[4756]: I0930 19:32:23.779667 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:23 crc kubenswrapper[4756]: I0930 19:32:23.779777 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:23Z","lastTransitionTime":"2025-09-30T19:32:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:23 crc kubenswrapper[4756]: I0930 19:32:23.883694 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:23 crc kubenswrapper[4756]: I0930 19:32:23.883790 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:23 crc kubenswrapper[4756]: I0930 19:32:23.883817 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:23 crc kubenswrapper[4756]: I0930 19:32:23.883858 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:23 crc kubenswrapper[4756]: I0930 19:32:23.883883 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:23Z","lastTransitionTime":"2025-09-30T19:32:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:23 crc kubenswrapper[4756]: I0930 19:32:23.987656 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:23 crc kubenswrapper[4756]: I0930 19:32:23.987726 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:23 crc kubenswrapper[4756]: I0930 19:32:23.987743 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:23 crc kubenswrapper[4756]: I0930 19:32:23.987772 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:23 crc kubenswrapper[4756]: I0930 19:32:23.987791 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:23Z","lastTransitionTime":"2025-09-30T19:32:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:24 crc kubenswrapper[4756]: I0930 19:32:24.090899 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:24 crc kubenswrapper[4756]: I0930 19:32:24.090974 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:24 crc kubenswrapper[4756]: I0930 19:32:24.090995 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:24 crc kubenswrapper[4756]: I0930 19:32:24.091023 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:24 crc kubenswrapper[4756]: I0930 19:32:24.091042 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:24Z","lastTransitionTime":"2025-09-30T19:32:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:24 crc kubenswrapper[4756]: I0930 19:32:24.115654 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-b4n45" Sep 30 19:32:24 crc kubenswrapper[4756]: E0930 19:32:24.115861 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-b4n45" podUID="6c151bbd-9757-44ba-aa11-ff679e841fdc" Sep 30 19:32:24 crc kubenswrapper[4756]: I0930 19:32:24.194297 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:24 crc kubenswrapper[4756]: I0930 19:32:24.194357 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:24 crc kubenswrapper[4756]: I0930 19:32:24.194377 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:24 crc kubenswrapper[4756]: I0930 19:32:24.194434 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:24 crc kubenswrapper[4756]: I0930 19:32:24.194495 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:24Z","lastTransitionTime":"2025-09-30T19:32:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:24 crc kubenswrapper[4756]: I0930 19:32:24.298335 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:24 crc kubenswrapper[4756]: I0930 19:32:24.298452 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:24 crc kubenswrapper[4756]: I0930 19:32:24.298472 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:24 crc kubenswrapper[4756]: I0930 19:32:24.298506 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:24 crc kubenswrapper[4756]: I0930 19:32:24.298530 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:24Z","lastTransitionTime":"2025-09-30T19:32:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:24 crc kubenswrapper[4756]: I0930 19:32:24.402728 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:24 crc kubenswrapper[4756]: I0930 19:32:24.402801 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:24 crc kubenswrapper[4756]: I0930 19:32:24.402819 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:24 crc kubenswrapper[4756]: I0930 19:32:24.402850 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:24 crc kubenswrapper[4756]: I0930 19:32:24.402870 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:24Z","lastTransitionTime":"2025-09-30T19:32:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:24 crc kubenswrapper[4756]: I0930 19:32:24.506863 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:24 crc kubenswrapper[4756]: I0930 19:32:24.506939 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:24 crc kubenswrapper[4756]: I0930 19:32:24.506958 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:24 crc kubenswrapper[4756]: I0930 19:32:24.506988 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:24 crc kubenswrapper[4756]: I0930 19:32:24.507009 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:24Z","lastTransitionTime":"2025-09-30T19:32:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:24 crc kubenswrapper[4756]: I0930 19:32:24.610154 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:24 crc kubenswrapper[4756]: I0930 19:32:24.610417 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:24 crc kubenswrapper[4756]: I0930 19:32:24.610436 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:24 crc kubenswrapper[4756]: I0930 19:32:24.610464 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:24 crc kubenswrapper[4756]: I0930 19:32:24.610512 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:24Z","lastTransitionTime":"2025-09-30T19:32:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:24 crc kubenswrapper[4756]: I0930 19:32:24.713646 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:24 crc kubenswrapper[4756]: I0930 19:32:24.713702 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:24 crc kubenswrapper[4756]: I0930 19:32:24.713752 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:24 crc kubenswrapper[4756]: I0930 19:32:24.713779 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:24 crc kubenswrapper[4756]: I0930 19:32:24.713796 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:24Z","lastTransitionTime":"2025-09-30T19:32:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:24 crc kubenswrapper[4756]: I0930 19:32:24.816712 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:24 crc kubenswrapper[4756]: I0930 19:32:24.816789 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:24 crc kubenswrapper[4756]: I0930 19:32:24.816807 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:24 crc kubenswrapper[4756]: I0930 19:32:24.816834 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:24 crc kubenswrapper[4756]: I0930 19:32:24.816855 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:24Z","lastTransitionTime":"2025-09-30T19:32:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:24 crc kubenswrapper[4756]: I0930 19:32:24.920481 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:24 crc kubenswrapper[4756]: I0930 19:32:24.920571 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:24 crc kubenswrapper[4756]: I0930 19:32:24.920591 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:24 crc kubenswrapper[4756]: I0930 19:32:24.920621 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:24 crc kubenswrapper[4756]: I0930 19:32:24.920641 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:24Z","lastTransitionTime":"2025-09-30T19:32:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:25 crc kubenswrapper[4756]: I0930 19:32:25.023206 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:25 crc kubenswrapper[4756]: I0930 19:32:25.023287 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:25 crc kubenswrapper[4756]: I0930 19:32:25.023306 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:25 crc kubenswrapper[4756]: I0930 19:32:25.023333 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:25 crc kubenswrapper[4756]: I0930 19:32:25.023351 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:25Z","lastTransitionTime":"2025-09-30T19:32:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:25 crc kubenswrapper[4756]: I0930 19:32:25.115824 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 19:32:25 crc kubenswrapper[4756]: I0930 19:32:25.115890 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 19:32:25 crc kubenswrapper[4756]: E0930 19:32:25.116092 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 19:32:25 crc kubenswrapper[4756]: I0930 19:32:25.116213 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 19:32:25 crc kubenswrapper[4756]: E0930 19:32:25.116440 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 19:32:25 crc kubenswrapper[4756]: E0930 19:32:25.116598 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 19:32:25 crc kubenswrapper[4756]: I0930 19:32:25.126646 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:25 crc kubenswrapper[4756]: I0930 19:32:25.126695 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:25 crc kubenswrapper[4756]: I0930 19:32:25.126714 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:25 crc kubenswrapper[4756]: I0930 19:32:25.126735 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:25 crc kubenswrapper[4756]: I0930 19:32:25.126750 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:25Z","lastTransitionTime":"2025-09-30T19:32:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:25 crc kubenswrapper[4756]: I0930 19:32:25.230312 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:25 crc kubenswrapper[4756]: I0930 19:32:25.230388 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:25 crc kubenswrapper[4756]: I0930 19:32:25.230442 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:25 crc kubenswrapper[4756]: I0930 19:32:25.230474 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:25 crc kubenswrapper[4756]: I0930 19:32:25.230497 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:25Z","lastTransitionTime":"2025-09-30T19:32:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:25 crc kubenswrapper[4756]: I0930 19:32:25.334602 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:25 crc kubenswrapper[4756]: I0930 19:32:25.334668 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:25 crc kubenswrapper[4756]: I0930 19:32:25.334685 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:25 crc kubenswrapper[4756]: I0930 19:32:25.334714 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:25 crc kubenswrapper[4756]: I0930 19:32:25.334738 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:25Z","lastTransitionTime":"2025-09-30T19:32:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:25 crc kubenswrapper[4756]: I0930 19:32:25.438872 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:25 crc kubenswrapper[4756]: I0930 19:32:25.438930 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:25 crc kubenswrapper[4756]: I0930 19:32:25.438942 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:25 crc kubenswrapper[4756]: I0930 19:32:25.438962 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:25 crc kubenswrapper[4756]: I0930 19:32:25.438979 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:25Z","lastTransitionTime":"2025-09-30T19:32:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:25 crc kubenswrapper[4756]: I0930 19:32:25.543095 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:25 crc kubenswrapper[4756]: I0930 19:32:25.543160 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:25 crc kubenswrapper[4756]: I0930 19:32:25.543178 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:25 crc kubenswrapper[4756]: I0930 19:32:25.543204 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:25 crc kubenswrapper[4756]: I0930 19:32:25.543224 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:25Z","lastTransitionTime":"2025-09-30T19:32:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:25 crc kubenswrapper[4756]: I0930 19:32:25.646881 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:25 crc kubenswrapper[4756]: I0930 19:32:25.646944 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:25 crc kubenswrapper[4756]: I0930 19:32:25.646962 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:25 crc kubenswrapper[4756]: I0930 19:32:25.646991 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:25 crc kubenswrapper[4756]: I0930 19:32:25.647011 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:25Z","lastTransitionTime":"2025-09-30T19:32:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:25 crc kubenswrapper[4756]: I0930 19:32:25.750334 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:25 crc kubenswrapper[4756]: I0930 19:32:25.750446 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:25 crc kubenswrapper[4756]: I0930 19:32:25.750475 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:25 crc kubenswrapper[4756]: I0930 19:32:25.750509 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:25 crc kubenswrapper[4756]: I0930 19:32:25.750530 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:25Z","lastTransitionTime":"2025-09-30T19:32:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:25 crc kubenswrapper[4756]: I0930 19:32:25.854629 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:25 crc kubenswrapper[4756]: I0930 19:32:25.854727 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:25 crc kubenswrapper[4756]: I0930 19:32:25.854744 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:25 crc kubenswrapper[4756]: I0930 19:32:25.854774 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:25 crc kubenswrapper[4756]: I0930 19:32:25.854796 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:25Z","lastTransitionTime":"2025-09-30T19:32:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:25 crc kubenswrapper[4756]: I0930 19:32:25.958681 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:25 crc kubenswrapper[4756]: I0930 19:32:25.958747 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:25 crc kubenswrapper[4756]: I0930 19:32:25.958765 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:25 crc kubenswrapper[4756]: I0930 19:32:25.958793 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:25 crc kubenswrapper[4756]: I0930 19:32:25.958812 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:25Z","lastTransitionTime":"2025-09-30T19:32:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:26 crc kubenswrapper[4756]: I0930 19:32:26.062589 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:26 crc kubenswrapper[4756]: I0930 19:32:26.062652 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:26 crc kubenswrapper[4756]: I0930 19:32:26.062664 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:26 crc kubenswrapper[4756]: I0930 19:32:26.062688 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:26 crc kubenswrapper[4756]: I0930 19:32:26.062704 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:26Z","lastTransitionTime":"2025-09-30T19:32:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:26 crc kubenswrapper[4756]: I0930 19:32:26.115331 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-b4n45" Sep 30 19:32:26 crc kubenswrapper[4756]: E0930 19:32:26.115580 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-b4n45" podUID="6c151bbd-9757-44ba-aa11-ff679e841fdc" Sep 30 19:32:26 crc kubenswrapper[4756]: I0930 19:32:26.174561 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:26 crc kubenswrapper[4756]: I0930 19:32:26.174627 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:26 crc kubenswrapper[4756]: I0930 19:32:26.174647 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:26 crc kubenswrapper[4756]: I0930 19:32:26.174675 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:26 crc kubenswrapper[4756]: I0930 19:32:26.174697 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:26Z","lastTransitionTime":"2025-09-30T19:32:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:26 crc kubenswrapper[4756]: I0930 19:32:26.277764 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:26 crc kubenswrapper[4756]: I0930 19:32:26.277837 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:26 crc kubenswrapper[4756]: I0930 19:32:26.277850 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:26 crc kubenswrapper[4756]: I0930 19:32:26.277875 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:26 crc kubenswrapper[4756]: I0930 19:32:26.277892 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:26Z","lastTransitionTime":"2025-09-30T19:32:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:26 crc kubenswrapper[4756]: I0930 19:32:26.381788 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:26 crc kubenswrapper[4756]: I0930 19:32:26.381861 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:26 crc kubenswrapper[4756]: I0930 19:32:26.381881 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:26 crc kubenswrapper[4756]: I0930 19:32:26.381912 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:26 crc kubenswrapper[4756]: I0930 19:32:26.381933 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:26Z","lastTransitionTime":"2025-09-30T19:32:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:26 crc kubenswrapper[4756]: I0930 19:32:26.485565 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:26 crc kubenswrapper[4756]: I0930 19:32:26.485668 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:26 crc kubenswrapper[4756]: I0930 19:32:26.485695 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:26 crc kubenswrapper[4756]: I0930 19:32:26.485731 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:26 crc kubenswrapper[4756]: I0930 19:32:26.485758 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:26Z","lastTransitionTime":"2025-09-30T19:32:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:26 crc kubenswrapper[4756]: I0930 19:32:26.589336 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:26 crc kubenswrapper[4756]: I0930 19:32:26.589524 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:26 crc kubenswrapper[4756]: I0930 19:32:26.589541 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:26 crc kubenswrapper[4756]: I0930 19:32:26.589607 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:26 crc kubenswrapper[4756]: I0930 19:32:26.589630 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:26Z","lastTransitionTime":"2025-09-30T19:32:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:26 crc kubenswrapper[4756]: I0930 19:32:26.693529 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:26 crc kubenswrapper[4756]: I0930 19:32:26.693589 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:26 crc kubenswrapper[4756]: I0930 19:32:26.693609 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:26 crc kubenswrapper[4756]: I0930 19:32:26.693636 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:26 crc kubenswrapper[4756]: I0930 19:32:26.693657 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:26Z","lastTransitionTime":"2025-09-30T19:32:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:26 crc kubenswrapper[4756]: I0930 19:32:26.796931 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:26 crc kubenswrapper[4756]: I0930 19:32:26.797014 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:26 crc kubenswrapper[4756]: I0930 19:32:26.797032 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:26 crc kubenswrapper[4756]: I0930 19:32:26.797065 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:26 crc kubenswrapper[4756]: I0930 19:32:26.797090 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:26Z","lastTransitionTime":"2025-09-30T19:32:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:26 crc kubenswrapper[4756]: I0930 19:32:26.900876 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:26 crc kubenswrapper[4756]: I0930 19:32:26.900954 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:26 crc kubenswrapper[4756]: I0930 19:32:26.900978 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:26 crc kubenswrapper[4756]: I0930 19:32:26.901009 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:26 crc kubenswrapper[4756]: I0930 19:32:26.901029 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:26Z","lastTransitionTime":"2025-09-30T19:32:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:27 crc kubenswrapper[4756]: I0930 19:32:27.004350 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:27 crc kubenswrapper[4756]: I0930 19:32:27.004451 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:27 crc kubenswrapper[4756]: I0930 19:32:27.004469 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:27 crc kubenswrapper[4756]: I0930 19:32:27.004496 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:27 crc kubenswrapper[4756]: I0930 19:32:27.004514 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:27Z","lastTransitionTime":"2025-09-30T19:32:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:27 crc kubenswrapper[4756]: I0930 19:32:27.108766 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:27 crc kubenswrapper[4756]: I0930 19:32:27.108827 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:27 crc kubenswrapper[4756]: I0930 19:32:27.108846 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:27 crc kubenswrapper[4756]: I0930 19:32:27.108879 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:27 crc kubenswrapper[4756]: I0930 19:32:27.108904 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:27Z","lastTransitionTime":"2025-09-30T19:32:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:27 crc kubenswrapper[4756]: I0930 19:32:27.115312 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 19:32:27 crc kubenswrapper[4756]: I0930 19:32:27.115356 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 19:32:27 crc kubenswrapper[4756]: E0930 19:32:27.115513 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 19:32:27 crc kubenswrapper[4756]: I0930 19:32:27.115513 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 19:32:27 crc kubenswrapper[4756]: E0930 19:32:27.115768 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 19:32:27 crc kubenswrapper[4756]: E0930 19:32:27.115906 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 19:32:27 crc kubenswrapper[4756]: I0930 19:32:27.211830 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:27 crc kubenswrapper[4756]: I0930 19:32:27.211914 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:27 crc kubenswrapper[4756]: I0930 19:32:27.211941 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:27 crc kubenswrapper[4756]: I0930 19:32:27.211974 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:27 crc kubenswrapper[4756]: I0930 19:32:27.211996 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:27Z","lastTransitionTime":"2025-09-30T19:32:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:27 crc kubenswrapper[4756]: I0930 19:32:27.315777 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:27 crc kubenswrapper[4756]: I0930 19:32:27.315862 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:27 crc kubenswrapper[4756]: I0930 19:32:27.315887 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:27 crc kubenswrapper[4756]: I0930 19:32:27.315918 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:27 crc kubenswrapper[4756]: I0930 19:32:27.315946 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:27Z","lastTransitionTime":"2025-09-30T19:32:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:27 crc kubenswrapper[4756]: I0930 19:32:27.419086 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:27 crc kubenswrapper[4756]: I0930 19:32:27.419172 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:27 crc kubenswrapper[4756]: I0930 19:32:27.419188 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:27 crc kubenswrapper[4756]: I0930 19:32:27.419209 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:27 crc kubenswrapper[4756]: I0930 19:32:27.419221 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:27Z","lastTransitionTime":"2025-09-30T19:32:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:27 crc kubenswrapper[4756]: I0930 19:32:27.522851 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:27 crc kubenswrapper[4756]: I0930 19:32:27.522940 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:27 crc kubenswrapper[4756]: I0930 19:32:27.522968 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:27 crc kubenswrapper[4756]: I0930 19:32:27.523005 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:27 crc kubenswrapper[4756]: I0930 19:32:27.523063 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:27Z","lastTransitionTime":"2025-09-30T19:32:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:27 crc kubenswrapper[4756]: I0930 19:32:27.625987 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:27 crc kubenswrapper[4756]: I0930 19:32:27.626098 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:27 crc kubenswrapper[4756]: I0930 19:32:27.626119 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:27 crc kubenswrapper[4756]: I0930 19:32:27.626150 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:27 crc kubenswrapper[4756]: I0930 19:32:27.626172 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:27Z","lastTransitionTime":"2025-09-30T19:32:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:27 crc kubenswrapper[4756]: I0930 19:32:27.729038 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:27 crc kubenswrapper[4756]: I0930 19:32:27.729136 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:27 crc kubenswrapper[4756]: I0930 19:32:27.729169 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:27 crc kubenswrapper[4756]: I0930 19:32:27.729211 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:27 crc kubenswrapper[4756]: I0930 19:32:27.729236 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:27Z","lastTransitionTime":"2025-09-30T19:32:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:27 crc kubenswrapper[4756]: I0930 19:32:27.834041 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:27 crc kubenswrapper[4756]: I0930 19:32:27.834111 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:27 crc kubenswrapper[4756]: I0930 19:32:27.834129 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:27 crc kubenswrapper[4756]: I0930 19:32:27.834157 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:27 crc kubenswrapper[4756]: I0930 19:32:27.834179 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:27Z","lastTransitionTime":"2025-09-30T19:32:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:27 crc kubenswrapper[4756]: I0930 19:32:27.937612 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:27 crc kubenswrapper[4756]: I0930 19:32:27.937676 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:27 crc kubenswrapper[4756]: I0930 19:32:27.937694 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:27 crc kubenswrapper[4756]: I0930 19:32:27.937722 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:27 crc kubenswrapper[4756]: I0930 19:32:27.937743 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:27Z","lastTransitionTime":"2025-09-30T19:32:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:28 crc kubenswrapper[4756]: I0930 19:32:28.041645 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:28 crc kubenswrapper[4756]: I0930 19:32:28.041712 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:28 crc kubenswrapper[4756]: I0930 19:32:28.041731 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:28 crc kubenswrapper[4756]: I0930 19:32:28.041759 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:28 crc kubenswrapper[4756]: I0930 19:32:28.041776 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:28Z","lastTransitionTime":"2025-09-30T19:32:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:28 crc kubenswrapper[4756]: I0930 19:32:28.115235 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-b4n45" Sep 30 19:32:28 crc kubenswrapper[4756]: E0930 19:32:28.115533 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-b4n45" podUID="6c151bbd-9757-44ba-aa11-ff679e841fdc" Sep 30 19:32:28 crc kubenswrapper[4756]: I0930 19:32:28.145361 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:28 crc kubenswrapper[4756]: I0930 19:32:28.145460 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:28 crc kubenswrapper[4756]: I0930 19:32:28.145625 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:28 crc kubenswrapper[4756]: I0930 19:32:28.145689 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:28 crc kubenswrapper[4756]: I0930 19:32:28.146085 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:28Z","lastTransitionTime":"2025-09-30T19:32:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:28 crc kubenswrapper[4756]: I0930 19:32:28.249163 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:28 crc kubenswrapper[4756]: I0930 19:32:28.249231 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:28 crc kubenswrapper[4756]: I0930 19:32:28.249254 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:28 crc kubenswrapper[4756]: I0930 19:32:28.249283 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:28 crc kubenswrapper[4756]: I0930 19:32:28.249303 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:28Z","lastTransitionTime":"2025-09-30T19:32:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:28 crc kubenswrapper[4756]: I0930 19:32:28.352957 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:28 crc kubenswrapper[4756]: I0930 19:32:28.353068 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:28 crc kubenswrapper[4756]: I0930 19:32:28.353089 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:28 crc kubenswrapper[4756]: I0930 19:32:28.353121 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:28 crc kubenswrapper[4756]: I0930 19:32:28.353141 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:28Z","lastTransitionTime":"2025-09-30T19:32:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:28 crc kubenswrapper[4756]: I0930 19:32:28.370574 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:28 crc kubenswrapper[4756]: I0930 19:32:28.370651 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:28 crc kubenswrapper[4756]: I0930 19:32:28.370680 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:28 crc kubenswrapper[4756]: I0930 19:32:28.370713 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:28 crc kubenswrapper[4756]: I0930 19:32:28.370737 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:28Z","lastTransitionTime":"2025-09-30T19:32:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:28 crc kubenswrapper[4756]: E0930 19:32:28.394996 4756 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:32:28Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:32:28Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:32:28Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:32:28Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:32:28Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:32:28Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:32:28Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:32:28Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"0611c06e-4fb1-42eb-9eae-fbc2363d05f7\\\",\\\"systemUUID\\\":\\\"9e1f3995-67e3-42b5-a320-7b79274c960e\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:28Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:28 crc kubenswrapper[4756]: I0930 19:32:28.401932 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:28 crc kubenswrapper[4756]: I0930 19:32:28.401997 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:28 crc kubenswrapper[4756]: I0930 19:32:28.402021 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:28 crc kubenswrapper[4756]: I0930 19:32:28.402053 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:28 crc kubenswrapper[4756]: I0930 19:32:28.402078 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:28Z","lastTransitionTime":"2025-09-30T19:32:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:28 crc kubenswrapper[4756]: E0930 19:32:28.423595 4756 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:32:28Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:32:28Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:32:28Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:32:28Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:32:28Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:32:28Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:32:28Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:32:28Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"0611c06e-4fb1-42eb-9eae-fbc2363d05f7\\\",\\\"systemUUID\\\":\\\"9e1f3995-67e3-42b5-a320-7b79274c960e\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:28Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:28 crc kubenswrapper[4756]: I0930 19:32:28.430627 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:28 crc kubenswrapper[4756]: I0930 19:32:28.430811 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:28 crc kubenswrapper[4756]: I0930 19:32:28.430866 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:28 crc kubenswrapper[4756]: I0930 19:32:28.430893 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:28 crc kubenswrapper[4756]: I0930 19:32:28.430913 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:28Z","lastTransitionTime":"2025-09-30T19:32:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:28 crc kubenswrapper[4756]: E0930 19:32:28.456737 4756 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:32:28Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:32:28Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:32:28Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:32:28Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:32:28Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:32:28Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:32:28Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:32:28Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"0611c06e-4fb1-42eb-9eae-fbc2363d05f7\\\",\\\"systemUUID\\\":\\\"9e1f3995-67e3-42b5-a320-7b79274c960e\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:28Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:28 crc kubenswrapper[4756]: I0930 19:32:28.463054 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:28 crc kubenswrapper[4756]: I0930 19:32:28.463111 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:28 crc kubenswrapper[4756]: I0930 19:32:28.463124 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:28 crc kubenswrapper[4756]: I0930 19:32:28.463147 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:28 crc kubenswrapper[4756]: I0930 19:32:28.463163 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:28Z","lastTransitionTime":"2025-09-30T19:32:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:28 crc kubenswrapper[4756]: E0930 19:32:28.486862 4756 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:32:28Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:32:28Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:32:28Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:32:28Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:32:28Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:32:28Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:32:28Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:32:28Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"0611c06e-4fb1-42eb-9eae-fbc2363d05f7\\\",\\\"systemUUID\\\":\\\"9e1f3995-67e3-42b5-a320-7b79274c960e\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:28Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:28 crc kubenswrapper[4756]: I0930 19:32:28.492926 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:28 crc kubenswrapper[4756]: I0930 19:32:28.493066 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:28 crc kubenswrapper[4756]: I0930 19:32:28.493160 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:28 crc kubenswrapper[4756]: I0930 19:32:28.493185 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:28 crc kubenswrapper[4756]: I0930 19:32:28.493199 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:28Z","lastTransitionTime":"2025-09-30T19:32:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:28 crc kubenswrapper[4756]: E0930 19:32:28.517315 4756 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:32:28Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:32:28Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:32:28Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:32:28Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:32:28Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:32:28Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:32:28Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:32:28Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"0611c06e-4fb1-42eb-9eae-fbc2363d05f7\\\",\\\"systemUUID\\\":\\\"9e1f3995-67e3-42b5-a320-7b79274c960e\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:28Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:28 crc kubenswrapper[4756]: E0930 19:32:28.517701 4756 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Sep 30 19:32:28 crc kubenswrapper[4756]: I0930 19:32:28.520538 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:28 crc kubenswrapper[4756]: I0930 19:32:28.520582 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:28 crc kubenswrapper[4756]: I0930 19:32:28.520609 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:28 crc kubenswrapper[4756]: I0930 19:32:28.520634 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:28 crc kubenswrapper[4756]: I0930 19:32:28.520652 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:28Z","lastTransitionTime":"2025-09-30T19:32:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:28 crc kubenswrapper[4756]: I0930 19:32:28.624480 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:28 crc kubenswrapper[4756]: I0930 19:32:28.624584 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:28 crc kubenswrapper[4756]: I0930 19:32:28.624602 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:28 crc kubenswrapper[4756]: I0930 19:32:28.624684 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:28 crc kubenswrapper[4756]: I0930 19:32:28.624707 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:28Z","lastTransitionTime":"2025-09-30T19:32:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:28 crc kubenswrapper[4756]: I0930 19:32:28.728239 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:28 crc kubenswrapper[4756]: I0930 19:32:28.728296 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:28 crc kubenswrapper[4756]: I0930 19:32:28.728312 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:28 crc kubenswrapper[4756]: I0930 19:32:28.728337 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:28 crc kubenswrapper[4756]: I0930 19:32:28.728356 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:28Z","lastTransitionTime":"2025-09-30T19:32:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:28 crc kubenswrapper[4756]: I0930 19:32:28.831081 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:28 crc kubenswrapper[4756]: I0930 19:32:28.831137 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:28 crc kubenswrapper[4756]: I0930 19:32:28.831157 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:28 crc kubenswrapper[4756]: I0930 19:32:28.831189 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:28 crc kubenswrapper[4756]: I0930 19:32:28.831207 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:28Z","lastTransitionTime":"2025-09-30T19:32:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:28 crc kubenswrapper[4756]: I0930 19:32:28.934877 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:28 crc kubenswrapper[4756]: I0930 19:32:28.934946 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:28 crc kubenswrapper[4756]: I0930 19:32:28.934964 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:28 crc kubenswrapper[4756]: I0930 19:32:28.934992 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:28 crc kubenswrapper[4756]: I0930 19:32:28.935010 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:28Z","lastTransitionTime":"2025-09-30T19:32:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:29 crc kubenswrapper[4756]: I0930 19:32:29.037539 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:29 crc kubenswrapper[4756]: I0930 19:32:29.037674 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:29 crc kubenswrapper[4756]: I0930 19:32:29.037706 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:29 crc kubenswrapper[4756]: I0930 19:32:29.037741 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:29 crc kubenswrapper[4756]: I0930 19:32:29.037764 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:29Z","lastTransitionTime":"2025-09-30T19:32:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:29 crc kubenswrapper[4756]: I0930 19:32:29.115716 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 19:32:29 crc kubenswrapper[4756]: I0930 19:32:29.115834 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 19:32:29 crc kubenswrapper[4756]: E0930 19:32:29.115944 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 19:32:29 crc kubenswrapper[4756]: I0930 19:32:29.116121 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 19:32:29 crc kubenswrapper[4756]: E0930 19:32:29.116259 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 19:32:29 crc kubenswrapper[4756]: E0930 19:32:29.116389 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 19:32:29 crc kubenswrapper[4756]: I0930 19:32:29.135642 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/kube-rbac-proxy-crio-crc"] Sep 30 19:32:29 crc kubenswrapper[4756]: I0930 19:32:29.140761 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:29 crc kubenswrapper[4756]: I0930 19:32:29.140815 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:29 crc kubenswrapper[4756]: I0930 19:32:29.140829 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:29 crc kubenswrapper[4756]: I0930 19:32:29.140853 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:29 crc kubenswrapper[4756]: I0930 19:32:29.140869 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:29Z","lastTransitionTime":"2025-09-30T19:32:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:29 crc kubenswrapper[4756]: I0930 19:32:29.245038 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:29 crc kubenswrapper[4756]: I0930 19:32:29.245104 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:29 crc kubenswrapper[4756]: I0930 19:32:29.245122 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:29 crc kubenswrapper[4756]: I0930 19:32:29.245152 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:29 crc kubenswrapper[4756]: I0930 19:32:29.245171 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:29Z","lastTransitionTime":"2025-09-30T19:32:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:29 crc kubenswrapper[4756]: I0930 19:32:29.349144 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:29 crc kubenswrapper[4756]: I0930 19:32:29.349301 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:29 crc kubenswrapper[4756]: I0930 19:32:29.349321 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:29 crc kubenswrapper[4756]: I0930 19:32:29.349348 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:29 crc kubenswrapper[4756]: I0930 19:32:29.349366 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:29Z","lastTransitionTime":"2025-09-30T19:32:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:29 crc kubenswrapper[4756]: I0930 19:32:29.453367 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:29 crc kubenswrapper[4756]: I0930 19:32:29.453476 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:29 crc kubenswrapper[4756]: I0930 19:32:29.453497 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:29 crc kubenswrapper[4756]: I0930 19:32:29.453525 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:29 crc kubenswrapper[4756]: I0930 19:32:29.453543 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:29Z","lastTransitionTime":"2025-09-30T19:32:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:29 crc kubenswrapper[4756]: I0930 19:32:29.557514 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:29 crc kubenswrapper[4756]: I0930 19:32:29.557629 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:29 crc kubenswrapper[4756]: I0930 19:32:29.557647 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:29 crc kubenswrapper[4756]: I0930 19:32:29.557678 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:29 crc kubenswrapper[4756]: I0930 19:32:29.557696 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:29Z","lastTransitionTime":"2025-09-30T19:32:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:29 crc kubenswrapper[4756]: I0930 19:32:29.661367 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:29 crc kubenswrapper[4756]: I0930 19:32:29.661466 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:29 crc kubenswrapper[4756]: I0930 19:32:29.661485 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:29 crc kubenswrapper[4756]: I0930 19:32:29.661512 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:29 crc kubenswrapper[4756]: I0930 19:32:29.661532 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:29Z","lastTransitionTime":"2025-09-30T19:32:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:29 crc kubenswrapper[4756]: I0930 19:32:29.765443 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:29 crc kubenswrapper[4756]: I0930 19:32:29.765528 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:29 crc kubenswrapper[4756]: I0930 19:32:29.765552 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:29 crc kubenswrapper[4756]: I0930 19:32:29.765588 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:29 crc kubenswrapper[4756]: I0930 19:32:29.765612 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:29Z","lastTransitionTime":"2025-09-30T19:32:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:29 crc kubenswrapper[4756]: I0930 19:32:29.869935 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:29 crc kubenswrapper[4756]: I0930 19:32:29.870017 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:29 crc kubenswrapper[4756]: I0930 19:32:29.870034 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:29 crc kubenswrapper[4756]: I0930 19:32:29.870063 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:29 crc kubenswrapper[4756]: I0930 19:32:29.870083 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:29Z","lastTransitionTime":"2025-09-30T19:32:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:29 crc kubenswrapper[4756]: I0930 19:32:29.973185 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:29 crc kubenswrapper[4756]: I0930 19:32:29.973266 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:29 crc kubenswrapper[4756]: I0930 19:32:29.973291 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:29 crc kubenswrapper[4756]: I0930 19:32:29.973322 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:29 crc kubenswrapper[4756]: I0930 19:32:29.973346 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:29Z","lastTransitionTime":"2025-09-30T19:32:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:30 crc kubenswrapper[4756]: I0930 19:32:30.076896 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:30 crc kubenswrapper[4756]: I0930 19:32:30.076967 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:30 crc kubenswrapper[4756]: I0930 19:32:30.076979 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:30 crc kubenswrapper[4756]: I0930 19:32:30.077001 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:30 crc kubenswrapper[4756]: I0930 19:32:30.077014 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:30Z","lastTransitionTime":"2025-09-30T19:32:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:30 crc kubenswrapper[4756]: I0930 19:32:30.115642 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-b4n45" Sep 30 19:32:30 crc kubenswrapper[4756]: E0930 19:32:30.115856 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-b4n45" podUID="6c151bbd-9757-44ba-aa11-ff679e841fdc" Sep 30 19:32:30 crc kubenswrapper[4756]: I0930 19:32:30.179889 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:30 crc kubenswrapper[4756]: I0930 19:32:30.179948 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:30 crc kubenswrapper[4756]: I0930 19:32:30.179967 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:30 crc kubenswrapper[4756]: I0930 19:32:30.179990 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:30 crc kubenswrapper[4756]: I0930 19:32:30.180009 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:30Z","lastTransitionTime":"2025-09-30T19:32:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:30 crc kubenswrapper[4756]: I0930 19:32:30.283470 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:30 crc kubenswrapper[4756]: I0930 19:32:30.283622 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:30 crc kubenswrapper[4756]: I0930 19:32:30.283645 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:30 crc kubenswrapper[4756]: I0930 19:32:30.283715 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:30 crc kubenswrapper[4756]: I0930 19:32:30.283741 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:30Z","lastTransitionTime":"2025-09-30T19:32:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:30 crc kubenswrapper[4756]: I0930 19:32:30.387192 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:30 crc kubenswrapper[4756]: I0930 19:32:30.387250 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:30 crc kubenswrapper[4756]: I0930 19:32:30.387263 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:30 crc kubenswrapper[4756]: I0930 19:32:30.387284 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:30 crc kubenswrapper[4756]: I0930 19:32:30.387293 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:30Z","lastTransitionTime":"2025-09-30T19:32:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:30 crc kubenswrapper[4756]: I0930 19:32:30.490651 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:30 crc kubenswrapper[4756]: I0930 19:32:30.490715 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:30 crc kubenswrapper[4756]: I0930 19:32:30.490733 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:30 crc kubenswrapper[4756]: I0930 19:32:30.490759 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:30 crc kubenswrapper[4756]: I0930 19:32:30.490782 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:30Z","lastTransitionTime":"2025-09-30T19:32:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:30 crc kubenswrapper[4756]: I0930 19:32:30.593888 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:30 crc kubenswrapper[4756]: I0930 19:32:30.593949 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:30 crc kubenswrapper[4756]: I0930 19:32:30.593969 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:30 crc kubenswrapper[4756]: I0930 19:32:30.593998 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:30 crc kubenswrapper[4756]: I0930 19:32:30.594018 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:30Z","lastTransitionTime":"2025-09-30T19:32:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:30 crc kubenswrapper[4756]: I0930 19:32:30.696928 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:30 crc kubenswrapper[4756]: I0930 19:32:30.697000 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:30 crc kubenswrapper[4756]: I0930 19:32:30.697017 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:30 crc kubenswrapper[4756]: I0930 19:32:30.697046 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:30 crc kubenswrapper[4756]: I0930 19:32:30.697071 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:30Z","lastTransitionTime":"2025-09-30T19:32:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:30 crc kubenswrapper[4756]: I0930 19:32:30.801024 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:30 crc kubenswrapper[4756]: I0930 19:32:30.801155 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:30 crc kubenswrapper[4756]: I0930 19:32:30.801174 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:30 crc kubenswrapper[4756]: I0930 19:32:30.801197 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:30 crc kubenswrapper[4756]: I0930 19:32:30.801236 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:30Z","lastTransitionTime":"2025-09-30T19:32:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:30 crc kubenswrapper[4756]: I0930 19:32:30.905057 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:30 crc kubenswrapper[4756]: I0930 19:32:30.905124 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:30 crc kubenswrapper[4756]: I0930 19:32:30.905143 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:30 crc kubenswrapper[4756]: I0930 19:32:30.905172 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:30 crc kubenswrapper[4756]: I0930 19:32:30.905191 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:30Z","lastTransitionTime":"2025-09-30T19:32:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:31 crc kubenswrapper[4756]: I0930 19:32:31.008954 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:31 crc kubenswrapper[4756]: I0930 19:32:31.009029 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:31 crc kubenswrapper[4756]: I0930 19:32:31.009050 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:31 crc kubenswrapper[4756]: I0930 19:32:31.009080 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:31 crc kubenswrapper[4756]: I0930 19:32:31.009099 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:31Z","lastTransitionTime":"2025-09-30T19:32:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:31 crc kubenswrapper[4756]: I0930 19:32:31.112541 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:31 crc kubenswrapper[4756]: I0930 19:32:31.112625 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:31 crc kubenswrapper[4756]: I0930 19:32:31.112639 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:31 crc kubenswrapper[4756]: I0930 19:32:31.112684 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:31 crc kubenswrapper[4756]: I0930 19:32:31.112704 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:31Z","lastTransitionTime":"2025-09-30T19:32:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:31 crc kubenswrapper[4756]: I0930 19:32:31.115052 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 19:32:31 crc kubenswrapper[4756]: I0930 19:32:31.115091 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 19:32:31 crc kubenswrapper[4756]: I0930 19:32:31.115049 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 19:32:31 crc kubenswrapper[4756]: E0930 19:32:31.115273 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 19:32:31 crc kubenswrapper[4756]: E0930 19:32:31.115456 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 19:32:31 crc kubenswrapper[4756]: E0930 19:32:31.115610 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 19:32:31 crc kubenswrapper[4756]: I0930 19:32:31.141295 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:31Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:31 crc kubenswrapper[4756]: I0930 19:32:31.161388 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d37999f5cb36e7071b109ed90026aea745a55dbdd26d53799ee4196dc6a983c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:31Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:31 crc kubenswrapper[4756]: I0930 19:32:31.183568 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-l9mw5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"104562fb-2a2d-4291-963c-7a95062ec13a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:32:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:32:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d7e48a55049d09339652ebc1700f595dcbba5c2434a42ccd4a1d644c388f996\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c012009175f2ca25af13346dce481a4640354a2b7c771ce9a8d92f0a6a014f23\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T19:32:19Z\\\",\\\"message\\\":\\\"2025-09-30T19:31:34+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_84cba474-668f-4940-86a3-7be18c7a30f3\\\\n2025-09-30T19:31:34+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_84cba474-668f-4940-86a3-7be18c7a30f3 to /host/opt/cni/bin/\\\\n2025-09-30T19:31:34Z [verbose] multus-daemon started\\\\n2025-09-30T19:31:34Z [verbose] Readiness Indicator file check\\\\n2025-09-30T19:32:19Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:33Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:32:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mhq89\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:32Z\\\"}}\" for pod \"openshift-multus\"/\"multus-l9mw5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:31Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:31 crc kubenswrapper[4756]: I0930 19:32:31.207689 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-m6zsp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2b4cba15-5ae1-4c98-a5e5-060b3af3aec6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://13307050242b53fcc9d2c9ab6870c36d5c2c1e43adfa36c1e76addf32166b95b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://828a01ba594287ceea1e72baa5419d522ce14d2ab3df5a69fb6332400c3e4abc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://828a01ba594287ceea1e72baa5419d522ce14d2ab3df5a69fb6332400c3e4abc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f91b3382af5267116de235e9d5f480e574b479350e738621b5e7568329dd879\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0f91b3382af5267116de235e9d5f480e574b479350e738621b5e7568329dd879\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://677982485b1155ded1bce2a0f5b3262766d498642307d1744d5cbae6c66102e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://677982485b1155ded1bce2a0f5b3262766d498642307d1744d5cbae6c66102e3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://14b8b318d4cd0da2272b87ac74a4dccdf29874a818aadc0698e9d85a75a169da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://14b8b318d4cd0da2272b87ac74a4dccdf29874a818aadc0698e9d85a75a169da\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e877edd834105d5bd0dcd24e6ba9c11ead17ab39a95ca0169c54acaad7544ebe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e877edd834105d5bd0dcd24e6ba9c11ead17ab39a95ca0169c54acaad7544ebe\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://decf531f70c78d1d142bef3d265612ebac2c8790ea10aba75def6d785fba84a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://decf531f70c78d1d142bef3d265612ebac2c8790ea10aba75def6d785fba84a8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:32Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-m6zsp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:31Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:31 crc kubenswrapper[4756]: I0930 19:32:31.217147 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:31 crc kubenswrapper[4756]: I0930 19:32:31.217181 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:31 crc kubenswrapper[4756]: I0930 19:32:31.217195 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:31 crc kubenswrapper[4756]: I0930 19:32:31.217218 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:31 crc kubenswrapper[4756]: I0930 19:32:31.217233 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:31Z","lastTransitionTime":"2025-09-30T19:32:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:31 crc kubenswrapper[4756]: I0930 19:32:31.226118 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-gld2h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7ae264bc-c901-4628-89f4-53e94403dff8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d059c5c1b5cded992e4971b04737c4f528ed04278d98651c01c8b8a168fe5655\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc8dq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://764e5cdcf5a9c07e5d046217ffd9db27e2b09a827d02f165459deb24d99f141c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc8dq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:44Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-gld2h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:31Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:31 crc kubenswrapper[4756]: I0930 19:32:31.248071 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:31Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:31 crc kubenswrapper[4756]: I0930 19:32:31.267724 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d39cfd1b725ded02a27e8f7bed73221dd180e3fffb7f06d1877d1bc2f6910334\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:31Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:31 crc kubenswrapper[4756]: I0930 19:32:31.283949 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-ghp5t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"32ba8a0a-215c-415b-a893-74f7a40e3c20\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a96a8eea15e5d12ad93cdb41ef3d1054fae455aea2878451bc5715de6d5b612f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pslwb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:32Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-ghp5t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:31Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:31 crc kubenswrapper[4756]: I0930 19:32:31.308715 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c7764cf2-26f0-4b53-85b6-2b07668edda7\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:32:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:32:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6cbb0f481cae6dbc480af93bb03f4b4e597381979fed1a9b91fb23aef7868b65\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d2adbf3b2ade3e39ca3d2f7a70212537c9e7531907dd4a1e7748220183d34b9d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b904814bd38b98f875532fedfa07321ce8d149f697e027ad926f8f40ddce61e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f9299d12673c7a8e182e0d4354088d1aa539ca35a788ba6ff507d2ca96cff07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4f9299d12673c7a8e182e0d4354088d1aa539ca35a788ba6ff507d2ca96cff07\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:12Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:11Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:31Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:31 crc kubenswrapper[4756]: I0930 19:32:31.321215 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:31 crc kubenswrapper[4756]: I0930 19:32:31.321263 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:31 crc kubenswrapper[4756]: I0930 19:32:31.321460 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:31 crc kubenswrapper[4756]: I0930 19:32:31.321538 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:31 crc kubenswrapper[4756]: I0930 19:32:31.321563 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:31Z","lastTransitionTime":"2025-09-30T19:32:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:31 crc kubenswrapper[4756]: I0930 19:32:31.328533 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3370c2ca-fec3-4f90-8df7-51e21e6c7e1c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1a5ef539293ea3ff94f53b11a0510bb8bb45c62c945cffab746512efeeb19a5a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pstrc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ba18abf718fde52cadd61c25b05097873901a4dbbe7631fd7ce64133627b9473\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pstrc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:32Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-4n9zj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:31Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:31 crc kubenswrapper[4756]: I0930 19:32:31.359301 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-kmxk8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d96acc31-a519-46fc-94d8-f19522e77391\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe054d1844c58433aebfe8f4e500e17558f1b4afb5423002d60b50f210052c93\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c5badf6335e4358fa0078ae48d7d50a908bf754fdb57c786e445ccae31933ebe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f20230d520af06912183a05d0b9bcabb8581bf772fe34e037fa8d1a157c4a1b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://261b104308c48bbe4631db85f56d63cefeb0bc6b7112076b0bf3f4fb6b67caad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1fa8babb1f5d1213dec99c75be1d5f37d09c2257a31babd51ec297621fc7d564\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://990926364c8d973164f3da73740b7d31c8caf87f0fb691539f7a62520b822fba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://539ad7633e1076358f49a836687191a85b8b2967d0dd7c7e6591ec78416e1115\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://539ad7633e1076358f49a836687191a85b8b2967d0dd7c7e6591ec78416e1115\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T19:32:05Z\\\",\\\"message\\\":\\\"mns:[] Mutations:[{Column:policies Mutator:insert Value:{GoSet:[{GoUUID:a5a72d02-1a0f-4f7f-a8c5-6923a1c4274a}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {f6d604c1-9711-4e25-be6c-79ec28bbad1b}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI0930 19:32:05.081516 6485 address_set.go:302] New(0d39bc5c-d5b9-432c-81be-2275bce5d7aa/default-network-controller:EgressIP:node-ips:v4:default/a712973235162149816) with []\\\\nI0930 19:32:05.081551 6485 address_set.go:302] New(aa6fc2dc-fab0-4812-b9da-809058e4dcf7/default-network-controller:EgressIP:egressip-served-pods:v4:default/a8519615025667110816) with []\\\\nI0930 19:32:05.081577 6485 address_set.go:302] New(bf133528-8652-4c84-85ff-881f0afe9837/default-network-controller:EgressService:egresssvc-served-pods:v4/a13607449821398607916) with []\\\\nI0930 19:32:05.081647 6485 factory.go:1336] Added *v1.Node event handler 7\\\\nI0930 19:32:05.081686 6485 factory.go:1336] Added *v1.EgressIP event handler 8\\\\nI0930 19:32:05.082052 6485 factory.go:1336] Added *v1.EgressFirewall event handler 9\\\\nI0930 19:32:05.082149 6485 controller.go:132] Adding controller ef_node_controller event handlers\\\\nI0930 19:32:05.082187 6485 ovnkube.go:599] Stopped ovnkube\\\\nI0930 19:32:05.082214 6485 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF0930 19:32:05.082302 6485 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T19:32:04Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-kmxk8_openshift-ovn-kubernetes(d96acc31-a519-46fc-94d8-f19522e77391)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://64404e34c6a077df9cc9e9f9273dbe188c6feda3bc1186a83e05a43e1fa2ea99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://76bb398eefb2b0a9ef06b44c2ea182deefd51e4daf42b17946b911822d6eb0af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://76bb398eefb2b0a9ef06b44c2ea182deefd51e4daf42b17946b911822d6eb0af\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:32Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-kmxk8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:31Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:31 crc kubenswrapper[4756]: I0930 19:32:31.373807 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-8nj5z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"02bb1d9a-1ad2-406d-afc2-c4cd283fe1f4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://28f7474a36dde55478614d47fe30f93b44cb549a25e148e295987e64158fc3c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l6nxq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:34Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-8nj5z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:31Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:31 crc kubenswrapper[4756]: I0930 19:32:31.393087 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-b4n45" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6c151bbd-9757-44ba-aa11-ff679e841fdc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fsl2z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fsl2z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:46Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-b4n45\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:31Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:31 crc kubenswrapper[4756]: I0930 19:32:31.414720 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"86ed586e-a84e-413c-bfb3-4e25a9393c3a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://38fdd34c9534d8b27068f856bae35d51ce2f284f03aa5176b20e82f0dfe82abe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4346c24591ed37e8c2cb6d5693d16c1c3d54d3239edd35f193e2e030e8bfa221\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4346c24591ed37e8c2cb6d5693d16c1c3d54d3239edd35f193e2e030e8bfa221\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:11Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:31Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:31 crc kubenswrapper[4756]: I0930 19:32:31.425791 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:31 crc kubenswrapper[4756]: I0930 19:32:31.425888 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:31 crc kubenswrapper[4756]: I0930 19:32:31.425915 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:31 crc kubenswrapper[4756]: I0930 19:32:31.425952 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:31 crc kubenswrapper[4756]: I0930 19:32:31.425983 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:31Z","lastTransitionTime":"2025-09-30T19:32:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:31 crc kubenswrapper[4756]: I0930 19:32:31.446616 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6aa869dd-ec28-4032-82be-c656f27bd7b1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://99767cc72b9d6057d38c45fbec6231bafa47ccffefb78e5924b5e9c1e797722b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b852143b26114c05edce20729cf706c5394609a74f522b7ecf44d92f9d66aed7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ee47cdb77e416535bd00569159712241bbd0952651774af6440fd04000d1722\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5cca81a0aeb985d6044f19cccc580ca5c78553a0abb60fb2a2bed955233f7a97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://621b37e117d0eeef289e110d75521c3262ced12144d665be35d6823780b4bae2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a5550fd486f7fad8080fb3eed0f4c73140172c68983fa8a423cd48da8f31ef3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a5550fd486f7fad8080fb3eed0f4c73140172c68983fa8a423cd48da8f31ef3a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://390ef7ee5c5d98dc226f1d061c6c9bb83236c988f2af15635e7834f643862840\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://390ef7ee5c5d98dc226f1d061c6c9bb83236c988f2af15635e7834f643862840\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:13Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://6f98786c914dcdd3ca1cde8421686a1d536abb97c375f26f458d8f514bdccd52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6f98786c914dcdd3ca1cde8421686a1d536abb97c375f26f458d8f514bdccd52\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:11Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:31Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:31 crc kubenswrapper[4756]: I0930 19:32:31.468712 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0dfdb5d6-9311-4699-a1ac-5f1a6cde2441\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a78698331fc3db3b12b06c8816256c5d055d1decfd12cc0099d6cfd155d673d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32fb926334772dc454adcdabd6070f751ef1aded82a5c13732875e1ec04d2c48\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6195128a0e66f0f932a444f4717084f00ea05ff648f7c26364bb9c96483b652a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://746e37834d01867d327b2cb407df8e9a8489f58884a28f1da6a336f74e4d1033\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:11Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:31Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:31 crc kubenswrapper[4756]: I0930 19:32:31.494323 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"218f95d8-7230-4b64-83a5-00af4f5ec7dc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:32:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:32:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0a76d38dbf936ca79e57470de2c8bf98e8681f7d94f2c572aa5c0d2b2acbb484\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://57f786181b4762f1e04a48ac6e352090b62e1691091ddc390821abb693e8f7a6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c275af67ad8675a437b3f7fae34236bbdfbdf1f216be6a442d6765471d1d11e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7673b8a2ea701b95437a1ad447661ea3140ce0b27204ab946f39dbf0a25e29b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a2e0caca690592c9bd17dae14f79390b94f47507afb05a1c1732453700b99786\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"message\\\":\\\"g.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 19:31:31.245412 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 19:31:31.245417 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0930 19:31:31.245423 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0930 19:31:31.245426 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0930 19:31:31.245428 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0930 19:31:31.245860 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI0930 19:31:31.249125 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0930 19:31:31.249169 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0930 19:31:31.249201 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0930 19:31:31.249211 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0930 19:31:31.249225 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0930 19:31:31.249238 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0930 19:31:31.250125 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI0930 19:31:31.250150 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nF0930 19:31:31.250576 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:25Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://01164eb1afdb771ef4d2dc268107dadd206959e1ef66b154caf3ff18717e20de\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:14Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7365492f5fe72c39f569bfa6e2b1a44236da10b4687a738b97ce95626081c1b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7365492f5fe72c39f569bfa6e2b1a44236da10b4687a738b97ce95626081c1b5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:11Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:31Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:31 crc kubenswrapper[4756]: I0930 19:32:31.517590 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:31Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:31 crc kubenswrapper[4756]: I0930 19:32:31.528641 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:31 crc kubenswrapper[4756]: I0930 19:32:31.528685 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:31 crc kubenswrapper[4756]: I0930 19:32:31.528698 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:31 crc kubenswrapper[4756]: I0930 19:32:31.528716 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:31 crc kubenswrapper[4756]: I0930 19:32:31.528728 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:31Z","lastTransitionTime":"2025-09-30T19:32:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:31 crc kubenswrapper[4756]: I0930 19:32:31.538889 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://449a4827a34c17e8cdc851d2714ced931581b5c3a1bcd2f7f058b95d68403740\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://163fab064016913eb90cd87fe0da1c702eeb7f000c92b108bfb24a21ef0e98d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:31Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:31 crc kubenswrapper[4756]: I0930 19:32:31.632577 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:31 crc kubenswrapper[4756]: I0930 19:32:31.632665 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:31 crc kubenswrapper[4756]: I0930 19:32:31.632685 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:31 crc kubenswrapper[4756]: I0930 19:32:31.632715 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:31 crc kubenswrapper[4756]: I0930 19:32:31.632741 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:31Z","lastTransitionTime":"2025-09-30T19:32:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:31 crc kubenswrapper[4756]: I0930 19:32:31.736360 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:31 crc kubenswrapper[4756]: I0930 19:32:31.736519 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:31 crc kubenswrapper[4756]: I0930 19:32:31.736544 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:31 crc kubenswrapper[4756]: I0930 19:32:31.736572 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:31 crc kubenswrapper[4756]: I0930 19:32:31.736594 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:31Z","lastTransitionTime":"2025-09-30T19:32:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:31 crc kubenswrapper[4756]: I0930 19:32:31.840123 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:31 crc kubenswrapper[4756]: I0930 19:32:31.840206 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:31 crc kubenswrapper[4756]: I0930 19:32:31.840226 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:31 crc kubenswrapper[4756]: I0930 19:32:31.840254 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:31 crc kubenswrapper[4756]: I0930 19:32:31.840277 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:31Z","lastTransitionTime":"2025-09-30T19:32:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:31 crc kubenswrapper[4756]: I0930 19:32:31.943978 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:31 crc kubenswrapper[4756]: I0930 19:32:31.944052 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:31 crc kubenswrapper[4756]: I0930 19:32:31.944071 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:31 crc kubenswrapper[4756]: I0930 19:32:31.944104 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:31 crc kubenswrapper[4756]: I0930 19:32:31.944122 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:31Z","lastTransitionTime":"2025-09-30T19:32:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:32 crc kubenswrapper[4756]: I0930 19:32:32.048156 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:32 crc kubenswrapper[4756]: I0930 19:32:32.048230 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:32 crc kubenswrapper[4756]: I0930 19:32:32.048253 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:32 crc kubenswrapper[4756]: I0930 19:32:32.048283 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:32 crc kubenswrapper[4756]: I0930 19:32:32.048303 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:32Z","lastTransitionTime":"2025-09-30T19:32:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:32 crc kubenswrapper[4756]: I0930 19:32:32.115940 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-b4n45" Sep 30 19:32:32 crc kubenswrapper[4756]: E0930 19:32:32.116176 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-b4n45" podUID="6c151bbd-9757-44ba-aa11-ff679e841fdc" Sep 30 19:32:32 crc kubenswrapper[4756]: I0930 19:32:32.117331 4756 scope.go:117] "RemoveContainer" containerID="539ad7633e1076358f49a836687191a85b8b2967d0dd7c7e6591ec78416e1115" Sep 30 19:32:32 crc kubenswrapper[4756]: I0930 19:32:32.151268 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:32 crc kubenswrapper[4756]: I0930 19:32:32.151326 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:32 crc kubenswrapper[4756]: I0930 19:32:32.151343 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:32 crc kubenswrapper[4756]: I0930 19:32:32.151368 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:32 crc kubenswrapper[4756]: I0930 19:32:32.151387 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:32Z","lastTransitionTime":"2025-09-30T19:32:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:32 crc kubenswrapper[4756]: I0930 19:32:32.254662 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:32 crc kubenswrapper[4756]: I0930 19:32:32.254719 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:32 crc kubenswrapper[4756]: I0930 19:32:32.254732 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:32 crc kubenswrapper[4756]: I0930 19:32:32.254780 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:32 crc kubenswrapper[4756]: I0930 19:32:32.254792 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:32Z","lastTransitionTime":"2025-09-30T19:32:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:32 crc kubenswrapper[4756]: I0930 19:32:32.359671 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:32 crc kubenswrapper[4756]: I0930 19:32:32.360132 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:32 crc kubenswrapper[4756]: I0930 19:32:32.360151 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:32 crc kubenswrapper[4756]: I0930 19:32:32.360179 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:32 crc kubenswrapper[4756]: I0930 19:32:32.360199 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:32Z","lastTransitionTime":"2025-09-30T19:32:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:32 crc kubenswrapper[4756]: I0930 19:32:32.464242 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:32 crc kubenswrapper[4756]: I0930 19:32:32.464311 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:32 crc kubenswrapper[4756]: I0930 19:32:32.464332 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:32 crc kubenswrapper[4756]: I0930 19:32:32.464365 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:32 crc kubenswrapper[4756]: I0930 19:32:32.464387 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:32Z","lastTransitionTime":"2025-09-30T19:32:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:32 crc kubenswrapper[4756]: I0930 19:32:32.568467 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:32 crc kubenswrapper[4756]: I0930 19:32:32.568518 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:32 crc kubenswrapper[4756]: I0930 19:32:32.568531 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:32 crc kubenswrapper[4756]: I0930 19:32:32.568548 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:32 crc kubenswrapper[4756]: I0930 19:32:32.568558 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:32Z","lastTransitionTime":"2025-09-30T19:32:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:32 crc kubenswrapper[4756]: I0930 19:32:32.611197 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-kmxk8_d96acc31-a519-46fc-94d8-f19522e77391/ovnkube-controller/2.log" Sep 30 19:32:32 crc kubenswrapper[4756]: I0930 19:32:32.615220 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-kmxk8" event={"ID":"d96acc31-a519-46fc-94d8-f19522e77391","Type":"ContainerStarted","Data":"802780f678d2e964061eb22cc909146073c026124ad07406cf1e046c73c442a2"} Sep 30 19:32:32 crc kubenswrapper[4756]: I0930 19:32:32.615766 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-kmxk8" Sep 30 19:32:32 crc kubenswrapper[4756]: I0930 19:32:32.631581 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3370c2ca-fec3-4f90-8df7-51e21e6c7e1c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1a5ef539293ea3ff94f53b11a0510bb8bb45c62c945cffab746512efeeb19a5a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pstrc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ba18abf718fde52cadd61c25b05097873901a4dbbe7631fd7ce64133627b9473\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pstrc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:32Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-4n9zj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:32Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:32 crc kubenswrapper[4756]: I0930 19:32:32.656553 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-kmxk8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d96acc31-a519-46fc-94d8-f19522e77391\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe054d1844c58433aebfe8f4e500e17558f1b4afb5423002d60b50f210052c93\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c5badf6335e4358fa0078ae48d7d50a908bf754fdb57c786e445ccae31933ebe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f20230d520af06912183a05d0b9bcabb8581bf772fe34e037fa8d1a157c4a1b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://261b104308c48bbe4631db85f56d63cefeb0bc6b7112076b0bf3f4fb6b67caad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1fa8babb1f5d1213dec99c75be1d5f37d09c2257a31babd51ec297621fc7d564\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://990926364c8d973164f3da73740b7d31c8caf87f0fb691539f7a62520b822fba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://802780f678d2e964061eb22cc909146073c026124ad07406cf1e046c73c442a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://539ad7633e1076358f49a836687191a85b8b2967d0dd7c7e6591ec78416e1115\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T19:32:05Z\\\",\\\"message\\\":\\\"mns:[] Mutations:[{Column:policies Mutator:insert Value:{GoSet:[{GoUUID:a5a72d02-1a0f-4f7f-a8c5-6923a1c4274a}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {f6d604c1-9711-4e25-be6c-79ec28bbad1b}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI0930 19:32:05.081516 6485 address_set.go:302] New(0d39bc5c-d5b9-432c-81be-2275bce5d7aa/default-network-controller:EgressIP:node-ips:v4:default/a712973235162149816) with []\\\\nI0930 19:32:05.081551 6485 address_set.go:302] New(aa6fc2dc-fab0-4812-b9da-809058e4dcf7/default-network-controller:EgressIP:egressip-served-pods:v4:default/a8519615025667110816) with []\\\\nI0930 19:32:05.081577 6485 address_set.go:302] New(bf133528-8652-4c84-85ff-881f0afe9837/default-network-controller:EgressService:egresssvc-served-pods:v4/a13607449821398607916) with []\\\\nI0930 19:32:05.081647 6485 factory.go:1336] Added *v1.Node event handler 7\\\\nI0930 19:32:05.081686 6485 factory.go:1336] Added *v1.EgressIP event handler 8\\\\nI0930 19:32:05.082052 6485 factory.go:1336] Added *v1.EgressFirewall event handler 9\\\\nI0930 19:32:05.082149 6485 controller.go:132] Adding controller ef_node_controller event handlers\\\\nI0930 19:32:05.082187 6485 ovnkube.go:599] Stopped ovnkube\\\\nI0930 19:32:05.082214 6485 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF0930 19:32:05.082302 6485 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T19:32:04Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:32:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://64404e34c6a077df9cc9e9f9273dbe188c6feda3bc1186a83e05a43e1fa2ea99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://76bb398eefb2b0a9ef06b44c2ea182deefd51e4daf42b17946b911822d6eb0af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://76bb398eefb2b0a9ef06b44c2ea182deefd51e4daf42b17946b911822d6eb0af\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:32Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-kmxk8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:32Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:32 crc kubenswrapper[4756]: I0930 19:32:32.671743 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-8nj5z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"02bb1d9a-1ad2-406d-afc2-c4cd283fe1f4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://28f7474a36dde55478614d47fe30f93b44cb549a25e148e295987e64158fc3c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l6nxq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:34Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-8nj5z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:32Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:32 crc kubenswrapper[4756]: I0930 19:32:32.672286 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:32 crc kubenswrapper[4756]: I0930 19:32:32.672316 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:32 crc kubenswrapper[4756]: I0930 19:32:32.672325 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:32 crc kubenswrapper[4756]: I0930 19:32:32.672343 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:32 crc kubenswrapper[4756]: I0930 19:32:32.672355 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:32Z","lastTransitionTime":"2025-09-30T19:32:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:32 crc kubenswrapper[4756]: I0930 19:32:32.685358 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-b4n45" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6c151bbd-9757-44ba-aa11-ff679e841fdc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fsl2z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fsl2z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:46Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-b4n45\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:32Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:32 crc kubenswrapper[4756]: I0930 19:32:32.704080 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"86ed586e-a84e-413c-bfb3-4e25a9393c3a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://38fdd34c9534d8b27068f856bae35d51ce2f284f03aa5176b20e82f0dfe82abe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4346c24591ed37e8c2cb6d5693d16c1c3d54d3239edd35f193e2e030e8bfa221\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4346c24591ed37e8c2cb6d5693d16c1c3d54d3239edd35f193e2e030e8bfa221\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:11Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:32Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:32 crc kubenswrapper[4756]: I0930 19:32:32.740554 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6aa869dd-ec28-4032-82be-c656f27bd7b1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://99767cc72b9d6057d38c45fbec6231bafa47ccffefb78e5924b5e9c1e797722b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b852143b26114c05edce20729cf706c5394609a74f522b7ecf44d92f9d66aed7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ee47cdb77e416535bd00569159712241bbd0952651774af6440fd04000d1722\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5cca81a0aeb985d6044f19cccc580ca5c78553a0abb60fb2a2bed955233f7a97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://621b37e117d0eeef289e110d75521c3262ced12144d665be35d6823780b4bae2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a5550fd486f7fad8080fb3eed0f4c73140172c68983fa8a423cd48da8f31ef3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a5550fd486f7fad8080fb3eed0f4c73140172c68983fa8a423cd48da8f31ef3a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://390ef7ee5c5d98dc226f1d061c6c9bb83236c988f2af15635e7834f643862840\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://390ef7ee5c5d98dc226f1d061c6c9bb83236c988f2af15635e7834f643862840\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:13Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://6f98786c914dcdd3ca1cde8421686a1d536abb97c375f26f458d8f514bdccd52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6f98786c914dcdd3ca1cde8421686a1d536abb97c375f26f458d8f514bdccd52\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:11Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:32Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:32 crc kubenswrapper[4756]: I0930 19:32:32.766691 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0dfdb5d6-9311-4699-a1ac-5f1a6cde2441\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a78698331fc3db3b12b06c8816256c5d055d1decfd12cc0099d6cfd155d673d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32fb926334772dc454adcdabd6070f751ef1aded82a5c13732875e1ec04d2c48\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6195128a0e66f0f932a444f4717084f00ea05ff648f7c26364bb9c96483b652a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://746e37834d01867d327b2cb407df8e9a8489f58884a28f1da6a336f74e4d1033\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:11Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:32Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:32 crc kubenswrapper[4756]: I0930 19:32:32.776444 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:32 crc kubenswrapper[4756]: I0930 19:32:32.776514 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:32 crc kubenswrapper[4756]: I0930 19:32:32.776539 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:32 crc kubenswrapper[4756]: I0930 19:32:32.776572 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:32 crc kubenswrapper[4756]: I0930 19:32:32.776593 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:32Z","lastTransitionTime":"2025-09-30T19:32:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:32 crc kubenswrapper[4756]: I0930 19:32:32.794868 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c7764cf2-26f0-4b53-85b6-2b07668edda7\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:32:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:32:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6cbb0f481cae6dbc480af93bb03f4b4e597381979fed1a9b91fb23aef7868b65\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d2adbf3b2ade3e39ca3d2f7a70212537c9e7531907dd4a1e7748220183d34b9d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b904814bd38b98f875532fedfa07321ce8d149f697e027ad926f8f40ddce61e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f9299d12673c7a8e182e0d4354088d1aa539ca35a788ba6ff507d2ca96cff07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4f9299d12673c7a8e182e0d4354088d1aa539ca35a788ba6ff507d2ca96cff07\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:12Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:11Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:32Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:32 crc kubenswrapper[4756]: I0930 19:32:32.819436 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"218f95d8-7230-4b64-83a5-00af4f5ec7dc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:32:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:32:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0a76d38dbf936ca79e57470de2c8bf98e8681f7d94f2c572aa5c0d2b2acbb484\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://57f786181b4762f1e04a48ac6e352090b62e1691091ddc390821abb693e8f7a6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c275af67ad8675a437b3f7fae34236bbdfbdf1f216be6a442d6765471d1d11e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7673b8a2ea701b95437a1ad447661ea3140ce0b27204ab946f39dbf0a25e29b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a2e0caca690592c9bd17dae14f79390b94f47507afb05a1c1732453700b99786\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"message\\\":\\\"g.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 19:31:31.245412 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 19:31:31.245417 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0930 19:31:31.245423 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0930 19:31:31.245426 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0930 19:31:31.245428 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0930 19:31:31.245860 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI0930 19:31:31.249125 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0930 19:31:31.249169 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0930 19:31:31.249201 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0930 19:31:31.249211 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0930 19:31:31.249225 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0930 19:31:31.249238 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0930 19:31:31.250125 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI0930 19:31:31.250150 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nF0930 19:31:31.250576 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:25Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://01164eb1afdb771ef4d2dc268107dadd206959e1ef66b154caf3ff18717e20de\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:14Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7365492f5fe72c39f569bfa6e2b1a44236da10b4687a738b97ce95626081c1b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7365492f5fe72c39f569bfa6e2b1a44236da10b4687a738b97ce95626081c1b5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:11Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:32Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:32 crc kubenswrapper[4756]: I0930 19:32:32.851422 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:32Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:32 crc kubenswrapper[4756]: I0930 19:32:32.877282 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://449a4827a34c17e8cdc851d2714ced931581b5c3a1bcd2f7f058b95d68403740\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://163fab064016913eb90cd87fe0da1c702eeb7f000c92b108bfb24a21ef0e98d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:32Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:32 crc kubenswrapper[4756]: I0930 19:32:32.880102 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:32 crc kubenswrapper[4756]: I0930 19:32:32.880153 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:32 crc kubenswrapper[4756]: I0930 19:32:32.880165 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:32 crc kubenswrapper[4756]: I0930 19:32:32.880188 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:32 crc kubenswrapper[4756]: I0930 19:32:32.880202 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:32Z","lastTransitionTime":"2025-09-30T19:32:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:32 crc kubenswrapper[4756]: I0930 19:32:32.902029 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:32Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:32 crc kubenswrapper[4756]: I0930 19:32:32.920337 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d37999f5cb36e7071b109ed90026aea745a55dbdd26d53799ee4196dc6a983c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:32Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:32 crc kubenswrapper[4756]: I0930 19:32:32.951984 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-l9mw5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"104562fb-2a2d-4291-963c-7a95062ec13a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:32:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:32:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d7e48a55049d09339652ebc1700f595dcbba5c2434a42ccd4a1d644c388f996\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c012009175f2ca25af13346dce481a4640354a2b7c771ce9a8d92f0a6a014f23\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T19:32:19Z\\\",\\\"message\\\":\\\"2025-09-30T19:31:34+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_84cba474-668f-4940-86a3-7be18c7a30f3\\\\n2025-09-30T19:31:34+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_84cba474-668f-4940-86a3-7be18c7a30f3 to /host/opt/cni/bin/\\\\n2025-09-30T19:31:34Z [verbose] multus-daemon started\\\\n2025-09-30T19:31:34Z [verbose] Readiness Indicator file check\\\\n2025-09-30T19:32:19Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:33Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:32:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mhq89\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:32Z\\\"}}\" for pod \"openshift-multus\"/\"multus-l9mw5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:32Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:32 crc kubenswrapper[4756]: I0930 19:32:32.971589 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-gld2h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7ae264bc-c901-4628-89f4-53e94403dff8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d059c5c1b5cded992e4971b04737c4f528ed04278d98651c01c8b8a168fe5655\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc8dq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://764e5cdcf5a9c07e5d046217ffd9db27e2b09a827d02f165459deb24d99f141c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc8dq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:44Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-gld2h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:32Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:32 crc kubenswrapper[4756]: I0930 19:32:32.982833 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:32 crc kubenswrapper[4756]: I0930 19:32:32.982891 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:32 crc kubenswrapper[4756]: I0930 19:32:32.982907 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:32 crc kubenswrapper[4756]: I0930 19:32:32.982930 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:32 crc kubenswrapper[4756]: I0930 19:32:32.982949 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:32Z","lastTransitionTime":"2025-09-30T19:32:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:32 crc kubenswrapper[4756]: I0930 19:32:32.987899 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:32Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:33 crc kubenswrapper[4756]: I0930 19:32:33.008703 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d39cfd1b725ded02a27e8f7bed73221dd180e3fffb7f06d1877d1bc2f6910334\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:33Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:33 crc kubenswrapper[4756]: I0930 19:32:33.029207 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-ghp5t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"32ba8a0a-215c-415b-a893-74f7a40e3c20\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a96a8eea15e5d12ad93cdb41ef3d1054fae455aea2878451bc5715de6d5b612f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pslwb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:32Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-ghp5t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:33Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:33 crc kubenswrapper[4756]: I0930 19:32:33.048512 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-m6zsp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2b4cba15-5ae1-4c98-a5e5-060b3af3aec6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://13307050242b53fcc9d2c9ab6870c36d5c2c1e43adfa36c1e76addf32166b95b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://828a01ba594287ceea1e72baa5419d522ce14d2ab3df5a69fb6332400c3e4abc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://828a01ba594287ceea1e72baa5419d522ce14d2ab3df5a69fb6332400c3e4abc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f91b3382af5267116de235e9d5f480e574b479350e738621b5e7568329dd879\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0f91b3382af5267116de235e9d5f480e574b479350e738621b5e7568329dd879\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://677982485b1155ded1bce2a0f5b3262766d498642307d1744d5cbae6c66102e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://677982485b1155ded1bce2a0f5b3262766d498642307d1744d5cbae6c66102e3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://14b8b318d4cd0da2272b87ac74a4dccdf29874a818aadc0698e9d85a75a169da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://14b8b318d4cd0da2272b87ac74a4dccdf29874a818aadc0698e9d85a75a169da\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e877edd834105d5bd0dcd24e6ba9c11ead17ab39a95ca0169c54acaad7544ebe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e877edd834105d5bd0dcd24e6ba9c11ead17ab39a95ca0169c54acaad7544ebe\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://decf531f70c78d1d142bef3d265612ebac2c8790ea10aba75def6d785fba84a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://decf531f70c78d1d142bef3d265612ebac2c8790ea10aba75def6d785fba84a8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:32Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-m6zsp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:33Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:33 crc kubenswrapper[4756]: I0930 19:32:33.086117 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:33 crc kubenswrapper[4756]: I0930 19:32:33.086175 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:33 crc kubenswrapper[4756]: I0930 19:32:33.086193 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:33 crc kubenswrapper[4756]: I0930 19:32:33.086217 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:33 crc kubenswrapper[4756]: I0930 19:32:33.086233 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:33Z","lastTransitionTime":"2025-09-30T19:32:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:33 crc kubenswrapper[4756]: I0930 19:32:33.116020 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 19:32:33 crc kubenswrapper[4756]: I0930 19:32:33.116039 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 19:32:33 crc kubenswrapper[4756]: I0930 19:32:33.116016 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 19:32:33 crc kubenswrapper[4756]: E0930 19:32:33.116171 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 19:32:33 crc kubenswrapper[4756]: E0930 19:32:33.116327 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 19:32:33 crc kubenswrapper[4756]: E0930 19:32:33.116418 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 19:32:33 crc kubenswrapper[4756]: I0930 19:32:33.190096 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:33 crc kubenswrapper[4756]: I0930 19:32:33.190145 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:33 crc kubenswrapper[4756]: I0930 19:32:33.190154 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:33 crc kubenswrapper[4756]: I0930 19:32:33.190176 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:33 crc kubenswrapper[4756]: I0930 19:32:33.190188 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:33Z","lastTransitionTime":"2025-09-30T19:32:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:33 crc kubenswrapper[4756]: I0930 19:32:33.295103 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:33 crc kubenswrapper[4756]: I0930 19:32:33.295204 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:33 crc kubenswrapper[4756]: I0930 19:32:33.295229 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:33 crc kubenswrapper[4756]: I0930 19:32:33.295297 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:33 crc kubenswrapper[4756]: I0930 19:32:33.295327 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:33Z","lastTransitionTime":"2025-09-30T19:32:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:33 crc kubenswrapper[4756]: I0930 19:32:33.398715 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:33 crc kubenswrapper[4756]: I0930 19:32:33.398766 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:33 crc kubenswrapper[4756]: I0930 19:32:33.398780 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:33 crc kubenswrapper[4756]: I0930 19:32:33.398804 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:33 crc kubenswrapper[4756]: I0930 19:32:33.398820 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:33Z","lastTransitionTime":"2025-09-30T19:32:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:33 crc kubenswrapper[4756]: I0930 19:32:33.502521 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:33 crc kubenswrapper[4756]: I0930 19:32:33.502631 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:33 crc kubenswrapper[4756]: I0930 19:32:33.502659 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:33 crc kubenswrapper[4756]: I0930 19:32:33.502694 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:33 crc kubenswrapper[4756]: I0930 19:32:33.502718 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:33Z","lastTransitionTime":"2025-09-30T19:32:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:33 crc kubenswrapper[4756]: I0930 19:32:33.605018 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:33 crc kubenswrapper[4756]: I0930 19:32:33.605075 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:33 crc kubenswrapper[4756]: I0930 19:32:33.605093 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:33 crc kubenswrapper[4756]: I0930 19:32:33.605118 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:33 crc kubenswrapper[4756]: I0930 19:32:33.605138 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:33Z","lastTransitionTime":"2025-09-30T19:32:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:33 crc kubenswrapper[4756]: I0930 19:32:33.620863 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-kmxk8_d96acc31-a519-46fc-94d8-f19522e77391/ovnkube-controller/3.log" Sep 30 19:32:33 crc kubenswrapper[4756]: I0930 19:32:33.621807 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-kmxk8_d96acc31-a519-46fc-94d8-f19522e77391/ovnkube-controller/2.log" Sep 30 19:32:33 crc kubenswrapper[4756]: I0930 19:32:33.625921 4756 generic.go:334] "Generic (PLEG): container finished" podID="d96acc31-a519-46fc-94d8-f19522e77391" containerID="802780f678d2e964061eb22cc909146073c026124ad07406cf1e046c73c442a2" exitCode=1 Sep 30 19:32:33 crc kubenswrapper[4756]: I0930 19:32:33.625982 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-kmxk8" event={"ID":"d96acc31-a519-46fc-94d8-f19522e77391","Type":"ContainerDied","Data":"802780f678d2e964061eb22cc909146073c026124ad07406cf1e046c73c442a2"} Sep 30 19:32:33 crc kubenswrapper[4756]: I0930 19:32:33.626038 4756 scope.go:117] "RemoveContainer" containerID="539ad7633e1076358f49a836687191a85b8b2967d0dd7c7e6591ec78416e1115" Sep 30 19:32:33 crc kubenswrapper[4756]: I0930 19:32:33.626698 4756 scope.go:117] "RemoveContainer" containerID="802780f678d2e964061eb22cc909146073c026124ad07406cf1e046c73c442a2" Sep 30 19:32:33 crc kubenswrapper[4756]: E0930 19:32:33.626905 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-kmxk8_openshift-ovn-kubernetes(d96acc31-a519-46fc-94d8-f19522e77391)\"" pod="openshift-ovn-kubernetes/ovnkube-node-kmxk8" podUID="d96acc31-a519-46fc-94d8-f19522e77391" Sep 30 19:32:33 crc kubenswrapper[4756]: I0930 19:32:33.649045 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c7764cf2-26f0-4b53-85b6-2b07668edda7\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:32:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:32:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6cbb0f481cae6dbc480af93bb03f4b4e597381979fed1a9b91fb23aef7868b65\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d2adbf3b2ade3e39ca3d2f7a70212537c9e7531907dd4a1e7748220183d34b9d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b904814bd38b98f875532fedfa07321ce8d149f697e027ad926f8f40ddce61e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f9299d12673c7a8e182e0d4354088d1aa539ca35a788ba6ff507d2ca96cff07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4f9299d12673c7a8e182e0d4354088d1aa539ca35a788ba6ff507d2ca96cff07\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:12Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:11Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:33Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:33 crc kubenswrapper[4756]: I0930 19:32:33.669527 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3370c2ca-fec3-4f90-8df7-51e21e6c7e1c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1a5ef539293ea3ff94f53b11a0510bb8bb45c62c945cffab746512efeeb19a5a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pstrc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ba18abf718fde52cadd61c25b05097873901a4dbbe7631fd7ce64133627b9473\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pstrc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:32Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-4n9zj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:33Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:33 crc kubenswrapper[4756]: I0930 19:32:33.705508 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-kmxk8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d96acc31-a519-46fc-94d8-f19522e77391\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe054d1844c58433aebfe8f4e500e17558f1b4afb5423002d60b50f210052c93\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c5badf6335e4358fa0078ae48d7d50a908bf754fdb57c786e445ccae31933ebe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f20230d520af06912183a05d0b9bcabb8581bf772fe34e037fa8d1a157c4a1b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://261b104308c48bbe4631db85f56d63cefeb0bc6b7112076b0bf3f4fb6b67caad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1fa8babb1f5d1213dec99c75be1d5f37d09c2257a31babd51ec297621fc7d564\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://990926364c8d973164f3da73740b7d31c8caf87f0fb691539f7a62520b822fba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://802780f678d2e964061eb22cc909146073c026124ad07406cf1e046c73c442a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://539ad7633e1076358f49a836687191a85b8b2967d0dd7c7e6591ec78416e1115\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T19:32:05Z\\\",\\\"message\\\":\\\"mns:[] Mutations:[{Column:policies Mutator:insert Value:{GoSet:[{GoUUID:a5a72d02-1a0f-4f7f-a8c5-6923a1c4274a}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {f6d604c1-9711-4e25-be6c-79ec28bbad1b}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI0930 19:32:05.081516 6485 address_set.go:302] New(0d39bc5c-d5b9-432c-81be-2275bce5d7aa/default-network-controller:EgressIP:node-ips:v4:default/a712973235162149816) with []\\\\nI0930 19:32:05.081551 6485 address_set.go:302] New(aa6fc2dc-fab0-4812-b9da-809058e4dcf7/default-network-controller:EgressIP:egressip-served-pods:v4:default/a8519615025667110816) with []\\\\nI0930 19:32:05.081577 6485 address_set.go:302] New(bf133528-8652-4c84-85ff-881f0afe9837/default-network-controller:EgressService:egresssvc-served-pods:v4/a13607449821398607916) with []\\\\nI0930 19:32:05.081647 6485 factory.go:1336] Added *v1.Node event handler 7\\\\nI0930 19:32:05.081686 6485 factory.go:1336] Added *v1.EgressIP event handler 8\\\\nI0930 19:32:05.082052 6485 factory.go:1336] Added *v1.EgressFirewall event handler 9\\\\nI0930 19:32:05.082149 6485 controller.go:132] Adding controller ef_node_controller event handlers\\\\nI0930 19:32:05.082187 6485 ovnkube.go:599] Stopped ovnkube\\\\nI0930 19:32:05.082214 6485 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF0930 19:32:05.082302 6485 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T19:32:04Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://802780f678d2e964061eb22cc909146073c026124ad07406cf1e046c73c442a2\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T19:32:33Z\\\",\\\"message\\\":\\\"services.LB{services.LB{Name:\\\\\\\"Service_openshift-authentication-operator/metrics_TCP_cluster\\\\\\\", UUID:\\\\\\\"6ea1fd71-2b40-4361-92ee-3f1ab4ec7414\\\\\\\", Protocol:\\\\\\\"tcp\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-authentication-operator/metrics\\\\\\\"}, Opts:services.LBOpts{Reject:false, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{}, Templates:services.TemplateMap{}, Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}, built lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-authentication-operator/metrics_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-authentication-operator/metrics\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.150\\\\\\\", Port:443, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nF0930 19:32:33.308634 6847 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T19:32:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://64404e34c6a077df9cc9e9f9273dbe188c6feda3bc1186a83e05a43e1fa2ea99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://76bb398eefb2b0a9ef06b44c2ea182deefd51e4daf42b17946b911822d6eb0af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://76bb398eefb2b0a9ef06b44c2ea182deefd51e4daf42b17946b911822d6eb0af\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:32Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-kmxk8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:33Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:33 crc kubenswrapper[4756]: I0930 19:32:33.708372 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:33 crc kubenswrapper[4756]: I0930 19:32:33.708428 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:33 crc kubenswrapper[4756]: I0930 19:32:33.708443 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:33 crc kubenswrapper[4756]: I0930 19:32:33.708465 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:33 crc kubenswrapper[4756]: I0930 19:32:33.708481 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:33Z","lastTransitionTime":"2025-09-30T19:32:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:33 crc kubenswrapper[4756]: I0930 19:32:33.721806 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-8nj5z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"02bb1d9a-1ad2-406d-afc2-c4cd283fe1f4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://28f7474a36dde55478614d47fe30f93b44cb549a25e148e295987e64158fc3c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l6nxq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:34Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-8nj5z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:33Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:33 crc kubenswrapper[4756]: I0930 19:32:33.746544 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-b4n45" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6c151bbd-9757-44ba-aa11-ff679e841fdc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fsl2z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fsl2z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:46Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-b4n45\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:33Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:33 crc kubenswrapper[4756]: I0930 19:32:33.761237 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"86ed586e-a84e-413c-bfb3-4e25a9393c3a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://38fdd34c9534d8b27068f856bae35d51ce2f284f03aa5176b20e82f0dfe82abe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4346c24591ed37e8c2cb6d5693d16c1c3d54d3239edd35f193e2e030e8bfa221\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4346c24591ed37e8c2cb6d5693d16c1c3d54d3239edd35f193e2e030e8bfa221\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:11Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:33Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:33 crc kubenswrapper[4756]: I0930 19:32:33.796644 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6aa869dd-ec28-4032-82be-c656f27bd7b1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://99767cc72b9d6057d38c45fbec6231bafa47ccffefb78e5924b5e9c1e797722b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b852143b26114c05edce20729cf706c5394609a74f522b7ecf44d92f9d66aed7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ee47cdb77e416535bd00569159712241bbd0952651774af6440fd04000d1722\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5cca81a0aeb985d6044f19cccc580ca5c78553a0abb60fb2a2bed955233f7a97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://621b37e117d0eeef289e110d75521c3262ced12144d665be35d6823780b4bae2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a5550fd486f7fad8080fb3eed0f4c73140172c68983fa8a423cd48da8f31ef3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a5550fd486f7fad8080fb3eed0f4c73140172c68983fa8a423cd48da8f31ef3a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://390ef7ee5c5d98dc226f1d061c6c9bb83236c988f2af15635e7834f643862840\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://390ef7ee5c5d98dc226f1d061c6c9bb83236c988f2af15635e7834f643862840\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:13Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://6f98786c914dcdd3ca1cde8421686a1d536abb97c375f26f458d8f514bdccd52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6f98786c914dcdd3ca1cde8421686a1d536abb97c375f26f458d8f514bdccd52\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:11Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:33Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:33 crc kubenswrapper[4756]: I0930 19:32:33.811783 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:33 crc kubenswrapper[4756]: I0930 19:32:33.811875 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:33 crc kubenswrapper[4756]: I0930 19:32:33.811894 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:33 crc kubenswrapper[4756]: I0930 19:32:33.811922 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:33 crc kubenswrapper[4756]: I0930 19:32:33.811940 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:33Z","lastTransitionTime":"2025-09-30T19:32:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:33 crc kubenswrapper[4756]: I0930 19:32:33.818865 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0dfdb5d6-9311-4699-a1ac-5f1a6cde2441\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a78698331fc3db3b12b06c8816256c5d055d1decfd12cc0099d6cfd155d673d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32fb926334772dc454adcdabd6070f751ef1aded82a5c13732875e1ec04d2c48\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6195128a0e66f0f932a444f4717084f00ea05ff648f7c26364bb9c96483b652a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://746e37834d01867d327b2cb407df8e9a8489f58884a28f1da6a336f74e4d1033\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:11Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:33Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:33 crc kubenswrapper[4756]: I0930 19:32:33.843350 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"218f95d8-7230-4b64-83a5-00af4f5ec7dc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:32:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:32:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0a76d38dbf936ca79e57470de2c8bf98e8681f7d94f2c572aa5c0d2b2acbb484\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://57f786181b4762f1e04a48ac6e352090b62e1691091ddc390821abb693e8f7a6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c275af67ad8675a437b3f7fae34236bbdfbdf1f216be6a442d6765471d1d11e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7673b8a2ea701b95437a1ad447661ea3140ce0b27204ab946f39dbf0a25e29b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a2e0caca690592c9bd17dae14f79390b94f47507afb05a1c1732453700b99786\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"message\\\":\\\"g.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 19:31:31.245412 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 19:31:31.245417 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0930 19:31:31.245423 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0930 19:31:31.245426 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0930 19:31:31.245428 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0930 19:31:31.245860 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI0930 19:31:31.249125 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0930 19:31:31.249169 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0930 19:31:31.249201 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0930 19:31:31.249211 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0930 19:31:31.249225 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0930 19:31:31.249238 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0930 19:31:31.250125 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI0930 19:31:31.250150 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nF0930 19:31:31.250576 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:25Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://01164eb1afdb771ef4d2dc268107dadd206959e1ef66b154caf3ff18717e20de\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:14Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7365492f5fe72c39f569bfa6e2b1a44236da10b4687a738b97ce95626081c1b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7365492f5fe72c39f569bfa6e2b1a44236da10b4687a738b97ce95626081c1b5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:11Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:33Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:33 crc kubenswrapper[4756]: I0930 19:32:33.866081 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:33Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:33 crc kubenswrapper[4756]: I0930 19:32:33.889615 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://449a4827a34c17e8cdc851d2714ced931581b5c3a1bcd2f7f058b95d68403740\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://163fab064016913eb90cd87fe0da1c702eeb7f000c92b108bfb24a21ef0e98d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:33Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:33 crc kubenswrapper[4756]: I0930 19:32:33.908316 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:33Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:33 crc kubenswrapper[4756]: I0930 19:32:33.914966 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:33 crc kubenswrapper[4756]: I0930 19:32:33.915008 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:33 crc kubenswrapper[4756]: I0930 19:32:33.915025 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:33 crc kubenswrapper[4756]: I0930 19:32:33.915047 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:33 crc kubenswrapper[4756]: I0930 19:32:33.915064 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:33Z","lastTransitionTime":"2025-09-30T19:32:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:33 crc kubenswrapper[4756]: I0930 19:32:33.967070 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d37999f5cb36e7071b109ed90026aea745a55dbdd26d53799ee4196dc6a983c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:33Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:33 crc kubenswrapper[4756]: I0930 19:32:33.991511 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-l9mw5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"104562fb-2a2d-4291-963c-7a95062ec13a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:32:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:32:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d7e48a55049d09339652ebc1700f595dcbba5c2434a42ccd4a1d644c388f996\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c012009175f2ca25af13346dce481a4640354a2b7c771ce9a8d92f0a6a014f23\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T19:32:19Z\\\",\\\"message\\\":\\\"2025-09-30T19:31:34+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_84cba474-668f-4940-86a3-7be18c7a30f3\\\\n2025-09-30T19:31:34+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_84cba474-668f-4940-86a3-7be18c7a30f3 to /host/opt/cni/bin/\\\\n2025-09-30T19:31:34Z [verbose] multus-daemon started\\\\n2025-09-30T19:31:34Z [verbose] Readiness Indicator file check\\\\n2025-09-30T19:32:19Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:33Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:32:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mhq89\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:32Z\\\"}}\" for pod \"openshift-multus\"/\"multus-l9mw5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:33Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:34 crc kubenswrapper[4756]: I0930 19:32:34.008545 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-m6zsp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2b4cba15-5ae1-4c98-a5e5-060b3af3aec6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://13307050242b53fcc9d2c9ab6870c36d5c2c1e43adfa36c1e76addf32166b95b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://828a01ba594287ceea1e72baa5419d522ce14d2ab3df5a69fb6332400c3e4abc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://828a01ba594287ceea1e72baa5419d522ce14d2ab3df5a69fb6332400c3e4abc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f91b3382af5267116de235e9d5f480e574b479350e738621b5e7568329dd879\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0f91b3382af5267116de235e9d5f480e574b479350e738621b5e7568329dd879\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://677982485b1155ded1bce2a0f5b3262766d498642307d1744d5cbae6c66102e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://677982485b1155ded1bce2a0f5b3262766d498642307d1744d5cbae6c66102e3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://14b8b318d4cd0da2272b87ac74a4dccdf29874a818aadc0698e9d85a75a169da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://14b8b318d4cd0da2272b87ac74a4dccdf29874a818aadc0698e9d85a75a169da\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e877edd834105d5bd0dcd24e6ba9c11ead17ab39a95ca0169c54acaad7544ebe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e877edd834105d5bd0dcd24e6ba9c11ead17ab39a95ca0169c54acaad7544ebe\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://decf531f70c78d1d142bef3d265612ebac2c8790ea10aba75def6d785fba84a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://decf531f70c78d1d142bef3d265612ebac2c8790ea10aba75def6d785fba84a8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:32Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-m6zsp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:34Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:34 crc kubenswrapper[4756]: I0930 19:32:34.018032 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:34 crc kubenswrapper[4756]: I0930 19:32:34.018072 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:34 crc kubenswrapper[4756]: I0930 19:32:34.018087 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:34 crc kubenswrapper[4756]: I0930 19:32:34.018108 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:34 crc kubenswrapper[4756]: I0930 19:32:34.018122 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:34Z","lastTransitionTime":"2025-09-30T19:32:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:34 crc kubenswrapper[4756]: I0930 19:32:34.023380 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-gld2h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7ae264bc-c901-4628-89f4-53e94403dff8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d059c5c1b5cded992e4971b04737c4f528ed04278d98651c01c8b8a168fe5655\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc8dq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://764e5cdcf5a9c07e5d046217ffd9db27e2b09a827d02f165459deb24d99f141c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc8dq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:44Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-gld2h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:34Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:34 crc kubenswrapper[4756]: I0930 19:32:34.039172 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:34Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:34 crc kubenswrapper[4756]: I0930 19:32:34.066752 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d39cfd1b725ded02a27e8f7bed73221dd180e3fffb7f06d1877d1bc2f6910334\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:34Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:34 crc kubenswrapper[4756]: I0930 19:32:34.080771 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-ghp5t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"32ba8a0a-215c-415b-a893-74f7a40e3c20\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a96a8eea15e5d12ad93cdb41ef3d1054fae455aea2878451bc5715de6d5b612f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pslwb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:32Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-ghp5t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:34Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:34 crc kubenswrapper[4756]: I0930 19:32:34.115749 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-b4n45" Sep 30 19:32:34 crc kubenswrapper[4756]: E0930 19:32:34.115957 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-b4n45" podUID="6c151bbd-9757-44ba-aa11-ff679e841fdc" Sep 30 19:32:34 crc kubenswrapper[4756]: I0930 19:32:34.121482 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:34 crc kubenswrapper[4756]: I0930 19:32:34.121545 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:34 crc kubenswrapper[4756]: I0930 19:32:34.121564 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:34 crc kubenswrapper[4756]: I0930 19:32:34.121592 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:34 crc kubenswrapper[4756]: I0930 19:32:34.121614 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:34Z","lastTransitionTime":"2025-09-30T19:32:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:34 crc kubenswrapper[4756]: I0930 19:32:34.224562 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:34 crc kubenswrapper[4756]: I0930 19:32:34.224637 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:34 crc kubenswrapper[4756]: I0930 19:32:34.224655 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:34 crc kubenswrapper[4756]: I0930 19:32:34.224685 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:34 crc kubenswrapper[4756]: I0930 19:32:34.224703 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:34Z","lastTransitionTime":"2025-09-30T19:32:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:34 crc kubenswrapper[4756]: I0930 19:32:34.328043 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:34 crc kubenswrapper[4756]: I0930 19:32:34.328116 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:34 crc kubenswrapper[4756]: I0930 19:32:34.328134 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:34 crc kubenswrapper[4756]: I0930 19:32:34.328162 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:34 crc kubenswrapper[4756]: I0930 19:32:34.328186 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:34Z","lastTransitionTime":"2025-09-30T19:32:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:34 crc kubenswrapper[4756]: I0930 19:32:34.430964 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:34 crc kubenswrapper[4756]: I0930 19:32:34.431014 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:34 crc kubenswrapper[4756]: I0930 19:32:34.431031 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:34 crc kubenswrapper[4756]: I0930 19:32:34.431054 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:34 crc kubenswrapper[4756]: I0930 19:32:34.431072 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:34Z","lastTransitionTime":"2025-09-30T19:32:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:34 crc kubenswrapper[4756]: I0930 19:32:34.534820 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:34 crc kubenswrapper[4756]: I0930 19:32:34.534904 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:34 crc kubenswrapper[4756]: I0930 19:32:34.534924 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:34 crc kubenswrapper[4756]: I0930 19:32:34.534954 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:34 crc kubenswrapper[4756]: I0930 19:32:34.534973 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:34Z","lastTransitionTime":"2025-09-30T19:32:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:34 crc kubenswrapper[4756]: I0930 19:32:34.632955 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-kmxk8_d96acc31-a519-46fc-94d8-f19522e77391/ovnkube-controller/3.log" Sep 30 19:32:34 crc kubenswrapper[4756]: I0930 19:32:34.637689 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:34 crc kubenswrapper[4756]: I0930 19:32:34.637891 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:34 crc kubenswrapper[4756]: I0930 19:32:34.637987 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:34 crc kubenswrapper[4756]: I0930 19:32:34.638076 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:34 crc kubenswrapper[4756]: I0930 19:32:34.638108 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:34Z","lastTransitionTime":"2025-09-30T19:32:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:34 crc kubenswrapper[4756]: I0930 19:32:34.639475 4756 scope.go:117] "RemoveContainer" containerID="802780f678d2e964061eb22cc909146073c026124ad07406cf1e046c73c442a2" Sep 30 19:32:34 crc kubenswrapper[4756]: E0930 19:32:34.639801 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-kmxk8_openshift-ovn-kubernetes(d96acc31-a519-46fc-94d8-f19522e77391)\"" pod="openshift-ovn-kubernetes/ovnkube-node-kmxk8" podUID="d96acc31-a519-46fc-94d8-f19522e77391" Sep 30 19:32:34 crc kubenswrapper[4756]: I0930 19:32:34.666451 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0dfdb5d6-9311-4699-a1ac-5f1a6cde2441\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a78698331fc3db3b12b06c8816256c5d055d1decfd12cc0099d6cfd155d673d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32fb926334772dc454adcdabd6070f751ef1aded82a5c13732875e1ec04d2c48\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6195128a0e66f0f932a444f4717084f00ea05ff648f7c26364bb9c96483b652a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://746e37834d01867d327b2cb407df8e9a8489f58884a28f1da6a336f74e4d1033\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:11Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:34Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:34 crc kubenswrapper[4756]: I0930 19:32:34.687423 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c7764cf2-26f0-4b53-85b6-2b07668edda7\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:32:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:32:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6cbb0f481cae6dbc480af93bb03f4b4e597381979fed1a9b91fb23aef7868b65\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d2adbf3b2ade3e39ca3d2f7a70212537c9e7531907dd4a1e7748220183d34b9d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b904814bd38b98f875532fedfa07321ce8d149f697e027ad926f8f40ddce61e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f9299d12673c7a8e182e0d4354088d1aa539ca35a788ba6ff507d2ca96cff07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4f9299d12673c7a8e182e0d4354088d1aa539ca35a788ba6ff507d2ca96cff07\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:12Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:11Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:34Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:34 crc kubenswrapper[4756]: I0930 19:32:34.706741 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3370c2ca-fec3-4f90-8df7-51e21e6c7e1c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1a5ef539293ea3ff94f53b11a0510bb8bb45c62c945cffab746512efeeb19a5a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pstrc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ba18abf718fde52cadd61c25b05097873901a4dbbe7631fd7ce64133627b9473\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pstrc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:32Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-4n9zj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:34Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:34 crc kubenswrapper[4756]: I0930 19:32:34.742872 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:34 crc kubenswrapper[4756]: I0930 19:32:34.742960 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:34 crc kubenswrapper[4756]: I0930 19:32:34.742970 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:34 crc kubenswrapper[4756]: I0930 19:32:34.742992 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:34 crc kubenswrapper[4756]: I0930 19:32:34.743006 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:34Z","lastTransitionTime":"2025-09-30T19:32:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:34 crc kubenswrapper[4756]: I0930 19:32:34.743731 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-kmxk8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d96acc31-a519-46fc-94d8-f19522e77391\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe054d1844c58433aebfe8f4e500e17558f1b4afb5423002d60b50f210052c93\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c5badf6335e4358fa0078ae48d7d50a908bf754fdb57c786e445ccae31933ebe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f20230d520af06912183a05d0b9bcabb8581bf772fe34e037fa8d1a157c4a1b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://261b104308c48bbe4631db85f56d63cefeb0bc6b7112076b0bf3f4fb6b67caad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1fa8babb1f5d1213dec99c75be1d5f37d09c2257a31babd51ec297621fc7d564\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://990926364c8d973164f3da73740b7d31c8caf87f0fb691539f7a62520b822fba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://802780f678d2e964061eb22cc909146073c026124ad07406cf1e046c73c442a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://802780f678d2e964061eb22cc909146073c026124ad07406cf1e046c73c442a2\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T19:32:33Z\\\",\\\"message\\\":\\\"services.LB{services.LB{Name:\\\\\\\"Service_openshift-authentication-operator/metrics_TCP_cluster\\\\\\\", UUID:\\\\\\\"6ea1fd71-2b40-4361-92ee-3f1ab4ec7414\\\\\\\", Protocol:\\\\\\\"tcp\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-authentication-operator/metrics\\\\\\\"}, Opts:services.LBOpts{Reject:false, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{}, Templates:services.TemplateMap{}, Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}, built lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-authentication-operator/metrics_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-authentication-operator/metrics\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.150\\\\\\\", Port:443, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nF0930 19:32:33.308634 6847 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T19:32:32Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-kmxk8_openshift-ovn-kubernetes(d96acc31-a519-46fc-94d8-f19522e77391)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://64404e34c6a077df9cc9e9f9273dbe188c6feda3bc1186a83e05a43e1fa2ea99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://76bb398eefb2b0a9ef06b44c2ea182deefd51e4daf42b17946b911822d6eb0af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://76bb398eefb2b0a9ef06b44c2ea182deefd51e4daf42b17946b911822d6eb0af\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5bwg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:32Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-kmxk8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:34Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:34 crc kubenswrapper[4756]: I0930 19:32:34.763580 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-8nj5z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"02bb1d9a-1ad2-406d-afc2-c4cd283fe1f4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://28f7474a36dde55478614d47fe30f93b44cb549a25e148e295987e64158fc3c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l6nxq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:34Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-8nj5z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:34Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:34 crc kubenswrapper[4756]: I0930 19:32:34.784784 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-b4n45" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6c151bbd-9757-44ba-aa11-ff679e841fdc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fsl2z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fsl2z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:46Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-b4n45\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:34Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:34 crc kubenswrapper[4756]: I0930 19:32:34.805130 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"86ed586e-a84e-413c-bfb3-4e25a9393c3a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://38fdd34c9534d8b27068f856bae35d51ce2f284f03aa5176b20e82f0dfe82abe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4346c24591ed37e8c2cb6d5693d16c1c3d54d3239edd35f193e2e030e8bfa221\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4346c24591ed37e8c2cb6d5693d16c1c3d54d3239edd35f193e2e030e8bfa221\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:11Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:34Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:34 crc kubenswrapper[4756]: I0930 19:32:34.842830 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6aa869dd-ec28-4032-82be-c656f27bd7b1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://99767cc72b9d6057d38c45fbec6231bafa47ccffefb78e5924b5e9c1e797722b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b852143b26114c05edce20729cf706c5394609a74f522b7ecf44d92f9d66aed7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ee47cdb77e416535bd00569159712241bbd0952651774af6440fd04000d1722\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5cca81a0aeb985d6044f19cccc580ca5c78553a0abb60fb2a2bed955233f7a97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://621b37e117d0eeef289e110d75521c3262ced12144d665be35d6823780b4bae2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a5550fd486f7fad8080fb3eed0f4c73140172c68983fa8a423cd48da8f31ef3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a5550fd486f7fad8080fb3eed0f4c73140172c68983fa8a423cd48da8f31ef3a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://390ef7ee5c5d98dc226f1d061c6c9bb83236c988f2af15635e7834f643862840\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://390ef7ee5c5d98dc226f1d061c6c9bb83236c988f2af15635e7834f643862840\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:13Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://6f98786c914dcdd3ca1cde8421686a1d536abb97c375f26f458d8f514bdccd52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6f98786c914dcdd3ca1cde8421686a1d536abb97c375f26f458d8f514bdccd52\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:11Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:34Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:34 crc kubenswrapper[4756]: I0930 19:32:34.846237 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:34 crc kubenswrapper[4756]: I0930 19:32:34.846287 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:34 crc kubenswrapper[4756]: I0930 19:32:34.846300 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:34 crc kubenswrapper[4756]: I0930 19:32:34.846322 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:34 crc kubenswrapper[4756]: I0930 19:32:34.846346 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:34Z","lastTransitionTime":"2025-09-30T19:32:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:34 crc kubenswrapper[4756]: I0930 19:32:34.861898 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://449a4827a34c17e8cdc851d2714ced931581b5c3a1bcd2f7f058b95d68403740\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://163fab064016913eb90cd87fe0da1c702eeb7f000c92b108bfb24a21ef0e98d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:34Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:34 crc kubenswrapper[4756]: I0930 19:32:34.881237 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"218f95d8-7230-4b64-83a5-00af4f5ec7dc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:32:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:32:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0a76d38dbf936ca79e57470de2c8bf98e8681f7d94f2c572aa5c0d2b2acbb484\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://57f786181b4762f1e04a48ac6e352090b62e1691091ddc390821abb693e8f7a6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c275af67ad8675a437b3f7fae34236bbdfbdf1f216be6a442d6765471d1d11e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7673b8a2ea701b95437a1ad447661ea3140ce0b27204ab946f39dbf0a25e29b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a2e0caca690592c9bd17dae14f79390b94f47507afb05a1c1732453700b99786\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"message\\\":\\\"g.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 19:31:31.245412 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 19:31:31.245417 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0930 19:31:31.245423 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0930 19:31:31.245426 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0930 19:31:31.245428 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0930 19:31:31.245860 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI0930 19:31:31.249125 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0930 19:31:31.249169 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0930 19:31:31.249201 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0930 19:31:31.249211 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0930 19:31:31.249225 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0930 19:31:31.249238 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0930 19:31:31.250125 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI0930 19:31:31.250150 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nF0930 19:31:31.250576 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:25Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://01164eb1afdb771ef4d2dc268107dadd206959e1ef66b154caf3ff18717e20de\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:14Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7365492f5fe72c39f569bfa6e2b1a44236da10b4687a738b97ce95626081c1b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7365492f5fe72c39f569bfa6e2b1a44236da10b4687a738b97ce95626081c1b5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:11Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:34Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:34 crc kubenswrapper[4756]: I0930 19:32:34.897174 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:34Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:34 crc kubenswrapper[4756]: I0930 19:32:34.911597 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-l9mw5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"104562fb-2a2d-4291-963c-7a95062ec13a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:32:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:32:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d7e48a55049d09339652ebc1700f595dcbba5c2434a42ccd4a1d644c388f996\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c012009175f2ca25af13346dce481a4640354a2b7c771ce9a8d92f0a6a014f23\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T19:32:19Z\\\",\\\"message\\\":\\\"2025-09-30T19:31:34+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_84cba474-668f-4940-86a3-7be18c7a30f3\\\\n2025-09-30T19:31:34+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_84cba474-668f-4940-86a3-7be18c7a30f3 to /host/opt/cni/bin/\\\\n2025-09-30T19:31:34Z [verbose] multus-daemon started\\\\n2025-09-30T19:31:34Z [verbose] Readiness Indicator file check\\\\n2025-09-30T19:32:19Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:33Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:32:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mhq89\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:32Z\\\"}}\" for pod \"openshift-multus\"/\"multus-l9mw5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:34Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:34 crc kubenswrapper[4756]: I0930 19:32:34.931707 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:34Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:34 crc kubenswrapper[4756]: I0930 19:32:34.949843 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:34 crc kubenswrapper[4756]: I0930 19:32:34.949911 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:34 crc kubenswrapper[4756]: I0930 19:32:34.949935 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:34 crc kubenswrapper[4756]: I0930 19:32:34.949967 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:34 crc kubenswrapper[4756]: I0930 19:32:34.949993 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:34Z","lastTransitionTime":"2025-09-30T19:32:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:34 crc kubenswrapper[4756]: I0930 19:32:34.951533 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d37999f5cb36e7071b109ed90026aea745a55dbdd26d53799ee4196dc6a983c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:34Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:34 crc kubenswrapper[4756]: I0930 19:32:34.971190 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-ghp5t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"32ba8a0a-215c-415b-a893-74f7a40e3c20\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a96a8eea15e5d12ad93cdb41ef3d1054fae455aea2878451bc5715de6d5b612f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pslwb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:32Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-ghp5t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:34Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:34 crc kubenswrapper[4756]: I0930 19:32:34.992514 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-m6zsp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2b4cba15-5ae1-4c98-a5e5-060b3af3aec6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://13307050242b53fcc9d2c9ab6870c36d5c2c1e43adfa36c1e76addf32166b95b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://828a01ba594287ceea1e72baa5419d522ce14d2ab3df5a69fb6332400c3e4abc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://828a01ba594287ceea1e72baa5419d522ce14d2ab3df5a69fb6332400c3e4abc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f91b3382af5267116de235e9d5f480e574b479350e738621b5e7568329dd879\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0f91b3382af5267116de235e9d5f480e574b479350e738621b5e7568329dd879\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://677982485b1155ded1bce2a0f5b3262766d498642307d1744d5cbae6c66102e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://677982485b1155ded1bce2a0f5b3262766d498642307d1744d5cbae6c66102e3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://14b8b318d4cd0da2272b87ac74a4dccdf29874a818aadc0698e9d85a75a169da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://14b8b318d4cd0da2272b87ac74a4dccdf29874a818aadc0698e9d85a75a169da\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e877edd834105d5bd0dcd24e6ba9c11ead17ab39a95ca0169c54acaad7544ebe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e877edd834105d5bd0dcd24e6ba9c11ead17ab39a95ca0169c54acaad7544ebe\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://decf531f70c78d1d142bef3d265612ebac2c8790ea10aba75def6d785fba84a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://decf531f70c78d1d142bef3d265612ebac2c8790ea10aba75def6d785fba84a8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:31:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:31:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jvl9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:32Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-m6zsp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:34Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:35 crc kubenswrapper[4756]: I0930 19:32:35.010105 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-gld2h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7ae264bc-c901-4628-89f4-53e94403dff8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d059c5c1b5cded992e4971b04737c4f528ed04278d98651c01c8b8a168fe5655\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc8dq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://764e5cdcf5a9c07e5d046217ffd9db27e2b09a827d02f165459deb24d99f141c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc8dq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:31:44Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-gld2h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:35Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:35 crc kubenswrapper[4756]: I0930 19:32:35.029450 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:35Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:35 crc kubenswrapper[4756]: I0930 19:32:35.048592 4756 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:31:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d39cfd1b725ded02a27e8f7bed73221dd180e3fffb7f06d1877d1bc2f6910334\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:31:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:35Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:35 crc kubenswrapper[4756]: I0930 19:32:35.053853 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:35 crc kubenswrapper[4756]: I0930 19:32:35.053927 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:35 crc kubenswrapper[4756]: I0930 19:32:35.053948 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:35 crc kubenswrapper[4756]: I0930 19:32:35.053976 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:35 crc kubenswrapper[4756]: I0930 19:32:35.053995 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:35Z","lastTransitionTime":"2025-09-30T19:32:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:35 crc kubenswrapper[4756]: I0930 19:32:35.083530 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 19:32:35 crc kubenswrapper[4756]: I0930 19:32:35.083705 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 19:32:35 crc kubenswrapper[4756]: I0930 19:32:35.083795 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 19:32:35 crc kubenswrapper[4756]: I0930 19:32:35.083834 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 19:32:35 crc kubenswrapper[4756]: I0930 19:32:35.083883 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 19:32:35 crc kubenswrapper[4756]: E0930 19:32:35.084100 4756 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Sep 30 19:32:35 crc kubenswrapper[4756]: E0930 19:32:35.084129 4756 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Sep 30 19:32:35 crc kubenswrapper[4756]: E0930 19:32:35.084150 4756 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 30 19:32:35 crc kubenswrapper[4756]: E0930 19:32:35.084233 4756 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-09-30 19:33:39.084206314 +0000 UTC m=+148.705139831 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 30 19:32:35 crc kubenswrapper[4756]: E0930 19:32:35.084585 4756 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 19:33:39.084570004 +0000 UTC m=+148.705503521 (durationBeforeRetry 1m4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:32:35 crc kubenswrapper[4756]: E0930 19:32:35.084676 4756 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Sep 30 19:32:35 crc kubenswrapper[4756]: E0930 19:32:35.084694 4756 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Sep 30 19:32:35 crc kubenswrapper[4756]: E0930 19:32:35.084710 4756 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 30 19:32:35 crc kubenswrapper[4756]: E0930 19:32:35.084752 4756 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-09-30 19:33:39.084739258 +0000 UTC m=+148.705672765 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 30 19:32:35 crc kubenswrapper[4756]: E0930 19:32:35.084957 4756 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Sep 30 19:32:35 crc kubenswrapper[4756]: E0930 19:32:35.085000 4756 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-09-30 19:33:39.084987685 +0000 UTC m=+148.705921192 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Sep 30 19:32:35 crc kubenswrapper[4756]: E0930 19:32:35.085321 4756 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Sep 30 19:32:35 crc kubenswrapper[4756]: E0930 19:32:35.085467 4756 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-09-30 19:33:39.085435666 +0000 UTC m=+148.706369183 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Sep 30 19:32:35 crc kubenswrapper[4756]: I0930 19:32:35.116711 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 19:32:35 crc kubenswrapper[4756]: I0930 19:32:35.116757 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 19:32:35 crc kubenswrapper[4756]: E0930 19:32:35.116942 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 19:32:35 crc kubenswrapper[4756]: I0930 19:32:35.117030 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 19:32:35 crc kubenswrapper[4756]: E0930 19:32:35.117245 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 19:32:35 crc kubenswrapper[4756]: E0930 19:32:35.117529 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 19:32:35 crc kubenswrapper[4756]: I0930 19:32:35.157013 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:35 crc kubenswrapper[4756]: I0930 19:32:35.157054 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:35 crc kubenswrapper[4756]: I0930 19:32:35.157081 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:35 crc kubenswrapper[4756]: I0930 19:32:35.157102 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:35 crc kubenswrapper[4756]: I0930 19:32:35.157113 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:35Z","lastTransitionTime":"2025-09-30T19:32:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:35 crc kubenswrapper[4756]: I0930 19:32:35.261499 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:35 crc kubenswrapper[4756]: I0930 19:32:35.261576 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:35 crc kubenswrapper[4756]: I0930 19:32:35.261595 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:35 crc kubenswrapper[4756]: I0930 19:32:35.261627 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:35 crc kubenswrapper[4756]: I0930 19:32:35.261648 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:35Z","lastTransitionTime":"2025-09-30T19:32:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:35 crc kubenswrapper[4756]: I0930 19:32:35.365748 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:35 crc kubenswrapper[4756]: I0930 19:32:35.365816 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:35 crc kubenswrapper[4756]: I0930 19:32:35.365835 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:35 crc kubenswrapper[4756]: I0930 19:32:35.365867 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:35 crc kubenswrapper[4756]: I0930 19:32:35.365889 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:35Z","lastTransitionTime":"2025-09-30T19:32:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:35 crc kubenswrapper[4756]: I0930 19:32:35.469383 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:35 crc kubenswrapper[4756]: I0930 19:32:35.469517 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:35 crc kubenswrapper[4756]: I0930 19:32:35.469537 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:35 crc kubenswrapper[4756]: I0930 19:32:35.469566 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:35 crc kubenswrapper[4756]: I0930 19:32:35.469587 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:35Z","lastTransitionTime":"2025-09-30T19:32:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:35 crc kubenswrapper[4756]: I0930 19:32:35.576369 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:35 crc kubenswrapper[4756]: I0930 19:32:35.577050 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:35 crc kubenswrapper[4756]: I0930 19:32:35.577075 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:35 crc kubenswrapper[4756]: I0930 19:32:35.577172 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:35 crc kubenswrapper[4756]: I0930 19:32:35.577254 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:35Z","lastTransitionTime":"2025-09-30T19:32:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:35 crc kubenswrapper[4756]: I0930 19:32:35.680345 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:35 crc kubenswrapper[4756]: I0930 19:32:35.680462 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:35 crc kubenswrapper[4756]: I0930 19:32:35.680490 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:35 crc kubenswrapper[4756]: I0930 19:32:35.680526 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:35 crc kubenswrapper[4756]: I0930 19:32:35.680551 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:35Z","lastTransitionTime":"2025-09-30T19:32:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:35 crc kubenswrapper[4756]: I0930 19:32:35.783876 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:35 crc kubenswrapper[4756]: I0930 19:32:35.783969 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:35 crc kubenswrapper[4756]: I0930 19:32:35.784001 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:35 crc kubenswrapper[4756]: I0930 19:32:35.784038 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:35 crc kubenswrapper[4756]: I0930 19:32:35.784063 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:35Z","lastTransitionTime":"2025-09-30T19:32:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:35 crc kubenswrapper[4756]: I0930 19:32:35.888022 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:35 crc kubenswrapper[4756]: I0930 19:32:35.888107 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:35 crc kubenswrapper[4756]: I0930 19:32:35.888127 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:35 crc kubenswrapper[4756]: I0930 19:32:35.888158 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:35 crc kubenswrapper[4756]: I0930 19:32:35.888176 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:35Z","lastTransitionTime":"2025-09-30T19:32:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:35 crc kubenswrapper[4756]: I0930 19:32:35.991181 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:35 crc kubenswrapper[4756]: I0930 19:32:35.991253 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:35 crc kubenswrapper[4756]: I0930 19:32:35.991276 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:35 crc kubenswrapper[4756]: I0930 19:32:35.991306 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:35 crc kubenswrapper[4756]: I0930 19:32:35.991329 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:35Z","lastTransitionTime":"2025-09-30T19:32:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:36 crc kubenswrapper[4756]: I0930 19:32:36.094846 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:36 crc kubenswrapper[4756]: I0930 19:32:36.094923 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:36 crc kubenswrapper[4756]: I0930 19:32:36.094947 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:36 crc kubenswrapper[4756]: I0930 19:32:36.094976 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:36 crc kubenswrapper[4756]: I0930 19:32:36.095002 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:36Z","lastTransitionTime":"2025-09-30T19:32:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:36 crc kubenswrapper[4756]: I0930 19:32:36.116794 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-b4n45" Sep 30 19:32:36 crc kubenswrapper[4756]: E0930 19:32:36.117076 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-b4n45" podUID="6c151bbd-9757-44ba-aa11-ff679e841fdc" Sep 30 19:32:36 crc kubenswrapper[4756]: I0930 19:32:36.198501 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:36 crc kubenswrapper[4756]: I0930 19:32:36.198552 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:36 crc kubenswrapper[4756]: I0930 19:32:36.198569 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:36 crc kubenswrapper[4756]: I0930 19:32:36.198592 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:36 crc kubenswrapper[4756]: I0930 19:32:36.198610 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:36Z","lastTransitionTime":"2025-09-30T19:32:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:36 crc kubenswrapper[4756]: I0930 19:32:36.301235 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:36 crc kubenswrapper[4756]: I0930 19:32:36.301570 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:36 crc kubenswrapper[4756]: I0930 19:32:36.301776 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:36 crc kubenswrapper[4756]: I0930 19:32:36.302146 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:36 crc kubenswrapper[4756]: I0930 19:32:36.302505 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:36Z","lastTransitionTime":"2025-09-30T19:32:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:36 crc kubenswrapper[4756]: I0930 19:32:36.405352 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:36 crc kubenswrapper[4756]: I0930 19:32:36.405441 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:36 crc kubenswrapper[4756]: I0930 19:32:36.405454 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:36 crc kubenswrapper[4756]: I0930 19:32:36.405480 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:36 crc kubenswrapper[4756]: I0930 19:32:36.405500 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:36Z","lastTransitionTime":"2025-09-30T19:32:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:36 crc kubenswrapper[4756]: I0930 19:32:36.508873 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:36 crc kubenswrapper[4756]: I0930 19:32:36.508946 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:36 crc kubenswrapper[4756]: I0930 19:32:36.508971 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:36 crc kubenswrapper[4756]: I0930 19:32:36.509003 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:36 crc kubenswrapper[4756]: I0930 19:32:36.509030 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:36Z","lastTransitionTime":"2025-09-30T19:32:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:36 crc kubenswrapper[4756]: I0930 19:32:36.612386 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:36 crc kubenswrapper[4756]: I0930 19:32:36.612497 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:36 crc kubenswrapper[4756]: I0930 19:32:36.612518 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:36 crc kubenswrapper[4756]: I0930 19:32:36.612558 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:36 crc kubenswrapper[4756]: I0930 19:32:36.612579 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:36Z","lastTransitionTime":"2025-09-30T19:32:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:36 crc kubenswrapper[4756]: I0930 19:32:36.716636 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:36 crc kubenswrapper[4756]: I0930 19:32:36.717065 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:36 crc kubenswrapper[4756]: I0930 19:32:36.717260 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:36 crc kubenswrapper[4756]: I0930 19:32:36.717432 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:36 crc kubenswrapper[4756]: I0930 19:32:36.717559 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:36Z","lastTransitionTime":"2025-09-30T19:32:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:36 crc kubenswrapper[4756]: I0930 19:32:36.821821 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:36 crc kubenswrapper[4756]: I0930 19:32:36.822241 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:36 crc kubenswrapper[4756]: I0930 19:32:36.822334 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:36 crc kubenswrapper[4756]: I0930 19:32:36.822424 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:36 crc kubenswrapper[4756]: I0930 19:32:36.822509 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:36Z","lastTransitionTime":"2025-09-30T19:32:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:36 crc kubenswrapper[4756]: I0930 19:32:36.926241 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:36 crc kubenswrapper[4756]: I0930 19:32:36.926303 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:36 crc kubenswrapper[4756]: I0930 19:32:36.926323 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:36 crc kubenswrapper[4756]: I0930 19:32:36.926351 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:36 crc kubenswrapper[4756]: I0930 19:32:36.926379 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:36Z","lastTransitionTime":"2025-09-30T19:32:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:37 crc kubenswrapper[4756]: I0930 19:32:37.030739 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:37 crc kubenswrapper[4756]: I0930 19:32:37.030838 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:37 crc kubenswrapper[4756]: I0930 19:32:37.030865 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:37 crc kubenswrapper[4756]: I0930 19:32:37.030903 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:37 crc kubenswrapper[4756]: I0930 19:32:37.030931 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:37Z","lastTransitionTime":"2025-09-30T19:32:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:37 crc kubenswrapper[4756]: I0930 19:32:37.116059 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 19:32:37 crc kubenswrapper[4756]: I0930 19:32:37.116146 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 19:32:37 crc kubenswrapper[4756]: I0930 19:32:37.116090 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 19:32:37 crc kubenswrapper[4756]: E0930 19:32:37.116298 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 19:32:37 crc kubenswrapper[4756]: E0930 19:32:37.116597 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 19:32:37 crc kubenswrapper[4756]: E0930 19:32:37.116734 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 19:32:37 crc kubenswrapper[4756]: I0930 19:32:37.134288 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:37 crc kubenswrapper[4756]: I0930 19:32:37.134336 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:37 crc kubenswrapper[4756]: I0930 19:32:37.134348 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:37 crc kubenswrapper[4756]: I0930 19:32:37.134368 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:37 crc kubenswrapper[4756]: I0930 19:32:37.134384 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:37Z","lastTransitionTime":"2025-09-30T19:32:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:37 crc kubenswrapper[4756]: I0930 19:32:37.239845 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:37 crc kubenswrapper[4756]: I0930 19:32:37.239898 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:37 crc kubenswrapper[4756]: I0930 19:32:37.239916 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:37 crc kubenswrapper[4756]: I0930 19:32:37.239944 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:37 crc kubenswrapper[4756]: I0930 19:32:37.239970 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:37Z","lastTransitionTime":"2025-09-30T19:32:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:37 crc kubenswrapper[4756]: I0930 19:32:37.343842 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:37 crc kubenswrapper[4756]: I0930 19:32:37.343924 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:37 crc kubenswrapper[4756]: I0930 19:32:37.343946 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:37 crc kubenswrapper[4756]: I0930 19:32:37.343978 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:37 crc kubenswrapper[4756]: I0930 19:32:37.344001 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:37Z","lastTransitionTime":"2025-09-30T19:32:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:37 crc kubenswrapper[4756]: I0930 19:32:37.447691 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:37 crc kubenswrapper[4756]: I0930 19:32:37.447779 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:37 crc kubenswrapper[4756]: I0930 19:32:37.447802 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:37 crc kubenswrapper[4756]: I0930 19:32:37.447835 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:37 crc kubenswrapper[4756]: I0930 19:32:37.447858 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:37Z","lastTransitionTime":"2025-09-30T19:32:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:37 crc kubenswrapper[4756]: I0930 19:32:37.551026 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:37 crc kubenswrapper[4756]: I0930 19:32:37.551104 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:37 crc kubenswrapper[4756]: I0930 19:32:37.551128 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:37 crc kubenswrapper[4756]: I0930 19:32:37.551160 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:37 crc kubenswrapper[4756]: I0930 19:32:37.551184 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:37Z","lastTransitionTime":"2025-09-30T19:32:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:37 crc kubenswrapper[4756]: I0930 19:32:37.653139 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:37 crc kubenswrapper[4756]: I0930 19:32:37.653678 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:37 crc kubenswrapper[4756]: I0930 19:32:37.653856 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:37 crc kubenswrapper[4756]: I0930 19:32:37.654038 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:37 crc kubenswrapper[4756]: I0930 19:32:37.654252 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:37Z","lastTransitionTime":"2025-09-30T19:32:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:37 crc kubenswrapper[4756]: I0930 19:32:37.757777 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:37 crc kubenswrapper[4756]: I0930 19:32:37.757836 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:37 crc kubenswrapper[4756]: I0930 19:32:37.757853 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:37 crc kubenswrapper[4756]: I0930 19:32:37.757880 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:37 crc kubenswrapper[4756]: I0930 19:32:37.757899 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:37Z","lastTransitionTime":"2025-09-30T19:32:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:37 crc kubenswrapper[4756]: I0930 19:32:37.861304 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:37 crc kubenswrapper[4756]: I0930 19:32:37.861379 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:37 crc kubenswrapper[4756]: I0930 19:32:37.861424 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:37 crc kubenswrapper[4756]: I0930 19:32:37.861451 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:37 crc kubenswrapper[4756]: I0930 19:32:37.861470 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:37Z","lastTransitionTime":"2025-09-30T19:32:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:37 crc kubenswrapper[4756]: I0930 19:32:37.964638 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:37 crc kubenswrapper[4756]: I0930 19:32:37.964984 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:37 crc kubenswrapper[4756]: I0930 19:32:37.965157 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:37 crc kubenswrapper[4756]: I0930 19:32:37.965345 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:37 crc kubenswrapper[4756]: I0930 19:32:37.965578 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:37Z","lastTransitionTime":"2025-09-30T19:32:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:38 crc kubenswrapper[4756]: I0930 19:32:38.068926 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:38 crc kubenswrapper[4756]: I0930 19:32:38.069621 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:38 crc kubenswrapper[4756]: I0930 19:32:38.069676 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:38 crc kubenswrapper[4756]: I0930 19:32:38.069718 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:38 crc kubenswrapper[4756]: I0930 19:32:38.069749 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:38Z","lastTransitionTime":"2025-09-30T19:32:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:38 crc kubenswrapper[4756]: I0930 19:32:38.115671 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-b4n45" Sep 30 19:32:38 crc kubenswrapper[4756]: E0930 19:32:38.115932 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-b4n45" podUID="6c151bbd-9757-44ba-aa11-ff679e841fdc" Sep 30 19:32:38 crc kubenswrapper[4756]: I0930 19:32:38.173569 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:38 crc kubenswrapper[4756]: I0930 19:32:38.173648 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:38 crc kubenswrapper[4756]: I0930 19:32:38.173669 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:38 crc kubenswrapper[4756]: I0930 19:32:38.173702 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:38 crc kubenswrapper[4756]: I0930 19:32:38.173722 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:38Z","lastTransitionTime":"2025-09-30T19:32:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:38 crc kubenswrapper[4756]: I0930 19:32:38.283107 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:38 crc kubenswrapper[4756]: I0930 19:32:38.283191 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:38 crc kubenswrapper[4756]: I0930 19:32:38.283210 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:38 crc kubenswrapper[4756]: I0930 19:32:38.283248 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:38 crc kubenswrapper[4756]: I0930 19:32:38.283270 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:38Z","lastTransitionTime":"2025-09-30T19:32:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:38 crc kubenswrapper[4756]: I0930 19:32:38.386510 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:38 crc kubenswrapper[4756]: I0930 19:32:38.386613 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:38 crc kubenswrapper[4756]: I0930 19:32:38.386633 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:38 crc kubenswrapper[4756]: I0930 19:32:38.386664 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:38 crc kubenswrapper[4756]: I0930 19:32:38.386686 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:38Z","lastTransitionTime":"2025-09-30T19:32:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:38 crc kubenswrapper[4756]: I0930 19:32:38.490151 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:38 crc kubenswrapper[4756]: I0930 19:32:38.490217 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:38 crc kubenswrapper[4756]: I0930 19:32:38.490234 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:38 crc kubenswrapper[4756]: I0930 19:32:38.490256 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:38 crc kubenswrapper[4756]: I0930 19:32:38.490272 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:38Z","lastTransitionTime":"2025-09-30T19:32:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:38 crc kubenswrapper[4756]: I0930 19:32:38.593252 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:38 crc kubenswrapper[4756]: I0930 19:32:38.593334 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:38 crc kubenswrapper[4756]: I0930 19:32:38.593352 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:38 crc kubenswrapper[4756]: I0930 19:32:38.593386 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:38 crc kubenswrapper[4756]: I0930 19:32:38.593433 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:38Z","lastTransitionTime":"2025-09-30T19:32:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:38 crc kubenswrapper[4756]: I0930 19:32:38.696248 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:38 crc kubenswrapper[4756]: I0930 19:32:38.696312 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:38 crc kubenswrapper[4756]: I0930 19:32:38.696325 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:38 crc kubenswrapper[4756]: I0930 19:32:38.696352 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:38 crc kubenswrapper[4756]: I0930 19:32:38.696371 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:38Z","lastTransitionTime":"2025-09-30T19:32:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:38 crc kubenswrapper[4756]: I0930 19:32:38.741360 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:38 crc kubenswrapper[4756]: I0930 19:32:38.741416 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:38 crc kubenswrapper[4756]: I0930 19:32:38.741426 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:38 crc kubenswrapper[4756]: I0930 19:32:38.741447 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:38 crc kubenswrapper[4756]: I0930 19:32:38.741459 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:38Z","lastTransitionTime":"2025-09-30T19:32:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:38 crc kubenswrapper[4756]: E0930 19:32:38.759041 4756 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:32:38Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:32:38Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:32:38Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:32:38Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:32:38Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:32:38Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:32:38Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:32:38Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"0611c06e-4fb1-42eb-9eae-fbc2363d05f7\\\",\\\"systemUUID\\\":\\\"9e1f3995-67e3-42b5-a320-7b79274c960e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:38Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:38 crc kubenswrapper[4756]: I0930 19:32:38.763909 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:38 crc kubenswrapper[4756]: I0930 19:32:38.763970 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:38 crc kubenswrapper[4756]: I0930 19:32:38.763990 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:38 crc kubenswrapper[4756]: I0930 19:32:38.764018 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:38 crc kubenswrapper[4756]: I0930 19:32:38.764042 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:38Z","lastTransitionTime":"2025-09-30T19:32:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:38 crc kubenswrapper[4756]: E0930 19:32:38.778016 4756 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:32:38Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:32:38Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:32:38Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:32:38Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:32:38Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:32:38Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:32:38Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:32:38Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"0611c06e-4fb1-42eb-9eae-fbc2363d05f7\\\",\\\"systemUUID\\\":\\\"9e1f3995-67e3-42b5-a320-7b79274c960e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:38Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:38 crc kubenswrapper[4756]: I0930 19:32:38.783030 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:38 crc kubenswrapper[4756]: I0930 19:32:38.783111 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:38 crc kubenswrapper[4756]: I0930 19:32:38.783128 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:38 crc kubenswrapper[4756]: I0930 19:32:38.783183 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:38 crc kubenswrapper[4756]: I0930 19:32:38.783203 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:38Z","lastTransitionTime":"2025-09-30T19:32:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:38 crc kubenswrapper[4756]: E0930 19:32:38.797381 4756 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:32:38Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:32:38Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:32:38Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:32:38Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:32:38Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:32:38Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:32:38Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:32:38Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"0611c06e-4fb1-42eb-9eae-fbc2363d05f7\\\",\\\"systemUUID\\\":\\\"9e1f3995-67e3-42b5-a320-7b79274c960e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:38Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:38 crc kubenswrapper[4756]: I0930 19:32:38.801676 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:38 crc kubenswrapper[4756]: I0930 19:32:38.801725 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:38 crc kubenswrapper[4756]: I0930 19:32:38.801736 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:38 crc kubenswrapper[4756]: I0930 19:32:38.801757 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:38 crc kubenswrapper[4756]: I0930 19:32:38.801771 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:38Z","lastTransitionTime":"2025-09-30T19:32:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:38 crc kubenswrapper[4756]: E0930 19:32:38.817139 4756 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:32:38Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:32:38Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:32:38Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:32:38Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:32:38Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:32:38Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:32:38Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:32:38Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"0611c06e-4fb1-42eb-9eae-fbc2363d05f7\\\",\\\"systemUUID\\\":\\\"9e1f3995-67e3-42b5-a320-7b79274c960e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:38Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:38 crc kubenswrapper[4756]: I0930 19:32:38.822063 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:38 crc kubenswrapper[4756]: I0930 19:32:38.822125 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:38 crc kubenswrapper[4756]: I0930 19:32:38.822146 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:38 crc kubenswrapper[4756]: I0930 19:32:38.822171 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:38 crc kubenswrapper[4756]: I0930 19:32:38.822192 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:38Z","lastTransitionTime":"2025-09-30T19:32:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:38 crc kubenswrapper[4756]: E0930 19:32:38.835681 4756 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:32:38Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:32:38Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:32:38Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:32:38Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:32:38Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:32:38Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:32:38Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:32:38Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"0611c06e-4fb1-42eb-9eae-fbc2363d05f7\\\",\\\"systemUUID\\\":\\\"9e1f3995-67e3-42b5-a320-7b79274c960e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:32:38Z is after 2025-08-24T17:21:41Z" Sep 30 19:32:38 crc kubenswrapper[4756]: E0930 19:32:38.835988 4756 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Sep 30 19:32:38 crc kubenswrapper[4756]: I0930 19:32:38.838099 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:38 crc kubenswrapper[4756]: I0930 19:32:38.838186 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:38 crc kubenswrapper[4756]: I0930 19:32:38.838214 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:38 crc kubenswrapper[4756]: I0930 19:32:38.838268 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:38 crc kubenswrapper[4756]: I0930 19:32:38.838291 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:38Z","lastTransitionTime":"2025-09-30T19:32:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:38 crc kubenswrapper[4756]: I0930 19:32:38.941701 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:38 crc kubenswrapper[4756]: I0930 19:32:38.941753 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:38 crc kubenswrapper[4756]: I0930 19:32:38.941764 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:38 crc kubenswrapper[4756]: I0930 19:32:38.941783 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:38 crc kubenswrapper[4756]: I0930 19:32:38.941795 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:38Z","lastTransitionTime":"2025-09-30T19:32:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:39 crc kubenswrapper[4756]: I0930 19:32:39.046849 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:39 crc kubenswrapper[4756]: I0930 19:32:39.046903 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:39 crc kubenswrapper[4756]: I0930 19:32:39.046914 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:39 crc kubenswrapper[4756]: I0930 19:32:39.046943 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:39 crc kubenswrapper[4756]: I0930 19:32:39.046957 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:39Z","lastTransitionTime":"2025-09-30T19:32:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:39 crc kubenswrapper[4756]: I0930 19:32:39.115966 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 19:32:39 crc kubenswrapper[4756]: I0930 19:32:39.115997 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 19:32:39 crc kubenswrapper[4756]: I0930 19:32:39.116033 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 19:32:39 crc kubenswrapper[4756]: E0930 19:32:39.116512 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 19:32:39 crc kubenswrapper[4756]: E0930 19:32:39.116548 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 19:32:39 crc kubenswrapper[4756]: E0930 19:32:39.116699 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 19:32:39 crc kubenswrapper[4756]: I0930 19:32:39.150648 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:39 crc kubenswrapper[4756]: I0930 19:32:39.150715 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:39 crc kubenswrapper[4756]: I0930 19:32:39.150730 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:39 crc kubenswrapper[4756]: I0930 19:32:39.150750 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:39 crc kubenswrapper[4756]: I0930 19:32:39.150764 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:39Z","lastTransitionTime":"2025-09-30T19:32:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:39 crc kubenswrapper[4756]: I0930 19:32:39.254184 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:39 crc kubenswrapper[4756]: I0930 19:32:39.254254 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:39 crc kubenswrapper[4756]: I0930 19:32:39.254272 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:39 crc kubenswrapper[4756]: I0930 19:32:39.254299 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:39 crc kubenswrapper[4756]: I0930 19:32:39.254321 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:39Z","lastTransitionTime":"2025-09-30T19:32:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:39 crc kubenswrapper[4756]: I0930 19:32:39.356809 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:39 crc kubenswrapper[4756]: I0930 19:32:39.356887 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:39 crc kubenswrapper[4756]: I0930 19:32:39.356908 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:39 crc kubenswrapper[4756]: I0930 19:32:39.356937 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:39 crc kubenswrapper[4756]: I0930 19:32:39.356958 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:39Z","lastTransitionTime":"2025-09-30T19:32:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:39 crc kubenswrapper[4756]: I0930 19:32:39.460815 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:39 crc kubenswrapper[4756]: I0930 19:32:39.460908 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:39 crc kubenswrapper[4756]: I0930 19:32:39.460934 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:39 crc kubenswrapper[4756]: I0930 19:32:39.461004 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:39 crc kubenswrapper[4756]: I0930 19:32:39.461037 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:39Z","lastTransitionTime":"2025-09-30T19:32:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:39 crc kubenswrapper[4756]: I0930 19:32:39.565591 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:39 crc kubenswrapper[4756]: I0930 19:32:39.565753 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:39 crc kubenswrapper[4756]: I0930 19:32:39.565780 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:39 crc kubenswrapper[4756]: I0930 19:32:39.565845 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:39 crc kubenswrapper[4756]: I0930 19:32:39.565871 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:39Z","lastTransitionTime":"2025-09-30T19:32:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:39 crc kubenswrapper[4756]: I0930 19:32:39.668948 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:39 crc kubenswrapper[4756]: I0930 19:32:39.669020 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:39 crc kubenswrapper[4756]: I0930 19:32:39.669039 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:39 crc kubenswrapper[4756]: I0930 19:32:39.669067 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:39 crc kubenswrapper[4756]: I0930 19:32:39.669092 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:39Z","lastTransitionTime":"2025-09-30T19:32:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:39 crc kubenswrapper[4756]: I0930 19:32:39.773271 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:39 crc kubenswrapper[4756]: I0930 19:32:39.773361 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:39 crc kubenswrapper[4756]: I0930 19:32:39.773386 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:39 crc kubenswrapper[4756]: I0930 19:32:39.773458 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:39 crc kubenswrapper[4756]: I0930 19:32:39.773481 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:39Z","lastTransitionTime":"2025-09-30T19:32:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:39 crc kubenswrapper[4756]: I0930 19:32:39.876842 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:39 crc kubenswrapper[4756]: I0930 19:32:39.876936 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:39 crc kubenswrapper[4756]: I0930 19:32:39.876956 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:39 crc kubenswrapper[4756]: I0930 19:32:39.876989 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:39 crc kubenswrapper[4756]: I0930 19:32:39.877009 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:39Z","lastTransitionTime":"2025-09-30T19:32:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:39 crc kubenswrapper[4756]: I0930 19:32:39.981569 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:39 crc kubenswrapper[4756]: I0930 19:32:39.981646 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:39 crc kubenswrapper[4756]: I0930 19:32:39.981664 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:39 crc kubenswrapper[4756]: I0930 19:32:39.981761 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:39 crc kubenswrapper[4756]: I0930 19:32:39.981787 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:39Z","lastTransitionTime":"2025-09-30T19:32:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:40 crc kubenswrapper[4756]: I0930 19:32:40.084776 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:40 crc kubenswrapper[4756]: I0930 19:32:40.084855 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:40 crc kubenswrapper[4756]: I0930 19:32:40.084873 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:40 crc kubenswrapper[4756]: I0930 19:32:40.084905 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:40 crc kubenswrapper[4756]: I0930 19:32:40.084925 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:40Z","lastTransitionTime":"2025-09-30T19:32:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:40 crc kubenswrapper[4756]: I0930 19:32:40.115486 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-b4n45" Sep 30 19:32:40 crc kubenswrapper[4756]: E0930 19:32:40.115688 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-b4n45" podUID="6c151bbd-9757-44ba-aa11-ff679e841fdc" Sep 30 19:32:40 crc kubenswrapper[4756]: I0930 19:32:40.189022 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:40 crc kubenswrapper[4756]: I0930 19:32:40.189108 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:40 crc kubenswrapper[4756]: I0930 19:32:40.189133 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:40 crc kubenswrapper[4756]: I0930 19:32:40.189208 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:40 crc kubenswrapper[4756]: I0930 19:32:40.189254 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:40Z","lastTransitionTime":"2025-09-30T19:32:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:40 crc kubenswrapper[4756]: I0930 19:32:40.293290 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:40 crc kubenswrapper[4756]: I0930 19:32:40.293354 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:40 crc kubenswrapper[4756]: I0930 19:32:40.293371 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:40 crc kubenswrapper[4756]: I0930 19:32:40.293426 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:40 crc kubenswrapper[4756]: I0930 19:32:40.293447 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:40Z","lastTransitionTime":"2025-09-30T19:32:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:40 crc kubenswrapper[4756]: I0930 19:32:40.401812 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:40 crc kubenswrapper[4756]: I0930 19:32:40.401879 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:40 crc kubenswrapper[4756]: I0930 19:32:40.401906 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:40 crc kubenswrapper[4756]: I0930 19:32:40.401939 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:40 crc kubenswrapper[4756]: I0930 19:32:40.401964 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:40Z","lastTransitionTime":"2025-09-30T19:32:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:40 crc kubenswrapper[4756]: I0930 19:32:40.505289 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:40 crc kubenswrapper[4756]: I0930 19:32:40.505371 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:40 crc kubenswrapper[4756]: I0930 19:32:40.505389 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:40 crc kubenswrapper[4756]: I0930 19:32:40.505458 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:40 crc kubenswrapper[4756]: I0930 19:32:40.505480 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:40Z","lastTransitionTime":"2025-09-30T19:32:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:40 crc kubenswrapper[4756]: I0930 19:32:40.608853 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:40 crc kubenswrapper[4756]: I0930 19:32:40.608910 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:40 crc kubenswrapper[4756]: I0930 19:32:40.608927 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:40 crc kubenswrapper[4756]: I0930 19:32:40.608956 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:40 crc kubenswrapper[4756]: I0930 19:32:40.608976 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:40Z","lastTransitionTime":"2025-09-30T19:32:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:40 crc kubenswrapper[4756]: I0930 19:32:40.711763 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:40 crc kubenswrapper[4756]: I0930 19:32:40.711806 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:40 crc kubenswrapper[4756]: I0930 19:32:40.711819 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:40 crc kubenswrapper[4756]: I0930 19:32:40.711838 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:40 crc kubenswrapper[4756]: I0930 19:32:40.711849 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:40Z","lastTransitionTime":"2025-09-30T19:32:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:40 crc kubenswrapper[4756]: I0930 19:32:40.816621 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:40 crc kubenswrapper[4756]: I0930 19:32:40.816707 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:40 crc kubenswrapper[4756]: I0930 19:32:40.816731 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:40 crc kubenswrapper[4756]: I0930 19:32:40.816759 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:40 crc kubenswrapper[4756]: I0930 19:32:40.816777 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:40Z","lastTransitionTime":"2025-09-30T19:32:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:40 crc kubenswrapper[4756]: I0930 19:32:40.919804 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:40 crc kubenswrapper[4756]: I0930 19:32:40.919863 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:40 crc kubenswrapper[4756]: I0930 19:32:40.919881 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:40 crc kubenswrapper[4756]: I0930 19:32:40.919908 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:40 crc kubenswrapper[4756]: I0930 19:32:40.919925 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:40Z","lastTransitionTime":"2025-09-30T19:32:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:41 crc kubenswrapper[4756]: I0930 19:32:41.023326 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:41 crc kubenswrapper[4756]: I0930 19:32:41.023417 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:41 crc kubenswrapper[4756]: I0930 19:32:41.023432 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:41 crc kubenswrapper[4756]: I0930 19:32:41.023457 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:41 crc kubenswrapper[4756]: I0930 19:32:41.023475 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:41Z","lastTransitionTime":"2025-09-30T19:32:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:41 crc kubenswrapper[4756]: I0930 19:32:41.115654 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 19:32:41 crc kubenswrapper[4756]: I0930 19:32:41.115761 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 19:32:41 crc kubenswrapper[4756]: E0930 19:32:41.115816 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 19:32:41 crc kubenswrapper[4756]: I0930 19:32:41.115857 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 19:32:41 crc kubenswrapper[4756]: E0930 19:32:41.115938 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 19:32:41 crc kubenswrapper[4756]: E0930 19:32:41.116181 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 19:32:41 crc kubenswrapper[4756]: I0930 19:32:41.126312 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:41 crc kubenswrapper[4756]: I0930 19:32:41.126344 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:41 crc kubenswrapper[4756]: I0930 19:32:41.126355 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:41 crc kubenswrapper[4756]: I0930 19:32:41.126374 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:41 crc kubenswrapper[4756]: I0930 19:32:41.126387 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:41Z","lastTransitionTime":"2025-09-30T19:32:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:41 crc kubenswrapper[4756]: I0930 19:32:41.148985 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/node-resolver-ghp5t" podStartSLOduration=70.148966294 podStartE2EDuration="1m10.148966294s" podCreationTimestamp="2025-09-30 19:31:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 19:32:41.14804389 +0000 UTC m=+90.768977367" watchObservedRunningTime="2025-09-30 19:32:41.148966294 +0000 UTC m=+90.769899771" Sep 30 19:32:41 crc kubenswrapper[4756]: I0930 19:32:41.168233 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-additional-cni-plugins-m6zsp" podStartSLOduration=70.168211195 podStartE2EDuration="1m10.168211195s" podCreationTimestamp="2025-09-30 19:31:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 19:32:41.166293105 +0000 UTC m=+90.787226602" watchObservedRunningTime="2025-09-30 19:32:41.168211195 +0000 UTC m=+90.789144672" Sep 30 19:32:41 crc kubenswrapper[4756]: I0930 19:32:41.181723 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-gld2h" podStartSLOduration=69.181692966 podStartE2EDuration="1m9.181692966s" podCreationTimestamp="2025-09-30 19:31:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 19:32:41.180790922 +0000 UTC m=+90.801724399" watchObservedRunningTime="2025-09-30 19:32:41.181692966 +0000 UTC m=+90.802626443" Sep 30 19:32:41 crc kubenswrapper[4756]: I0930 19:32:41.231501 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podStartSLOduration=69.231374747 podStartE2EDuration="1m9.231374747s" podCreationTimestamp="2025-09-30 19:31:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 19:32:41.22725161 +0000 UTC m=+90.848185087" watchObservedRunningTime="2025-09-30 19:32:41.231374747 +0000 UTC m=+90.852308224" Sep 30 19:32:41 crc kubenswrapper[4756]: I0930 19:32:41.234562 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:41 crc kubenswrapper[4756]: I0930 19:32:41.234617 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:41 crc kubenswrapper[4756]: I0930 19:32:41.234630 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:41 crc kubenswrapper[4756]: I0930 19:32:41.234657 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:41 crc kubenswrapper[4756]: I0930 19:32:41.234672 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:41Z","lastTransitionTime":"2025-09-30T19:32:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:41 crc kubenswrapper[4756]: I0930 19:32:41.255998 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" podStartSLOduration=38.255974367 podStartE2EDuration="38.255974367s" podCreationTimestamp="2025-09-30 19:32:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 19:32:41.255454104 +0000 UTC m=+90.876387581" watchObservedRunningTime="2025-09-30 19:32:41.255974367 +0000 UTC m=+90.876907844" Sep 30 19:32:41 crc kubenswrapper[4756]: I0930 19:32:41.298989 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podStartSLOduration=70.298967195 podStartE2EDuration="1m10.298967195s" podCreationTimestamp="2025-09-30 19:31:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 19:32:41.270191057 +0000 UTC m=+90.891124534" watchObservedRunningTime="2025-09-30 19:32:41.298967195 +0000 UTC m=+90.919900672" Sep 30 19:32:41 crc kubenswrapper[4756]: I0930 19:32:41.323964 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/node-ca-8nj5z" podStartSLOduration=70.323946015 podStartE2EDuration="1m10.323946015s" podCreationTimestamp="2025-09-30 19:31:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 19:32:41.309286964 +0000 UTC m=+90.930220441" watchObservedRunningTime="2025-09-30 19:32:41.323946015 +0000 UTC m=+90.944879492" Sep 30 19:32:41 crc kubenswrapper[4756]: I0930 19:32:41.332916 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" podStartSLOduration=12.332889967 podStartE2EDuration="12.332889967s" podCreationTimestamp="2025-09-30 19:32:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 19:32:41.332529088 +0000 UTC m=+90.953462565" watchObservedRunningTime="2025-09-30 19:32:41.332889967 +0000 UTC m=+90.953823434" Sep 30 19:32:41 crc kubenswrapper[4756]: I0930 19:32:41.337335 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:41 crc kubenswrapper[4756]: I0930 19:32:41.337425 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:41 crc kubenswrapper[4756]: I0930 19:32:41.337437 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:41 crc kubenswrapper[4756]: I0930 19:32:41.337453 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:41 crc kubenswrapper[4756]: I0930 19:32:41.337464 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:41Z","lastTransitionTime":"2025-09-30T19:32:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:41 crc kubenswrapper[4756]: I0930 19:32:41.359791 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd/etcd-crc" podStartSLOduration=68.359763816 podStartE2EDuration="1m8.359763816s" podCreationTimestamp="2025-09-30 19:31:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 19:32:41.35915111 +0000 UTC m=+90.980084617" watchObservedRunningTime="2025-09-30 19:32:41.359763816 +0000 UTC m=+90.980697303" Sep 30 19:32:41 crc kubenswrapper[4756]: I0930 19:32:41.412372 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-crc" podStartSLOduration=69.412346454 podStartE2EDuration="1m9.412346454s" podCreationTimestamp="2025-09-30 19:31:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 19:32:41.390802113 +0000 UTC m=+91.011735580" watchObservedRunningTime="2025-09-30 19:32:41.412346454 +0000 UTC m=+91.033279971" Sep 30 19:32:41 crc kubenswrapper[4756]: I0930 19:32:41.426727 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-l9mw5" podStartSLOduration=70.426702887 podStartE2EDuration="1m10.426702887s" podCreationTimestamp="2025-09-30 19:31:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 19:32:41.426239375 +0000 UTC m=+91.047172852" watchObservedRunningTime="2025-09-30 19:32:41.426702887 +0000 UTC m=+91.047636364" Sep 30 19:32:41 crc kubenswrapper[4756]: I0930 19:32:41.443550 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:41 crc kubenswrapper[4756]: I0930 19:32:41.443591 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:41 crc kubenswrapper[4756]: I0930 19:32:41.443603 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:41 crc kubenswrapper[4756]: I0930 19:32:41.443620 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:41 crc kubenswrapper[4756]: I0930 19:32:41.443632 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:41Z","lastTransitionTime":"2025-09-30T19:32:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:41 crc kubenswrapper[4756]: I0930 19:32:41.546465 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:41 crc kubenswrapper[4756]: I0930 19:32:41.546495 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:41 crc kubenswrapper[4756]: I0930 19:32:41.546505 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:41 crc kubenswrapper[4756]: I0930 19:32:41.546519 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:41 crc kubenswrapper[4756]: I0930 19:32:41.546530 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:41Z","lastTransitionTime":"2025-09-30T19:32:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:41 crc kubenswrapper[4756]: I0930 19:32:41.648989 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:41 crc kubenswrapper[4756]: I0930 19:32:41.649079 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:41 crc kubenswrapper[4756]: I0930 19:32:41.649099 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:41 crc kubenswrapper[4756]: I0930 19:32:41.649124 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:41 crc kubenswrapper[4756]: I0930 19:32:41.649143 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:41Z","lastTransitionTime":"2025-09-30T19:32:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:41 crc kubenswrapper[4756]: I0930 19:32:41.751574 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:41 crc kubenswrapper[4756]: I0930 19:32:41.751633 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:41 crc kubenswrapper[4756]: I0930 19:32:41.751646 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:41 crc kubenswrapper[4756]: I0930 19:32:41.751672 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:41 crc kubenswrapper[4756]: I0930 19:32:41.751686 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:41Z","lastTransitionTime":"2025-09-30T19:32:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:41 crc kubenswrapper[4756]: I0930 19:32:41.857855 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:41 crc kubenswrapper[4756]: I0930 19:32:41.858023 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:41 crc kubenswrapper[4756]: I0930 19:32:41.858058 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:41 crc kubenswrapper[4756]: I0930 19:32:41.858112 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:41 crc kubenswrapper[4756]: I0930 19:32:41.858135 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:41Z","lastTransitionTime":"2025-09-30T19:32:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:41 crc kubenswrapper[4756]: I0930 19:32:41.961337 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:41 crc kubenswrapper[4756]: I0930 19:32:41.961380 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:41 crc kubenswrapper[4756]: I0930 19:32:41.961389 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:41 crc kubenswrapper[4756]: I0930 19:32:41.961432 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:41 crc kubenswrapper[4756]: I0930 19:32:41.961443 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:41Z","lastTransitionTime":"2025-09-30T19:32:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:42 crc kubenswrapper[4756]: I0930 19:32:42.064443 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:42 crc kubenswrapper[4756]: I0930 19:32:42.064512 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:42 crc kubenswrapper[4756]: I0930 19:32:42.064537 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:42 crc kubenswrapper[4756]: I0930 19:32:42.064568 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:42 crc kubenswrapper[4756]: I0930 19:32:42.064589 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:42Z","lastTransitionTime":"2025-09-30T19:32:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:42 crc kubenswrapper[4756]: I0930 19:32:42.115694 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-b4n45" Sep 30 19:32:42 crc kubenswrapper[4756]: E0930 19:32:42.115963 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-b4n45" podUID="6c151bbd-9757-44ba-aa11-ff679e841fdc" Sep 30 19:32:42 crc kubenswrapper[4756]: I0930 19:32:42.168110 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:42 crc kubenswrapper[4756]: I0930 19:32:42.168179 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:42 crc kubenswrapper[4756]: I0930 19:32:42.168200 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:42 crc kubenswrapper[4756]: I0930 19:32:42.168233 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:42 crc kubenswrapper[4756]: I0930 19:32:42.168254 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:42Z","lastTransitionTime":"2025-09-30T19:32:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:42 crc kubenswrapper[4756]: I0930 19:32:42.271539 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:42 crc kubenswrapper[4756]: I0930 19:32:42.271610 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:42 crc kubenswrapper[4756]: I0930 19:32:42.271628 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:42 crc kubenswrapper[4756]: I0930 19:32:42.271658 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:42 crc kubenswrapper[4756]: I0930 19:32:42.271677 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:42Z","lastTransitionTime":"2025-09-30T19:32:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:42 crc kubenswrapper[4756]: I0930 19:32:42.374959 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:42 crc kubenswrapper[4756]: I0930 19:32:42.375047 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:42 crc kubenswrapper[4756]: I0930 19:32:42.375065 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:42 crc kubenswrapper[4756]: I0930 19:32:42.375095 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:42 crc kubenswrapper[4756]: I0930 19:32:42.375116 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:42Z","lastTransitionTime":"2025-09-30T19:32:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:42 crc kubenswrapper[4756]: I0930 19:32:42.478165 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:42 crc kubenswrapper[4756]: I0930 19:32:42.478221 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:42 crc kubenswrapper[4756]: I0930 19:32:42.478240 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:42 crc kubenswrapper[4756]: I0930 19:32:42.478270 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:42 crc kubenswrapper[4756]: I0930 19:32:42.478291 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:42Z","lastTransitionTime":"2025-09-30T19:32:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:42 crc kubenswrapper[4756]: I0930 19:32:42.581963 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:42 crc kubenswrapper[4756]: I0930 19:32:42.582042 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:42 crc kubenswrapper[4756]: I0930 19:32:42.582066 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:42 crc kubenswrapper[4756]: I0930 19:32:42.582099 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:42 crc kubenswrapper[4756]: I0930 19:32:42.582119 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:42Z","lastTransitionTime":"2025-09-30T19:32:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:42 crc kubenswrapper[4756]: I0930 19:32:42.684188 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:42 crc kubenswrapper[4756]: I0930 19:32:42.684271 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:42 crc kubenswrapper[4756]: I0930 19:32:42.684290 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:42 crc kubenswrapper[4756]: I0930 19:32:42.684319 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:42 crc kubenswrapper[4756]: I0930 19:32:42.684340 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:42Z","lastTransitionTime":"2025-09-30T19:32:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:42 crc kubenswrapper[4756]: I0930 19:32:42.790731 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:42 crc kubenswrapper[4756]: I0930 19:32:42.790780 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:42 crc kubenswrapper[4756]: I0930 19:32:42.790789 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:42 crc kubenswrapper[4756]: I0930 19:32:42.790807 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:42 crc kubenswrapper[4756]: I0930 19:32:42.790817 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:42Z","lastTransitionTime":"2025-09-30T19:32:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:42 crc kubenswrapper[4756]: I0930 19:32:42.893552 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:42 crc kubenswrapper[4756]: I0930 19:32:42.893619 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:42 crc kubenswrapper[4756]: I0930 19:32:42.893629 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:42 crc kubenswrapper[4756]: I0930 19:32:42.893647 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:42 crc kubenswrapper[4756]: I0930 19:32:42.893660 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:42Z","lastTransitionTime":"2025-09-30T19:32:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:42 crc kubenswrapper[4756]: I0930 19:32:42.996124 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:42 crc kubenswrapper[4756]: I0930 19:32:42.996179 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:42 crc kubenswrapper[4756]: I0930 19:32:42.996194 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:42 crc kubenswrapper[4756]: I0930 19:32:42.996216 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:42 crc kubenswrapper[4756]: I0930 19:32:42.996231 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:42Z","lastTransitionTime":"2025-09-30T19:32:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:43 crc kubenswrapper[4756]: I0930 19:32:43.098754 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:43 crc kubenswrapper[4756]: I0930 19:32:43.098817 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:43 crc kubenswrapper[4756]: I0930 19:32:43.098838 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:43 crc kubenswrapper[4756]: I0930 19:32:43.098866 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:43 crc kubenswrapper[4756]: I0930 19:32:43.098886 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:43Z","lastTransitionTime":"2025-09-30T19:32:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:43 crc kubenswrapper[4756]: I0930 19:32:43.115386 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 19:32:43 crc kubenswrapper[4756]: I0930 19:32:43.115472 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 19:32:43 crc kubenswrapper[4756]: I0930 19:32:43.115493 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 19:32:43 crc kubenswrapper[4756]: E0930 19:32:43.115676 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 19:32:43 crc kubenswrapper[4756]: E0930 19:32:43.115788 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 19:32:43 crc kubenswrapper[4756]: E0930 19:32:43.115996 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 19:32:43 crc kubenswrapper[4756]: I0930 19:32:43.201502 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:43 crc kubenswrapper[4756]: I0930 19:32:43.201548 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:43 crc kubenswrapper[4756]: I0930 19:32:43.201559 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:43 crc kubenswrapper[4756]: I0930 19:32:43.201577 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:43 crc kubenswrapper[4756]: I0930 19:32:43.201588 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:43Z","lastTransitionTime":"2025-09-30T19:32:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:43 crc kubenswrapper[4756]: I0930 19:32:43.304425 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:43 crc kubenswrapper[4756]: I0930 19:32:43.304458 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:43 crc kubenswrapper[4756]: I0930 19:32:43.304466 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:43 crc kubenswrapper[4756]: I0930 19:32:43.304481 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:43 crc kubenswrapper[4756]: I0930 19:32:43.304491 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:43Z","lastTransitionTime":"2025-09-30T19:32:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:43 crc kubenswrapper[4756]: I0930 19:32:43.407274 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:43 crc kubenswrapper[4756]: I0930 19:32:43.407317 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:43 crc kubenswrapper[4756]: I0930 19:32:43.407324 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:43 crc kubenswrapper[4756]: I0930 19:32:43.407340 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:43 crc kubenswrapper[4756]: I0930 19:32:43.407350 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:43Z","lastTransitionTime":"2025-09-30T19:32:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:43 crc kubenswrapper[4756]: I0930 19:32:43.510738 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:43 crc kubenswrapper[4756]: I0930 19:32:43.510783 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:43 crc kubenswrapper[4756]: I0930 19:32:43.510795 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:43 crc kubenswrapper[4756]: I0930 19:32:43.510813 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:43 crc kubenswrapper[4756]: I0930 19:32:43.510826 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:43Z","lastTransitionTime":"2025-09-30T19:32:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:43 crc kubenswrapper[4756]: I0930 19:32:43.614114 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:43 crc kubenswrapper[4756]: I0930 19:32:43.614162 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:43 crc kubenswrapper[4756]: I0930 19:32:43.614173 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:43 crc kubenswrapper[4756]: I0930 19:32:43.614192 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:43 crc kubenswrapper[4756]: I0930 19:32:43.614202 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:43Z","lastTransitionTime":"2025-09-30T19:32:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:43 crc kubenswrapper[4756]: I0930 19:32:43.717380 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:43 crc kubenswrapper[4756]: I0930 19:32:43.717502 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:43 crc kubenswrapper[4756]: I0930 19:32:43.717528 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:43 crc kubenswrapper[4756]: I0930 19:32:43.717562 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:43 crc kubenswrapper[4756]: I0930 19:32:43.717587 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:43Z","lastTransitionTime":"2025-09-30T19:32:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:43 crc kubenswrapper[4756]: I0930 19:32:43.821036 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:43 crc kubenswrapper[4756]: I0930 19:32:43.821082 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:43 crc kubenswrapper[4756]: I0930 19:32:43.821093 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:43 crc kubenswrapper[4756]: I0930 19:32:43.821132 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:43 crc kubenswrapper[4756]: I0930 19:32:43.821144 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:43Z","lastTransitionTime":"2025-09-30T19:32:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:43 crc kubenswrapper[4756]: I0930 19:32:43.925078 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:43 crc kubenswrapper[4756]: I0930 19:32:43.925156 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:43 crc kubenswrapper[4756]: I0930 19:32:43.925174 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:43 crc kubenswrapper[4756]: I0930 19:32:43.925207 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:43 crc kubenswrapper[4756]: I0930 19:32:43.925228 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:43Z","lastTransitionTime":"2025-09-30T19:32:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:44 crc kubenswrapper[4756]: I0930 19:32:44.029281 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:44 crc kubenswrapper[4756]: I0930 19:32:44.029359 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:44 crc kubenswrapper[4756]: I0930 19:32:44.029376 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:44 crc kubenswrapper[4756]: I0930 19:32:44.029437 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:44 crc kubenswrapper[4756]: I0930 19:32:44.029460 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:44Z","lastTransitionTime":"2025-09-30T19:32:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:44 crc kubenswrapper[4756]: I0930 19:32:44.115791 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-b4n45" Sep 30 19:32:44 crc kubenswrapper[4756]: E0930 19:32:44.115982 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-b4n45" podUID="6c151bbd-9757-44ba-aa11-ff679e841fdc" Sep 30 19:32:44 crc kubenswrapper[4756]: I0930 19:32:44.132550 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:44 crc kubenswrapper[4756]: I0930 19:32:44.132596 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:44 crc kubenswrapper[4756]: I0930 19:32:44.132607 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:44 crc kubenswrapper[4756]: I0930 19:32:44.132625 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:44 crc kubenswrapper[4756]: I0930 19:32:44.132636 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:44Z","lastTransitionTime":"2025-09-30T19:32:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:44 crc kubenswrapper[4756]: I0930 19:32:44.242991 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:44 crc kubenswrapper[4756]: I0930 19:32:44.243062 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:44 crc kubenswrapper[4756]: I0930 19:32:44.243081 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:44 crc kubenswrapper[4756]: I0930 19:32:44.243110 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:44 crc kubenswrapper[4756]: I0930 19:32:44.243129 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:44Z","lastTransitionTime":"2025-09-30T19:32:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:44 crc kubenswrapper[4756]: I0930 19:32:44.346657 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:44 crc kubenswrapper[4756]: I0930 19:32:44.346714 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:44 crc kubenswrapper[4756]: I0930 19:32:44.346728 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:44 crc kubenswrapper[4756]: I0930 19:32:44.346750 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:44 crc kubenswrapper[4756]: I0930 19:32:44.346765 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:44Z","lastTransitionTime":"2025-09-30T19:32:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:44 crc kubenswrapper[4756]: I0930 19:32:44.450912 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:44 crc kubenswrapper[4756]: I0930 19:32:44.450970 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:44 crc kubenswrapper[4756]: I0930 19:32:44.451005 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:44 crc kubenswrapper[4756]: I0930 19:32:44.451037 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:44 crc kubenswrapper[4756]: I0930 19:32:44.451059 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:44Z","lastTransitionTime":"2025-09-30T19:32:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:44 crc kubenswrapper[4756]: I0930 19:32:44.554983 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:44 crc kubenswrapper[4756]: I0930 19:32:44.555058 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:44 crc kubenswrapper[4756]: I0930 19:32:44.555096 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:44 crc kubenswrapper[4756]: I0930 19:32:44.555128 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:44 crc kubenswrapper[4756]: I0930 19:32:44.555150 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:44Z","lastTransitionTime":"2025-09-30T19:32:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:44 crc kubenswrapper[4756]: I0930 19:32:44.658925 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:44 crc kubenswrapper[4756]: I0930 19:32:44.659027 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:44 crc kubenswrapper[4756]: I0930 19:32:44.659055 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:44 crc kubenswrapper[4756]: I0930 19:32:44.659095 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:44 crc kubenswrapper[4756]: I0930 19:32:44.659121 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:44Z","lastTransitionTime":"2025-09-30T19:32:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:44 crc kubenswrapper[4756]: I0930 19:32:44.762906 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:44 crc kubenswrapper[4756]: I0930 19:32:44.762994 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:44 crc kubenswrapper[4756]: I0930 19:32:44.763034 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:44 crc kubenswrapper[4756]: I0930 19:32:44.763066 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:44 crc kubenswrapper[4756]: I0930 19:32:44.763087 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:44Z","lastTransitionTime":"2025-09-30T19:32:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:44 crc kubenswrapper[4756]: I0930 19:32:44.867149 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:44 crc kubenswrapper[4756]: I0930 19:32:44.867284 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:44 crc kubenswrapper[4756]: I0930 19:32:44.867306 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:44 crc kubenswrapper[4756]: I0930 19:32:44.867338 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:44 crc kubenswrapper[4756]: I0930 19:32:44.867359 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:44Z","lastTransitionTime":"2025-09-30T19:32:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:44 crc kubenswrapper[4756]: I0930 19:32:44.971019 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:44 crc kubenswrapper[4756]: I0930 19:32:44.971090 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:44 crc kubenswrapper[4756]: I0930 19:32:44.971111 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:44 crc kubenswrapper[4756]: I0930 19:32:44.971142 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:44 crc kubenswrapper[4756]: I0930 19:32:44.971163 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:44Z","lastTransitionTime":"2025-09-30T19:32:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:45 crc kubenswrapper[4756]: I0930 19:32:45.074830 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:45 crc kubenswrapper[4756]: I0930 19:32:45.074929 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:45 crc kubenswrapper[4756]: I0930 19:32:45.074954 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:45 crc kubenswrapper[4756]: I0930 19:32:45.074988 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:45 crc kubenswrapper[4756]: I0930 19:32:45.075008 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:45Z","lastTransitionTime":"2025-09-30T19:32:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:45 crc kubenswrapper[4756]: I0930 19:32:45.115027 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 19:32:45 crc kubenswrapper[4756]: I0930 19:32:45.115052 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 19:32:45 crc kubenswrapper[4756]: I0930 19:32:45.115182 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 19:32:45 crc kubenswrapper[4756]: E0930 19:32:45.115389 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 19:32:45 crc kubenswrapper[4756]: E0930 19:32:45.115637 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 19:32:45 crc kubenswrapper[4756]: E0930 19:32:45.115756 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 19:32:45 crc kubenswrapper[4756]: I0930 19:32:45.177995 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:45 crc kubenswrapper[4756]: I0930 19:32:45.178054 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:45 crc kubenswrapper[4756]: I0930 19:32:45.178071 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:45 crc kubenswrapper[4756]: I0930 19:32:45.178093 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:45 crc kubenswrapper[4756]: I0930 19:32:45.178119 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:45Z","lastTransitionTime":"2025-09-30T19:32:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:45 crc kubenswrapper[4756]: I0930 19:32:45.281883 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:45 crc kubenswrapper[4756]: I0930 19:32:45.281965 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:45 crc kubenswrapper[4756]: I0930 19:32:45.281989 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:45 crc kubenswrapper[4756]: I0930 19:32:45.282024 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:45 crc kubenswrapper[4756]: I0930 19:32:45.282047 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:45Z","lastTransitionTime":"2025-09-30T19:32:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:45 crc kubenswrapper[4756]: I0930 19:32:45.387896 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:45 crc kubenswrapper[4756]: I0930 19:32:45.387969 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:45 crc kubenswrapper[4756]: I0930 19:32:45.387995 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:45 crc kubenswrapper[4756]: I0930 19:32:45.388034 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:45 crc kubenswrapper[4756]: I0930 19:32:45.388067 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:45Z","lastTransitionTime":"2025-09-30T19:32:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:45 crc kubenswrapper[4756]: I0930 19:32:45.492384 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:45 crc kubenswrapper[4756]: I0930 19:32:45.492446 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:45 crc kubenswrapper[4756]: I0930 19:32:45.492458 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:45 crc kubenswrapper[4756]: I0930 19:32:45.492481 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:45 crc kubenswrapper[4756]: I0930 19:32:45.492511 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:45Z","lastTransitionTime":"2025-09-30T19:32:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:45 crc kubenswrapper[4756]: I0930 19:32:45.596373 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:45 crc kubenswrapper[4756]: I0930 19:32:45.596469 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:45 crc kubenswrapper[4756]: I0930 19:32:45.596503 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:45 crc kubenswrapper[4756]: I0930 19:32:45.596527 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:45 crc kubenswrapper[4756]: I0930 19:32:45.596544 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:45Z","lastTransitionTime":"2025-09-30T19:32:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:45 crc kubenswrapper[4756]: I0930 19:32:45.699823 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:45 crc kubenswrapper[4756]: I0930 19:32:45.699898 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:45 crc kubenswrapper[4756]: I0930 19:32:45.699916 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:45 crc kubenswrapper[4756]: I0930 19:32:45.699945 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:45 crc kubenswrapper[4756]: I0930 19:32:45.699966 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:45Z","lastTransitionTime":"2025-09-30T19:32:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:45 crc kubenswrapper[4756]: I0930 19:32:45.802882 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:45 crc kubenswrapper[4756]: I0930 19:32:45.802945 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:45 crc kubenswrapper[4756]: I0930 19:32:45.802963 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:45 crc kubenswrapper[4756]: I0930 19:32:45.802992 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:45 crc kubenswrapper[4756]: I0930 19:32:45.803011 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:45Z","lastTransitionTime":"2025-09-30T19:32:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:45 crc kubenswrapper[4756]: I0930 19:32:45.906312 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:45 crc kubenswrapper[4756]: I0930 19:32:45.906374 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:45 crc kubenswrapper[4756]: I0930 19:32:45.906383 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:45 crc kubenswrapper[4756]: I0930 19:32:45.906421 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:45 crc kubenswrapper[4756]: I0930 19:32:45.906432 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:45Z","lastTransitionTime":"2025-09-30T19:32:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:46 crc kubenswrapper[4756]: I0930 19:32:46.010487 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:46 crc kubenswrapper[4756]: I0930 19:32:46.010553 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:46 crc kubenswrapper[4756]: I0930 19:32:46.010566 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:46 crc kubenswrapper[4756]: I0930 19:32:46.010587 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:46 crc kubenswrapper[4756]: I0930 19:32:46.010598 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:46Z","lastTransitionTime":"2025-09-30T19:32:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:46 crc kubenswrapper[4756]: I0930 19:32:46.114120 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:46 crc kubenswrapper[4756]: I0930 19:32:46.114212 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:46 crc kubenswrapper[4756]: I0930 19:32:46.114244 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:46 crc kubenswrapper[4756]: I0930 19:32:46.114284 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:46 crc kubenswrapper[4756]: I0930 19:32:46.114309 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:46Z","lastTransitionTime":"2025-09-30T19:32:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:46 crc kubenswrapper[4756]: I0930 19:32:46.115008 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-b4n45" Sep 30 19:32:46 crc kubenswrapper[4756]: E0930 19:32:46.115215 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-b4n45" podUID="6c151bbd-9757-44ba-aa11-ff679e841fdc" Sep 30 19:32:46 crc kubenswrapper[4756]: I0930 19:32:46.217440 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:46 crc kubenswrapper[4756]: I0930 19:32:46.217497 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:46 crc kubenswrapper[4756]: I0930 19:32:46.217509 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:46 crc kubenswrapper[4756]: I0930 19:32:46.217529 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:46 crc kubenswrapper[4756]: I0930 19:32:46.217544 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:46Z","lastTransitionTime":"2025-09-30T19:32:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:46 crc kubenswrapper[4756]: I0930 19:32:46.320543 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:46 crc kubenswrapper[4756]: I0930 19:32:46.320631 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:46 crc kubenswrapper[4756]: I0930 19:32:46.320650 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:46 crc kubenswrapper[4756]: I0930 19:32:46.320687 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:46 crc kubenswrapper[4756]: I0930 19:32:46.320707 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:46Z","lastTransitionTime":"2025-09-30T19:32:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:46 crc kubenswrapper[4756]: I0930 19:32:46.424014 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:46 crc kubenswrapper[4756]: I0930 19:32:46.424065 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:46 crc kubenswrapper[4756]: I0930 19:32:46.424076 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:46 crc kubenswrapper[4756]: I0930 19:32:46.424094 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:46 crc kubenswrapper[4756]: I0930 19:32:46.424106 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:46Z","lastTransitionTime":"2025-09-30T19:32:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:46 crc kubenswrapper[4756]: I0930 19:32:46.528086 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:46 crc kubenswrapper[4756]: I0930 19:32:46.528156 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:46 crc kubenswrapper[4756]: I0930 19:32:46.528172 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:46 crc kubenswrapper[4756]: I0930 19:32:46.528196 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:46 crc kubenswrapper[4756]: I0930 19:32:46.528211 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:46Z","lastTransitionTime":"2025-09-30T19:32:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:46 crc kubenswrapper[4756]: I0930 19:32:46.631912 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:46 crc kubenswrapper[4756]: I0930 19:32:46.631996 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:46 crc kubenswrapper[4756]: I0930 19:32:46.632016 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:46 crc kubenswrapper[4756]: I0930 19:32:46.632044 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:46 crc kubenswrapper[4756]: I0930 19:32:46.632064 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:46Z","lastTransitionTime":"2025-09-30T19:32:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:46 crc kubenswrapper[4756]: I0930 19:32:46.735896 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:46 crc kubenswrapper[4756]: I0930 19:32:46.735970 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:46 crc kubenswrapper[4756]: I0930 19:32:46.735991 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:46 crc kubenswrapper[4756]: I0930 19:32:46.736023 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:46 crc kubenswrapper[4756]: I0930 19:32:46.736043 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:46Z","lastTransitionTime":"2025-09-30T19:32:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:46 crc kubenswrapper[4756]: I0930 19:32:46.840166 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:46 crc kubenswrapper[4756]: I0930 19:32:46.840284 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:46 crc kubenswrapper[4756]: I0930 19:32:46.840311 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:46 crc kubenswrapper[4756]: I0930 19:32:46.840439 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:46 crc kubenswrapper[4756]: I0930 19:32:46.840461 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:46Z","lastTransitionTime":"2025-09-30T19:32:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:46 crc kubenswrapper[4756]: I0930 19:32:46.944249 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:46 crc kubenswrapper[4756]: I0930 19:32:46.944303 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:46 crc kubenswrapper[4756]: I0930 19:32:46.944315 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:46 crc kubenswrapper[4756]: I0930 19:32:46.944335 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:46 crc kubenswrapper[4756]: I0930 19:32:46.944347 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:46Z","lastTransitionTime":"2025-09-30T19:32:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:47 crc kubenswrapper[4756]: I0930 19:32:47.047829 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:47 crc kubenswrapper[4756]: I0930 19:32:47.047881 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:47 crc kubenswrapper[4756]: I0930 19:32:47.047894 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:47 crc kubenswrapper[4756]: I0930 19:32:47.047916 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:47 crc kubenswrapper[4756]: I0930 19:32:47.047928 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:47Z","lastTransitionTime":"2025-09-30T19:32:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:47 crc kubenswrapper[4756]: I0930 19:32:47.115791 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 19:32:47 crc kubenswrapper[4756]: I0930 19:32:47.115942 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 19:32:47 crc kubenswrapper[4756]: E0930 19:32:47.116001 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 19:32:47 crc kubenswrapper[4756]: I0930 19:32:47.116146 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 19:32:47 crc kubenswrapper[4756]: E0930 19:32:47.116234 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 19:32:47 crc kubenswrapper[4756]: E0930 19:32:47.116459 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 19:32:47 crc kubenswrapper[4756]: I0930 19:32:47.150511 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:47 crc kubenswrapper[4756]: I0930 19:32:47.150587 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:47 crc kubenswrapper[4756]: I0930 19:32:47.150609 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:47 crc kubenswrapper[4756]: I0930 19:32:47.150635 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:47 crc kubenswrapper[4756]: I0930 19:32:47.150658 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:47Z","lastTransitionTime":"2025-09-30T19:32:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:47 crc kubenswrapper[4756]: I0930 19:32:47.253541 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:47 crc kubenswrapper[4756]: I0930 19:32:47.253629 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:47 crc kubenswrapper[4756]: I0930 19:32:47.253652 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:47 crc kubenswrapper[4756]: I0930 19:32:47.253685 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:47 crc kubenswrapper[4756]: I0930 19:32:47.253710 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:47Z","lastTransitionTime":"2025-09-30T19:32:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:47 crc kubenswrapper[4756]: I0930 19:32:47.356449 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:47 crc kubenswrapper[4756]: I0930 19:32:47.356566 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:47 crc kubenswrapper[4756]: I0930 19:32:47.356585 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:47 crc kubenswrapper[4756]: I0930 19:32:47.356618 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:47 crc kubenswrapper[4756]: I0930 19:32:47.356638 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:47Z","lastTransitionTime":"2025-09-30T19:32:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:47 crc kubenswrapper[4756]: I0930 19:32:47.460193 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:47 crc kubenswrapper[4756]: I0930 19:32:47.460241 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:47 crc kubenswrapper[4756]: I0930 19:32:47.460252 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:47 crc kubenswrapper[4756]: I0930 19:32:47.460270 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:47 crc kubenswrapper[4756]: I0930 19:32:47.460282 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:47Z","lastTransitionTime":"2025-09-30T19:32:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:47 crc kubenswrapper[4756]: I0930 19:32:47.563005 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:47 crc kubenswrapper[4756]: I0930 19:32:47.563061 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:47 crc kubenswrapper[4756]: I0930 19:32:47.563071 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:47 crc kubenswrapper[4756]: I0930 19:32:47.563088 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:47 crc kubenswrapper[4756]: I0930 19:32:47.563098 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:47Z","lastTransitionTime":"2025-09-30T19:32:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:47 crc kubenswrapper[4756]: I0930 19:32:47.666621 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:47 crc kubenswrapper[4756]: I0930 19:32:47.666690 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:47 crc kubenswrapper[4756]: I0930 19:32:47.666709 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:47 crc kubenswrapper[4756]: I0930 19:32:47.666735 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:47 crc kubenswrapper[4756]: I0930 19:32:47.666757 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:47Z","lastTransitionTime":"2025-09-30T19:32:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:47 crc kubenswrapper[4756]: I0930 19:32:47.770665 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:47 crc kubenswrapper[4756]: I0930 19:32:47.770725 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:47 crc kubenswrapper[4756]: I0930 19:32:47.770738 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:47 crc kubenswrapper[4756]: I0930 19:32:47.770761 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:47 crc kubenswrapper[4756]: I0930 19:32:47.770774 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:47Z","lastTransitionTime":"2025-09-30T19:32:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:47 crc kubenswrapper[4756]: I0930 19:32:47.875608 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:47 crc kubenswrapper[4756]: I0930 19:32:47.875670 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:47 crc kubenswrapper[4756]: I0930 19:32:47.875686 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:47 crc kubenswrapper[4756]: I0930 19:32:47.875712 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:47 crc kubenswrapper[4756]: I0930 19:32:47.875738 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:47Z","lastTransitionTime":"2025-09-30T19:32:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:47 crc kubenswrapper[4756]: I0930 19:32:47.979091 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:47 crc kubenswrapper[4756]: I0930 19:32:47.979156 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:47 crc kubenswrapper[4756]: I0930 19:32:47.979175 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:47 crc kubenswrapper[4756]: I0930 19:32:47.979200 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:47 crc kubenswrapper[4756]: I0930 19:32:47.979217 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:47Z","lastTransitionTime":"2025-09-30T19:32:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:48 crc kubenswrapper[4756]: I0930 19:32:48.082688 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:48 crc kubenswrapper[4756]: I0930 19:32:48.082769 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:48 crc kubenswrapper[4756]: I0930 19:32:48.082789 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:48 crc kubenswrapper[4756]: I0930 19:32:48.082822 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:48 crc kubenswrapper[4756]: I0930 19:32:48.082846 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:48Z","lastTransitionTime":"2025-09-30T19:32:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:48 crc kubenswrapper[4756]: I0930 19:32:48.115788 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-b4n45" Sep 30 19:32:48 crc kubenswrapper[4756]: E0930 19:32:48.116301 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-b4n45" podUID="6c151bbd-9757-44ba-aa11-ff679e841fdc" Sep 30 19:32:48 crc kubenswrapper[4756]: I0930 19:32:48.186382 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:48 crc kubenswrapper[4756]: I0930 19:32:48.186454 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:48 crc kubenswrapper[4756]: I0930 19:32:48.186465 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:48 crc kubenswrapper[4756]: I0930 19:32:48.186484 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:48 crc kubenswrapper[4756]: I0930 19:32:48.186498 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:48Z","lastTransitionTime":"2025-09-30T19:32:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:48 crc kubenswrapper[4756]: I0930 19:32:48.290317 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:48 crc kubenswrapper[4756]: I0930 19:32:48.290388 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:48 crc kubenswrapper[4756]: I0930 19:32:48.290447 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:48 crc kubenswrapper[4756]: I0930 19:32:48.290474 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:48 crc kubenswrapper[4756]: I0930 19:32:48.290492 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:48Z","lastTransitionTime":"2025-09-30T19:32:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:48 crc kubenswrapper[4756]: I0930 19:32:48.393291 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:48 crc kubenswrapper[4756]: I0930 19:32:48.393369 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:48 crc kubenswrapper[4756]: I0930 19:32:48.393389 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:48 crc kubenswrapper[4756]: I0930 19:32:48.393445 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:48 crc kubenswrapper[4756]: I0930 19:32:48.393469 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:48Z","lastTransitionTime":"2025-09-30T19:32:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:48 crc kubenswrapper[4756]: I0930 19:32:48.496153 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:48 crc kubenswrapper[4756]: I0930 19:32:48.496215 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:48 crc kubenswrapper[4756]: I0930 19:32:48.496232 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:48 crc kubenswrapper[4756]: I0930 19:32:48.496257 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:48 crc kubenswrapper[4756]: I0930 19:32:48.496274 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:48Z","lastTransitionTime":"2025-09-30T19:32:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:48 crc kubenswrapper[4756]: I0930 19:32:48.599690 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:48 crc kubenswrapper[4756]: I0930 19:32:48.599728 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:48 crc kubenswrapper[4756]: I0930 19:32:48.599736 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:48 crc kubenswrapper[4756]: I0930 19:32:48.599751 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:48 crc kubenswrapper[4756]: I0930 19:32:48.599760 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:48Z","lastTransitionTime":"2025-09-30T19:32:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:48 crc kubenswrapper[4756]: I0930 19:32:48.702889 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:48 crc kubenswrapper[4756]: I0930 19:32:48.702953 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:48 crc kubenswrapper[4756]: I0930 19:32:48.702972 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:48 crc kubenswrapper[4756]: I0930 19:32:48.703000 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:48 crc kubenswrapper[4756]: I0930 19:32:48.703019 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:48Z","lastTransitionTime":"2025-09-30T19:32:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:48 crc kubenswrapper[4756]: I0930 19:32:48.806509 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:48 crc kubenswrapper[4756]: I0930 19:32:48.806606 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:48 crc kubenswrapper[4756]: I0930 19:32:48.806626 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:48 crc kubenswrapper[4756]: I0930 19:32:48.806664 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:48 crc kubenswrapper[4756]: I0930 19:32:48.806690 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:48Z","lastTransitionTime":"2025-09-30T19:32:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:48 crc kubenswrapper[4756]: I0930 19:32:48.908990 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:48 crc kubenswrapper[4756]: I0930 19:32:48.909039 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:48 crc kubenswrapper[4756]: I0930 19:32:48.909050 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:48 crc kubenswrapper[4756]: I0930 19:32:48.909072 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:48 crc kubenswrapper[4756]: I0930 19:32:48.909084 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:48Z","lastTransitionTime":"2025-09-30T19:32:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:49 crc kubenswrapper[4756]: I0930 19:32:49.009199 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:49 crc kubenswrapper[4756]: I0930 19:32:49.009247 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:49 crc kubenswrapper[4756]: I0930 19:32:49.009258 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:49 crc kubenswrapper[4756]: I0930 19:32:49.009273 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:49 crc kubenswrapper[4756]: I0930 19:32:49.009284 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:49Z","lastTransitionTime":"2025-09-30T19:32:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:49 crc kubenswrapper[4756]: I0930 19:32:49.035392 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:32:49 crc kubenswrapper[4756]: I0930 19:32:49.035475 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:32:49 crc kubenswrapper[4756]: I0930 19:32:49.035490 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:32:49 crc kubenswrapper[4756]: I0930 19:32:49.035511 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:32:49 crc kubenswrapper[4756]: I0930 19:32:49.035524 4756 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:32:49Z","lastTransitionTime":"2025-09-30T19:32:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:32:49 crc kubenswrapper[4756]: I0930 19:32:49.058712 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-version/cluster-version-operator-5c965bbfc6-9mkqc"] Sep 30 19:32:49 crc kubenswrapper[4756]: I0930 19:32:49.059316 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-9mkqc" Sep 30 19:32:49 crc kubenswrapper[4756]: I0930 19:32:49.062890 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"openshift-service-ca.crt" Sep 30 19:32:49 crc kubenswrapper[4756]: I0930 19:32:49.063046 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"cluster-version-operator-serving-cert" Sep 30 19:32:49 crc kubenswrapper[4756]: I0930 19:32:49.063630 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"default-dockercfg-gxtc4" Sep 30 19:32:49 crc kubenswrapper[4756]: I0930 19:32:49.063963 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"kube-root-ca.crt" Sep 30 19:32:49 crc kubenswrapper[4756]: I0930 19:32:49.115816 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 19:32:49 crc kubenswrapper[4756]: I0930 19:32:49.116641 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 19:32:49 crc kubenswrapper[4756]: I0930 19:32:49.116776 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 19:32:49 crc kubenswrapper[4756]: I0930 19:32:49.116867 4756 scope.go:117] "RemoveContainer" containerID="802780f678d2e964061eb22cc909146073c026124ad07406cf1e046c73c442a2" Sep 30 19:32:49 crc kubenswrapper[4756]: E0930 19:32:49.116936 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 19:32:49 crc kubenswrapper[4756]: E0930 19:32:49.117098 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 19:32:49 crc kubenswrapper[4756]: E0930 19:32:49.117179 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-kmxk8_openshift-ovn-kubernetes(d96acc31-a519-46fc-94d8-f19522e77391)\"" pod="openshift-ovn-kubernetes/ovnkube-node-kmxk8" podUID="d96acc31-a519-46fc-94d8-f19522e77391" Sep 30 19:32:49 crc kubenswrapper[4756]: E0930 19:32:49.117292 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 19:32:49 crc kubenswrapper[4756]: I0930 19:32:49.153290 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/7469341d-c77e-4fdc-be97-479356b794dc-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-9mkqc\" (UID: \"7469341d-c77e-4fdc-be97-479356b794dc\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-9mkqc" Sep 30 19:32:49 crc kubenswrapper[4756]: I0930 19:32:49.153359 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/7469341d-c77e-4fdc-be97-479356b794dc-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-9mkqc\" (UID: \"7469341d-c77e-4fdc-be97-479356b794dc\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-9mkqc" Sep 30 19:32:49 crc kubenswrapper[4756]: I0930 19:32:49.153424 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/7469341d-c77e-4fdc-be97-479356b794dc-service-ca\") pod \"cluster-version-operator-5c965bbfc6-9mkqc\" (UID: \"7469341d-c77e-4fdc-be97-479356b794dc\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-9mkqc" Sep 30 19:32:49 crc kubenswrapper[4756]: I0930 19:32:49.153750 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/7469341d-c77e-4fdc-be97-479356b794dc-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-9mkqc\" (UID: \"7469341d-c77e-4fdc-be97-479356b794dc\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-9mkqc" Sep 30 19:32:49 crc kubenswrapper[4756]: I0930 19:32:49.153892 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7469341d-c77e-4fdc-be97-479356b794dc-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-9mkqc\" (UID: \"7469341d-c77e-4fdc-be97-479356b794dc\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-9mkqc" Sep 30 19:32:49 crc kubenswrapper[4756]: I0930 19:32:49.254940 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/7469341d-c77e-4fdc-be97-479356b794dc-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-9mkqc\" (UID: \"7469341d-c77e-4fdc-be97-479356b794dc\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-9mkqc" Sep 30 19:32:49 crc kubenswrapper[4756]: I0930 19:32:49.255060 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7469341d-c77e-4fdc-be97-479356b794dc-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-9mkqc\" (UID: \"7469341d-c77e-4fdc-be97-479356b794dc\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-9mkqc" Sep 30 19:32:49 crc kubenswrapper[4756]: I0930 19:32:49.255214 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/7469341d-c77e-4fdc-be97-479356b794dc-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-9mkqc\" (UID: \"7469341d-c77e-4fdc-be97-479356b794dc\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-9mkqc" Sep 30 19:32:49 crc kubenswrapper[4756]: I0930 19:32:49.255289 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/7469341d-c77e-4fdc-be97-479356b794dc-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-9mkqc\" (UID: \"7469341d-c77e-4fdc-be97-479356b794dc\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-9mkqc" Sep 30 19:32:49 crc kubenswrapper[4756]: I0930 19:32:49.255332 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/7469341d-c77e-4fdc-be97-479356b794dc-service-ca\") pod \"cluster-version-operator-5c965bbfc6-9mkqc\" (UID: \"7469341d-c77e-4fdc-be97-479356b794dc\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-9mkqc" Sep 30 19:32:49 crc kubenswrapper[4756]: I0930 19:32:49.255504 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/7469341d-c77e-4fdc-be97-479356b794dc-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-9mkqc\" (UID: \"7469341d-c77e-4fdc-be97-479356b794dc\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-9mkqc" Sep 30 19:32:49 crc kubenswrapper[4756]: I0930 19:32:49.257026 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/7469341d-c77e-4fdc-be97-479356b794dc-service-ca\") pod \"cluster-version-operator-5c965bbfc6-9mkqc\" (UID: \"7469341d-c77e-4fdc-be97-479356b794dc\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-9mkqc" Sep 30 19:32:49 crc kubenswrapper[4756]: I0930 19:32:49.257385 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/7469341d-c77e-4fdc-be97-479356b794dc-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-9mkqc\" (UID: \"7469341d-c77e-4fdc-be97-479356b794dc\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-9mkqc" Sep 30 19:32:49 crc kubenswrapper[4756]: I0930 19:32:49.267645 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7469341d-c77e-4fdc-be97-479356b794dc-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-9mkqc\" (UID: \"7469341d-c77e-4fdc-be97-479356b794dc\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-9mkqc" Sep 30 19:32:49 crc kubenswrapper[4756]: I0930 19:32:49.274095 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/7469341d-c77e-4fdc-be97-479356b794dc-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-9mkqc\" (UID: \"7469341d-c77e-4fdc-be97-479356b794dc\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-9mkqc" Sep 30 19:32:49 crc kubenswrapper[4756]: I0930 19:32:49.375116 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-9mkqc" Sep 30 19:32:49 crc kubenswrapper[4756]: W0930 19:32:49.405566 4756 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7469341d_c77e_4fdc_be97_479356b794dc.slice/crio-7f1ad21961b0f261ffd144dec2e420d7c0c6843d642c5a2f8e76b4a25a662fd6 WatchSource:0}: Error finding container 7f1ad21961b0f261ffd144dec2e420d7c0c6843d642c5a2f8e76b4a25a662fd6: Status 404 returned error can't find the container with id 7f1ad21961b0f261ffd144dec2e420d7c0c6843d642c5a2f8e76b4a25a662fd6 Sep 30 19:32:49 crc kubenswrapper[4756]: I0930 19:32:49.702333 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-9mkqc" event={"ID":"7469341d-c77e-4fdc-be97-479356b794dc","Type":"ContainerStarted","Data":"8a2dcc78e16faf16629039ed154dae20c25be5b41fe711975368bfa0d6990b97"} Sep 30 19:32:49 crc kubenswrapper[4756]: I0930 19:32:49.702473 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-9mkqc" event={"ID":"7469341d-c77e-4fdc-be97-479356b794dc","Type":"ContainerStarted","Data":"7f1ad21961b0f261ffd144dec2e420d7c0c6843d642c5a2f8e76b4a25a662fd6"} Sep 30 19:32:49 crc kubenswrapper[4756]: I0930 19:32:49.718660 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-9mkqc" podStartSLOduration=78.718633292 podStartE2EDuration="1m18.718633292s" podCreationTimestamp="2025-09-30 19:31:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 19:32:49.718334535 +0000 UTC m=+99.339268032" watchObservedRunningTime="2025-09-30 19:32:49.718633292 +0000 UTC m=+99.339566779" Sep 30 19:32:50 crc kubenswrapper[4756]: I0930 19:32:50.116015 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-b4n45" Sep 30 19:32:50 crc kubenswrapper[4756]: E0930 19:32:50.116654 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-b4n45" podUID="6c151bbd-9757-44ba-aa11-ff679e841fdc" Sep 30 19:32:50 crc kubenswrapper[4756]: I0930 19:32:50.166330 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/6c151bbd-9757-44ba-aa11-ff679e841fdc-metrics-certs\") pod \"network-metrics-daemon-b4n45\" (UID: \"6c151bbd-9757-44ba-aa11-ff679e841fdc\") " pod="openshift-multus/network-metrics-daemon-b4n45" Sep 30 19:32:50 crc kubenswrapper[4756]: E0930 19:32:50.166883 4756 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Sep 30 19:32:50 crc kubenswrapper[4756]: E0930 19:32:50.167075 4756 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/6c151bbd-9757-44ba-aa11-ff679e841fdc-metrics-certs podName:6c151bbd-9757-44ba-aa11-ff679e841fdc nodeName:}" failed. No retries permitted until 2025-09-30 19:33:54.167055353 +0000 UTC m=+163.787988840 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/6c151bbd-9757-44ba-aa11-ff679e841fdc-metrics-certs") pod "network-metrics-daemon-b4n45" (UID: "6c151bbd-9757-44ba-aa11-ff679e841fdc") : object "openshift-multus"/"metrics-daemon-secret" not registered Sep 30 19:32:51 crc kubenswrapper[4756]: I0930 19:32:51.114951 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 19:32:51 crc kubenswrapper[4756]: I0930 19:32:51.115688 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 19:32:51 crc kubenswrapper[4756]: E0930 19:32:51.117208 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 19:32:51 crc kubenswrapper[4756]: I0930 19:32:51.117274 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 19:32:51 crc kubenswrapper[4756]: E0930 19:32:51.117551 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 19:32:51 crc kubenswrapper[4756]: E0930 19:32:51.117703 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 19:32:52 crc kubenswrapper[4756]: I0930 19:32:52.115753 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-b4n45" Sep 30 19:32:52 crc kubenswrapper[4756]: E0930 19:32:52.115979 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-b4n45" podUID="6c151bbd-9757-44ba-aa11-ff679e841fdc" Sep 30 19:32:53 crc kubenswrapper[4756]: I0930 19:32:53.115043 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 19:32:53 crc kubenswrapper[4756]: I0930 19:32:53.115168 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 19:32:53 crc kubenswrapper[4756]: I0930 19:32:53.115268 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 19:32:53 crc kubenswrapper[4756]: E0930 19:32:53.115193 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 19:32:53 crc kubenswrapper[4756]: E0930 19:32:53.115443 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 19:32:53 crc kubenswrapper[4756]: E0930 19:32:53.115549 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 19:32:54 crc kubenswrapper[4756]: I0930 19:32:54.115076 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-b4n45" Sep 30 19:32:54 crc kubenswrapper[4756]: E0930 19:32:54.115294 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-b4n45" podUID="6c151bbd-9757-44ba-aa11-ff679e841fdc" Sep 30 19:32:55 crc kubenswrapper[4756]: I0930 19:32:55.115996 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 19:32:55 crc kubenswrapper[4756]: I0930 19:32:55.116115 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 19:32:55 crc kubenswrapper[4756]: E0930 19:32:55.116234 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 19:32:55 crc kubenswrapper[4756]: I0930 19:32:55.116115 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 19:32:55 crc kubenswrapper[4756]: E0930 19:32:55.116334 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 19:32:55 crc kubenswrapper[4756]: E0930 19:32:55.116543 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 19:32:56 crc kubenswrapper[4756]: I0930 19:32:56.115466 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-b4n45" Sep 30 19:32:56 crc kubenswrapper[4756]: E0930 19:32:56.115626 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-b4n45" podUID="6c151bbd-9757-44ba-aa11-ff679e841fdc" Sep 30 19:32:57 crc kubenswrapper[4756]: I0930 19:32:57.115517 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 19:32:57 crc kubenswrapper[4756]: I0930 19:32:57.115614 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 19:32:57 crc kubenswrapper[4756]: I0930 19:32:57.115551 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 19:32:57 crc kubenswrapper[4756]: E0930 19:32:57.115743 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 19:32:57 crc kubenswrapper[4756]: E0930 19:32:57.115863 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 19:32:57 crc kubenswrapper[4756]: E0930 19:32:57.116097 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 19:32:58 crc kubenswrapper[4756]: I0930 19:32:58.115678 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-b4n45" Sep 30 19:32:58 crc kubenswrapper[4756]: E0930 19:32:58.115922 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-b4n45" podUID="6c151bbd-9757-44ba-aa11-ff679e841fdc" Sep 30 19:32:59 crc kubenswrapper[4756]: I0930 19:32:59.115219 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 19:32:59 crc kubenswrapper[4756]: E0930 19:32:59.115375 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 19:32:59 crc kubenswrapper[4756]: I0930 19:32:59.115687 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 19:32:59 crc kubenswrapper[4756]: E0930 19:32:59.115776 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 19:32:59 crc kubenswrapper[4756]: I0930 19:32:59.116031 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 19:32:59 crc kubenswrapper[4756]: E0930 19:32:59.116113 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 19:33:00 crc kubenswrapper[4756]: I0930 19:33:00.115601 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-b4n45" Sep 30 19:33:00 crc kubenswrapper[4756]: E0930 19:33:00.115862 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-b4n45" podUID="6c151bbd-9757-44ba-aa11-ff679e841fdc" Sep 30 19:33:01 crc kubenswrapper[4756]: I0930 19:33:01.115229 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 19:33:01 crc kubenswrapper[4756]: I0930 19:33:01.115302 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 19:33:01 crc kubenswrapper[4756]: E0930 19:33:01.117656 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 19:33:01 crc kubenswrapper[4756]: I0930 19:33:01.117724 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 19:33:01 crc kubenswrapper[4756]: E0930 19:33:01.117883 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 19:33:01 crc kubenswrapper[4756]: E0930 19:33:01.117982 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 19:33:01 crc kubenswrapper[4756]: I0930 19:33:01.119425 4756 scope.go:117] "RemoveContainer" containerID="802780f678d2e964061eb22cc909146073c026124ad07406cf1e046c73c442a2" Sep 30 19:33:01 crc kubenswrapper[4756]: E0930 19:33:01.119695 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-kmxk8_openshift-ovn-kubernetes(d96acc31-a519-46fc-94d8-f19522e77391)\"" pod="openshift-ovn-kubernetes/ovnkube-node-kmxk8" podUID="d96acc31-a519-46fc-94d8-f19522e77391" Sep 30 19:33:02 crc kubenswrapper[4756]: I0930 19:33:02.115832 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-b4n45" Sep 30 19:33:02 crc kubenswrapper[4756]: E0930 19:33:02.115989 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-b4n45" podUID="6c151bbd-9757-44ba-aa11-ff679e841fdc" Sep 30 19:33:03 crc kubenswrapper[4756]: I0930 19:33:03.115923 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 19:33:03 crc kubenswrapper[4756]: I0930 19:33:03.115950 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 19:33:03 crc kubenswrapper[4756]: E0930 19:33:03.116193 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 19:33:03 crc kubenswrapper[4756]: I0930 19:33:03.115992 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 19:33:03 crc kubenswrapper[4756]: E0930 19:33:03.116344 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 19:33:03 crc kubenswrapper[4756]: E0930 19:33:03.116452 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 19:33:04 crc kubenswrapper[4756]: I0930 19:33:04.115371 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-b4n45" Sep 30 19:33:04 crc kubenswrapper[4756]: E0930 19:33:04.115526 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-b4n45" podUID="6c151bbd-9757-44ba-aa11-ff679e841fdc" Sep 30 19:33:05 crc kubenswrapper[4756]: I0930 19:33:05.115891 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 19:33:05 crc kubenswrapper[4756]: I0930 19:33:05.116034 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 19:33:05 crc kubenswrapper[4756]: E0930 19:33:05.116096 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 19:33:05 crc kubenswrapper[4756]: I0930 19:33:05.116199 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 19:33:05 crc kubenswrapper[4756]: E0930 19:33:05.117163 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 19:33:05 crc kubenswrapper[4756]: E0930 19:33:05.117443 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 19:33:06 crc kubenswrapper[4756]: I0930 19:33:06.115584 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-b4n45" Sep 30 19:33:06 crc kubenswrapper[4756]: E0930 19:33:06.116161 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-b4n45" podUID="6c151bbd-9757-44ba-aa11-ff679e841fdc" Sep 30 19:33:06 crc kubenswrapper[4756]: I0930 19:33:06.771200 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-l9mw5_104562fb-2a2d-4291-963c-7a95062ec13a/kube-multus/1.log" Sep 30 19:33:06 crc kubenswrapper[4756]: I0930 19:33:06.772024 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-l9mw5_104562fb-2a2d-4291-963c-7a95062ec13a/kube-multus/0.log" Sep 30 19:33:06 crc kubenswrapper[4756]: I0930 19:33:06.772117 4756 generic.go:334] "Generic (PLEG): container finished" podID="104562fb-2a2d-4291-963c-7a95062ec13a" containerID="3d7e48a55049d09339652ebc1700f595dcbba5c2434a42ccd4a1d644c388f996" exitCode=1 Sep 30 19:33:06 crc kubenswrapper[4756]: I0930 19:33:06.772177 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-l9mw5" event={"ID":"104562fb-2a2d-4291-963c-7a95062ec13a","Type":"ContainerDied","Data":"3d7e48a55049d09339652ebc1700f595dcbba5c2434a42ccd4a1d644c388f996"} Sep 30 19:33:06 crc kubenswrapper[4756]: I0930 19:33:06.772250 4756 scope.go:117] "RemoveContainer" containerID="c012009175f2ca25af13346dce481a4640354a2b7c771ce9a8d92f0a6a014f23" Sep 30 19:33:06 crc kubenswrapper[4756]: I0930 19:33:06.772907 4756 scope.go:117] "RemoveContainer" containerID="3d7e48a55049d09339652ebc1700f595dcbba5c2434a42ccd4a1d644c388f996" Sep 30 19:33:06 crc kubenswrapper[4756]: E0930 19:33:06.773190 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-multus pod=multus-l9mw5_openshift-multus(104562fb-2a2d-4291-963c-7a95062ec13a)\"" pod="openshift-multus/multus-l9mw5" podUID="104562fb-2a2d-4291-963c-7a95062ec13a" Sep 30 19:33:07 crc kubenswrapper[4756]: I0930 19:33:07.115894 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 19:33:07 crc kubenswrapper[4756]: E0930 19:33:07.116045 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 19:33:07 crc kubenswrapper[4756]: I0930 19:33:07.115894 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 19:33:07 crc kubenswrapper[4756]: I0930 19:33:07.116134 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 19:33:07 crc kubenswrapper[4756]: E0930 19:33:07.116372 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 19:33:07 crc kubenswrapper[4756]: E0930 19:33:07.116500 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 19:33:07 crc kubenswrapper[4756]: I0930 19:33:07.777632 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-l9mw5_104562fb-2a2d-4291-963c-7a95062ec13a/kube-multus/1.log" Sep 30 19:33:08 crc kubenswrapper[4756]: I0930 19:33:08.115672 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-b4n45" Sep 30 19:33:08 crc kubenswrapper[4756]: E0930 19:33:08.115867 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-b4n45" podUID="6c151bbd-9757-44ba-aa11-ff679e841fdc" Sep 30 19:33:09 crc kubenswrapper[4756]: I0930 19:33:09.115476 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 19:33:09 crc kubenswrapper[4756]: I0930 19:33:09.115604 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 19:33:09 crc kubenswrapper[4756]: I0930 19:33:09.115492 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 19:33:09 crc kubenswrapper[4756]: E0930 19:33:09.115723 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 19:33:09 crc kubenswrapper[4756]: E0930 19:33:09.115988 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 19:33:09 crc kubenswrapper[4756]: E0930 19:33:09.116109 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 19:33:10 crc kubenswrapper[4756]: I0930 19:33:10.116029 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-b4n45" Sep 30 19:33:10 crc kubenswrapper[4756]: E0930 19:33:10.116311 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-b4n45" podUID="6c151bbd-9757-44ba-aa11-ff679e841fdc" Sep 30 19:33:11 crc kubenswrapper[4756]: I0930 19:33:11.115664 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 19:33:11 crc kubenswrapper[4756]: E0930 19:33:11.116116 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 19:33:11 crc kubenswrapper[4756]: I0930 19:33:11.116340 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 19:33:11 crc kubenswrapper[4756]: E0930 19:33:11.116476 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 19:33:11 crc kubenswrapper[4756]: I0930 19:33:11.116354 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 19:33:11 crc kubenswrapper[4756]: E0930 19:33:11.116584 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 19:33:11 crc kubenswrapper[4756]: E0930 19:33:11.128679 4756 kubelet_node_status.go:497] "Node not becoming ready in time after startup" Sep 30 19:33:11 crc kubenswrapper[4756]: E0930 19:33:11.233151 4756 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Sep 30 19:33:12 crc kubenswrapper[4756]: I0930 19:33:12.115062 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-b4n45" Sep 30 19:33:12 crc kubenswrapper[4756]: E0930 19:33:12.115710 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-b4n45" podUID="6c151bbd-9757-44ba-aa11-ff679e841fdc" Sep 30 19:33:13 crc kubenswrapper[4756]: I0930 19:33:13.115115 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 19:33:13 crc kubenswrapper[4756]: E0930 19:33:13.115781 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 19:33:13 crc kubenswrapper[4756]: I0930 19:33:13.115439 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 19:33:13 crc kubenswrapper[4756]: E0930 19:33:13.116020 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 19:33:13 crc kubenswrapper[4756]: I0930 19:33:13.115139 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 19:33:13 crc kubenswrapper[4756]: E0930 19:33:13.116217 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 19:33:14 crc kubenswrapper[4756]: I0930 19:33:14.115581 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-b4n45" Sep 30 19:33:14 crc kubenswrapper[4756]: E0930 19:33:14.115824 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-b4n45" podUID="6c151bbd-9757-44ba-aa11-ff679e841fdc" Sep 30 19:33:15 crc kubenswrapper[4756]: I0930 19:33:15.115630 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 19:33:15 crc kubenswrapper[4756]: E0930 19:33:15.116245 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 19:33:15 crc kubenswrapper[4756]: I0930 19:33:15.115793 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 19:33:15 crc kubenswrapper[4756]: E0930 19:33:15.116966 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 19:33:15 crc kubenswrapper[4756]: I0930 19:33:15.115732 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 19:33:15 crc kubenswrapper[4756]: E0930 19:33:15.117232 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 19:33:16 crc kubenswrapper[4756]: I0930 19:33:16.115115 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-b4n45" Sep 30 19:33:16 crc kubenswrapper[4756]: E0930 19:33:16.115620 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-b4n45" podUID="6c151bbd-9757-44ba-aa11-ff679e841fdc" Sep 30 19:33:16 crc kubenswrapper[4756]: I0930 19:33:16.115911 4756 scope.go:117] "RemoveContainer" containerID="802780f678d2e964061eb22cc909146073c026124ad07406cf1e046c73c442a2" Sep 30 19:33:16 crc kubenswrapper[4756]: E0930 19:33:16.234527 4756 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Sep 30 19:33:16 crc kubenswrapper[4756]: I0930 19:33:16.810710 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-kmxk8_d96acc31-a519-46fc-94d8-f19522e77391/ovnkube-controller/3.log" Sep 30 19:33:16 crc kubenswrapper[4756]: I0930 19:33:16.813980 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-kmxk8" event={"ID":"d96acc31-a519-46fc-94d8-f19522e77391","Type":"ContainerStarted","Data":"1fac0efd11ba04c69202d48c30a6196f3d0be1b1457916c5d64b3a8d1d79aa75"} Sep 30 19:33:16 crc kubenswrapper[4756]: I0930 19:33:16.814411 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-kmxk8" Sep 30 19:33:16 crc kubenswrapper[4756]: I0930 19:33:16.844926 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-kmxk8" podStartSLOduration=104.844897416 podStartE2EDuration="1m44.844897416s" podCreationTimestamp="2025-09-30 19:31:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 19:33:16.844728592 +0000 UTC m=+126.465662079" watchObservedRunningTime="2025-09-30 19:33:16.844897416 +0000 UTC m=+126.465830893" Sep 30 19:33:17 crc kubenswrapper[4756]: I0930 19:33:17.037128 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-b4n45"] Sep 30 19:33:17 crc kubenswrapper[4756]: I0930 19:33:17.037247 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-b4n45" Sep 30 19:33:17 crc kubenswrapper[4756]: E0930 19:33:17.037467 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-b4n45" podUID="6c151bbd-9757-44ba-aa11-ff679e841fdc" Sep 30 19:33:17 crc kubenswrapper[4756]: I0930 19:33:17.115310 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 19:33:17 crc kubenswrapper[4756]: I0930 19:33:17.115322 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 19:33:17 crc kubenswrapper[4756]: E0930 19:33:17.115454 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 19:33:17 crc kubenswrapper[4756]: I0930 19:33:17.115518 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 19:33:17 crc kubenswrapper[4756]: E0930 19:33:17.115623 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 19:33:17 crc kubenswrapper[4756]: E0930 19:33:17.115652 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 19:33:18 crc kubenswrapper[4756]: I0930 19:33:18.115511 4756 scope.go:117] "RemoveContainer" containerID="3d7e48a55049d09339652ebc1700f595dcbba5c2434a42ccd4a1d644c388f996" Sep 30 19:33:18 crc kubenswrapper[4756]: I0930 19:33:18.827778 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-l9mw5_104562fb-2a2d-4291-963c-7a95062ec13a/kube-multus/1.log" Sep 30 19:33:18 crc kubenswrapper[4756]: I0930 19:33:18.828217 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-l9mw5" event={"ID":"104562fb-2a2d-4291-963c-7a95062ec13a","Type":"ContainerStarted","Data":"470501388b1e38afd5e6b82ca923d83c2a5392340aa8161d03f7487c4d5945c9"} Sep 30 19:33:19 crc kubenswrapper[4756]: I0930 19:33:19.116389 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-b4n45" Sep 30 19:33:19 crc kubenswrapper[4756]: I0930 19:33:19.116494 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 19:33:19 crc kubenswrapper[4756]: I0930 19:33:19.116443 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 19:33:19 crc kubenswrapper[4756]: E0930 19:33:19.116683 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-b4n45" podUID="6c151bbd-9757-44ba-aa11-ff679e841fdc" Sep 30 19:33:19 crc kubenswrapper[4756]: I0930 19:33:19.116726 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 19:33:19 crc kubenswrapper[4756]: E0930 19:33:19.116891 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 19:33:19 crc kubenswrapper[4756]: E0930 19:33:19.116970 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 19:33:19 crc kubenswrapper[4756]: E0930 19:33:19.117032 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 19:33:21 crc kubenswrapper[4756]: I0930 19:33:21.115796 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 19:33:21 crc kubenswrapper[4756]: I0930 19:33:21.115877 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-b4n45" Sep 30 19:33:21 crc kubenswrapper[4756]: E0930 19:33:21.115957 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 19:33:21 crc kubenswrapper[4756]: I0930 19:33:21.115796 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 19:33:21 crc kubenswrapper[4756]: E0930 19:33:21.116036 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-b4n45" podUID="6c151bbd-9757-44ba-aa11-ff679e841fdc" Sep 30 19:33:21 crc kubenswrapper[4756]: I0930 19:33:21.115880 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 19:33:21 crc kubenswrapper[4756]: E0930 19:33:21.116102 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 19:33:21 crc kubenswrapper[4756]: E0930 19:33:21.116360 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 19:33:22 crc kubenswrapper[4756]: I0930 19:33:22.134864 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-kmxk8" Sep 30 19:33:23 crc kubenswrapper[4756]: I0930 19:33:23.116843 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 19:33:23 crc kubenswrapper[4756]: I0930 19:33:23.116968 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 19:33:23 crc kubenswrapper[4756]: I0930 19:33:23.116861 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-b4n45" Sep 30 19:33:23 crc kubenswrapper[4756]: I0930 19:33:23.116885 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 19:33:23 crc kubenswrapper[4756]: I0930 19:33:23.120184 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Sep 30 19:33:23 crc kubenswrapper[4756]: I0930 19:33:23.120184 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Sep 30 19:33:23 crc kubenswrapper[4756]: I0930 19:33:23.120512 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Sep 30 19:33:23 crc kubenswrapper[4756]: I0930 19:33:23.121119 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Sep 30 19:33:23 crc kubenswrapper[4756]: I0930 19:33:23.121152 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-sa-dockercfg-d427c" Sep 30 19:33:23 crc kubenswrapper[4756]: I0930 19:33:23.121261 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-secret" Sep 30 19:33:29 crc kubenswrapper[4756]: I0930 19:33:29.794995 4756 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeReady" Sep 30 19:33:29 crc kubenswrapper[4756]: I0930 19:33:29.826094 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-dwlkd"] Sep 30 19:33:29 crc kubenswrapper[4756]: I0930 19:33:29.826645 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-dwlkd" Sep 30 19:33:29 crc kubenswrapper[4756]: I0930 19:33:29.826696 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-zlg2j"] Sep 30 19:33:29 crc kubenswrapper[4756]: I0930 19:33:29.829023 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-h5tp5"] Sep 30 19:33:29 crc kubenswrapper[4756]: I0930 19:33:29.830173 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-zlg2j" Sep 30 19:33:29 crc kubenswrapper[4756]: I0930 19:33:29.830593 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-ln592"] Sep 30 19:33:29 crc kubenswrapper[4756]: I0930 19:33:29.830618 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"etcd-client" Sep 30 19:33:29 crc kubenswrapper[4756]: I0930 19:33:29.830919 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-ln592" Sep 30 19:33:29 crc kubenswrapper[4756]: I0930 19:33:29.831001 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"trusted-ca-bundle" Sep 30 19:33:29 crc kubenswrapper[4756]: I0930 19:33:29.831026 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"audit-1" Sep 30 19:33:29 crc kubenswrapper[4756]: I0930 19:33:29.831249 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-h5tp5" Sep 30 19:33:29 crc kubenswrapper[4756]: I0930 19:33:29.831375 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"encryption-config-1" Sep 30 19:33:29 crc kubenswrapper[4756]: I0930 19:33:29.831661 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"openshift-service-ca.crt" Sep 30 19:33:29 crc kubenswrapper[4756]: I0930 19:33:29.831590 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"oauth-apiserver-sa-dockercfg-6r2bq" Sep 30 19:33:29 crc kubenswrapper[4756]: I0930 19:33:29.831825 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"serving-cert" Sep 30 19:33:29 crc kubenswrapper[4756]: I0930 19:33:29.832307 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"kube-root-ca.crt" Sep 30 19:33:29 crc kubenswrapper[4756]: I0930 19:33:29.833328 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console-operator/console-operator-58897d9998-w7nzp"] Sep 30 19:33:29 crc kubenswrapper[4756]: I0930 19:33:29.833695 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-w7nzp" Sep 30 19:33:29 crc kubenswrapper[4756]: I0930 19:33:29.834153 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"config" Sep 30 19:33:29 crc kubenswrapper[4756]: I0930 19:33:29.834632 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5921345a-df3c-4611-b471-52f946af0c5f-config\") pod \"apiserver-76f77b778f-zlg2j\" (UID: \"5921345a-df3c-4611-b471-52f946af0c5f\") " pod="openshift-apiserver/apiserver-76f77b778f-zlg2j" Sep 30 19:33:29 crc kubenswrapper[4756]: I0930 19:33:29.834664 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/07175e3d-c776-4d48-af22-d30a7e9281ef-etcd-client\") pod \"apiserver-7bbb656c7d-dwlkd\" (UID: \"07175e3d-c776-4d48-af22-d30a7e9281ef\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-dwlkd" Sep 30 19:33:29 crc kubenswrapper[4756]: I0930 19:33:29.834688 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/5921345a-df3c-4611-b471-52f946af0c5f-trusted-ca-bundle\") pod \"apiserver-76f77b778f-zlg2j\" (UID: \"5921345a-df3c-4611-b471-52f946af0c5f\") " pod="openshift-apiserver/apiserver-76f77b778f-zlg2j" Sep 30 19:33:29 crc kubenswrapper[4756]: I0930 19:33:29.834709 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/5921345a-df3c-4611-b471-52f946af0c5f-encryption-config\") pod \"apiserver-76f77b778f-zlg2j\" (UID: \"5921345a-df3c-4611-b471-52f946af0c5f\") " pod="openshift-apiserver/apiserver-76f77b778f-zlg2j" Sep 30 19:33:29 crc kubenswrapper[4756]: I0930 19:33:29.834726 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/29fa1c29-d959-4928-bff4-550a2928dfef-audit-policies\") pod \"oauth-openshift-558db77b4-h5tp5\" (UID: \"29fa1c29-d959-4928-bff4-550a2928dfef\") " pod="openshift-authentication/oauth-openshift-558db77b4-h5tp5" Sep 30 19:33:29 crc kubenswrapper[4756]: I0930 19:33:29.834746 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/1f640044-1129-4119-a819-bc37a9aaf617-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-ln592\" (UID: \"1f640044-1129-4119-a819-bc37a9aaf617\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-ln592" Sep 30 19:33:29 crc kubenswrapper[4756]: I0930 19:33:29.834769 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/29fa1c29-d959-4928-bff4-550a2928dfef-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-h5tp5\" (UID: \"29fa1c29-d959-4928-bff4-550a2928dfef\") " pod="openshift-authentication/oauth-openshift-558db77b4-h5tp5" Sep 30 19:33:29 crc kubenswrapper[4756]: I0930 19:33:29.834789 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/5921345a-df3c-4611-b471-52f946af0c5f-etcd-serving-ca\") pod \"apiserver-76f77b778f-zlg2j\" (UID: \"5921345a-df3c-4611-b471-52f946af0c5f\") " pod="openshift-apiserver/apiserver-76f77b778f-zlg2j" Sep 30 19:33:29 crc kubenswrapper[4756]: I0930 19:33:29.834814 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/29fa1c29-d959-4928-bff4-550a2928dfef-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-h5tp5\" (UID: \"29fa1c29-d959-4928-bff4-550a2928dfef\") " pod="openshift-authentication/oauth-openshift-558db77b4-h5tp5" Sep 30 19:33:29 crc kubenswrapper[4756]: I0930 19:33:29.834840 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5921345a-df3c-4611-b471-52f946af0c5f-serving-cert\") pod \"apiserver-76f77b778f-zlg2j\" (UID: \"5921345a-df3c-4611-b471-52f946af0c5f\") " pod="openshift-apiserver/apiserver-76f77b778f-zlg2j" Sep 30 19:33:29 crc kubenswrapper[4756]: I0930 19:33:29.834874 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/29fa1c29-d959-4928-bff4-550a2928dfef-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-h5tp5\" (UID: \"29fa1c29-d959-4928-bff4-550a2928dfef\") " pod="openshift-authentication/oauth-openshift-558db77b4-h5tp5" Sep 30 19:33:29 crc kubenswrapper[4756]: I0930 19:33:29.834891 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/5921345a-df3c-4611-b471-52f946af0c5f-audit-dir\") pod \"apiserver-76f77b778f-zlg2j\" (UID: \"5921345a-df3c-4611-b471-52f946af0c5f\") " pod="openshift-apiserver/apiserver-76f77b778f-zlg2j" Sep 30 19:33:29 crc kubenswrapper[4756]: I0930 19:33:29.834905 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/07175e3d-c776-4d48-af22-d30a7e9281ef-encryption-config\") pod \"apiserver-7bbb656c7d-dwlkd\" (UID: \"07175e3d-c776-4d48-af22-d30a7e9281ef\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-dwlkd" Sep 30 19:33:29 crc kubenswrapper[4756]: I0930 19:33:29.834932 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/07175e3d-c776-4d48-af22-d30a7e9281ef-audit-policies\") pod \"apiserver-7bbb656c7d-dwlkd\" (UID: \"07175e3d-c776-4d48-af22-d30a7e9281ef\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-dwlkd" Sep 30 19:33:29 crc kubenswrapper[4756]: I0930 19:33:29.834950 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6jx2p\" (UniqueName: \"kubernetes.io/projected/07175e3d-c776-4d48-af22-d30a7e9281ef-kube-api-access-6jx2p\") pod \"apiserver-7bbb656c7d-dwlkd\" (UID: \"07175e3d-c776-4d48-af22-d30a7e9281ef\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-dwlkd" Sep 30 19:33:29 crc kubenswrapper[4756]: I0930 19:33:29.834969 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1f640044-1129-4119-a819-bc37a9aaf617-config\") pod \"machine-api-operator-5694c8668f-ln592\" (UID: \"1f640044-1129-4119-a819-bc37a9aaf617\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-ln592" Sep 30 19:33:29 crc kubenswrapper[4756]: I0930 19:33:29.835021 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/5921345a-df3c-4611-b471-52f946af0c5f-etcd-client\") pod \"apiserver-76f77b778f-zlg2j\" (UID: \"5921345a-df3c-4611-b471-52f946af0c5f\") " pod="openshift-apiserver/apiserver-76f77b778f-zlg2j" Sep 30 19:33:29 crc kubenswrapper[4756]: I0930 19:33:29.835029 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-zxkfm"] Sep 30 19:33:29 crc kubenswrapper[4756]: I0930 19:33:29.835057 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/07175e3d-c776-4d48-af22-d30a7e9281ef-audit-dir\") pod \"apiserver-7bbb656c7d-dwlkd\" (UID: \"07175e3d-c776-4d48-af22-d30a7e9281ef\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-dwlkd" Sep 30 19:33:29 crc kubenswrapper[4756]: I0930 19:33:29.835072 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/29fa1c29-d959-4928-bff4-550a2928dfef-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-h5tp5\" (UID: \"29fa1c29-d959-4928-bff4-550a2928dfef\") " pod="openshift-authentication/oauth-openshift-558db77b4-h5tp5" Sep 30 19:33:29 crc kubenswrapper[4756]: I0930 19:33:29.835088 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bgkr5\" (UniqueName: \"kubernetes.io/projected/29fa1c29-d959-4928-bff4-550a2928dfef-kube-api-access-bgkr5\") pod \"oauth-openshift-558db77b4-h5tp5\" (UID: \"29fa1c29-d959-4928-bff4-550a2928dfef\") " pod="openshift-authentication/oauth-openshift-558db77b4-h5tp5" Sep 30 19:33:29 crc kubenswrapper[4756]: I0930 19:33:29.835107 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/5921345a-df3c-4611-b471-52f946af0c5f-node-pullsecrets\") pod \"apiserver-76f77b778f-zlg2j\" (UID: \"5921345a-df3c-4611-b471-52f946af0c5f\") " pod="openshift-apiserver/apiserver-76f77b778f-zlg2j" Sep 30 19:33:29 crc kubenswrapper[4756]: I0930 19:33:29.835123 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/5921345a-df3c-4611-b471-52f946af0c5f-image-import-ca\") pod \"apiserver-76f77b778f-zlg2j\" (UID: \"5921345a-df3c-4611-b471-52f946af0c5f\") " pod="openshift-apiserver/apiserver-76f77b778f-zlg2j" Sep 30 19:33:29 crc kubenswrapper[4756]: I0930 19:33:29.835140 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x8lbj\" (UniqueName: \"kubernetes.io/projected/5921345a-df3c-4611-b471-52f946af0c5f-kube-api-access-x8lbj\") pod \"apiserver-76f77b778f-zlg2j\" (UID: \"5921345a-df3c-4611-b471-52f946af0c5f\") " pod="openshift-apiserver/apiserver-76f77b778f-zlg2j" Sep 30 19:33:29 crc kubenswrapper[4756]: I0930 19:33:29.835167 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/07175e3d-c776-4d48-af22-d30a7e9281ef-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-dwlkd\" (UID: \"07175e3d-c776-4d48-af22-d30a7e9281ef\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-dwlkd" Sep 30 19:33:29 crc kubenswrapper[4756]: I0930 19:33:29.835183 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/29fa1c29-d959-4928-bff4-550a2928dfef-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-h5tp5\" (UID: \"29fa1c29-d959-4928-bff4-550a2928dfef\") " pod="openshift-authentication/oauth-openshift-558db77b4-h5tp5" Sep 30 19:33:29 crc kubenswrapper[4756]: I0930 19:33:29.835198 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c6xh5\" (UniqueName: \"kubernetes.io/projected/1f640044-1129-4119-a819-bc37a9aaf617-kube-api-access-c6xh5\") pod \"machine-api-operator-5694c8668f-ln592\" (UID: \"1f640044-1129-4119-a819-bc37a9aaf617\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-ln592" Sep 30 19:33:29 crc kubenswrapper[4756]: I0930 19:33:29.835314 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/5921345a-df3c-4611-b471-52f946af0c5f-audit\") pod \"apiserver-76f77b778f-zlg2j\" (UID: \"5921345a-df3c-4611-b471-52f946af0c5f\") " pod="openshift-apiserver/apiserver-76f77b778f-zlg2j" Sep 30 19:33:29 crc kubenswrapper[4756]: I0930 19:33:29.835350 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/29fa1c29-d959-4928-bff4-550a2928dfef-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-h5tp5\" (UID: \"29fa1c29-d959-4928-bff4-550a2928dfef\") " pod="openshift-authentication/oauth-openshift-558db77b4-h5tp5" Sep 30 19:33:29 crc kubenswrapper[4756]: I0930 19:33:29.835412 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/29fa1c29-d959-4928-bff4-550a2928dfef-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-h5tp5\" (UID: \"29fa1c29-d959-4928-bff4-550a2928dfef\") " pod="openshift-authentication/oauth-openshift-558db77b4-h5tp5" Sep 30 19:33:29 crc kubenswrapper[4756]: I0930 19:33:29.835448 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/07175e3d-c776-4d48-af22-d30a7e9281ef-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-dwlkd\" (UID: \"07175e3d-c776-4d48-af22-d30a7e9281ef\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-dwlkd" Sep 30 19:33:29 crc kubenswrapper[4756]: I0930 19:33:29.835472 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/07175e3d-c776-4d48-af22-d30a7e9281ef-serving-cert\") pod \"apiserver-7bbb656c7d-dwlkd\" (UID: \"07175e3d-c776-4d48-af22-d30a7e9281ef\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-dwlkd" Sep 30 19:33:29 crc kubenswrapper[4756]: I0930 19:33:29.835500 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/29fa1c29-d959-4928-bff4-550a2928dfef-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-h5tp5\" (UID: \"29fa1c29-d959-4928-bff4-550a2928dfef\") " pod="openshift-authentication/oauth-openshift-558db77b4-h5tp5" Sep 30 19:33:29 crc kubenswrapper[4756]: I0930 19:33:29.835528 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/29fa1c29-d959-4928-bff4-550a2928dfef-audit-dir\") pod \"oauth-openshift-558db77b4-h5tp5\" (UID: \"29fa1c29-d959-4928-bff4-550a2928dfef\") " pod="openshift-authentication/oauth-openshift-558db77b4-h5tp5" Sep 30 19:33:29 crc kubenswrapper[4756]: I0930 19:33:29.835554 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/29fa1c29-d959-4928-bff4-550a2928dfef-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-h5tp5\" (UID: \"29fa1c29-d959-4928-bff4-550a2928dfef\") " pod="openshift-authentication/oauth-openshift-558db77b4-h5tp5" Sep 30 19:33:29 crc kubenswrapper[4756]: I0930 19:33:29.835583 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/1f640044-1129-4119-a819-bc37a9aaf617-images\") pod \"machine-api-operator-5694c8668f-ln592\" (UID: \"1f640044-1129-4119-a819-bc37a9aaf617\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-ln592" Sep 30 19:33:29 crc kubenswrapper[4756]: I0930 19:33:29.835613 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/29fa1c29-d959-4928-bff4-550a2928dfef-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-h5tp5\" (UID: \"29fa1c29-d959-4928-bff4-550a2928dfef\") " pod="openshift-authentication/oauth-openshift-558db77b4-h5tp5" Sep 30 19:33:29 crc kubenswrapper[4756]: I0930 19:33:29.835638 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/29fa1c29-d959-4928-bff4-550a2928dfef-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-h5tp5\" (UID: \"29fa1c29-d959-4928-bff4-550a2928dfef\") " pod="openshift-authentication/oauth-openshift-558db77b4-h5tp5" Sep 30 19:33:29 crc kubenswrapper[4756]: I0930 19:33:29.835656 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"etcd-serving-ca" Sep 30 19:33:29 crc kubenswrapper[4756]: I0930 19:33:29.835725 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-zxkfm" Sep 30 19:33:29 crc kubenswrapper[4756]: I0930 19:33:29.838858 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"machine-api-operator-images" Sep 30 19:33:29 crc kubenswrapper[4756]: I0930 19:33:29.839592 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"kube-root-ca.crt" Sep 30 19:33:29 crc kubenswrapper[4756]: I0930 19:33:29.839851 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-rbac-proxy" Sep 30 19:33:29 crc kubenswrapper[4756]: I0930 19:33:29.839962 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"encryption-config-1" Sep 30 19:33:29 crc kubenswrapper[4756]: I0930 19:33:29.840100 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"openshift-service-ca.crt" Sep 30 19:33:29 crc kubenswrapper[4756]: I0930 19:33:29.840206 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"etcd-client" Sep 30 19:33:29 crc kubenswrapper[4756]: I0930 19:33:29.840304 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"audit-1" Sep 30 19:33:29 crc kubenswrapper[4756]: I0930 19:33:29.840339 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-75ssh"] Sep 30 19:33:29 crc kubenswrapper[4756]: I0930 19:33:29.840792 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-75ssh" Sep 30 19:33:29 crc kubenswrapper[4756]: I0930 19:33:29.854624 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"image-import-ca" Sep 30 19:33:29 crc kubenswrapper[4756]: I0930 19:33:29.854996 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Sep 30 19:33:29 crc kubenswrapper[4756]: I0930 19:33:29.855180 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Sep 30 19:33:29 crc kubenswrapper[4756]: I0930 19:33:29.856177 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"openshift-service-ca.crt" Sep 30 19:33:29 crc kubenswrapper[4756]: I0930 19:33:29.856288 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"etcd-serving-ca" Sep 30 19:33:29 crc kubenswrapper[4756]: I0930 19:33:29.856334 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Sep 30 19:33:29 crc kubenswrapper[4756]: I0930 19:33:29.856604 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-glgxx"] Sep 30 19:33:29 crc kubenswrapper[4756]: I0930 19:33:29.856630 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"kube-root-ca.crt" Sep 30 19:33:29 crc kubenswrapper[4756]: I0930 19:33:29.860813 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7" Sep 30 19:33:29 crc kubenswrapper[4756]: I0930 19:33:29.861226 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Sep 30 19:33:29 crc kubenswrapper[4756]: I0930 19:33:29.861411 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Sep 30 19:33:29 crc kubenswrapper[4756]: I0930 19:33:29.861596 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Sep 30 19:33:29 crc kubenswrapper[4756]: I0930 19:33:29.862377 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Sep 30 19:33:29 crc kubenswrapper[4756]: I0930 19:33:29.867736 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-tls" Sep 30 19:33:29 crc kubenswrapper[4756]: I0930 19:33:29.868066 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"serving-cert" Sep 30 19:33:29 crc kubenswrapper[4756]: I0930 19:33:29.868242 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Sep 30 19:33:29 crc kubenswrapper[4756]: I0930 19:33:29.868434 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"console-operator-dockercfg-4xjcr" Sep 30 19:33:29 crc kubenswrapper[4756]: I0930 19:33:29.868622 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Sep 30 19:33:29 crc kubenswrapper[4756]: I0930 19:33:29.868743 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Sep 30 19:33:29 crc kubenswrapper[4756]: I0930 19:33:29.856175 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"openshift-service-ca.crt" Sep 30 19:33:29 crc kubenswrapper[4756]: I0930 19:33:29.871869 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"serving-cert" Sep 30 19:33:29 crc kubenswrapper[4756]: I0930 19:33:29.872072 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"console-operator-config" Sep 30 19:33:29 crc kubenswrapper[4756]: I0930 19:33:29.872190 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Sep 30 19:33:29 crc kubenswrapper[4756]: I0930 19:33:29.883906 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Sep 30 19:33:29 crc kubenswrapper[4756]: I0930 19:33:29.884516 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Sep 30 19:33:29 crc kubenswrapper[4756]: I0930 19:33:29.883952 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-glgxx" Sep 30 19:33:29 crc kubenswrapper[4756]: I0930 19:33:29.884757 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Sep 30 19:33:29 crc kubenswrapper[4756]: I0930 19:33:29.884479 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Sep 30 19:33:29 crc kubenswrapper[4756]: I0930 19:33:29.884055 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-mstk5"] Sep 30 19:33:29 crc kubenswrapper[4756]: I0930 19:33:29.885366 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-mstk5" Sep 30 19:33:29 crc kubenswrapper[4756]: I0930 19:33:29.885642 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-root-ca.crt" Sep 30 19:33:29 crc kubenswrapper[4756]: I0930 19:33:29.885958 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Sep 30 19:33:29 crc kubenswrapper[4756]: I0930 19:33:29.887364 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-tvhtj"] Sep 30 19:33:29 crc kubenswrapper[4756]: I0930 19:33:29.887905 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-dwvn8"] Sep 30 19:33:29 crc kubenswrapper[4756]: I0930 19:33:29.888019 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-tvhtj" Sep 30 19:33:29 crc kubenswrapper[4756]: I0930 19:33:29.889308 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-dwvn8" Sep 30 19:33:29 crc kubenswrapper[4756]: I0930 19:33:29.889339 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-machine-approver/machine-approver-56656f9798-bzmlt"] Sep 30 19:33:29 crc kubenswrapper[4756]: I0930 19:33:29.889903 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-bzmlt" Sep 30 19:33:29 crc kubenswrapper[4756]: I0930 19:33:29.894640 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"openshift-service-ca.crt" Sep 30 19:33:29 crc kubenswrapper[4756]: I0930 19:33:29.895010 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-lls25"] Sep 30 19:33:29 crc kubenswrapper[4756]: I0930 19:33:29.895603 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-lls25" Sep 30 19:33:29 crc kubenswrapper[4756]: I0930 19:33:29.900580 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-operator-config" Sep 30 19:33:29 crc kubenswrapper[4756]: I0930 19:33:29.910261 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-service-ca-bundle" Sep 30 19:33:29 crc kubenswrapper[4756]: I0930 19:33:29.910667 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-ca-bundle" Sep 30 19:33:29 crc kubenswrapper[4756]: I0930 19:33:29.911946 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-mpnp5"] Sep 30 19:33:29 crc kubenswrapper[4756]: I0930 19:33:29.912675 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/downloads-7954f5f757-7dwjw"] Sep 30 19:33:29 crc kubenswrapper[4756]: I0930 19:33:29.912741 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-mpnp5" Sep 30 19:33:29 crc kubenswrapper[4756]: I0930 19:33:29.913138 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-7dwjw" Sep 30 19:33:29 crc kubenswrapper[4756]: I0930 19:33:29.917471 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-f9d7485db-h5cnz"] Sep 30 19:33:29 crc kubenswrapper[4756]: I0930 19:33:29.917923 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-x87xk"] Sep 30 19:33:29 crc kubenswrapper[4756]: I0930 19:33:29.918316 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-x87xk" Sep 30 19:33:29 crc kubenswrapper[4756]: I0930 19:33:29.918775 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-h5cnz" Sep 30 19:33:29 crc kubenswrapper[4756]: I0930 19:33:29.923454 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-4nhvz"] Sep 30 19:33:29 crc kubenswrapper[4756]: I0930 19:33:29.923990 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-ln592"] Sep 30 19:33:29 crc kubenswrapper[4756]: I0930 19:33:29.924100 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-4nhvz" Sep 30 19:33:29 crc kubenswrapper[4756]: I0930 19:33:29.926312 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-zlg2j"] Sep 30 19:33:29 crc kubenswrapper[4756]: I0930 19:33:29.927696 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-h5tp5"] Sep 30 19:33:29 crc kubenswrapper[4756]: I0930 19:33:29.928633 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-dwlkd"] Sep 30 19:33:29 crc kubenswrapper[4756]: I0930 19:33:29.931617 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-wpxq6"] Sep 30 19:33:29 crc kubenswrapper[4756]: I0930 19:33:29.932061 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-75ssh"] Sep 30 19:33:29 crc kubenswrapper[4756]: I0930 19:33:29.932082 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-tvhtj"] Sep 30 19:33:29 crc kubenswrapper[4756]: I0930 19:33:29.932164 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-wpxq6" Sep 30 19:33:29 crc kubenswrapper[4756]: I0930 19:33:29.933518 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-nd2dk"] Sep 30 19:33:29 crc kubenswrapper[4756]: I0930 19:33:29.934176 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"kube-root-ca.crt" Sep 30 19:33:29 crc kubenswrapper[4756]: I0930 19:33:29.934251 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-9npql"] Sep 30 19:33:29 crc kubenswrapper[4756]: I0930 19:33:29.934747 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-9npql" Sep 30 19:33:29 crc kubenswrapper[4756]: I0930 19:33:29.935095 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-nd2dk" Sep 30 19:33:29 crc kubenswrapper[4756]: I0930 19:33:29.936759 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/29fa1c29-d959-4928-bff4-550a2928dfef-audit-dir\") pod \"oauth-openshift-558db77b4-h5tp5\" (UID: \"29fa1c29-d959-4928-bff4-550a2928dfef\") " pod="openshift-authentication/oauth-openshift-558db77b4-h5tp5" Sep 30 19:33:29 crc kubenswrapper[4756]: I0930 19:33:29.936798 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/29fa1c29-d959-4928-bff4-550a2928dfef-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-h5tp5\" (UID: \"29fa1c29-d959-4928-bff4-550a2928dfef\") " pod="openshift-authentication/oauth-openshift-558db77b4-h5tp5" Sep 30 19:33:29 crc kubenswrapper[4756]: I0930 19:33:29.936826 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/1f640044-1129-4119-a819-bc37a9aaf617-images\") pod \"machine-api-operator-5694c8668f-ln592\" (UID: \"1f640044-1129-4119-a819-bc37a9aaf617\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-ln592" Sep 30 19:33:29 crc kubenswrapper[4756]: I0930 19:33:29.936852 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/29fa1c29-d959-4928-bff4-550a2928dfef-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-h5tp5\" (UID: \"29fa1c29-d959-4928-bff4-550a2928dfef\") " pod="openshift-authentication/oauth-openshift-558db77b4-h5tp5" Sep 30 19:33:29 crc kubenswrapper[4756]: I0930 19:33:29.936880 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/29fa1c29-d959-4928-bff4-550a2928dfef-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-h5tp5\" (UID: \"29fa1c29-d959-4928-bff4-550a2928dfef\") " pod="openshift-authentication/oauth-openshift-558db77b4-h5tp5" Sep 30 19:33:29 crc kubenswrapper[4756]: I0930 19:33:29.936920 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5921345a-df3c-4611-b471-52f946af0c5f-config\") pod \"apiserver-76f77b778f-zlg2j\" (UID: \"5921345a-df3c-4611-b471-52f946af0c5f\") " pod="openshift-apiserver/apiserver-76f77b778f-zlg2j" Sep 30 19:33:29 crc kubenswrapper[4756]: I0930 19:33:29.936951 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/07175e3d-c776-4d48-af22-d30a7e9281ef-etcd-client\") pod \"apiserver-7bbb656c7d-dwlkd\" (UID: \"07175e3d-c776-4d48-af22-d30a7e9281ef\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-dwlkd" Sep 30 19:33:29 crc kubenswrapper[4756]: I0930 19:33:29.936972 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/5921345a-df3c-4611-b471-52f946af0c5f-trusted-ca-bundle\") pod \"apiserver-76f77b778f-zlg2j\" (UID: \"5921345a-df3c-4611-b471-52f946af0c5f\") " pod="openshift-apiserver/apiserver-76f77b778f-zlg2j" Sep 30 19:33:29 crc kubenswrapper[4756]: I0930 19:33:29.936998 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/5921345a-df3c-4611-b471-52f946af0c5f-encryption-config\") pod \"apiserver-76f77b778f-zlg2j\" (UID: \"5921345a-df3c-4611-b471-52f946af0c5f\") " pod="openshift-apiserver/apiserver-76f77b778f-zlg2j" Sep 30 19:33:29 crc kubenswrapper[4756]: I0930 19:33:29.937023 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/29fa1c29-d959-4928-bff4-550a2928dfef-audit-policies\") pod \"oauth-openshift-558db77b4-h5tp5\" (UID: \"29fa1c29-d959-4928-bff4-550a2928dfef\") " pod="openshift-authentication/oauth-openshift-558db77b4-h5tp5" Sep 30 19:33:29 crc kubenswrapper[4756]: I0930 19:33:29.937046 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/1f640044-1129-4119-a819-bc37a9aaf617-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-ln592\" (UID: \"1f640044-1129-4119-a819-bc37a9aaf617\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-ln592" Sep 30 19:33:29 crc kubenswrapper[4756]: I0930 19:33:29.937071 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/29fa1c29-d959-4928-bff4-550a2928dfef-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-h5tp5\" (UID: \"29fa1c29-d959-4928-bff4-550a2928dfef\") " pod="openshift-authentication/oauth-openshift-558db77b4-h5tp5" Sep 30 19:33:29 crc kubenswrapper[4756]: I0930 19:33:29.937093 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/5921345a-df3c-4611-b471-52f946af0c5f-etcd-serving-ca\") pod \"apiserver-76f77b778f-zlg2j\" (UID: \"5921345a-df3c-4611-b471-52f946af0c5f\") " pod="openshift-apiserver/apiserver-76f77b778f-zlg2j" Sep 30 19:33:29 crc kubenswrapper[4756]: I0930 19:33:29.937118 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/29fa1c29-d959-4928-bff4-550a2928dfef-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-h5tp5\" (UID: \"29fa1c29-d959-4928-bff4-550a2928dfef\") " pod="openshift-authentication/oauth-openshift-558db77b4-h5tp5" Sep 30 19:33:29 crc kubenswrapper[4756]: I0930 19:33:29.937143 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5921345a-df3c-4611-b471-52f946af0c5f-serving-cert\") pod \"apiserver-76f77b778f-zlg2j\" (UID: \"5921345a-df3c-4611-b471-52f946af0c5f\") " pod="openshift-apiserver/apiserver-76f77b778f-zlg2j" Sep 30 19:33:29 crc kubenswrapper[4756]: I0930 19:33:29.937175 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/29fa1c29-d959-4928-bff4-550a2928dfef-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-h5tp5\" (UID: \"29fa1c29-d959-4928-bff4-550a2928dfef\") " pod="openshift-authentication/oauth-openshift-558db77b4-h5tp5" Sep 30 19:33:29 crc kubenswrapper[4756]: I0930 19:33:29.937217 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/5921345a-df3c-4611-b471-52f946af0c5f-audit-dir\") pod \"apiserver-76f77b778f-zlg2j\" (UID: \"5921345a-df3c-4611-b471-52f946af0c5f\") " pod="openshift-apiserver/apiserver-76f77b778f-zlg2j" Sep 30 19:33:29 crc kubenswrapper[4756]: I0930 19:33:29.937243 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/07175e3d-c776-4d48-af22-d30a7e9281ef-encryption-config\") pod \"apiserver-7bbb656c7d-dwlkd\" (UID: \"07175e3d-c776-4d48-af22-d30a7e9281ef\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-dwlkd" Sep 30 19:33:29 crc kubenswrapper[4756]: I0930 19:33:29.937266 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/07175e3d-c776-4d48-af22-d30a7e9281ef-audit-policies\") pod \"apiserver-7bbb656c7d-dwlkd\" (UID: \"07175e3d-c776-4d48-af22-d30a7e9281ef\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-dwlkd" Sep 30 19:33:29 crc kubenswrapper[4756]: I0930 19:33:29.937287 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6jx2p\" (UniqueName: \"kubernetes.io/projected/07175e3d-c776-4d48-af22-d30a7e9281ef-kube-api-access-6jx2p\") pod \"apiserver-7bbb656c7d-dwlkd\" (UID: \"07175e3d-c776-4d48-af22-d30a7e9281ef\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-dwlkd" Sep 30 19:33:29 crc kubenswrapper[4756]: I0930 19:33:29.937311 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1f640044-1129-4119-a819-bc37a9aaf617-config\") pod \"machine-api-operator-5694c8668f-ln592\" (UID: \"1f640044-1129-4119-a819-bc37a9aaf617\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-ln592" Sep 30 19:33:29 crc kubenswrapper[4756]: I0930 19:33:29.937336 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/5921345a-df3c-4611-b471-52f946af0c5f-etcd-client\") pod \"apiserver-76f77b778f-zlg2j\" (UID: \"5921345a-df3c-4611-b471-52f946af0c5f\") " pod="openshift-apiserver/apiserver-76f77b778f-zlg2j" Sep 30 19:33:29 crc kubenswrapper[4756]: I0930 19:33:29.937372 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/07175e3d-c776-4d48-af22-d30a7e9281ef-audit-dir\") pod \"apiserver-7bbb656c7d-dwlkd\" (UID: \"07175e3d-c776-4d48-af22-d30a7e9281ef\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-dwlkd" Sep 30 19:33:29 crc kubenswrapper[4756]: I0930 19:33:29.937739 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5921345a-df3c-4611-b471-52f946af0c5f-config\") pod \"apiserver-76f77b778f-zlg2j\" (UID: \"5921345a-df3c-4611-b471-52f946af0c5f\") " pod="openshift-apiserver/apiserver-76f77b778f-zlg2j" Sep 30 19:33:29 crc kubenswrapper[4756]: I0930 19:33:29.937798 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/29fa1c29-d959-4928-bff4-550a2928dfef-audit-dir\") pod \"oauth-openshift-558db77b4-h5tp5\" (UID: \"29fa1c29-d959-4928-bff4-550a2928dfef\") " pod="openshift-authentication/oauth-openshift-558db77b4-h5tp5" Sep 30 19:33:29 crc kubenswrapper[4756]: I0930 19:33:29.959371 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1f640044-1129-4119-a819-bc37a9aaf617-config\") pod \"machine-api-operator-5694c8668f-ln592\" (UID: \"1f640044-1129-4119-a819-bc37a9aaf617\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-ln592" Sep 30 19:33:29 crc kubenswrapper[4756]: I0930 19:33:29.959845 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/29fa1c29-d959-4928-bff4-550a2928dfef-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-h5tp5\" (UID: \"29fa1c29-d959-4928-bff4-550a2928dfef\") " pod="openshift-authentication/oauth-openshift-558db77b4-h5tp5" Sep 30 19:33:29 crc kubenswrapper[4756]: I0930 19:33:29.959912 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bgkr5\" (UniqueName: \"kubernetes.io/projected/29fa1c29-d959-4928-bff4-550a2928dfef-kube-api-access-bgkr5\") pod \"oauth-openshift-558db77b4-h5tp5\" (UID: \"29fa1c29-d959-4928-bff4-550a2928dfef\") " pod="openshift-authentication/oauth-openshift-558db77b4-h5tp5" Sep 30 19:33:29 crc kubenswrapper[4756]: I0930 19:33:29.960123 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/5921345a-df3c-4611-b471-52f946af0c5f-node-pullsecrets\") pod \"apiserver-76f77b778f-zlg2j\" (UID: \"5921345a-df3c-4611-b471-52f946af0c5f\") " pod="openshift-apiserver/apiserver-76f77b778f-zlg2j" Sep 30 19:33:29 crc kubenswrapper[4756]: I0930 19:33:29.960158 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/5921345a-df3c-4611-b471-52f946af0c5f-image-import-ca\") pod \"apiserver-76f77b778f-zlg2j\" (UID: \"5921345a-df3c-4611-b471-52f946af0c5f\") " pod="openshift-apiserver/apiserver-76f77b778f-zlg2j" Sep 30 19:33:29 crc kubenswrapper[4756]: I0930 19:33:29.960364 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x8lbj\" (UniqueName: \"kubernetes.io/projected/5921345a-df3c-4611-b471-52f946af0c5f-kube-api-access-x8lbj\") pod \"apiserver-76f77b778f-zlg2j\" (UID: \"5921345a-df3c-4611-b471-52f946af0c5f\") " pod="openshift-apiserver/apiserver-76f77b778f-zlg2j" Sep 30 19:33:29 crc kubenswrapper[4756]: I0930 19:33:29.960320 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/29fa1c29-d959-4928-bff4-550a2928dfef-audit-policies\") pod \"oauth-openshift-558db77b4-h5tp5\" (UID: \"29fa1c29-d959-4928-bff4-550a2928dfef\") " pod="openshift-authentication/oauth-openshift-558db77b4-h5tp5" Sep 30 19:33:29 crc kubenswrapper[4756]: I0930 19:33:29.960617 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/07175e3d-c776-4d48-af22-d30a7e9281ef-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-dwlkd\" (UID: \"07175e3d-c776-4d48-af22-d30a7e9281ef\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-dwlkd" Sep 30 19:33:29 crc kubenswrapper[4756]: I0930 19:33:29.960654 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/29fa1c29-d959-4928-bff4-550a2928dfef-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-h5tp5\" (UID: \"29fa1c29-d959-4928-bff4-550a2928dfef\") " pod="openshift-authentication/oauth-openshift-558db77b4-h5tp5" Sep 30 19:33:29 crc kubenswrapper[4756]: I0930 19:33:29.960826 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c6xh5\" (UniqueName: \"kubernetes.io/projected/1f640044-1129-4119-a819-bc37a9aaf617-kube-api-access-c6xh5\") pod \"machine-api-operator-5694c8668f-ln592\" (UID: \"1f640044-1129-4119-a819-bc37a9aaf617\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-ln592" Sep 30 19:33:29 crc kubenswrapper[4756]: I0930 19:33:29.960859 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/5921345a-df3c-4611-b471-52f946af0c5f-audit\") pod \"apiserver-76f77b778f-zlg2j\" (UID: \"5921345a-df3c-4611-b471-52f946af0c5f\") " pod="openshift-apiserver/apiserver-76f77b778f-zlg2j" Sep 30 19:33:29 crc kubenswrapper[4756]: I0930 19:33:29.961042 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/29fa1c29-d959-4928-bff4-550a2928dfef-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-h5tp5\" (UID: \"29fa1c29-d959-4928-bff4-550a2928dfef\") " pod="openshift-authentication/oauth-openshift-558db77b4-h5tp5" Sep 30 19:33:29 crc kubenswrapper[4756]: I0930 19:33:29.961075 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/29fa1c29-d959-4928-bff4-550a2928dfef-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-h5tp5\" (UID: \"29fa1c29-d959-4928-bff4-550a2928dfef\") " pod="openshift-authentication/oauth-openshift-558db77b4-h5tp5" Sep 30 19:33:29 crc kubenswrapper[4756]: I0930 19:33:29.961180 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/07175e3d-c776-4d48-af22-d30a7e9281ef-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-dwlkd\" (UID: \"07175e3d-c776-4d48-af22-d30a7e9281ef\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-dwlkd" Sep 30 19:33:29 crc kubenswrapper[4756]: I0930 19:33:29.961273 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/07175e3d-c776-4d48-af22-d30a7e9281ef-serving-cert\") pod \"apiserver-7bbb656c7d-dwlkd\" (UID: \"07175e3d-c776-4d48-af22-d30a7e9281ef\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-dwlkd" Sep 30 19:33:29 crc kubenswrapper[4756]: I0930 19:33:29.961313 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/29fa1c29-d959-4928-bff4-550a2928dfef-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-h5tp5\" (UID: \"29fa1c29-d959-4928-bff4-550a2928dfef\") " pod="openshift-authentication/oauth-openshift-558db77b4-h5tp5" Sep 30 19:33:29 crc kubenswrapper[4756]: I0930 19:33:29.961765 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29321010-snrpj"] Sep 30 19:33:29 crc kubenswrapper[4756]: I0930 19:33:29.962828 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/5921345a-df3c-4611-b471-52f946af0c5f-node-pullsecrets\") pod \"apiserver-76f77b778f-zlg2j\" (UID: \"5921345a-df3c-4611-b471-52f946af0c5f\") " pod="openshift-apiserver/apiserver-76f77b778f-zlg2j" Sep 30 19:33:29 crc kubenswrapper[4756]: I0930 19:33:29.965014 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/5921345a-df3c-4611-b471-52f946af0c5f-image-import-ca\") pod \"apiserver-76f77b778f-zlg2j\" (UID: \"5921345a-df3c-4611-b471-52f946af0c5f\") " pod="openshift-apiserver/apiserver-76f77b778f-zlg2j" Sep 30 19:33:29 crc kubenswrapper[4756]: I0930 19:33:29.965026 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29321010-snrpj" Sep 30 19:33:29 crc kubenswrapper[4756]: I0930 19:33:29.966545 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/5921345a-df3c-4611-b471-52f946af0c5f-etcd-serving-ca\") pod \"apiserver-76f77b778f-zlg2j\" (UID: \"5921345a-df3c-4611-b471-52f946af0c5f\") " pod="openshift-apiserver/apiserver-76f77b778f-zlg2j" Sep 30 19:33:29 crc kubenswrapper[4756]: I0930 19:33:29.976458 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/5921345a-df3c-4611-b471-52f946af0c5f-etcd-client\") pod \"apiserver-76f77b778f-zlg2j\" (UID: \"5921345a-df3c-4611-b471-52f946af0c5f\") " pod="openshift-apiserver/apiserver-76f77b778f-zlg2j" Sep 30 19:33:29 crc kubenswrapper[4756]: I0930 19:33:29.976764 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/07175e3d-c776-4d48-af22-d30a7e9281ef-audit-dir\") pod \"apiserver-7bbb656c7d-dwlkd\" (UID: \"07175e3d-c776-4d48-af22-d30a7e9281ef\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-dwlkd" Sep 30 19:33:29 crc kubenswrapper[4756]: I0930 19:33:29.977822 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/5921345a-df3c-4611-b471-52f946af0c5f-encryption-config\") pod \"apiserver-76f77b778f-zlg2j\" (UID: \"5921345a-df3c-4611-b471-52f946af0c5f\") " pod="openshift-apiserver/apiserver-76f77b778f-zlg2j" Sep 30 19:33:29 crc kubenswrapper[4756]: I0930 19:33:29.978340 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/07175e3d-c776-4d48-af22-d30a7e9281ef-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-dwlkd\" (UID: \"07175e3d-c776-4d48-af22-d30a7e9281ef\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-dwlkd" Sep 30 19:33:29 crc kubenswrapper[4756]: I0930 19:33:29.980116 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/29fa1c29-d959-4928-bff4-550a2928dfef-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-h5tp5\" (UID: \"29fa1c29-d959-4928-bff4-550a2928dfef\") " pod="openshift-authentication/oauth-openshift-558db77b4-h5tp5" Sep 30 19:33:29 crc kubenswrapper[4756]: I0930 19:33:29.980854 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/1f640044-1129-4119-a819-bc37a9aaf617-images\") pod \"machine-api-operator-5694c8668f-ln592\" (UID: \"1f640044-1129-4119-a819-bc37a9aaf617\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-ln592" Sep 30 19:33:29 crc kubenswrapper[4756]: I0930 19:33:29.980921 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/29fa1c29-d959-4928-bff4-550a2928dfef-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-h5tp5\" (UID: \"29fa1c29-d959-4928-bff4-550a2928dfef\") " pod="openshift-authentication/oauth-openshift-558db77b4-h5tp5" Sep 30 19:33:29 crc kubenswrapper[4756]: I0930 19:33:29.985764 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/29fa1c29-d959-4928-bff4-550a2928dfef-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-h5tp5\" (UID: \"29fa1c29-d959-4928-bff4-550a2928dfef\") " pod="openshift-authentication/oauth-openshift-558db77b4-h5tp5" Sep 30 19:33:29 crc kubenswrapper[4756]: I0930 19:33:29.986485 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/07175e3d-c776-4d48-af22-d30a7e9281ef-audit-policies\") pod \"apiserver-7bbb656c7d-dwlkd\" (UID: \"07175e3d-c776-4d48-af22-d30a7e9281ef\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-dwlkd" Sep 30 19:33:29 crc kubenswrapper[4756]: I0930 19:33:29.986541 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/29fa1c29-d959-4928-bff4-550a2928dfef-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-h5tp5\" (UID: \"29fa1c29-d959-4928-bff4-550a2928dfef\") " pod="openshift-authentication/oauth-openshift-558db77b4-h5tp5" Sep 30 19:33:29 crc kubenswrapper[4756]: I0930 19:33:29.987060 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/07175e3d-c776-4d48-af22-d30a7e9281ef-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-dwlkd\" (UID: \"07175e3d-c776-4d48-af22-d30a7e9281ef\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-dwlkd" Sep 30 19:33:29 crc kubenswrapper[4756]: I0930 19:33:29.987586 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-jw4c2"] Sep 30 19:33:29 crc kubenswrapper[4756]: I0930 19:33:29.997238 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/5921345a-df3c-4611-b471-52f946af0c5f-audit\") pod \"apiserver-76f77b778f-zlg2j\" (UID: \"5921345a-df3c-4611-b471-52f946af0c5f\") " pod="openshift-apiserver/apiserver-76f77b778f-zlg2j" Sep 30 19:33:30 crc kubenswrapper[4756]: I0930 19:33:30.007505 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Sep 30 19:33:30 crc kubenswrapper[4756]: I0930 19:33:30.008370 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/5921345a-df3c-4611-b471-52f946af0c5f-audit-dir\") pod \"apiserver-76f77b778f-zlg2j\" (UID: \"5921345a-df3c-4611-b471-52f946af0c5f\") " pod="openshift-apiserver/apiserver-76f77b778f-zlg2j" Sep 30 19:33:30 crc kubenswrapper[4756]: I0930 19:33:30.009076 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Sep 30 19:33:30 crc kubenswrapper[4756]: I0930 19:33:30.009156 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/29fa1c29-d959-4928-bff4-550a2928dfef-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-h5tp5\" (UID: \"29fa1c29-d959-4928-bff4-550a2928dfef\") " pod="openshift-authentication/oauth-openshift-558db77b4-h5tp5" Sep 30 19:33:30 crc kubenswrapper[4756]: I0930 19:33:30.009118 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/1f640044-1129-4119-a819-bc37a9aaf617-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-ln592\" (UID: \"1f640044-1129-4119-a819-bc37a9aaf617\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-ln592" Sep 30 19:33:30 crc kubenswrapper[4756]: I0930 19:33:30.009372 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/29fa1c29-d959-4928-bff4-550a2928dfef-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-h5tp5\" (UID: \"29fa1c29-d959-4928-bff4-550a2928dfef\") " pod="openshift-authentication/oauth-openshift-558db77b4-h5tp5" Sep 30 19:33:30 crc kubenswrapper[4756]: I0930 19:33:30.009427 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/29fa1c29-d959-4928-bff4-550a2928dfef-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-h5tp5\" (UID: \"29fa1c29-d959-4928-bff4-550a2928dfef\") " pod="openshift-authentication/oauth-openshift-558db77b4-h5tp5" Sep 30 19:33:30 crc kubenswrapper[4756]: I0930 19:33:30.009649 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-client" Sep 30 19:33:30 crc kubenswrapper[4756]: I0930 19:33:30.009787 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Sep 30 19:33:30 crc kubenswrapper[4756]: I0930 19:33:30.010939 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"service-ca-bundle" Sep 30 19:33:30 crc kubenswrapper[4756]: I0930 19:33:30.013916 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/07175e3d-c776-4d48-af22-d30a7e9281ef-etcd-client\") pod \"apiserver-7bbb656c7d-dwlkd\" (UID: \"07175e3d-c776-4d48-af22-d30a7e9281ef\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-dwlkd" Sep 30 19:33:30 crc kubenswrapper[4756]: I0930 19:33:30.014231 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/29fa1c29-d959-4928-bff4-550a2928dfef-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-h5tp5\" (UID: \"29fa1c29-d959-4928-bff4-550a2928dfef\") " pod="openshift-authentication/oauth-openshift-558db77b4-h5tp5" Sep 30 19:33:30 crc kubenswrapper[4756]: I0930 19:33:30.014547 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"kube-root-ca.crt" Sep 30 19:33:30 crc kubenswrapper[4756]: I0930 19:33:30.014730 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Sep 30 19:33:30 crc kubenswrapper[4756]: I0930 19:33:30.014917 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Sep 30 19:33:30 crc kubenswrapper[4756]: I0930 19:33:30.014982 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-dockercfg-r9srn" Sep 30 19:33:30 crc kubenswrapper[4756]: I0930 19:33:30.015124 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5921345a-df3c-4611-b471-52f946af0c5f-serving-cert\") pod \"apiserver-76f77b778f-zlg2j\" (UID: \"5921345a-df3c-4611-b471-52f946af0c5f\") " pod="openshift-apiserver/apiserver-76f77b778f-zlg2j" Sep 30 19:33:30 crc kubenswrapper[4756]: I0930 19:33:30.015263 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-jw4c2" Sep 30 19:33:30 crc kubenswrapper[4756]: I0930 19:33:30.015322 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Sep 30 19:33:30 crc kubenswrapper[4756]: I0930 19:33:30.015367 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"cluster-image-registry-operator-dockercfg-m4qtx" Sep 30 19:33:30 crc kubenswrapper[4756]: I0930 19:33:30.015363 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"authentication-operator-config" Sep 30 19:33:30 crc kubenswrapper[4756]: I0930 19:33:30.017782 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-vtffn"] Sep 30 19:33:30 crc kubenswrapper[4756]: I0930 19:33:30.018294 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-42ln4"] Sep 30 19:33:30 crc kubenswrapper[4756]: I0930 19:33:30.018489 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"openshift-apiserver-sa-dockercfg-djjff" Sep 30 19:33:30 crc kubenswrapper[4756]: I0930 19:33:30.018696 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-nfgwx"] Sep 30 19:33:30 crc kubenswrapper[4756]: I0930 19:33:30.019023 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/29fa1c29-d959-4928-bff4-550a2928dfef-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-h5tp5\" (UID: \"29fa1c29-d959-4928-bff4-550a2928dfef\") " pod="openshift-authentication/oauth-openshift-558db77b4-h5tp5" Sep 30 19:33:30 crc kubenswrapper[4756]: I0930 19:33:30.019141 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"authentication-operator-dockercfg-mz9bj" Sep 30 19:33:30 crc kubenswrapper[4756]: I0930 19:33:30.019219 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Sep 30 19:33:30 crc kubenswrapper[4756]: I0930 19:33:30.019219 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-vtffn" Sep 30 19:33:30 crc kubenswrapper[4756]: I0930 19:33:30.019438 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Sep 30 19:33:30 crc kubenswrapper[4756]: I0930 19:33:30.019673 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Sep 30 19:33:30 crc kubenswrapper[4756]: I0930 19:33:30.019705 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-operator-tls" Sep 30 19:33:30 crc kubenswrapper[4756]: I0930 19:33:30.019857 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-42ln4" Sep 30 19:33:30 crc kubenswrapper[4756]: I0930 19:33:30.020326 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-cgk7c"] Sep 30 19:33:30 crc kubenswrapper[4756]: I0930 19:33:30.020768 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-nfgwx" Sep 30 19:33:30 crc kubenswrapper[4756]: I0930 19:33:30.020964 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"openshift-service-ca.crt" Sep 30 19:33:30 crc kubenswrapper[4756]: I0930 19:33:30.021339 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"dns-operator-dockercfg-9mqw5" Sep 30 19:33:30 crc kubenswrapper[4756]: I0930 19:33:30.021543 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-trj9z"] Sep 30 19:33:30 crc kubenswrapper[4756]: I0930 19:33:30.021987 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-cgk7c" Sep 30 19:33:30 crc kubenswrapper[4756]: I0930 19:33:30.023007 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/07175e3d-c776-4d48-af22-d30a7e9281ef-encryption-config\") pod \"apiserver-7bbb656c7d-dwlkd\" (UID: \"07175e3d-c776-4d48-af22-d30a7e9281ef\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-dwlkd" Sep 30 19:33:30 crc kubenswrapper[4756]: I0930 19:33:30.026139 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"openshift-service-ca.crt" Sep 30 19:33:30 crc kubenswrapper[4756]: I0930 19:33:30.026931 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"trusted-ca-bundle" Sep 30 19:33:30 crc kubenswrapper[4756]: I0930 19:33:30.027201 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Sep 30 19:33:30 crc kubenswrapper[4756]: I0930 19:33:30.030610 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-s2485"] Sep 30 19:33:30 crc kubenswrapper[4756]: I0930 19:33:30.030870 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-serving-cert" Sep 30 19:33:30 crc kubenswrapper[4756]: I0930 19:33:30.031119 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-trj9z" Sep 30 19:33:30 crc kubenswrapper[4756]: I0930 19:33:30.031501 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-rbac-proxy" Sep 30 19:33:30 crc kubenswrapper[4756]: I0930 19:33:30.031573 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-s2485" Sep 30 19:33:30 crc kubenswrapper[4756]: I0930 19:33:30.032012 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"machine-approver-config" Sep 30 19:33:30 crc kubenswrapper[4756]: I0930 19:33:30.032345 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"config-operator-serving-cert" Sep 30 19:33:30 crc kubenswrapper[4756]: I0930 19:33:30.032537 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"metrics-tls" Sep 30 19:33:30 crc kubenswrapper[4756]: I0930 19:33:30.032688 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-config" Sep 30 19:33:30 crc kubenswrapper[4756]: I0930 19:33:30.032711 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"openshift-service-ca.crt" Sep 30 19:33:30 crc kubenswrapper[4756]: I0930 19:33:30.033669 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"trusted-ca" Sep 30 19:33:30 crc kubenswrapper[4756]: I0930 19:33:30.034225 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/5921345a-df3c-4611-b471-52f946af0c5f-trusted-ca-bundle\") pod \"apiserver-76f77b778f-zlg2j\" (UID: \"5921345a-df3c-4611-b471-52f946af0c5f\") " pod="openshift-apiserver/apiserver-76f77b778f-zlg2j" Sep 30 19:33:30 crc kubenswrapper[4756]: I0930 19:33:30.034464 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"kube-root-ca.crt" Sep 30 19:33:30 crc kubenswrapper[4756]: I0930 19:33:30.034515 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-sa-dockercfg-nl2j4" Sep 30 19:33:30 crc kubenswrapper[4756]: I0930 19:33:30.034749 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Sep 30 19:33:30 crc kubenswrapper[4756]: I0930 19:33:30.034755 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-2t55f"] Sep 30 19:33:30 crc kubenswrapper[4756]: I0930 19:33:30.034785 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-tls" Sep 30 19:33:30 crc kubenswrapper[4756]: I0930 19:33:30.035211 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"serving-cert" Sep 30 19:33:30 crc kubenswrapper[4756]: I0930 19:33:30.036040 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Sep 30 19:33:30 crc kubenswrapper[4756]: I0930 19:33:30.036255 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"openshift-config-operator-dockercfg-7pc5z" Sep 30 19:33:30 crc kubenswrapper[4756]: I0930 19:33:30.036428 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-serving-cert" Sep 30 19:33:30 crc kubenswrapper[4756]: I0930 19:33:30.036575 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"oauth-serving-cert" Sep 30 19:33:30 crc kubenswrapper[4756]: I0930 19:33:30.037806 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"kube-root-ca.crt" Sep 30 19:33:30 crc kubenswrapper[4756]: I0930 19:33:30.037955 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"openshift-service-ca.crt" Sep 30 19:33:30 crc kubenswrapper[4756]: I0930 19:33:30.038422 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"kube-root-ca.crt" Sep 30 19:33:30 crc kubenswrapper[4756]: I0930 19:33:30.038631 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-root-ca.crt" Sep 30 19:33:30 crc kubenswrapper[4756]: I0930 19:33:30.039071 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-service-ca.crt" Sep 30 19:33:30 crc kubenswrapper[4756]: I0930 19:33:30.039477 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"samples-operator-tls" Sep 30 19:33:30 crc kubenswrapper[4756]: I0930 19:33:30.039739 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-dockercfg-xtcjv" Sep 30 19:33:30 crc kubenswrapper[4756]: I0930 19:33:30.039842 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"console-config" Sep 30 19:33:30 crc kubenswrapper[4756]: I0930 19:33:30.038492 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-2t55f" Sep 30 19:33:30 crc kubenswrapper[4756]: I0930 19:33:30.040098 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-serving-cert" Sep 30 19:33:30 crc kubenswrapper[4756]: I0930 19:33:30.040234 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"cluster-samples-operator-dockercfg-xpp9w" Sep 30 19:33:30 crc kubenswrapper[4756]: I0930 19:33:30.039781 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"openshift-service-ca.crt" Sep 30 19:33:30 crc kubenswrapper[4756]: I0930 19:33:30.040489 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-oauth-config" Sep 30 19:33:30 crc kubenswrapper[4756]: I0930 19:33:30.040522 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"kube-root-ca.crt" Sep 30 19:33:30 crc kubenswrapper[4756]: I0930 19:33:30.039821 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"default-dockercfg-chnjx" Sep 30 19:33:30 crc kubenswrapper[4756]: I0930 19:33:30.040434 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"trusted-ca" Sep 30 19:33:30 crc kubenswrapper[4756]: I0930 19:33:30.041193 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"trusted-ca-bundle" Sep 30 19:33:30 crc kubenswrapper[4756]: I0930 19:33:30.042585 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-wdtth"] Sep 30 19:33:30 crc kubenswrapper[4756]: I0930 19:33:30.043135 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-wdtth" Sep 30 19:33:30 crc kubenswrapper[4756]: I0930 19:33:30.043969 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"kube-root-ca.crt" Sep 30 19:33:30 crc kubenswrapper[4756]: I0930 19:33:30.045521 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"openshift-service-ca.crt" Sep 30 19:33:30 crc kubenswrapper[4756]: I0930 19:33:30.048575 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-nlp7l"] Sep 30 19:33:30 crc kubenswrapper[4756]: I0930 19:33:30.048948 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/29fa1c29-d959-4928-bff4-550a2928dfef-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-h5tp5\" (UID: \"29fa1c29-d959-4928-bff4-550a2928dfef\") " pod="openshift-authentication/oauth-openshift-558db77b4-h5tp5" Sep 30 19:33:30 crc kubenswrapper[4756]: I0930 19:33:30.049257 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-nlp7l" Sep 30 19:33:30 crc kubenswrapper[4756]: I0930 19:33:30.065905 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-bchvw"] Sep 30 19:33:30 crc kubenswrapper[4756]: I0930 19:33:30.067022 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/07175e3d-c776-4d48-af22-d30a7e9281ef-serving-cert\") pod \"apiserver-7bbb656c7d-dwlkd\" (UID: \"07175e3d-c776-4d48-af22-d30a7e9281ef\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-dwlkd" Sep 30 19:33:30 crc kubenswrapper[4756]: I0930 19:33:30.067566 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"service-ca" Sep 30 19:33:30 crc kubenswrapper[4756]: I0930 19:33:30.068339 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-bchvw" Sep 30 19:33:30 crc kubenswrapper[4756]: I0930 19:33:30.068521 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/29fa1c29-d959-4928-bff4-550a2928dfef-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-h5tp5\" (UID: \"29fa1c29-d959-4928-bff4-550a2928dfef\") " pod="openshift-authentication/oauth-openshift-558db77b4-h5tp5" Sep 30 19:33:30 crc kubenswrapper[4756]: I0930 19:33:30.068709 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-dockercfg-f62pw" Sep 30 19:33:30 crc kubenswrapper[4756]: I0930 19:33:30.072998 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-dwvn8"] Sep 30 19:33:30 crc kubenswrapper[4756]: I0930 19:33:30.073078 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-lls25"] Sep 30 19:33:30 crc kubenswrapper[4756]: I0930 19:33:30.074772 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-mb2s4"] Sep 30 19:33:30 crc kubenswrapper[4756]: I0930 19:33:30.076565 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-mb2s4" Sep 30 19:33:30 crc kubenswrapper[4756]: I0930 19:33:30.079428 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-qsct2"] Sep 30 19:33:30 crc kubenswrapper[4756]: I0930 19:33:30.080240 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-qsct2" Sep 30 19:33:30 crc kubenswrapper[4756]: I0930 19:33:30.081907 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"trusted-ca-bundle" Sep 30 19:33:30 crc kubenswrapper[4756]: I0930 19:33:30.084108 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-7dwjw"] Sep 30 19:33:30 crc kubenswrapper[4756]: I0930 19:33:30.085501 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-h5cnz"] Sep 30 19:33:30 crc kubenswrapper[4756]: I0930 19:33:30.086571 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress/router-default-5444994796-88vsq"] Sep 30 19:33:30 crc kubenswrapper[4756]: I0930 19:33:30.087128 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-88vsq" Sep 30 19:33:30 crc kubenswrapper[4756]: I0930 19:33:30.088176 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-service-ca.crt" Sep 30 19:33:30 crc kubenswrapper[4756]: I0930 19:33:30.090133 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-fv5z8"] Sep 30 19:33:30 crc kubenswrapper[4756]: I0930 19:33:30.090791 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-fv5z8" Sep 30 19:33:30 crc kubenswrapper[4756]: I0930 19:33:30.091357 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-lmd8p"] Sep 30 19:33:30 crc kubenswrapper[4756]: I0930 19:33:30.093148 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-4nhvz"] Sep 30 19:33:30 crc kubenswrapper[4756]: I0930 19:33:30.093368 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-lmd8p" Sep 30 19:33:30 crc kubenswrapper[4756]: I0930 19:33:30.093545 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/dns-default-jjwh9"] Sep 30 19:33:30 crc kubenswrapper[4756]: I0930 19:33:30.094336 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-jjwh9" Sep 30 19:33:30 crc kubenswrapper[4756]: I0930 19:33:30.095086 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-glgxx"] Sep 30 19:33:30 crc kubenswrapper[4756]: I0930 19:33:30.096201 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-w7nzp"] Sep 30 19:33:30 crc kubenswrapper[4756]: I0930 19:33:30.097703 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-zxkfm"] Sep 30 19:33:30 crc kubenswrapper[4756]: I0930 19:33:30.109645 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-server-frglv"] Sep 30 19:33:30 crc kubenswrapper[4756]: I0930 19:33:30.110316 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-frglv" Sep 30 19:33:30 crc kubenswrapper[4756]: I0930 19:33:30.110309 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-dockercfg-vw8fw" Sep 30 19:33:30 crc kubenswrapper[4756]: I0930 19:33:30.114515 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-mpnp5"] Sep 30 19:33:30 crc kubenswrapper[4756]: I0930 19:33:30.121001 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-x87xk"] Sep 30 19:33:30 crc kubenswrapper[4756]: I0930 19:33:30.122813 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-nd2dk"] Sep 30 19:33:30 crc kubenswrapper[4756]: I0930 19:33:30.124047 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-mstk5"] Sep 30 19:33:30 crc kubenswrapper[4756]: I0930 19:33:30.125649 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29321010-snrpj"] Sep 30 19:33:30 crc kubenswrapper[4756]: I0930 19:33:30.127180 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-bchvw"] Sep 30 19:33:30 crc kubenswrapper[4756]: I0930 19:33:30.128053 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-wpxq6"] Sep 30 19:33:30 crc kubenswrapper[4756]: I0930 19:33:30.128522 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-serving-cert" Sep 30 19:33:30 crc kubenswrapper[4756]: I0930 19:33:30.128999 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-jjwh9"] Sep 30 19:33:30 crc kubenswrapper[4756]: I0930 19:33:30.130258 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-mb2s4"] Sep 30 19:33:30 crc kubenswrapper[4756]: I0930 19:33:30.131022 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-fv5z8"] Sep 30 19:33:30 crc kubenswrapper[4756]: I0930 19:33:30.132204 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-jw4c2"] Sep 30 19:33:30 crc kubenswrapper[4756]: I0930 19:33:30.133056 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-cgk7c"] Sep 30 19:33:30 crc kubenswrapper[4756]: I0930 19:33:30.134131 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-wdtth"] Sep 30 19:33:30 crc kubenswrapper[4756]: I0930 19:33:30.135087 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-9npql"] Sep 30 19:33:30 crc kubenswrapper[4756]: I0930 19:33:30.136477 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-vtffn"] Sep 30 19:33:30 crc kubenswrapper[4756]: I0930 19:33:30.137155 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-trj9z"] Sep 30 19:33:30 crc kubenswrapper[4756]: I0930 19:33:30.138678 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-nfgwx"] Sep 30 19:33:30 crc kubenswrapper[4756]: I0930 19:33:30.140357 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-s2485"] Sep 30 19:33:30 crc kubenswrapper[4756]: I0930 19:33:30.141420 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-42ln4"] Sep 30 19:33:30 crc kubenswrapper[4756]: I0930 19:33:30.142342 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-qsct2"] Sep 30 19:33:30 crc kubenswrapper[4756]: I0930 19:33:30.143503 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-lmd8p"] Sep 30 19:33:30 crc kubenswrapper[4756]: I0930 19:33:30.144669 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-2t55f"] Sep 30 19:33:30 crc kubenswrapper[4756]: I0930 19:33:30.145586 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-nlp7l"] Sep 30 19:33:30 crc kubenswrapper[4756]: I0930 19:33:30.146717 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-zh59x"] Sep 30 19:33:30 crc kubenswrapper[4756]: I0930 19:33:30.148015 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-canary/ingress-canary-9mtxr"] Sep 30 19:33:30 crc kubenswrapper[4756]: I0930 19:33:30.148161 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-zh59x" Sep 30 19:33:30 crc kubenswrapper[4756]: I0930 19:33:30.148894 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-zh59x"] Sep 30 19:33:30 crc kubenswrapper[4756]: I0930 19:33:30.148946 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-9mtxr" Sep 30 19:33:30 crc kubenswrapper[4756]: I0930 19:33:30.149303 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-config" Sep 30 19:33:30 crc kubenswrapper[4756]: I0930 19:33:30.150152 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-9mtxr"] Sep 30 19:33:30 crc kubenswrapper[4756]: I0930 19:33:30.169040 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"kube-root-ca.crt" Sep 30 19:33:30 crc kubenswrapper[4756]: I0930 19:33:30.189714 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"installation-pull-secrets" Sep 30 19:33:30 crc kubenswrapper[4756]: I0930 19:33:30.209195 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"registry-dockercfg-kzzsd" Sep 30 19:33:30 crc kubenswrapper[4756]: I0930 19:33:30.228689 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-tls" Sep 30 19:33:30 crc kubenswrapper[4756]: I0930 19:33:30.269529 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-root-ca.crt" Sep 30 19:33:30 crc kubenswrapper[4756]: I0930 19:33:30.289208 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-dockercfg-x57mr" Sep 30 19:33:30 crc kubenswrapper[4756]: I0930 19:33:30.309027 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-serving-cert" Sep 30 19:33:30 crc kubenswrapper[4756]: I0930 19:33:30.329775 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"machine-config-operator-images" Sep 30 19:33:30 crc kubenswrapper[4756]: I0930 19:33:30.348945 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-operator-dockercfg-98p87" Sep 30 19:33:30 crc kubenswrapper[4756]: I0930 19:33:30.369068 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mco-proxy-tls" Sep 30 19:33:30 crc kubenswrapper[4756]: I0930 19:33:30.405481 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-config" Sep 30 19:33:30 crc kubenswrapper[4756]: I0930 19:33:30.424293 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6jx2p\" (UniqueName: \"kubernetes.io/projected/07175e3d-c776-4d48-af22-d30a7e9281ef-kube-api-access-6jx2p\") pod \"apiserver-7bbb656c7d-dwlkd\" (UID: \"07175e3d-c776-4d48-af22-d30a7e9281ef\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-dwlkd" Sep 30 19:33:30 crc kubenswrapper[4756]: I0930 19:33:30.449533 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-dwlkd" Sep 30 19:33:30 crc kubenswrapper[4756]: I0930 19:33:30.453514 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bgkr5\" (UniqueName: \"kubernetes.io/projected/29fa1c29-d959-4928-bff4-550a2928dfef-kube-api-access-bgkr5\") pod \"oauth-openshift-558db77b4-h5tp5\" (UID: \"29fa1c29-d959-4928-bff4-550a2928dfef\") " pod="openshift-authentication/oauth-openshift-558db77b4-h5tp5" Sep 30 19:33:30 crc kubenswrapper[4756]: I0930 19:33:30.471892 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x8lbj\" (UniqueName: \"kubernetes.io/projected/5921345a-df3c-4611-b471-52f946af0c5f-kube-api-access-x8lbj\") pod \"apiserver-76f77b778f-zlg2j\" (UID: \"5921345a-df3c-4611-b471-52f946af0c5f\") " pod="openshift-apiserver/apiserver-76f77b778f-zlg2j" Sep 30 19:33:30 crc kubenswrapper[4756]: I0930 19:33:30.489343 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"openshift-service-ca.crt" Sep 30 19:33:30 crc kubenswrapper[4756]: I0930 19:33:30.489359 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c6xh5\" (UniqueName: \"kubernetes.io/projected/1f640044-1129-4119-a819-bc37a9aaf617-kube-api-access-c6xh5\") pod \"machine-api-operator-5694c8668f-ln592\" (UID: \"1f640044-1129-4119-a819-bc37a9aaf617\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-ln592" Sep 30 19:33:30 crc kubenswrapper[4756]: I0930 19:33:30.496458 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-h5tp5" Sep 30 19:33:30 crc kubenswrapper[4756]: I0930 19:33:30.510235 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"kube-root-ca.crt" Sep 30 19:33:30 crc kubenswrapper[4756]: I0930 19:33:30.529169 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Sep 30 19:33:30 crc kubenswrapper[4756]: I0930 19:33:30.549651 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"pprof-cert" Sep 30 19:33:30 crc kubenswrapper[4756]: I0930 19:33:30.569343 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Sep 30 19:33:30 crc kubenswrapper[4756]: I0930 19:33:30.608837 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mcc-proxy-tls" Sep 30 19:33:30 crc kubenswrapper[4756]: I0930 19:33:30.630136 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-controller-dockercfg-c2lfx" Sep 30 19:33:30 crc kubenswrapper[4756]: I0930 19:33:30.646209 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-dwlkd"] Sep 30 19:33:30 crc kubenswrapper[4756]: I0930 19:33:30.650043 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"kube-storage-version-migrator-operator-dockercfg-2bh8d" Sep 30 19:33:30 crc kubenswrapper[4756]: I0930 19:33:30.672771 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"kube-root-ca.crt" Sep 30 19:33:30 crc kubenswrapper[4756]: I0930 19:33:30.689986 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"config" Sep 30 19:33:30 crc kubenswrapper[4756]: I0930 19:33:30.708602 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"serving-cert" Sep 30 19:33:30 crc kubenswrapper[4756]: I0930 19:33:30.729263 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"openshift-service-ca.crt" Sep 30 19:33:30 crc kubenswrapper[4756]: I0930 19:33:30.748911 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"openshift-service-ca.crt" Sep 30 19:33:30 crc kubenswrapper[4756]: I0930 19:33:30.756256 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-zlg2j" Sep 30 19:33:30 crc kubenswrapper[4756]: I0930 19:33:30.769244 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-dockercfg-5nsgg" Sep 30 19:33:30 crc kubenswrapper[4756]: I0930 19:33:30.785299 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-ln592" Sep 30 19:33:30 crc kubenswrapper[4756]: I0930 19:33:30.789409 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-metrics" Sep 30 19:33:30 crc kubenswrapper[4756]: I0930 19:33:30.816225 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"marketplace-trusted-ca" Sep 30 19:33:30 crc kubenswrapper[4756]: I0930 19:33:30.829333 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"kube-root-ca.crt" Sep 30 19:33:30 crc kubenswrapper[4756]: I0930 19:33:30.850844 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"signing-key" Sep 30 19:33:30 crc kubenswrapper[4756]: I0930 19:33:30.872131 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"openshift-service-ca.crt" Sep 30 19:33:30 crc kubenswrapper[4756]: I0930 19:33:30.889029 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-dwlkd" event={"ID":"07175e3d-c776-4d48-af22-d30a7e9281ef","Type":"ContainerStarted","Data":"4b90f9f5e3e98f91734d7bde0d648f565c091e5f000fca4098fc4af797b9a029"} Sep 30 19:33:30 crc kubenswrapper[4756]: I0930 19:33:30.889077 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-dwlkd" event={"ID":"07175e3d-c776-4d48-af22-d30a7e9281ef","Type":"ContainerStarted","Data":"93547bd6b370feff3cb55192d8d5b15836b77d088e222a6fd8e85764ee524f81"} Sep 30 19:33:30 crc kubenswrapper[4756]: I0930 19:33:30.889048 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"signing-cabundle" Sep 30 19:33:30 crc kubenswrapper[4756]: I0930 19:33:30.905458 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-h5tp5"] Sep 30 19:33:30 crc kubenswrapper[4756]: I0930 19:33:30.905511 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-zlg2j"] Sep 30 19:33:30 crc kubenswrapper[4756]: I0930 19:33:30.909642 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"kube-root-ca.crt" Sep 30 19:33:30 crc kubenswrapper[4756]: I0930 19:33:30.929468 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"service-ca-dockercfg-pn86c" Sep 30 19:33:30 crc kubenswrapper[4756]: W0930 19:33:30.938949 4756 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod29fa1c29_d959_4928_bff4_550a2928dfef.slice/crio-be742d4b7690edccc58f7d4add6e79b98ec7447d28566b1f7eadd822e58714c9 WatchSource:0}: Error finding container be742d4b7690edccc58f7d4add6e79b98ec7447d28566b1f7eadd822e58714c9: Status 404 returned error can't find the container with id be742d4b7690edccc58f7d4add6e79b98ec7447d28566b1f7eadd822e58714c9 Sep 30 19:33:30 crc kubenswrapper[4756]: W0930 19:33:30.941883 4756 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5921345a_df3c_4611_b471_52f946af0c5f.slice/crio-3c33148cd0e4009e3770bca6f9e533d2cc16ff9fe4042bf539101fbb5796fe76 WatchSource:0}: Error finding container 3c33148cd0e4009e3770bca6f9e533d2cc16ff9fe4042bf539101fbb5796fe76: Status 404 returned error can't find the container with id 3c33148cd0e4009e3770bca6f9e533d2cc16ff9fe4042bf539101fbb5796fe76 Sep 30 19:33:30 crc kubenswrapper[4756]: I0930 19:33:30.948787 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ac-dockercfg-9lkdf" Sep 30 19:33:30 crc kubenswrapper[4756]: I0930 19:33:30.963624 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-ln592"] Sep 30 19:33:30 crc kubenswrapper[4756]: I0930 19:33:30.968819 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-admission-controller-secret" Sep 30 19:33:30 crc kubenswrapper[4756]: I0930 19:33:30.990363 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-tls" Sep 30 19:33:31 crc kubenswrapper[4756]: I0930 19:33:31.009186 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-dockercfg-k9rxt" Sep 30 19:33:31 crc kubenswrapper[4756]: I0930 19:33:31.028667 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serving-cert" Sep 30 19:33:31 crc kubenswrapper[4756]: I0930 19:33:31.048135 4756 request.go:700] Waited for 1.016200622s due to client-side throttling, not priority and fairness, request: GET:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-operator-lifecycle-manager/secrets?fieldSelector=metadata.name%3Dolm-operator-serviceaccount-dockercfg-rq7zk&limit=500&resourceVersion=0 Sep 30 19:33:31 crc kubenswrapper[4756]: I0930 19:33:31.049609 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serviceaccount-dockercfg-rq7zk" Sep 30 19:33:31 crc kubenswrapper[4756]: I0930 19:33:31.068782 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"catalog-operator-serving-cert" Sep 30 19:33:31 crc kubenswrapper[4756]: I0930 19:33:31.089047 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"openshift-service-ca.crt" Sep 30 19:33:31 crc kubenswrapper[4756]: I0930 19:33:31.110411 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"service-ca-operator-dockercfg-rg9jl" Sep 30 19:33:31 crc kubenswrapper[4756]: I0930 19:33:31.129166 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"kube-root-ca.crt" Sep 30 19:33:31 crc kubenswrapper[4756]: I0930 19:33:31.149043 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"service-ca-operator-config" Sep 30 19:33:31 crc kubenswrapper[4756]: I0930 19:33:31.177048 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"serving-cert" Sep 30 19:33:31 crc kubenswrapper[4756]: I0930 19:33:31.188566 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"openshift-service-ca.crt" Sep 30 19:33:31 crc kubenswrapper[4756]: I0930 19:33:31.217169 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"trusted-ca" Sep 30 19:33:31 crc kubenswrapper[4756]: I0930 19:33:31.228841 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"kube-root-ca.crt" Sep 30 19:33:31 crc kubenswrapper[4756]: I0930 19:33:31.249311 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"ingress-operator-dockercfg-7lnqk" Sep 30 19:33:31 crc kubenswrapper[4756]: I0930 19:33:31.270020 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"metrics-tls" Sep 30 19:33:31 crc kubenswrapper[4756]: I0930 19:33:31.289751 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator"/"kube-storage-version-migrator-sa-dockercfg-5xfcg" Sep 30 19:33:31 crc kubenswrapper[4756]: I0930 19:33:31.309165 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"kube-root-ca.crt" Sep 30 19:33:31 crc kubenswrapper[4756]: I0930 19:33:31.329631 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"openshift-service-ca.crt" Sep 30 19:33:31 crc kubenswrapper[4756]: I0930 19:33:31.349688 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"package-server-manager-serving-cert" Sep 30 19:33:31 crc kubenswrapper[4756]: I0930 19:33:31.368659 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"packageserver-service-cert" Sep 30 19:33:31 crc kubenswrapper[4756]: I0930 19:33:31.388939 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-dockercfg-zdk86" Sep 30 19:33:31 crc kubenswrapper[4756]: I0930 19:33:31.408885 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"service-ca-bundle" Sep 30 19:33:31 crc kubenswrapper[4756]: I0930 19:33:31.430747 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-certs-default" Sep 30 19:33:31 crc kubenswrapper[4756]: I0930 19:33:31.449707 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-stats-default" Sep 30 19:33:31 crc kubenswrapper[4756]: I0930 19:33:31.469933 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-metrics-certs-default" Sep 30 19:33:31 crc kubenswrapper[4756]: I0930 19:33:31.490003 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"kube-root-ca.crt" Sep 30 19:33:31 crc kubenswrapper[4756]: I0930 19:33:31.509040 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"openshift-service-ca.crt" Sep 30 19:33:31 crc kubenswrapper[4756]: I0930 19:33:31.529193 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-dockercfg-qt55r" Sep 30 19:33:31 crc kubenswrapper[4756]: I0930 19:33:31.549704 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"kube-scheduler-operator-serving-cert" Sep 30 19:33:31 crc kubenswrapper[4756]: I0930 19:33:31.570692 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-config" Sep 30 19:33:31 crc kubenswrapper[4756]: I0930 19:33:31.589561 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"kube-root-ca.crt" Sep 30 19:33:31 crc kubenswrapper[4756]: I0930 19:33:31.609791 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-dockercfg-gkqpw" Sep 30 19:33:31 crc kubenswrapper[4756]: I0930 19:33:31.629385 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-config" Sep 30 19:33:31 crc kubenswrapper[4756]: I0930 19:33:31.650764 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-serving-cert" Sep 30 19:33:31 crc kubenswrapper[4756]: I0930 19:33:31.669030 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-root-ca.crt" Sep 30 19:33:31 crc kubenswrapper[4756]: I0930 19:33:31.688518 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-default-metrics-tls" Sep 30 19:33:31 crc kubenswrapper[4756]: I0930 19:33:31.709686 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"dns-default" Sep 30 19:33:31 crc kubenswrapper[4756]: I0930 19:33:31.730960 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-dockercfg-jwfmh" Sep 30 19:33:31 crc kubenswrapper[4756]: I0930 19:33:31.751119 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"node-bootstrapper-token" Sep 30 19:33:31 crc kubenswrapper[4756]: I0930 19:33:31.770019 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-dockercfg-qx5rd" Sep 30 19:33:31 crc kubenswrapper[4756]: I0930 19:33:31.790058 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-tls" Sep 30 19:33:31 crc kubenswrapper[4756]: I0930 19:33:31.810148 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"kube-root-ca.crt" Sep 30 19:33:31 crc kubenswrapper[4756]: I0930 19:33:31.830539 4756 reflector.go:368] Caches populated for *v1.Secret from object-"hostpath-provisioner"/"csi-hostpath-provisioner-sa-dockercfg-qd74k" Sep 30 19:33:31 crc kubenswrapper[4756]: I0930 19:33:31.849703 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"openshift-service-ca.crt" Sep 30 19:33:31 crc kubenswrapper[4756]: I0930 19:33:31.869207 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"default-dockercfg-2llfx" Sep 30 19:33:31 crc kubenswrapper[4756]: I0930 19:33:31.889198 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"kube-root-ca.crt" Sep 30 19:33:31 crc kubenswrapper[4756]: I0930 19:33:31.895807 4756 generic.go:334] "Generic (PLEG): container finished" podID="07175e3d-c776-4d48-af22-d30a7e9281ef" containerID="4b90f9f5e3e98f91734d7bde0d648f565c091e5f000fca4098fc4af797b9a029" exitCode=0 Sep 30 19:33:31 crc kubenswrapper[4756]: I0930 19:33:31.895913 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-dwlkd" event={"ID":"07175e3d-c776-4d48-af22-d30a7e9281ef","Type":"ContainerDied","Data":"4b90f9f5e3e98f91734d7bde0d648f565c091e5f000fca4098fc4af797b9a029"} Sep 30 19:33:31 crc kubenswrapper[4756]: I0930 19:33:31.897528 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-h5tp5" event={"ID":"29fa1c29-d959-4928-bff4-550a2928dfef","Type":"ContainerStarted","Data":"2521882fbfd13f07179e6957ce4a8fd9e7bbe9c5f76ea11b73e6bd856015807a"} Sep 30 19:33:31 crc kubenswrapper[4756]: I0930 19:33:31.897581 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-h5tp5" event={"ID":"29fa1c29-d959-4928-bff4-550a2928dfef","Type":"ContainerStarted","Data":"be742d4b7690edccc58f7d4add6e79b98ec7447d28566b1f7eadd822e58714c9"} Sep 30 19:33:31 crc kubenswrapper[4756]: I0930 19:33:31.897965 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-558db77b4-h5tp5" Sep 30 19:33:31 crc kubenswrapper[4756]: I0930 19:33:31.899561 4756 generic.go:334] "Generic (PLEG): container finished" podID="5921345a-df3c-4611-b471-52f946af0c5f" containerID="5565b3d95902b4fad45af52b426253d1f8e5a662a199b0dc93a510c45af495fc" exitCode=0 Sep 30 19:33:31 crc kubenswrapper[4756]: I0930 19:33:31.899605 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-zlg2j" event={"ID":"5921345a-df3c-4611-b471-52f946af0c5f","Type":"ContainerDied","Data":"5565b3d95902b4fad45af52b426253d1f8e5a662a199b0dc93a510c45af495fc"} Sep 30 19:33:31 crc kubenswrapper[4756]: I0930 19:33:31.899622 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-zlg2j" event={"ID":"5921345a-df3c-4611-b471-52f946af0c5f","Type":"ContainerStarted","Data":"3c33148cd0e4009e3770bca6f9e533d2cc16ff9fe4042bf539101fbb5796fe76"} Sep 30 19:33:31 crc kubenswrapper[4756]: I0930 19:33:31.902243 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-ln592" event={"ID":"1f640044-1129-4119-a819-bc37a9aaf617","Type":"ContainerStarted","Data":"def8a6b03daf4371f7fa6bee29cb90bcef7b0a5e3a898175ceb50bdc5474c0fc"} Sep 30 19:33:31 crc kubenswrapper[4756]: I0930 19:33:31.902314 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-ln592" event={"ID":"1f640044-1129-4119-a819-bc37a9aaf617","Type":"ContainerStarted","Data":"b9b768cf0d582ab6196925bc4a15f0941d43fdc1f9294089a44459bfd125067f"} Sep 30 19:33:31 crc kubenswrapper[4756]: I0930 19:33:31.902329 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-ln592" event={"ID":"1f640044-1129-4119-a819-bc37a9aaf617","Type":"ContainerStarted","Data":"c66ee477e9e204f91cef4906859c376e67dfc99ed2dc1b48ea09d33ee3158e71"} Sep 30 19:33:31 crc kubenswrapper[4756]: I0930 19:33:31.910159 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"canary-serving-cert" Sep 30 19:33:31 crc kubenswrapper[4756]: I0930 19:33:31.932254 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"openshift-service-ca.crt" Sep 30 19:33:31 crc kubenswrapper[4756]: I0930 19:33:31.987904 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f47ebe12-09aa-476d-a609-f9066a106afb-config\") pod \"machine-approver-56656f9798-bzmlt\" (UID: \"f47ebe12-09aa-476d-a609-f9066a106afb\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-bzmlt" Sep 30 19:33:31 crc kubenswrapper[4756]: I0930 19:33:31.988455 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/81849a73-fe68-44ea-b5c6-24614310b67a-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-dwvn8\" (UID: \"81849a73-fe68-44ea-b5c6-24614310b67a\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-dwvn8" Sep 30 19:33:31 crc kubenswrapper[4756]: I0930 19:33:31.988537 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d116b7ef-bc7d-4f25-8aec-dabad585681a-serving-cert\") pod \"authentication-operator-69f744f599-tvhtj\" (UID: \"d116b7ef-bc7d-4f25-8aec-dabad585681a\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-tvhtj" Sep 30 19:33:31 crc kubenswrapper[4756]: I0930 19:33:31.988586 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/e0f26253-f454-4de3-b058-bbbece905a7e-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-x87xk\" (UID: \"e0f26253-f454-4de3-b058-bbbece905a7e\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-x87xk" Sep 30 19:33:31 crc kubenswrapper[4756]: I0930 19:33:31.988614 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/4110da68-3e86-428e-983e-e01ee898cf2c-images\") pod \"machine-config-operator-74547568cd-nd2dk\" (UID: \"4110da68-3e86-428e-983e-e01ee898cf2c\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-nd2dk" Sep 30 19:33:31 crc kubenswrapper[4756]: I0930 19:33:31.988705 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/ce702cef-b28d-4fcd-99b0-9961a980d3e6-bound-sa-token\") pod \"image-registry-697d97f7c8-wpxq6\" (UID: \"ce702cef-b28d-4fcd-99b0-9961a980d3e6\") " pod="openshift-image-registry/image-registry-697d97f7c8-wpxq6" Sep 30 19:33:31 crc kubenswrapper[4756]: I0930 19:33:31.989239 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/e0f26253-f454-4de3-b058-bbbece905a7e-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-x87xk\" (UID: \"e0f26253-f454-4de3-b058-bbbece905a7e\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-x87xk" Sep 30 19:33:31 crc kubenswrapper[4756]: I0930 19:33:31.989265 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/e0f26253-f454-4de3-b058-bbbece905a7e-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-x87xk\" (UID: \"e0f26253-f454-4de3-b058-bbbece905a7e\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-x87xk" Sep 30 19:33:31 crc kubenswrapper[4756]: I0930 19:33:31.989288 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/cfc57d6f-f884-42ee-8987-b8e92b4fb95b-etcd-service-ca\") pod \"etcd-operator-b45778765-75ssh\" (UID: \"cfc57d6f-f884-42ee-8987-b8e92b4fb95b\") " pod="openshift-etcd-operator/etcd-operator-b45778765-75ssh" Sep 30 19:33:31 crc kubenswrapper[4756]: I0930 19:33:31.989313 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vgbqz\" (UniqueName: \"kubernetes.io/projected/52803098-a1e8-4a45-a2dd-0eb8f6d5b8ad-kube-api-access-vgbqz\") pod \"downloads-7954f5f757-7dwjw\" (UID: \"52803098-a1e8-4a45-a2dd-0eb8f6d5b8ad\") " pod="openshift-console/downloads-7954f5f757-7dwjw" Sep 30 19:33:31 crc kubenswrapper[4756]: I0930 19:33:31.989433 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/81849a73-fe68-44ea-b5c6-24614310b67a-config\") pod \"openshift-apiserver-operator-796bbdcf4f-dwvn8\" (UID: \"81849a73-fe68-44ea-b5c6-24614310b67a\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-dwvn8" Sep 30 19:33:31 crc kubenswrapper[4756]: I0930 19:33:31.989539 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/37240768-1c94-44c3-9772-d4671f7e35bf-trusted-ca-bundle\") pod \"console-f9d7485db-h5cnz\" (UID: \"37240768-1c94-44c3-9772-d4671f7e35bf\") " pod="openshift-console/console-f9d7485db-h5cnz" Sep 30 19:33:31 crc kubenswrapper[4756]: I0930 19:33:31.989560 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kbmmx\" (UniqueName: \"kubernetes.io/projected/4d420445-45ce-42fc-9571-f137dddc8a2e-kube-api-access-kbmmx\") pod \"console-operator-58897d9998-w7nzp\" (UID: \"4d420445-45ce-42fc-9571-f137dddc8a2e\") " pod="openshift-console-operator/console-operator-58897d9998-w7nzp" Sep 30 19:33:31 crc kubenswrapper[4756]: I0930 19:33:31.989579 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3ae14a06-9c17-4937-b761-13a2247365cd-serving-cert\") pod \"openshift-config-operator-7777fb866f-mpnp5\" (UID: \"3ae14a06-9c17-4937-b761-13a2247365cd\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-mpnp5" Sep 30 19:33:31 crc kubenswrapper[4756]: I0930 19:33:31.989607 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4d420445-45ce-42fc-9571-f137dddc8a2e-config\") pod \"console-operator-58897d9998-w7nzp\" (UID: \"4d420445-45ce-42fc-9571-f137dddc8a2e\") " pod="openshift-console-operator/console-operator-58897d9998-w7nzp" Sep 30 19:33:31 crc kubenswrapper[4756]: I0930 19:33:31.989640 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fpv4j\" (UniqueName: \"kubernetes.io/projected/a7c32461-b400-43b7-829b-4aecbfb73d70-kube-api-access-fpv4j\") pod \"dns-operator-744455d44c-glgxx\" (UID: \"a7c32461-b400-43b7-829b-4aecbfb73d70\") " pod="openshift-dns-operator/dns-operator-744455d44c-glgxx" Sep 30 19:33:31 crc kubenswrapper[4756]: I0930 19:33:31.989668 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/ce702cef-b28d-4fcd-99b0-9961a980d3e6-registry-certificates\") pod \"image-registry-697d97f7c8-wpxq6\" (UID: \"ce702cef-b28d-4fcd-99b0-9961a980d3e6\") " pod="openshift-image-registry/image-registry-697d97f7c8-wpxq6" Sep 30 19:33:31 crc kubenswrapper[4756]: I0930 19:33:31.989690 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wxl8h\" (UniqueName: \"kubernetes.io/projected/d116b7ef-bc7d-4f25-8aec-dabad585681a-kube-api-access-wxl8h\") pod \"authentication-operator-69f744f599-tvhtj\" (UID: \"d116b7ef-bc7d-4f25-8aec-dabad585681a\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-tvhtj" Sep 30 19:33:31 crc kubenswrapper[4756]: I0930 19:33:31.989723 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/ce702cef-b28d-4fcd-99b0-9961a980d3e6-trusted-ca\") pod \"image-registry-697d97f7c8-wpxq6\" (UID: \"ce702cef-b28d-4fcd-99b0-9961a980d3e6\") " pod="openshift-image-registry/image-registry-697d97f7c8-wpxq6" Sep 30 19:33:31 crc kubenswrapper[4756]: I0930 19:33:31.989740 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r8llp\" (UniqueName: \"kubernetes.io/projected/f47ebe12-09aa-476d-a609-f9066a106afb-kube-api-access-r8llp\") pod \"machine-approver-56656f9798-bzmlt\" (UID: \"f47ebe12-09aa-476d-a609-f9066a106afb\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-bzmlt" Sep 30 19:33:31 crc kubenswrapper[4756]: I0930 19:33:31.989792 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/d116b7ef-bc7d-4f25-8aec-dabad585681a-service-ca-bundle\") pod \"authentication-operator-69f744f599-tvhtj\" (UID: \"d116b7ef-bc7d-4f25-8aec-dabad585681a\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-tvhtj" Sep 30 19:33:31 crc kubenswrapper[4756]: I0930 19:33:31.989810 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2mnz4\" (UniqueName: \"kubernetes.io/projected/8fee247b-3f4b-4816-a0c4-7d3854ebc33b-kube-api-access-2mnz4\") pod \"cluster-samples-operator-665b6dd947-lls25\" (UID: \"8fee247b-3f4b-4816-a0c4-7d3854ebc33b\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-lls25" Sep 30 19:33:31 crc kubenswrapper[4756]: I0930 19:33:31.989830 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wnbx9\" (UniqueName: \"kubernetes.io/projected/81849a73-fe68-44ea-b5c6-24614310b67a-kube-api-access-wnbx9\") pod \"openshift-apiserver-operator-796bbdcf4f-dwvn8\" (UID: \"81849a73-fe68-44ea-b5c6-24614310b67a\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-dwvn8" Sep 30 19:33:31 crc kubenswrapper[4756]: I0930 19:33:31.989854 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wpxq6\" (UID: \"ce702cef-b28d-4fcd-99b0-9961a980d3e6\") " pod="openshift-image-registry/image-registry-697d97f7c8-wpxq6" Sep 30 19:33:31 crc kubenswrapper[4756]: I0930 19:33:31.989872 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d03605a9-6716-409b-a9b2-f54bc2f8e467-serving-cert\") pod \"controller-manager-879f6c89f-mstk5\" (UID: \"d03605a9-6716-409b-a9b2-f54bc2f8e467\") " pod="openshift-controller-manager/controller-manager-879f6c89f-mstk5" Sep 30 19:33:31 crc kubenswrapper[4756]: I0930 19:33:31.989889 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/ce702cef-b28d-4fcd-99b0-9961a980d3e6-registry-tls\") pod \"image-registry-697d97f7c8-wpxq6\" (UID: \"ce702cef-b28d-4fcd-99b0-9961a980d3e6\") " pod="openshift-image-registry/image-registry-697d97f7c8-wpxq6" Sep 30 19:33:31 crc kubenswrapper[4756]: I0930 19:33:31.990353 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cgzt6\" (UniqueName: \"kubernetes.io/projected/e0f26253-f454-4de3-b058-bbbece905a7e-kube-api-access-cgzt6\") pod \"cluster-image-registry-operator-dc59b4c8b-x87xk\" (UID: \"e0f26253-f454-4de3-b058-bbbece905a7e\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-x87xk" Sep 30 19:33:31 crc kubenswrapper[4756]: I0930 19:33:31.990498 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/4110da68-3e86-428e-983e-e01ee898cf2c-proxy-tls\") pod \"machine-config-operator-74547568cd-nd2dk\" (UID: \"4110da68-3e86-428e-983e-e01ee898cf2c\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-nd2dk" Sep 30 19:33:31 crc kubenswrapper[4756]: I0930 19:33:31.990543 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m8h85\" (UniqueName: \"kubernetes.io/projected/37240768-1c94-44c3-9772-d4671f7e35bf-kube-api-access-m8h85\") pod \"console-f9d7485db-h5cnz\" (UID: \"37240768-1c94-44c3-9772-d4671f7e35bf\") " pod="openshift-console/console-f9d7485db-h5cnz" Sep 30 19:33:31 crc kubenswrapper[4756]: I0930 19:33:31.990630 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/d03605a9-6716-409b-a9b2-f54bc2f8e467-client-ca\") pod \"controller-manager-879f6c89f-mstk5\" (UID: \"d03605a9-6716-409b-a9b2-f54bc2f8e467\") " pod="openshift-controller-manager/controller-manager-879f6c89f-mstk5" Sep 30 19:33:31 crc kubenswrapper[4756]: I0930 19:33:31.990662 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/d03605a9-6716-409b-a9b2-f54bc2f8e467-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-mstk5\" (UID: \"d03605a9-6716-409b-a9b2-f54bc2f8e467\") " pod="openshift-controller-manager/controller-manager-879f6c89f-mstk5" Sep 30 19:33:31 crc kubenswrapper[4756]: E0930 19:33:31.990736 4756 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 19:33:32.490706746 +0000 UTC m=+142.111640433 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wpxq6" (UID: "ce702cef-b28d-4fcd-99b0-9961a980d3e6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:33:31 crc kubenswrapper[4756]: I0930 19:33:31.990792 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/37240768-1c94-44c3-9772-d4671f7e35bf-console-config\") pod \"console-f9d7485db-h5cnz\" (UID: \"37240768-1c94-44c3-9772-d4671f7e35bf\") " pod="openshift-console/console-f9d7485db-h5cnz" Sep 30 19:33:31 crc kubenswrapper[4756]: I0930 19:33:31.990852 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4bdd776c-4f43-42e4-ae36-da0ba80eb291-config\") pod \"route-controller-manager-6576b87f9c-zxkfm\" (UID: \"4bdd776c-4f43-42e4-ae36-da0ba80eb291\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-zxkfm" Sep 30 19:33:31 crc kubenswrapper[4756]: I0930 19:33:31.990876 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4d420445-45ce-42fc-9571-f137dddc8a2e-serving-cert\") pod \"console-operator-58897d9998-w7nzp\" (UID: \"4d420445-45ce-42fc-9571-f137dddc8a2e\") " pod="openshift-console-operator/console-operator-58897d9998-w7nzp" Sep 30 19:33:31 crc kubenswrapper[4756]: I0930 19:33:31.990893 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/37240768-1c94-44c3-9772-d4671f7e35bf-console-serving-cert\") pod \"console-f9d7485db-h5cnz\" (UID: \"37240768-1c94-44c3-9772-d4671f7e35bf\") " pod="openshift-console/console-f9d7485db-h5cnz" Sep 30 19:33:31 crc kubenswrapper[4756]: I0930 19:33:31.990938 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g6snr\" (UniqueName: \"kubernetes.io/projected/d03605a9-6716-409b-a9b2-f54bc2f8e467-kube-api-access-g6snr\") pod \"controller-manager-879f6c89f-mstk5\" (UID: \"d03605a9-6716-409b-a9b2-f54bc2f8e467\") " pod="openshift-controller-manager/controller-manager-879f6c89f-mstk5" Sep 30 19:33:31 crc kubenswrapper[4756]: I0930 19:33:31.990974 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ff466\" (UniqueName: \"kubernetes.io/projected/4110da68-3e86-428e-983e-e01ee898cf2c-kube-api-access-ff466\") pod \"machine-config-operator-74547568cd-nd2dk\" (UID: \"4110da68-3e86-428e-983e-e01ee898cf2c\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-nd2dk" Sep 30 19:33:31 crc kubenswrapper[4756]: I0930 19:33:31.991002 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/cfc57d6f-f884-42ee-8987-b8e92b4fb95b-serving-cert\") pod \"etcd-operator-b45778765-75ssh\" (UID: \"cfc57d6f-f884-42ee-8987-b8e92b4fb95b\") " pod="openshift-etcd-operator/etcd-operator-b45778765-75ssh" Sep 30 19:33:31 crc kubenswrapper[4756]: I0930 19:33:31.991035 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/4110da68-3e86-428e-983e-e01ee898cf2c-auth-proxy-config\") pod \"machine-config-operator-74547568cd-nd2dk\" (UID: \"4110da68-3e86-428e-983e-e01ee898cf2c\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-nd2dk" Sep 30 19:33:31 crc kubenswrapper[4756]: I0930 19:33:31.991053 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/f47ebe12-09aa-476d-a609-f9066a106afb-machine-approver-tls\") pod \"machine-approver-56656f9798-bzmlt\" (UID: \"f47ebe12-09aa-476d-a609-f9066a106afb\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-bzmlt" Sep 30 19:33:31 crc kubenswrapper[4756]: I0930 19:33:31.991131 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4bdd776c-4f43-42e4-ae36-da0ba80eb291-serving-cert\") pod \"route-controller-manager-6576b87f9c-zxkfm\" (UID: \"4bdd776c-4f43-42e4-ae36-da0ba80eb291\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-zxkfm" Sep 30 19:33:31 crc kubenswrapper[4756]: I0930 19:33:31.991207 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gztll\" (UniqueName: \"kubernetes.io/projected/3ae14a06-9c17-4937-b761-13a2247365cd-kube-api-access-gztll\") pod \"openshift-config-operator-7777fb866f-mpnp5\" (UID: \"3ae14a06-9c17-4937-b761-13a2247365cd\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-mpnp5" Sep 30 19:33:31 crc kubenswrapper[4756]: I0930 19:33:31.991428 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j2jvk\" (UniqueName: \"kubernetes.io/projected/cfc57d6f-f884-42ee-8987-b8e92b4fb95b-kube-api-access-j2jvk\") pod \"etcd-operator-b45778765-75ssh\" (UID: \"cfc57d6f-f884-42ee-8987-b8e92b4fb95b\") " pod="openshift-etcd-operator/etcd-operator-b45778765-75ssh" Sep 30 19:33:31 crc kubenswrapper[4756]: I0930 19:33:31.991456 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/3ae14a06-9c17-4937-b761-13a2247365cd-available-featuregates\") pod \"openshift-config-operator-7777fb866f-mpnp5\" (UID: \"3ae14a06-9c17-4937-b761-13a2247365cd\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-mpnp5" Sep 30 19:33:31 crc kubenswrapper[4756]: I0930 19:33:31.991591 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/4d420445-45ce-42fc-9571-f137dddc8a2e-trusted-ca\") pod \"console-operator-58897d9998-w7nzp\" (UID: \"4d420445-45ce-42fc-9571-f137dddc8a2e\") " pod="openshift-console-operator/console-operator-58897d9998-w7nzp" Sep 30 19:33:31 crc kubenswrapper[4756]: I0930 19:33:31.991615 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/cfc57d6f-f884-42ee-8987-b8e92b4fb95b-etcd-ca\") pod \"etcd-operator-b45778765-75ssh\" (UID: \"cfc57d6f-f884-42ee-8987-b8e92b4fb95b\") " pod="openshift-etcd-operator/etcd-operator-b45778765-75ssh" Sep 30 19:33:31 crc kubenswrapper[4756]: I0930 19:33:31.991639 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/37240768-1c94-44c3-9772-d4671f7e35bf-console-oauth-config\") pod \"console-f9d7485db-h5cnz\" (UID: \"37240768-1c94-44c3-9772-d4671f7e35bf\") " pod="openshift-console/console-f9d7485db-h5cnz" Sep 30 19:33:31 crc kubenswrapper[4756]: I0930 19:33:31.991673 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/37240768-1c94-44c3-9772-d4671f7e35bf-oauth-serving-cert\") pod \"console-f9d7485db-h5cnz\" (UID: \"37240768-1c94-44c3-9772-d4671f7e35bf\") " pod="openshift-console/console-f9d7485db-h5cnz" Sep 30 19:33:31 crc kubenswrapper[4756]: I0930 19:33:31.991709 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d03605a9-6716-409b-a9b2-f54bc2f8e467-config\") pod \"controller-manager-879f6c89f-mstk5\" (UID: \"d03605a9-6716-409b-a9b2-f54bc2f8e467\") " pod="openshift-controller-manager/controller-manager-879f6c89f-mstk5" Sep 30 19:33:31 crc kubenswrapper[4756]: I0930 19:33:31.991822 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n4qxs\" (UniqueName: \"kubernetes.io/projected/31438140-007d-4e3a-a6d4-d2c0dcaaf7bc-kube-api-access-n4qxs\") pod \"openshift-controller-manager-operator-756b6f6bc6-4nhvz\" (UID: \"31438140-007d-4e3a-a6d4-d2c0dcaaf7bc\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-4nhvz" Sep 30 19:33:31 crc kubenswrapper[4756]: I0930 19:33:31.991959 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/d116b7ef-bc7d-4f25-8aec-dabad585681a-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-tvhtj\" (UID: \"d116b7ef-bc7d-4f25-8aec-dabad585681a\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-tvhtj" Sep 30 19:33:31 crc kubenswrapper[4756]: I0930 19:33:31.991987 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a7c32461-b400-43b7-829b-4aecbfb73d70-metrics-tls\") pod \"dns-operator-744455d44c-glgxx\" (UID: \"a7c32461-b400-43b7-829b-4aecbfb73d70\") " pod="openshift-dns-operator/dns-operator-744455d44c-glgxx" Sep 30 19:33:31 crc kubenswrapper[4756]: I0930 19:33:31.992006 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d116b7ef-bc7d-4f25-8aec-dabad585681a-config\") pod \"authentication-operator-69f744f599-tvhtj\" (UID: \"d116b7ef-bc7d-4f25-8aec-dabad585681a\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-tvhtj" Sep 30 19:33:31 crc kubenswrapper[4756]: I0930 19:33:31.992024 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fzt7m\" (UniqueName: \"kubernetes.io/projected/4bdd776c-4f43-42e4-ae36-da0ba80eb291-kube-api-access-fzt7m\") pod \"route-controller-manager-6576b87f9c-zxkfm\" (UID: \"4bdd776c-4f43-42e4-ae36-da0ba80eb291\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-zxkfm" Sep 30 19:33:31 crc kubenswrapper[4756]: I0930 19:33:31.992045 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/f47ebe12-09aa-476d-a609-f9066a106afb-auth-proxy-config\") pod \"machine-approver-56656f9798-bzmlt\" (UID: \"f47ebe12-09aa-476d-a609-f9066a106afb\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-bzmlt" Sep 30 19:33:31 crc kubenswrapper[4756]: I0930 19:33:31.992081 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/31438140-007d-4e3a-a6d4-d2c0dcaaf7bc-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-4nhvz\" (UID: \"31438140-007d-4e3a-a6d4-d2c0dcaaf7bc\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-4nhvz" Sep 30 19:33:31 crc kubenswrapper[4756]: I0930 19:33:31.992179 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jf9s9\" (UniqueName: \"kubernetes.io/projected/ce702cef-b28d-4fcd-99b0-9961a980d3e6-kube-api-access-jf9s9\") pod \"image-registry-697d97f7c8-wpxq6\" (UID: \"ce702cef-b28d-4fcd-99b0-9961a980d3e6\") " pod="openshift-image-registry/image-registry-697d97f7c8-wpxq6" Sep 30 19:33:31 crc kubenswrapper[4756]: I0930 19:33:31.992199 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/37240768-1c94-44c3-9772-d4671f7e35bf-service-ca\") pod \"console-f9d7485db-h5cnz\" (UID: \"37240768-1c94-44c3-9772-d4671f7e35bf\") " pod="openshift-console/console-f9d7485db-h5cnz" Sep 30 19:33:31 crc kubenswrapper[4756]: I0930 19:33:31.992213 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/31438140-007d-4e3a-a6d4-d2c0dcaaf7bc-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-4nhvz\" (UID: \"31438140-007d-4e3a-a6d4-d2c0dcaaf7bc\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-4nhvz" Sep 30 19:33:31 crc kubenswrapper[4756]: I0930 19:33:31.992235 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/ce702cef-b28d-4fcd-99b0-9961a980d3e6-installation-pull-secrets\") pod \"image-registry-697d97f7c8-wpxq6\" (UID: \"ce702cef-b28d-4fcd-99b0-9961a980d3e6\") " pod="openshift-image-registry/image-registry-697d97f7c8-wpxq6" Sep 30 19:33:31 crc kubenswrapper[4756]: I0930 19:33:31.992268 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/8fee247b-3f4b-4816-a0c4-7d3854ebc33b-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-lls25\" (UID: \"8fee247b-3f4b-4816-a0c4-7d3854ebc33b\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-lls25" Sep 30 19:33:31 crc kubenswrapper[4756]: I0930 19:33:31.992294 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/4bdd776c-4f43-42e4-ae36-da0ba80eb291-client-ca\") pod \"route-controller-manager-6576b87f9c-zxkfm\" (UID: \"4bdd776c-4f43-42e4-ae36-da0ba80eb291\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-zxkfm" Sep 30 19:33:31 crc kubenswrapper[4756]: I0930 19:33:31.992329 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cfc57d6f-f884-42ee-8987-b8e92b4fb95b-config\") pod \"etcd-operator-b45778765-75ssh\" (UID: \"cfc57d6f-f884-42ee-8987-b8e92b4fb95b\") " pod="openshift-etcd-operator/etcd-operator-b45778765-75ssh" Sep 30 19:33:31 crc kubenswrapper[4756]: I0930 19:33:31.992347 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/ce702cef-b28d-4fcd-99b0-9961a980d3e6-ca-trust-extracted\") pod \"image-registry-697d97f7c8-wpxq6\" (UID: \"ce702cef-b28d-4fcd-99b0-9961a980d3e6\") " pod="openshift-image-registry/image-registry-697d97f7c8-wpxq6" Sep 30 19:33:31 crc kubenswrapper[4756]: I0930 19:33:31.992367 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/cfc57d6f-f884-42ee-8987-b8e92b4fb95b-etcd-client\") pod \"etcd-operator-b45778765-75ssh\" (UID: \"cfc57d6f-f884-42ee-8987-b8e92b4fb95b\") " pod="openshift-etcd-operator/etcd-operator-b45778765-75ssh" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.032246 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-558db77b4-h5tp5" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.092971 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.094734 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/d03605a9-6716-409b-a9b2-f54bc2f8e467-client-ca\") pod \"controller-manager-879f6c89f-mstk5\" (UID: \"d03605a9-6716-409b-a9b2-f54bc2f8e467\") " pod="openshift-controller-manager/controller-manager-879f6c89f-mstk5" Sep 30 19:33:32 crc kubenswrapper[4756]: E0930 19:33:32.098141 4756 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 19:33:32.594793434 +0000 UTC m=+142.215726911 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.098309 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/d03605a9-6716-409b-a9b2-f54bc2f8e467-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-mstk5\" (UID: \"d03605a9-6716-409b-a9b2-f54bc2f8e467\") " pod="openshift-controller-manager/controller-manager-879f6c89f-mstk5" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.098452 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/37240768-1c94-44c3-9772-d4671f7e35bf-console-config\") pod \"console-f9d7485db-h5cnz\" (UID: \"37240768-1c94-44c3-9772-d4671f7e35bf\") " pod="openshift-console/console-f9d7485db-h5cnz" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.098519 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4d420445-45ce-42fc-9571-f137dddc8a2e-serving-cert\") pod \"console-operator-58897d9998-w7nzp\" (UID: \"4d420445-45ce-42fc-9571-f137dddc8a2e\") " pod="openshift-console-operator/console-operator-58897d9998-w7nzp" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.098567 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/6c24c4b4-2bf5-49f1-ad2b-b146d1dd3522-srv-cert\") pod \"catalog-operator-68c6474976-2t55f\" (UID: \"6c24c4b4-2bf5-49f1-ad2b-b146d1dd3522\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-2t55f" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.098630 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/f2279937-aa0a-4874-bdd0-b3b1a685ee64-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-jw4c2\" (UID: \"f2279937-aa0a-4874-bdd0-b3b1a685ee64\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-jw4c2" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.098686 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/8f5053dc-916f-4f3f-8aee-83e6d2bfa438-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-fv5z8\" (UID: \"8f5053dc-916f-4f3f-8aee-83e6d2bfa438\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-fv5z8" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.098734 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/2dd216c3-656c-4c3d-909e-1eabf16f0473-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-9npql\" (UID: \"2dd216c3-656c-4c3d-909e-1eabf16f0473\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-9npql" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.098774 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/3fe6ddbb-59ee-4d35-9d3c-309cdd5eba71-default-certificate\") pod \"router-default-5444994796-88vsq\" (UID: \"3fe6ddbb-59ee-4d35-9d3c-309cdd5eba71\") " pod="openshift-ingress/router-default-5444994796-88vsq" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.098829 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/cfc57d6f-f884-42ee-8987-b8e92b4fb95b-serving-cert\") pod \"etcd-operator-b45778765-75ssh\" (UID: \"cfc57d6f-f884-42ee-8987-b8e92b4fb95b\") " pod="openshift-etcd-operator/etcd-operator-b45778765-75ssh" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.098880 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/4f7f9fe4-0797-4490-8bc2-c48850eb9f02-secret-volume\") pod \"collect-profiles-29321010-snrpj\" (UID: \"4f7f9fe4-0797-4490-8bc2-c48850eb9f02\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321010-snrpj" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.098937 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/86d80f6f-9644-43e7-be90-6e94eaf7d71f-metrics-tls\") pod \"dns-default-jjwh9\" (UID: \"86d80f6f-9644-43e7-be90-6e94eaf7d71f\") " pod="openshift-dns/dns-default-jjwh9" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.098993 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/306f83f8-10af-4389-9546-62325fe60d3a-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-lmd8p\" (UID: \"306f83f8-10af-4389-9546-62325fe60d3a\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-lmd8p" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.099057 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4bdd776c-4f43-42e4-ae36-da0ba80eb291-serving-cert\") pod \"route-controller-manager-6576b87f9c-zxkfm\" (UID: \"4bdd776c-4f43-42e4-ae36-da0ba80eb291\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-zxkfm" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.099117 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jn98q\" (UniqueName: \"kubernetes.io/projected/3fe6ddbb-59ee-4d35-9d3c-309cdd5eba71-kube-api-access-jn98q\") pod \"router-default-5444994796-88vsq\" (UID: \"3fe6ddbb-59ee-4d35-9d3c-309cdd5eba71\") " pod="openshift-ingress/router-default-5444994796-88vsq" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.099182 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/2a7c2d72-7192-4c40-a041-305580c8d0d0-node-bootstrap-token\") pod \"machine-config-server-frglv\" (UID: \"2a7c2d72-7192-4c40-a041-305580c8d0d0\") " pod="openshift-machine-config-operator/machine-config-server-frglv" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.099242 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j2jvk\" (UniqueName: \"kubernetes.io/projected/cfc57d6f-f884-42ee-8987-b8e92b4fb95b-kube-api-access-j2jvk\") pod \"etcd-operator-b45778765-75ssh\" (UID: \"cfc57d6f-f884-42ee-8987-b8e92b4fb95b\") " pod="openshift-etcd-operator/etcd-operator-b45778765-75ssh" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.099283 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/3ae14a06-9c17-4937-b761-13a2247365cd-available-featuregates\") pod \"openshift-config-operator-7777fb866f-mpnp5\" (UID: \"3ae14a06-9c17-4937-b761-13a2247365cd\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-mpnp5" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.099339 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/37240768-1c94-44c3-9772-d4671f7e35bf-console-oauth-config\") pod \"console-f9d7485db-h5cnz\" (UID: \"37240768-1c94-44c3-9772-d4671f7e35bf\") " pod="openshift-console/console-f9d7485db-h5cnz" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.099423 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/6c24c4b4-2bf5-49f1-ad2b-b146d1dd3522-profile-collector-cert\") pod \"catalog-operator-68c6474976-2t55f\" (UID: \"6c24c4b4-2bf5-49f1-ad2b-b146d1dd3522\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-2t55f" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.099483 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d03605a9-6716-409b-a9b2-f54bc2f8e467-config\") pod \"controller-manager-879f6c89f-mstk5\" (UID: \"d03605a9-6716-409b-a9b2-f54bc2f8e467\") " pod="openshift-controller-manager/controller-manager-879f6c89f-mstk5" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.099522 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8f5053dc-916f-4f3f-8aee-83e6d2bfa438-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-fv5z8\" (UID: \"8f5053dc-916f-4f3f-8aee-83e6d2bfa438\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-fv5z8" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.099570 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cl248\" (UniqueName: \"kubernetes.io/projected/baa40fd8-6584-4742-992d-ccc029178c3e-kube-api-access-cl248\") pod \"control-plane-machine-set-operator-78cbb6b69f-trj9z\" (UID: \"baa40fd8-6584-4742-992d-ccc029178c3e\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-trj9z" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.099624 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/f47ebe12-09aa-476d-a609-f9066a106afb-auth-proxy-config\") pod \"machine-approver-56656f9798-bzmlt\" (UID: \"f47ebe12-09aa-476d-a609-f9066a106afb\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-bzmlt" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.099672 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/df5245a3-0385-4fab-b949-d98964045062-profile-collector-cert\") pod \"olm-operator-6b444d44fb-s2485\" (UID: \"df5245a3-0385-4fab-b949-d98964045062\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-s2485" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.099753 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gsf4f\" (UniqueName: \"kubernetes.io/projected/4f7f9fe4-0797-4490-8bc2-c48850eb9f02-kube-api-access-gsf4f\") pod \"collect-profiles-29321010-snrpj\" (UID: \"4f7f9fe4-0797-4490-8bc2-c48850eb9f02\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321010-snrpj" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.099810 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f2c7827d-51b1-4d41-a16e-c29e249be294-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-vtffn\" (UID: \"f2c7827d-51b1-4d41-a16e-c29e249be294\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-vtffn" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.099864 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/31438140-007d-4e3a-a6d4-d2c0dcaaf7bc-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-4nhvz\" (UID: \"31438140-007d-4e3a-a6d4-d2c0dcaaf7bc\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-4nhvz" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.099903 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2dd216c3-656c-4c3d-909e-1eabf16f0473-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-9npql\" (UID: \"2dd216c3-656c-4c3d-909e-1eabf16f0473\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-9npql" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.099954 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/4bdd776c-4f43-42e4-ae36-da0ba80eb291-client-ca\") pod \"route-controller-manager-6576b87f9c-zxkfm\" (UID: \"4bdd776c-4f43-42e4-ae36-da0ba80eb291\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-zxkfm" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.099999 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/86d80f6f-9644-43e7-be90-6e94eaf7d71f-config-volume\") pod \"dns-default-jjwh9\" (UID: \"86d80f6f-9644-43e7-be90-6e94eaf7d71f\") " pod="openshift-dns/dns-default-jjwh9" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.100065 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b6jjg\" (UniqueName: \"kubernetes.io/projected/c63224ae-e78e-484b-83e1-2b6432cc1de6-kube-api-access-b6jjg\") pod \"service-ca-9c57cc56f-nfgwx\" (UID: \"c63224ae-e78e-484b-83e1-2b6432cc1de6\") " pod="openshift-service-ca/service-ca-9c57cc56f-nfgwx" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.100136 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ltqrg\" (UniqueName: \"kubernetes.io/projected/bd2faf96-f859-4c10-871a-6b9ed6f4571b-kube-api-access-ltqrg\") pod \"ingress-canary-9mtxr\" (UID: \"bd2faf96-f859-4c10-871a-6b9ed6f4571b\") " pod="openshift-ingress-canary/ingress-canary-9mtxr" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.100176 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xxmrs\" (UniqueName: \"kubernetes.io/projected/8f68dfd5-c9df-45b1-8987-8ec4053731ff-kube-api-access-xxmrs\") pod \"multus-admission-controller-857f4d67dd-cgk7c\" (UID: \"8f68dfd5-c9df-45b1-8987-8ec4053731ff\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-cgk7c" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.100231 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/ce702cef-b28d-4fcd-99b0-9961a980d3e6-ca-trust-extracted\") pod \"image-registry-697d97f7c8-wpxq6\" (UID: \"ce702cef-b28d-4fcd-99b0-9961a980d3e6\") " pod="openshift-image-registry/image-registry-697d97f7c8-wpxq6" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.100303 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8q7j2\" (UniqueName: \"kubernetes.io/projected/428bbd8f-09ea-45c6-acd6-3ba9bab95aba-kube-api-access-8q7j2\") pod \"packageserver-d55dfcdfc-qsct2\" (UID: \"428bbd8f-09ea-45c6-acd6-3ba9bab95aba\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-qsct2" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.100364 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f47ebe12-09aa-476d-a609-f9066a106afb-config\") pod \"machine-approver-56656f9798-bzmlt\" (UID: \"f47ebe12-09aa-476d-a609-f9066a106afb\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-bzmlt" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.100450 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/e0f26253-f454-4de3-b058-bbbece905a7e-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-x87xk\" (UID: \"e0f26253-f454-4de3-b058-bbbece905a7e\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-x87xk" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.100521 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/4110da68-3e86-428e-983e-e01ee898cf2c-images\") pod \"machine-config-operator-74547568cd-nd2dk\" (UID: \"4110da68-3e86-428e-983e-e01ee898cf2c\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-nd2dk" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.100571 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/ce702cef-b28d-4fcd-99b0-9961a980d3e6-bound-sa-token\") pod \"image-registry-697d97f7c8-wpxq6\" (UID: \"ce702cef-b28d-4fcd-99b0-9961a980d3e6\") " pod="openshift-image-registry/image-registry-697d97f7c8-wpxq6" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.100611 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/e0f26253-f454-4de3-b058-bbbece905a7e-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-x87xk\" (UID: \"e0f26253-f454-4de3-b058-bbbece905a7e\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-x87xk" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.100666 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/e0f26253-f454-4de3-b058-bbbece905a7e-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-x87xk\" (UID: \"e0f26253-f454-4de3-b058-bbbece905a7e\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-x87xk" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.100713 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/37240768-1c94-44c3-9772-d4671f7e35bf-trusted-ca-bundle\") pod \"console-f9d7485db-h5cnz\" (UID: \"37240768-1c94-44c3-9772-d4671f7e35bf\") " pod="openshift-console/console-f9d7485db-h5cnz" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.100764 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4d420445-45ce-42fc-9571-f137dddc8a2e-config\") pod \"console-operator-58897d9998-w7nzp\" (UID: \"4d420445-45ce-42fc-9571-f137dddc8a2e\") " pod="openshift-console-operator/console-operator-58897d9998-w7nzp" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.100803 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kbmmx\" (UniqueName: \"kubernetes.io/projected/4d420445-45ce-42fc-9571-f137dddc8a2e-kube-api-access-kbmmx\") pod \"console-operator-58897d9998-w7nzp\" (UID: \"4d420445-45ce-42fc-9571-f137dddc8a2e\") " pod="openshift-console-operator/console-operator-58897d9998-w7nzp" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.100850 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/69ad14ee-0c64-4ba8-a2b0-15acc76c1d1f-config\") pod \"service-ca-operator-777779d784-wdtth\" (UID: \"69ad14ee-0c64-4ba8-a2b0-15acc76c1d1f\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-wdtth" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.100849 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/d03605a9-6716-409b-a9b2-f54bc2f8e467-client-ca\") pod \"controller-manager-879f6c89f-mstk5\" (UID: \"d03605a9-6716-409b-a9b2-f54bc2f8e467\") " pod="openshift-controller-manager/controller-manager-879f6c89f-mstk5" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.100898 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/428bbd8f-09ea-45c6-acd6-3ba9bab95aba-webhook-cert\") pod \"packageserver-d55dfcdfc-qsct2\" (UID: \"428bbd8f-09ea-45c6-acd6-3ba9bab95aba\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-qsct2" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.101040 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fpv4j\" (UniqueName: \"kubernetes.io/projected/a7c32461-b400-43b7-829b-4aecbfb73d70-kube-api-access-fpv4j\") pod \"dns-operator-744455d44c-glgxx\" (UID: \"a7c32461-b400-43b7-829b-4aecbfb73d70\") " pod="openshift-dns-operator/dns-operator-744455d44c-glgxx" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.102091 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/ce702cef-b28d-4fcd-99b0-9961a980d3e6-ca-trust-extracted\") pod \"image-registry-697d97f7c8-wpxq6\" (UID: \"ce702cef-b28d-4fcd-99b0-9961a980d3e6\") " pod="openshift-image-registry/image-registry-697d97f7c8-wpxq6" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.102662 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/d03605a9-6716-409b-a9b2-f54bc2f8e467-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-mstk5\" (UID: \"d03605a9-6716-409b-a9b2-f54bc2f8e467\") " pod="openshift-controller-manager/controller-manager-879f6c89f-mstk5" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.103756 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/37240768-1c94-44c3-9772-d4671f7e35bf-console-config\") pod \"console-f9d7485db-h5cnz\" (UID: \"37240768-1c94-44c3-9772-d4671f7e35bf\") " pod="openshift-console/console-f9d7485db-h5cnz" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.103829 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/4110da68-3e86-428e-983e-e01ee898cf2c-images\") pod \"machine-config-operator-74547568cd-nd2dk\" (UID: \"4110da68-3e86-428e-983e-e01ee898cf2c\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-nd2dk" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.104482 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f47ebe12-09aa-476d-a609-f9066a106afb-config\") pod \"machine-approver-56656f9798-bzmlt\" (UID: \"f47ebe12-09aa-476d-a609-f9066a106afb\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-bzmlt" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.104511 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/e0f26253-f454-4de3-b058-bbbece905a7e-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-x87xk\" (UID: \"e0f26253-f454-4de3-b058-bbbece905a7e\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-x87xk" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.104538 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/37240768-1c94-44c3-9772-d4671f7e35bf-trusted-ca-bundle\") pod \"console-f9d7485db-h5cnz\" (UID: \"37240768-1c94-44c3-9772-d4671f7e35bf\") " pod="openshift-console/console-f9d7485db-h5cnz" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.105273 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4d420445-45ce-42fc-9571-f137dddc8a2e-config\") pod \"console-operator-58897d9998-w7nzp\" (UID: \"4d420445-45ce-42fc-9571-f137dddc8a2e\") " pod="openshift-console-operator/console-operator-58897d9998-w7nzp" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.105503 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/3ae14a06-9c17-4937-b761-13a2247365cd-available-featuregates\") pod \"openshift-config-operator-7777fb866f-mpnp5\" (UID: \"3ae14a06-9c17-4937-b761-13a2247365cd\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-mpnp5" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.105607 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/ce702cef-b28d-4fcd-99b0-9961a980d3e6-registry-certificates\") pod \"image-registry-697d97f7c8-wpxq6\" (UID: \"ce702cef-b28d-4fcd-99b0-9961a980d3e6\") " pod="openshift-image-registry/image-registry-697d97f7c8-wpxq6" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.105654 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wxl8h\" (UniqueName: \"kubernetes.io/projected/d116b7ef-bc7d-4f25-8aec-dabad585681a-kube-api-access-wxl8h\") pod \"authentication-operator-69f744f599-tvhtj\" (UID: \"d116b7ef-bc7d-4f25-8aec-dabad585681a\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-tvhtj" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.105722 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/ce702cef-b28d-4fcd-99b0-9961a980d3e6-trusted-ca\") pod \"image-registry-697d97f7c8-wpxq6\" (UID: \"ce702cef-b28d-4fcd-99b0-9961a980d3e6\") " pod="openshift-image-registry/image-registry-697d97f7c8-wpxq6" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.105749 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r8llp\" (UniqueName: \"kubernetes.io/projected/f47ebe12-09aa-476d-a609-f9066a106afb-kube-api-access-r8llp\") pod \"machine-approver-56656f9798-bzmlt\" (UID: \"f47ebe12-09aa-476d-a609-f9066a106afb\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-bzmlt" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.106493 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/4bdd776c-4f43-42e4-ae36-da0ba80eb291-client-ca\") pod \"route-controller-manager-6576b87f9c-zxkfm\" (UID: \"4bdd776c-4f43-42e4-ae36-da0ba80eb291\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-zxkfm" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.106873 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/ce702cef-b28d-4fcd-99b0-9961a980d3e6-trusted-ca\") pod \"image-registry-697d97f7c8-wpxq6\" (UID: \"ce702cef-b28d-4fcd-99b0-9961a980d3e6\") " pod="openshift-image-registry/image-registry-697d97f7c8-wpxq6" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.107074 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d03605a9-6716-409b-a9b2-f54bc2f8e467-config\") pod \"controller-manager-879f6c89f-mstk5\" (UID: \"d03605a9-6716-409b-a9b2-f54bc2f8e467\") " pod="openshift-controller-manager/controller-manager-879f6c89f-mstk5" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.107658 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/d5e87d60-31f6-441c-9966-573390631165-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-mb2s4\" (UID: \"d5e87d60-31f6-441c-9966-573390631165\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-mb2s4" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.107686 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/f47ebe12-09aa-476d-a609-f9066a106afb-auth-proxy-config\") pod \"machine-approver-56656f9798-bzmlt\" (UID: \"f47ebe12-09aa-476d-a609-f9066a106afb\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-bzmlt" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.107826 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/dffc851c-bb5e-4691-9c2d-1dff44276a16-plugins-dir\") pod \"csi-hostpathplugin-zh59x\" (UID: \"dffc851c-bb5e-4691-9c2d-1dff44276a16\") " pod="hostpath-provisioner/csi-hostpathplugin-zh59x" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.108176 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2mnz4\" (UniqueName: \"kubernetes.io/projected/8fee247b-3f4b-4816-a0c4-7d3854ebc33b-kube-api-access-2mnz4\") pod \"cluster-samples-operator-665b6dd947-lls25\" (UID: \"8fee247b-3f4b-4816-a0c4-7d3854ebc33b\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-lls25" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.108335 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wnbx9\" (UniqueName: \"kubernetes.io/projected/81849a73-fe68-44ea-b5c6-24614310b67a-kube-api-access-wnbx9\") pod \"openshift-apiserver-operator-796bbdcf4f-dwvn8\" (UID: \"81849a73-fe68-44ea-b5c6-24614310b67a\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-dwvn8" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.108482 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/ce702cef-b28d-4fcd-99b0-9961a980d3e6-registry-certificates\") pod \"image-registry-697d97f7c8-wpxq6\" (UID: \"ce702cef-b28d-4fcd-99b0-9961a980d3e6\") " pod="openshift-image-registry/image-registry-697d97f7c8-wpxq6" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.108510 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d03605a9-6716-409b-a9b2-f54bc2f8e467-serving-cert\") pod \"controller-manager-879f6c89f-mstk5\" (UID: \"d03605a9-6716-409b-a9b2-f54bc2f8e467\") " pod="openshift-controller-manager/controller-manager-879f6c89f-mstk5" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.108563 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/4f7f9fe4-0797-4490-8bc2-c48850eb9f02-config-volume\") pod \"collect-profiles-29321010-snrpj\" (UID: \"4f7f9fe4-0797-4490-8bc2-c48850eb9f02\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321010-snrpj" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.109037 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/ce702cef-b28d-4fcd-99b0-9961a980d3e6-registry-tls\") pod \"image-registry-697d97f7c8-wpxq6\" (UID: \"ce702cef-b28d-4fcd-99b0-9961a980d3e6\") " pod="openshift-image-registry/image-registry-697d97f7c8-wpxq6" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.109108 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m8h85\" (UniqueName: \"kubernetes.io/projected/37240768-1c94-44c3-9772-d4671f7e35bf-kube-api-access-m8h85\") pod \"console-f9d7485db-h5cnz\" (UID: \"37240768-1c94-44c3-9772-d4671f7e35bf\") " pod="openshift-console/console-f9d7485db-h5cnz" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.109171 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/d2f7e436-45ee-4e29-a473-cde6db34524b-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-42ln4\" (UID: \"d2f7e436-45ee-4e29-a473-cde6db34524b\") " pod="openshift-marketplace/marketplace-operator-79b997595-42ln4" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.109730 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/428bbd8f-09ea-45c6-acd6-3ba9bab95aba-tmpfs\") pod \"packageserver-d55dfcdfc-qsct2\" (UID: \"428bbd8f-09ea-45c6-acd6-3ba9bab95aba\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-qsct2" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.109509 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/cfc57d6f-f884-42ee-8987-b8e92b4fb95b-serving-cert\") pod \"etcd-operator-b45778765-75ssh\" (UID: \"cfc57d6f-f884-42ee-8987-b8e92b4fb95b\") " pod="openshift-etcd-operator/etcd-operator-b45778765-75ssh" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.109797 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/37240768-1c94-44c3-9772-d4671f7e35bf-console-serving-cert\") pod \"console-f9d7485db-h5cnz\" (UID: \"37240768-1c94-44c3-9772-d4671f7e35bf\") " pod="openshift-console/console-f9d7485db-h5cnz" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.109885 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4bdd776c-4f43-42e4-ae36-da0ba80eb291-config\") pod \"route-controller-manager-6576b87f9c-zxkfm\" (UID: \"4bdd776c-4f43-42e4-ae36-da0ba80eb291\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-zxkfm" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.110026 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4d420445-45ce-42fc-9571-f137dddc8a2e-serving-cert\") pod \"console-operator-58897d9998-w7nzp\" (UID: \"4d420445-45ce-42fc-9571-f137dddc8a2e\") " pod="openshift-console-operator/console-operator-58897d9998-w7nzp" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.110306 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2dd216c3-656c-4c3d-909e-1eabf16f0473-config\") pod \"kube-apiserver-operator-766d6c64bb-9npql\" (UID: \"2dd216c3-656c-4c3d-909e-1eabf16f0473\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-9npql" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.110531 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g6snr\" (UniqueName: \"kubernetes.io/projected/d03605a9-6716-409b-a9b2-f54bc2f8e467-kube-api-access-g6snr\") pod \"controller-manager-879f6c89f-mstk5\" (UID: \"d03605a9-6716-409b-a9b2-f54bc2f8e467\") " pod="openshift-controller-manager/controller-manager-879f6c89f-mstk5" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.110588 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b5stv\" (UniqueName: \"kubernetes.io/projected/df5245a3-0385-4fab-b949-d98964045062-kube-api-access-b5stv\") pod \"olm-operator-6b444d44fb-s2485\" (UID: \"df5245a3-0385-4fab-b949-d98964045062\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-s2485" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.110619 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/c63224ae-e78e-484b-83e1-2b6432cc1de6-signing-key\") pod \"service-ca-9c57cc56f-nfgwx\" (UID: \"c63224ae-e78e-484b-83e1-2b6432cc1de6\") " pod="openshift-service-ca/service-ca-9c57cc56f-nfgwx" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.110655 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/dffc851c-bb5e-4691-9c2d-1dff44276a16-mountpoint-dir\") pod \"csi-hostpathplugin-zh59x\" (UID: \"dffc851c-bb5e-4691-9c2d-1dff44276a16\") " pod="hostpath-provisioner/csi-hostpathplugin-zh59x" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.110682 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k7vmz\" (UniqueName: \"kubernetes.io/projected/2abbe765-36c9-4aed-9cd1-f30b0bfaf484-kube-api-access-k7vmz\") pod \"ingress-operator-5b745b69d9-nlp7l\" (UID: \"2abbe765-36c9-4aed-9cd1-f30b0bfaf484\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-nlp7l" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.110767 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ff466\" (UniqueName: \"kubernetes.io/projected/4110da68-3e86-428e-983e-e01ee898cf2c-kube-api-access-ff466\") pod \"machine-config-operator-74547568cd-nd2dk\" (UID: \"4110da68-3e86-428e-983e-e01ee898cf2c\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-nd2dk" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.110855 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-42lxj\" (UniqueName: \"kubernetes.io/projected/f2279937-aa0a-4874-bdd0-b3b1a685ee64-kube-api-access-42lxj\") pod \"machine-config-controller-84d6567774-jw4c2\" (UID: \"f2279937-aa0a-4874-bdd0-b3b1a685ee64\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-jw4c2" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.110888 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/2a7c2d72-7192-4c40-a041-305580c8d0d0-certs\") pod \"machine-config-server-frglv\" (UID: \"2a7c2d72-7192-4c40-a041-305580c8d0d0\") " pod="openshift-machine-config-operator/machine-config-server-frglv" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.110918 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/d2f7e436-45ee-4e29-a473-cde6db34524b-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-42ln4\" (UID: \"d2f7e436-45ee-4e29-a473-cde6db34524b\") " pod="openshift-marketplace/marketplace-operator-79b997595-42ln4" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.110958 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4bdd776c-4f43-42e4-ae36-da0ba80eb291-config\") pod \"route-controller-manager-6576b87f9c-zxkfm\" (UID: \"4bdd776c-4f43-42e4-ae36-da0ba80eb291\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-zxkfm" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.110998 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cxsh8\" (UniqueName: \"kubernetes.io/projected/6c24c4b4-2bf5-49f1-ad2b-b146d1dd3522-kube-api-access-cxsh8\") pod \"catalog-operator-68c6474976-2t55f\" (UID: \"6c24c4b4-2bf5-49f1-ad2b-b146d1dd3522\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-2t55f" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.111107 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/4110da68-3e86-428e-983e-e01ee898cf2c-auth-proxy-config\") pod \"machine-config-operator-74547568cd-nd2dk\" (UID: \"4110da68-3e86-428e-983e-e01ee898cf2c\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-nd2dk" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.111285 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/f47ebe12-09aa-476d-a609-f9066a106afb-machine-approver-tls\") pod \"machine-approver-56656f9798-bzmlt\" (UID: \"f47ebe12-09aa-476d-a609-f9066a106afb\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-bzmlt" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.111571 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gztll\" (UniqueName: \"kubernetes.io/projected/3ae14a06-9c17-4937-b761-13a2247365cd-kube-api-access-gztll\") pod \"openshift-config-operator-7777fb866f-mpnp5\" (UID: \"3ae14a06-9c17-4937-b761-13a2247365cd\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-mpnp5" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.111703 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/dffc851c-bb5e-4691-9c2d-1dff44276a16-socket-dir\") pod \"csi-hostpathplugin-zh59x\" (UID: \"dffc851c-bb5e-4691-9c2d-1dff44276a16\") " pod="hostpath-provisioner/csi-hostpathplugin-zh59x" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.111880 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/dffc851c-bb5e-4691-9c2d-1dff44276a16-registration-dir\") pod \"csi-hostpathplugin-zh59x\" (UID: \"dffc851c-bb5e-4691-9c2d-1dff44276a16\") " pod="hostpath-provisioner/csi-hostpathplugin-zh59x" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.111984 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/4110da68-3e86-428e-983e-e01ee898cf2c-auth-proxy-config\") pod \"machine-config-operator-74547568cd-nd2dk\" (UID: \"4110da68-3e86-428e-983e-e01ee898cf2c\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-nd2dk" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.111997 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2lvlf\" (UniqueName: \"kubernetes.io/projected/d2f7e436-45ee-4e29-a473-cde6db34524b-kube-api-access-2lvlf\") pod \"marketplace-operator-79b997595-42ln4\" (UID: \"d2f7e436-45ee-4e29-a473-cde6db34524b\") " pod="openshift-marketplace/marketplace-operator-79b997595-42ln4" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.112365 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t9vgb\" (UniqueName: \"kubernetes.io/projected/69ad14ee-0c64-4ba8-a2b0-15acc76c1d1f-kube-api-access-t9vgb\") pod \"service-ca-operator-777779d784-wdtth\" (UID: \"69ad14ee-0c64-4ba8-a2b0-15acc76c1d1f\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-wdtth" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.112586 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/4d420445-45ce-42fc-9571-f137dddc8a2e-trusted-ca\") pod \"console-operator-58897d9998-w7nzp\" (UID: \"4d420445-45ce-42fc-9571-f137dddc8a2e\") " pod="openshift-console-operator/console-operator-58897d9998-w7nzp" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.112899 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/cfc57d6f-f884-42ee-8987-b8e92b4fb95b-etcd-ca\") pod \"etcd-operator-b45778765-75ssh\" (UID: \"cfc57d6f-f884-42ee-8987-b8e92b4fb95b\") " pod="openshift-etcd-operator/etcd-operator-b45778765-75ssh" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.113036 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/37240768-1c94-44c3-9772-d4671f7e35bf-oauth-serving-cert\") pod \"console-f9d7485db-h5cnz\" (UID: \"37240768-1c94-44c3-9772-d4671f7e35bf\") " pod="openshift-console/console-f9d7485db-h5cnz" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.113809 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n4qxs\" (UniqueName: \"kubernetes.io/projected/31438140-007d-4e3a-a6d4-d2c0dcaaf7bc-kube-api-access-n4qxs\") pod \"openshift-controller-manager-operator-756b6f6bc6-4nhvz\" (UID: \"31438140-007d-4e3a-a6d4-d2c0dcaaf7bc\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-4nhvz" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.113887 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/d116b7ef-bc7d-4f25-8aec-dabad585681a-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-tvhtj\" (UID: \"d116b7ef-bc7d-4f25-8aec-dabad585681a\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-tvhtj" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.113936 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a7c32461-b400-43b7-829b-4aecbfb73d70-metrics-tls\") pod \"dns-operator-744455d44c-glgxx\" (UID: \"a7c32461-b400-43b7-829b-4aecbfb73d70\") " pod="openshift-dns-operator/dns-operator-744455d44c-glgxx" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.113969 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/f2279937-aa0a-4874-bdd0-b3b1a685ee64-proxy-tls\") pod \"machine-config-controller-84d6567774-jw4c2\" (UID: \"f2279937-aa0a-4874-bdd0-b3b1a685ee64\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-jw4c2" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.114075 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/37240768-1c94-44c3-9772-d4671f7e35bf-oauth-serving-cert\") pod \"console-f9d7485db-h5cnz\" (UID: \"37240768-1c94-44c3-9772-d4671f7e35bf\") " pod="openshift-console/console-f9d7485db-h5cnz" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.114487 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/cfc57d6f-f884-42ee-8987-b8e92b4fb95b-etcd-ca\") pod \"etcd-operator-b45778765-75ssh\" (UID: \"cfc57d6f-f884-42ee-8987-b8e92b4fb95b\") " pod="openshift-etcd-operator/etcd-operator-b45778765-75ssh" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.114824 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d116b7ef-bc7d-4f25-8aec-dabad585681a-config\") pod \"authentication-operator-69f744f599-tvhtj\" (UID: \"d116b7ef-bc7d-4f25-8aec-dabad585681a\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-tvhtj" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.114929 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/37240768-1c94-44c3-9772-d4671f7e35bf-console-oauth-config\") pod \"console-f9d7485db-h5cnz\" (UID: \"37240768-1c94-44c3-9772-d4671f7e35bf\") " pod="openshift-console/console-f9d7485db-h5cnz" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.115072 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/4d420445-45ce-42fc-9571-f137dddc8a2e-trusted-ca\") pod \"console-operator-58897d9998-w7nzp\" (UID: \"4d420445-45ce-42fc-9571-f137dddc8a2e\") " pod="openshift-console-operator/console-operator-58897d9998-w7nzp" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.115165 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fzt7m\" (UniqueName: \"kubernetes.io/projected/4bdd776c-4f43-42e4-ae36-da0ba80eb291-kube-api-access-fzt7m\") pod \"route-controller-manager-6576b87f9c-zxkfm\" (UID: \"4bdd776c-4f43-42e4-ae36-da0ba80eb291\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-zxkfm" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.115217 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/d116b7ef-bc7d-4f25-8aec-dabad585681a-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-tvhtj\" (UID: \"d116b7ef-bc7d-4f25-8aec-dabad585681a\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-tvhtj" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.115414 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/c63224ae-e78e-484b-83e1-2b6432cc1de6-signing-cabundle\") pod \"service-ca-9c57cc56f-nfgwx\" (UID: \"c63224ae-e78e-484b-83e1-2b6432cc1de6\") " pod="openshift-service-ca/service-ca-9c57cc56f-nfgwx" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.115485 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d116b7ef-bc7d-4f25-8aec-dabad585681a-config\") pod \"authentication-operator-69f744f599-tvhtj\" (UID: \"d116b7ef-bc7d-4f25-8aec-dabad585681a\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-tvhtj" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.115590 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/31438140-007d-4e3a-a6d4-d2c0dcaaf7bc-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-4nhvz\" (UID: \"31438140-007d-4e3a-a6d4-d2c0dcaaf7bc\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-4nhvz" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.115616 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/ce702cef-b28d-4fcd-99b0-9961a980d3e6-registry-tls\") pod \"image-registry-697d97f7c8-wpxq6\" (UID: \"ce702cef-b28d-4fcd-99b0-9961a980d3e6\") " pod="openshift-image-registry/image-registry-697d97f7c8-wpxq6" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.115678 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jf9s9\" (UniqueName: \"kubernetes.io/projected/ce702cef-b28d-4fcd-99b0-9961a980d3e6-kube-api-access-jf9s9\") pod \"image-registry-697d97f7c8-wpxq6\" (UID: \"ce702cef-b28d-4fcd-99b0-9961a980d3e6\") " pod="openshift-image-registry/image-registry-697d97f7c8-wpxq6" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.115773 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/37240768-1c94-44c3-9772-d4671f7e35bf-console-serving-cert\") pod \"console-f9d7485db-h5cnz\" (UID: \"37240768-1c94-44c3-9772-d4671f7e35bf\") " pod="openshift-console/console-f9d7485db-h5cnz" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.115837 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/2abbe765-36c9-4aed-9cd1-f30b0bfaf484-trusted-ca\") pod \"ingress-operator-5b745b69d9-nlp7l\" (UID: \"2abbe765-36c9-4aed-9cd1-f30b0bfaf484\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-nlp7l" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.115894 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/ce702cef-b28d-4fcd-99b0-9961a980d3e6-installation-pull-secrets\") pod \"image-registry-697d97f7c8-wpxq6\" (UID: \"ce702cef-b28d-4fcd-99b0-9961a980d3e6\") " pod="openshift-image-registry/image-registry-697d97f7c8-wpxq6" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.115958 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/37240768-1c94-44c3-9772-d4671f7e35bf-service-ca\") pod \"console-f9d7485db-h5cnz\" (UID: \"37240768-1c94-44c3-9772-d4671f7e35bf\") " pod="openshift-console/console-f9d7485db-h5cnz" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.115992 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/df5245a3-0385-4fab-b949-d98964045062-srv-cert\") pod \"olm-operator-6b444d44fb-s2485\" (UID: \"df5245a3-0385-4fab-b949-d98964045062\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-s2485" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.116034 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/306f83f8-10af-4389-9546-62325fe60d3a-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-lmd8p\" (UID: \"306f83f8-10af-4389-9546-62325fe60d3a\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-lmd8p" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.116077 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/8fee247b-3f4b-4816-a0c4-7d3854ebc33b-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-lls25\" (UID: \"8fee247b-3f4b-4816-a0c4-7d3854ebc33b\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-lls25" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.116117 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/428bbd8f-09ea-45c6-acd6-3ba9bab95aba-apiservice-cert\") pod \"packageserver-d55dfcdfc-qsct2\" (UID: \"428bbd8f-09ea-45c6-acd6-3ba9bab95aba\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-qsct2" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.116161 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g5r6q\" (UniqueName: \"kubernetes.io/projected/2a7c2d72-7192-4c40-a041-305580c8d0d0-kube-api-access-g5r6q\") pod \"machine-config-server-frglv\" (UID: \"2a7c2d72-7192-4c40-a041-305580c8d0d0\") " pod="openshift-machine-config-operator/machine-config-server-frglv" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.116009 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/e0f26253-f454-4de3-b058-bbbece905a7e-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-x87xk\" (UID: \"e0f26253-f454-4de3-b058-bbbece905a7e\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-x87xk" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.116212 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cfc57d6f-f884-42ee-8987-b8e92b4fb95b-config\") pod \"etcd-operator-b45778765-75ssh\" (UID: \"cfc57d6f-f884-42ee-8987-b8e92b4fb95b\") " pod="openshift-etcd-operator/etcd-operator-b45778765-75ssh" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.116373 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z2bgj\" (UniqueName: \"kubernetes.io/projected/dffc851c-bb5e-4691-9c2d-1dff44276a16-kube-api-access-z2bgj\") pod \"csi-hostpathplugin-zh59x\" (UID: \"dffc851c-bb5e-4691-9c2d-1dff44276a16\") " pod="hostpath-provisioner/csi-hostpathplugin-zh59x" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.117125 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/2abbe765-36c9-4aed-9cd1-f30b0bfaf484-bound-sa-token\") pod \"ingress-operator-5b745b69d9-nlp7l\" (UID: \"2abbe765-36c9-4aed-9cd1-f30b0bfaf484\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-nlp7l" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.118736 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f2c7827d-51b1-4d41-a16e-c29e249be294-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-vtffn\" (UID: \"f2c7827d-51b1-4d41-a16e-c29e249be294\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-vtffn" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.118797 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/3fe6ddbb-59ee-4d35-9d3c-309cdd5eba71-metrics-certs\") pod \"router-default-5444994796-88vsq\" (UID: \"3fe6ddbb-59ee-4d35-9d3c-309cdd5eba71\") " pod="openshift-ingress/router-default-5444994796-88vsq" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.118862 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/cfc57d6f-f884-42ee-8987-b8e92b4fb95b-etcd-client\") pod \"etcd-operator-b45778765-75ssh\" (UID: \"cfc57d6f-f884-42ee-8987-b8e92b4fb95b\") " pod="openshift-etcd-operator/etcd-operator-b45778765-75ssh" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.119070 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/37240768-1c94-44c3-9772-d4671f7e35bf-service-ca\") pod \"console-f9d7485db-h5cnz\" (UID: \"37240768-1c94-44c3-9772-d4671f7e35bf\") " pod="openshift-console/console-f9d7485db-h5cnz" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.119113 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/2abbe765-36c9-4aed-9cd1-f30b0bfaf484-metrics-tls\") pod \"ingress-operator-5b745b69d9-nlp7l\" (UID: \"2abbe765-36c9-4aed-9cd1-f30b0bfaf484\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-nlp7l" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.119135 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/8f68dfd5-c9df-45b1-8987-8ec4053731ff-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-cgk7c\" (UID: \"8f68dfd5-c9df-45b1-8987-8ec4053731ff\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-cgk7c" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.119149 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/31438140-007d-4e3a-a6d4-d2c0dcaaf7bc-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-4nhvz\" (UID: \"31438140-007d-4e3a-a6d4-d2c0dcaaf7bc\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-4nhvz" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.119177 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/baa40fd8-6584-4742-992d-ccc029178c3e-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-trj9z\" (UID: \"baa40fd8-6584-4742-992d-ccc029178c3e\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-trj9z" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.119447 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4bdd776c-4f43-42e4-ae36-da0ba80eb291-serving-cert\") pod \"route-controller-manager-6576b87f9c-zxkfm\" (UID: \"4bdd776c-4f43-42e4-ae36-da0ba80eb291\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-zxkfm" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.119585 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ps2r4\" (UniqueName: \"kubernetes.io/projected/3ec8d77c-feb3-49c0-b162-1ee976685255-kube-api-access-ps2r4\") pod \"migrator-59844c95c7-bchvw\" (UID: \"3ec8d77c-feb3-49c0-b162-1ee976685255\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-bchvw" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.119638 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d116b7ef-bc7d-4f25-8aec-dabad585681a-serving-cert\") pod \"authentication-operator-69f744f599-tvhtj\" (UID: \"d116b7ef-bc7d-4f25-8aec-dabad585681a\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-tvhtj" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.119689 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/81849a73-fe68-44ea-b5c6-24614310b67a-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-dwvn8\" (UID: \"81849a73-fe68-44ea-b5c6-24614310b67a\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-dwvn8" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.119903 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/f47ebe12-09aa-476d-a609-f9066a106afb-machine-approver-tls\") pod \"machine-approver-56656f9798-bzmlt\" (UID: \"f47ebe12-09aa-476d-a609-f9066a106afb\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-bzmlt" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.119988 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cfc57d6f-f884-42ee-8987-b8e92b4fb95b-config\") pod \"etcd-operator-b45778765-75ssh\" (UID: \"cfc57d6f-f884-42ee-8987-b8e92b4fb95b\") " pod="openshift-etcd-operator/etcd-operator-b45778765-75ssh" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.120056 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/ce702cef-b28d-4fcd-99b0-9961a980d3e6-installation-pull-secrets\") pod \"image-registry-697d97f7c8-wpxq6\" (UID: \"ce702cef-b28d-4fcd-99b0-9961a980d3e6\") " pod="openshift-image-registry/image-registry-697d97f7c8-wpxq6" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.120112 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/306f83f8-10af-4389-9546-62325fe60d3a-config\") pod \"kube-controller-manager-operator-78b949d7b-lmd8p\" (UID: \"306f83f8-10af-4389-9546-62325fe60d3a\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-lmd8p" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.121181 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/3fe6ddbb-59ee-4d35-9d3c-309cdd5eba71-service-ca-bundle\") pod \"router-default-5444994796-88vsq\" (UID: \"3fe6ddbb-59ee-4d35-9d3c-309cdd5eba71\") " pod="openshift-ingress/router-default-5444994796-88vsq" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.121262 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a7c32461-b400-43b7-829b-4aecbfb73d70-metrics-tls\") pod \"dns-operator-744455d44c-glgxx\" (UID: \"a7c32461-b400-43b7-829b-4aecbfb73d70\") " pod="openshift-dns-operator/dns-operator-744455d44c-glgxx" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.121284 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/cfc57d6f-f884-42ee-8987-b8e92b4fb95b-etcd-service-ca\") pod \"etcd-operator-b45778765-75ssh\" (UID: \"cfc57d6f-f884-42ee-8987-b8e92b4fb95b\") " pod="openshift-etcd-operator/etcd-operator-b45778765-75ssh" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.121915 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/cfc57d6f-f884-42ee-8987-b8e92b4fb95b-etcd-service-ca\") pod \"etcd-operator-b45778765-75ssh\" (UID: \"cfc57d6f-f884-42ee-8987-b8e92b4fb95b\") " pod="openshift-etcd-operator/etcd-operator-b45778765-75ssh" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.122576 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vgbqz\" (UniqueName: \"kubernetes.io/projected/52803098-a1e8-4a45-a2dd-0eb8f6d5b8ad-kube-api-access-vgbqz\") pod \"downloads-7954f5f757-7dwjw\" (UID: \"52803098-a1e8-4a45-a2dd-0eb8f6d5b8ad\") " pod="openshift-console/downloads-7954f5f757-7dwjw" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.122613 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/81849a73-fe68-44ea-b5c6-24614310b67a-config\") pod \"openshift-apiserver-operator-796bbdcf4f-dwvn8\" (UID: \"81849a73-fe68-44ea-b5c6-24614310b67a\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-dwvn8" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.122648 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/dffc851c-bb5e-4691-9c2d-1dff44276a16-csi-data-dir\") pod \"csi-hostpathplugin-zh59x\" (UID: \"dffc851c-bb5e-4691-9c2d-1dff44276a16\") " pod="hostpath-provisioner/csi-hostpathplugin-zh59x" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.122661 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d03605a9-6716-409b-a9b2-f54bc2f8e467-serving-cert\") pod \"controller-manager-879f6c89f-mstk5\" (UID: \"d03605a9-6716-409b-a9b2-f54bc2f8e467\") " pod="openshift-controller-manager/controller-manager-879f6c89f-mstk5" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.122670 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/bd2faf96-f859-4c10-871a-6b9ed6f4571b-cert\") pod \"ingress-canary-9mtxr\" (UID: \"bd2faf96-f859-4c10-871a-6b9ed6f4571b\") " pod="openshift-ingress-canary/ingress-canary-9mtxr" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.122718 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3ae14a06-9c17-4937-b761-13a2247365cd-serving-cert\") pod \"openshift-config-operator-7777fb866f-mpnp5\" (UID: \"3ae14a06-9c17-4937-b761-13a2247365cd\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-mpnp5" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.122742 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8f5053dc-916f-4f3f-8aee-83e6d2bfa438-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-fv5z8\" (UID: \"8f5053dc-916f-4f3f-8aee-83e6d2bfa438\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-fv5z8" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.122761 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/3fe6ddbb-59ee-4d35-9d3c-309cdd5eba71-stats-auth\") pod \"router-default-5444994796-88vsq\" (UID: \"3fe6ddbb-59ee-4d35-9d3c-309cdd5eba71\") " pod="openshift-ingress/router-default-5444994796-88vsq" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.123199 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/69ad14ee-0c64-4ba8-a2b0-15acc76c1d1f-serving-cert\") pod \"service-ca-operator-777779d784-wdtth\" (UID: \"69ad14ee-0c64-4ba8-a2b0-15acc76c1d1f\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-wdtth" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.123256 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/31438140-007d-4e3a-a6d4-d2c0dcaaf7bc-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-4nhvz\" (UID: \"31438140-007d-4e3a-a6d4-d2c0dcaaf7bc\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-4nhvz" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.123331 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gf274\" (UniqueName: \"kubernetes.io/projected/86d80f6f-9644-43e7-be90-6e94eaf7d71f-kube-api-access-gf274\") pod \"dns-default-jjwh9\" (UID: \"86d80f6f-9644-43e7-be90-6e94eaf7d71f\") " pod="openshift-dns/dns-default-jjwh9" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.123409 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-87fcg\" (UniqueName: \"kubernetes.io/projected/f2c7827d-51b1-4d41-a16e-c29e249be294-kube-api-access-87fcg\") pod \"kube-storage-version-migrator-operator-b67b599dd-vtffn\" (UID: \"f2c7827d-51b1-4d41-a16e-c29e249be294\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-vtffn" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.123451 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/d116b7ef-bc7d-4f25-8aec-dabad585681a-service-ca-bundle\") pod \"authentication-operator-69f744f599-tvhtj\" (UID: \"d116b7ef-bc7d-4f25-8aec-dabad585681a\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-tvhtj" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.123494 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wpxq6\" (UID: \"ce702cef-b28d-4fcd-99b0-9961a980d3e6\") " pod="openshift-image-registry/image-registry-697d97f7c8-wpxq6" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.123524 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q62jh\" (UniqueName: \"kubernetes.io/projected/d5e87d60-31f6-441c-9966-573390631165-kube-api-access-q62jh\") pod \"package-server-manager-789f6589d5-mb2s4\" (UID: \"d5e87d60-31f6-441c-9966-573390631165\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-mb2s4" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.123572 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cgzt6\" (UniqueName: \"kubernetes.io/projected/e0f26253-f454-4de3-b058-bbbece905a7e-kube-api-access-cgzt6\") pod \"cluster-image-registry-operator-dc59b4c8b-x87xk\" (UID: \"e0f26253-f454-4de3-b058-bbbece905a7e\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-x87xk" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.123607 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/4110da68-3e86-428e-983e-e01ee898cf2c-proxy-tls\") pod \"machine-config-operator-74547568cd-nd2dk\" (UID: \"4110da68-3e86-428e-983e-e01ee898cf2c\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-nd2dk" Sep 30 19:33:32 crc kubenswrapper[4756]: E0930 19:33:32.124323 4756 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 19:33:32.624302166 +0000 UTC m=+142.245235653 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wpxq6" (UID: "ce702cef-b28d-4fcd-99b0-9961a980d3e6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.124328 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d116b7ef-bc7d-4f25-8aec-dabad585681a-serving-cert\") pod \"authentication-operator-69f744f599-tvhtj\" (UID: \"d116b7ef-bc7d-4f25-8aec-dabad585681a\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-tvhtj" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.124828 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/d116b7ef-bc7d-4f25-8aec-dabad585681a-service-ca-bundle\") pod \"authentication-operator-69f744f599-tvhtj\" (UID: \"d116b7ef-bc7d-4f25-8aec-dabad585681a\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-tvhtj" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.125211 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/cfc57d6f-f884-42ee-8987-b8e92b4fb95b-etcd-client\") pod \"etcd-operator-b45778765-75ssh\" (UID: \"cfc57d6f-f884-42ee-8987-b8e92b4fb95b\") " pod="openshift-etcd-operator/etcd-operator-b45778765-75ssh" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.125492 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/81849a73-fe68-44ea-b5c6-24614310b67a-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-dwvn8\" (UID: \"81849a73-fe68-44ea-b5c6-24614310b67a\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-dwvn8" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.126078 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/81849a73-fe68-44ea-b5c6-24614310b67a-config\") pod \"openshift-apiserver-operator-796bbdcf4f-dwvn8\" (UID: \"81849a73-fe68-44ea-b5c6-24614310b67a\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-dwvn8" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.126842 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3ae14a06-9c17-4937-b761-13a2247365cd-serving-cert\") pod \"openshift-config-operator-7777fb866f-mpnp5\" (UID: \"3ae14a06-9c17-4937-b761-13a2247365cd\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-mpnp5" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.131206 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/8fee247b-3f4b-4816-a0c4-7d3854ebc33b-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-lls25\" (UID: \"8fee247b-3f4b-4816-a0c4-7d3854ebc33b\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-lls25" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.131560 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/4110da68-3e86-428e-983e-e01ee898cf2c-proxy-tls\") pod \"machine-config-operator-74547568cd-nd2dk\" (UID: \"4110da68-3e86-428e-983e-e01ee898cf2c\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-nd2dk" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.147006 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/e0f26253-f454-4de3-b058-bbbece905a7e-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-x87xk\" (UID: \"e0f26253-f454-4de3-b058-bbbece905a7e\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-x87xk" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.167187 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/ce702cef-b28d-4fcd-99b0-9961a980d3e6-bound-sa-token\") pod \"image-registry-697d97f7c8-wpxq6\" (UID: \"ce702cef-b28d-4fcd-99b0-9961a980d3e6\") " pod="openshift-image-registry/image-registry-697d97f7c8-wpxq6" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.183366 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fpv4j\" (UniqueName: \"kubernetes.io/projected/a7c32461-b400-43b7-829b-4aecbfb73d70-kube-api-access-fpv4j\") pod \"dns-operator-744455d44c-glgxx\" (UID: \"a7c32461-b400-43b7-829b-4aecbfb73d70\") " pod="openshift-dns-operator/dns-operator-744455d44c-glgxx" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.205465 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kbmmx\" (UniqueName: \"kubernetes.io/projected/4d420445-45ce-42fc-9571-f137dddc8a2e-kube-api-access-kbmmx\") pod \"console-operator-58897d9998-w7nzp\" (UID: \"4d420445-45ce-42fc-9571-f137dddc8a2e\") " pod="openshift-console-operator/console-operator-58897d9998-w7nzp" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.224812 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.225100 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j2jvk\" (UniqueName: \"kubernetes.io/projected/cfc57d6f-f884-42ee-8987-b8e92b4fb95b-kube-api-access-j2jvk\") pod \"etcd-operator-b45778765-75ssh\" (UID: \"cfc57d6f-f884-42ee-8987-b8e92b4fb95b\") " pod="openshift-etcd-operator/etcd-operator-b45778765-75ssh" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.225202 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8f5053dc-916f-4f3f-8aee-83e6d2bfa438-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-fv5z8\" (UID: \"8f5053dc-916f-4f3f-8aee-83e6d2bfa438\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-fv5z8" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.225243 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/3fe6ddbb-59ee-4d35-9d3c-309cdd5eba71-stats-auth\") pod \"router-default-5444994796-88vsq\" (UID: \"3fe6ddbb-59ee-4d35-9d3c-309cdd5eba71\") " pod="openshift-ingress/router-default-5444994796-88vsq" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.225311 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gf274\" (UniqueName: \"kubernetes.io/projected/86d80f6f-9644-43e7-be90-6e94eaf7d71f-kube-api-access-gf274\") pod \"dns-default-jjwh9\" (UID: \"86d80f6f-9644-43e7-be90-6e94eaf7d71f\") " pod="openshift-dns/dns-default-jjwh9" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.225351 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-87fcg\" (UniqueName: \"kubernetes.io/projected/f2c7827d-51b1-4d41-a16e-c29e249be294-kube-api-access-87fcg\") pod \"kube-storage-version-migrator-operator-b67b599dd-vtffn\" (UID: \"f2c7827d-51b1-4d41-a16e-c29e249be294\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-vtffn" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.225384 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/69ad14ee-0c64-4ba8-a2b0-15acc76c1d1f-serving-cert\") pod \"service-ca-operator-777779d784-wdtth\" (UID: \"69ad14ee-0c64-4ba8-a2b0-15acc76c1d1f\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-wdtth" Sep 30 19:33:32 crc kubenswrapper[4756]: E0930 19:33:32.225535 4756 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 19:33:32.725456318 +0000 UTC m=+142.346389845 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.225641 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q62jh\" (UniqueName: \"kubernetes.io/projected/d5e87d60-31f6-441c-9966-573390631165-kube-api-access-q62jh\") pod \"package-server-manager-789f6589d5-mb2s4\" (UID: \"d5e87d60-31f6-441c-9966-573390631165\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-mb2s4" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.225859 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/6c24c4b4-2bf5-49f1-ad2b-b146d1dd3522-srv-cert\") pod \"catalog-operator-68c6474976-2t55f\" (UID: \"6c24c4b4-2bf5-49f1-ad2b-b146d1dd3522\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-2t55f" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.225962 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/f2279937-aa0a-4874-bdd0-b3b1a685ee64-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-jw4c2\" (UID: \"f2279937-aa0a-4874-bdd0-b3b1a685ee64\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-jw4c2" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.226073 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/8f5053dc-916f-4f3f-8aee-83e6d2bfa438-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-fv5z8\" (UID: \"8f5053dc-916f-4f3f-8aee-83e6d2bfa438\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-fv5z8" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.226361 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/2dd216c3-656c-4c3d-909e-1eabf16f0473-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-9npql\" (UID: \"2dd216c3-656c-4c3d-909e-1eabf16f0473\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-9npql" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.226492 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/3fe6ddbb-59ee-4d35-9d3c-309cdd5eba71-default-certificate\") pod \"router-default-5444994796-88vsq\" (UID: \"3fe6ddbb-59ee-4d35-9d3c-309cdd5eba71\") " pod="openshift-ingress/router-default-5444994796-88vsq" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.226586 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/4f7f9fe4-0797-4490-8bc2-c48850eb9f02-secret-volume\") pod \"collect-profiles-29321010-snrpj\" (UID: \"4f7f9fe4-0797-4490-8bc2-c48850eb9f02\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321010-snrpj" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.226674 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/86d80f6f-9644-43e7-be90-6e94eaf7d71f-metrics-tls\") pod \"dns-default-jjwh9\" (UID: \"86d80f6f-9644-43e7-be90-6e94eaf7d71f\") " pod="openshift-dns/dns-default-jjwh9" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.226833 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8f5053dc-916f-4f3f-8aee-83e6d2bfa438-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-fv5z8\" (UID: \"8f5053dc-916f-4f3f-8aee-83e6d2bfa438\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-fv5z8" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.226775 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/306f83f8-10af-4389-9546-62325fe60d3a-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-lmd8p\" (UID: \"306f83f8-10af-4389-9546-62325fe60d3a\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-lmd8p" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.227077 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jn98q\" (UniqueName: \"kubernetes.io/projected/3fe6ddbb-59ee-4d35-9d3c-309cdd5eba71-kube-api-access-jn98q\") pod \"router-default-5444994796-88vsq\" (UID: \"3fe6ddbb-59ee-4d35-9d3c-309cdd5eba71\") " pod="openshift-ingress/router-default-5444994796-88vsq" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.227543 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/f2279937-aa0a-4874-bdd0-b3b1a685ee64-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-jw4c2\" (UID: \"f2279937-aa0a-4874-bdd0-b3b1a685ee64\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-jw4c2" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.227657 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/2a7c2d72-7192-4c40-a041-305580c8d0d0-node-bootstrap-token\") pod \"machine-config-server-frglv\" (UID: \"2a7c2d72-7192-4c40-a041-305580c8d0d0\") " pod="openshift-machine-config-operator/machine-config-server-frglv" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.227856 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/6c24c4b4-2bf5-49f1-ad2b-b146d1dd3522-profile-collector-cert\") pod \"catalog-operator-68c6474976-2t55f\" (UID: \"6c24c4b4-2bf5-49f1-ad2b-b146d1dd3522\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-2t55f" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.228251 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/3fe6ddbb-59ee-4d35-9d3c-309cdd5eba71-stats-auth\") pod \"router-default-5444994796-88vsq\" (UID: \"3fe6ddbb-59ee-4d35-9d3c-309cdd5eba71\") " pod="openshift-ingress/router-default-5444994796-88vsq" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.228719 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/6c24c4b4-2bf5-49f1-ad2b-b146d1dd3522-srv-cert\") pod \"catalog-operator-68c6474976-2t55f\" (UID: \"6c24c4b4-2bf5-49f1-ad2b-b146d1dd3522\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-2t55f" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.229207 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8f5053dc-916f-4f3f-8aee-83e6d2bfa438-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-fv5z8\" (UID: \"8f5053dc-916f-4f3f-8aee-83e6d2bfa438\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-fv5z8" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.229480 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cl248\" (UniqueName: \"kubernetes.io/projected/baa40fd8-6584-4742-992d-ccc029178c3e-kube-api-access-cl248\") pod \"control-plane-machine-set-operator-78cbb6b69f-trj9z\" (UID: \"baa40fd8-6584-4742-992d-ccc029178c3e\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-trj9z" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.229549 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/df5245a3-0385-4fab-b949-d98964045062-profile-collector-cert\") pod \"olm-operator-6b444d44fb-s2485\" (UID: \"df5245a3-0385-4fab-b949-d98964045062\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-s2485" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.229654 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gsf4f\" (UniqueName: \"kubernetes.io/projected/4f7f9fe4-0797-4490-8bc2-c48850eb9f02-kube-api-access-gsf4f\") pod \"collect-profiles-29321010-snrpj\" (UID: \"4f7f9fe4-0797-4490-8bc2-c48850eb9f02\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321010-snrpj" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.229711 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f2c7827d-51b1-4d41-a16e-c29e249be294-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-vtffn\" (UID: \"f2c7827d-51b1-4d41-a16e-c29e249be294\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-vtffn" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.229771 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2dd216c3-656c-4c3d-909e-1eabf16f0473-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-9npql\" (UID: \"2dd216c3-656c-4c3d-909e-1eabf16f0473\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-9npql" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.229816 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/86d80f6f-9644-43e7-be90-6e94eaf7d71f-config-volume\") pod \"dns-default-jjwh9\" (UID: \"86d80f6f-9644-43e7-be90-6e94eaf7d71f\") " pod="openshift-dns/dns-default-jjwh9" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.229862 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b6jjg\" (UniqueName: \"kubernetes.io/projected/c63224ae-e78e-484b-83e1-2b6432cc1de6-kube-api-access-b6jjg\") pod \"service-ca-9c57cc56f-nfgwx\" (UID: \"c63224ae-e78e-484b-83e1-2b6432cc1de6\") " pod="openshift-service-ca/service-ca-9c57cc56f-nfgwx" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.229915 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ltqrg\" (UniqueName: \"kubernetes.io/projected/bd2faf96-f859-4c10-871a-6b9ed6f4571b-kube-api-access-ltqrg\") pod \"ingress-canary-9mtxr\" (UID: \"bd2faf96-f859-4c10-871a-6b9ed6f4571b\") " pod="openshift-ingress-canary/ingress-canary-9mtxr" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.229967 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xxmrs\" (UniqueName: \"kubernetes.io/projected/8f68dfd5-c9df-45b1-8987-8ec4053731ff-kube-api-access-xxmrs\") pod \"multus-admission-controller-857f4d67dd-cgk7c\" (UID: \"8f68dfd5-c9df-45b1-8987-8ec4053731ff\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-cgk7c" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.230013 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8q7j2\" (UniqueName: \"kubernetes.io/projected/428bbd8f-09ea-45c6-acd6-3ba9bab95aba-kube-api-access-8q7j2\") pod \"packageserver-d55dfcdfc-qsct2\" (UID: \"428bbd8f-09ea-45c6-acd6-3ba9bab95aba\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-qsct2" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.230095 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/69ad14ee-0c64-4ba8-a2b0-15acc76c1d1f-config\") pod \"service-ca-operator-777779d784-wdtth\" (UID: \"69ad14ee-0c64-4ba8-a2b0-15acc76c1d1f\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-wdtth" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.230167 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/428bbd8f-09ea-45c6-acd6-3ba9bab95aba-webhook-cert\") pod \"packageserver-d55dfcdfc-qsct2\" (UID: \"428bbd8f-09ea-45c6-acd6-3ba9bab95aba\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-qsct2" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.230262 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/d5e87d60-31f6-441c-9966-573390631165-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-mb2s4\" (UID: \"d5e87d60-31f6-441c-9966-573390631165\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-mb2s4" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.230314 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/dffc851c-bb5e-4691-9c2d-1dff44276a16-plugins-dir\") pod \"csi-hostpathplugin-zh59x\" (UID: \"dffc851c-bb5e-4691-9c2d-1dff44276a16\") " pod="hostpath-provisioner/csi-hostpathplugin-zh59x" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.230379 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/4f7f9fe4-0797-4490-8bc2-c48850eb9f02-config-volume\") pod \"collect-profiles-29321010-snrpj\" (UID: \"4f7f9fe4-0797-4490-8bc2-c48850eb9f02\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321010-snrpj" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.230495 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/d2f7e436-45ee-4e29-a473-cde6db34524b-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-42ln4\" (UID: \"d2f7e436-45ee-4e29-a473-cde6db34524b\") " pod="openshift-marketplace/marketplace-operator-79b997595-42ln4" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.230537 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/428bbd8f-09ea-45c6-acd6-3ba9bab95aba-tmpfs\") pod \"packageserver-d55dfcdfc-qsct2\" (UID: \"428bbd8f-09ea-45c6-acd6-3ba9bab95aba\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-qsct2" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.230577 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2dd216c3-656c-4c3d-909e-1eabf16f0473-config\") pod \"kube-apiserver-operator-766d6c64bb-9npql\" (UID: \"2dd216c3-656c-4c3d-909e-1eabf16f0473\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-9npql" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.230624 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b5stv\" (UniqueName: \"kubernetes.io/projected/df5245a3-0385-4fab-b949-d98964045062-kube-api-access-b5stv\") pod \"olm-operator-6b444d44fb-s2485\" (UID: \"df5245a3-0385-4fab-b949-d98964045062\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-s2485" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.230662 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/c63224ae-e78e-484b-83e1-2b6432cc1de6-signing-key\") pod \"service-ca-9c57cc56f-nfgwx\" (UID: \"c63224ae-e78e-484b-83e1-2b6432cc1de6\") " pod="openshift-service-ca/service-ca-9c57cc56f-nfgwx" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.230697 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/dffc851c-bb5e-4691-9c2d-1dff44276a16-mountpoint-dir\") pod \"csi-hostpathplugin-zh59x\" (UID: \"dffc851c-bb5e-4691-9c2d-1dff44276a16\") " pod="hostpath-provisioner/csi-hostpathplugin-zh59x" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.230733 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k7vmz\" (UniqueName: \"kubernetes.io/projected/2abbe765-36c9-4aed-9cd1-f30b0bfaf484-kube-api-access-k7vmz\") pod \"ingress-operator-5b745b69d9-nlp7l\" (UID: \"2abbe765-36c9-4aed-9cd1-f30b0bfaf484\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-nlp7l" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.230781 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-42lxj\" (UniqueName: \"kubernetes.io/projected/f2279937-aa0a-4874-bdd0-b3b1a685ee64-kube-api-access-42lxj\") pod \"machine-config-controller-84d6567774-jw4c2\" (UID: \"f2279937-aa0a-4874-bdd0-b3b1a685ee64\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-jw4c2" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.230820 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/2a7c2d72-7192-4c40-a041-305580c8d0d0-certs\") pod \"machine-config-server-frglv\" (UID: \"2a7c2d72-7192-4c40-a041-305580c8d0d0\") " pod="openshift-machine-config-operator/machine-config-server-frglv" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.230866 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/d2f7e436-45ee-4e29-a473-cde6db34524b-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-42ln4\" (UID: \"d2f7e436-45ee-4e29-a473-cde6db34524b\") " pod="openshift-marketplace/marketplace-operator-79b997595-42ln4" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.230925 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/dffc851c-bb5e-4691-9c2d-1dff44276a16-socket-dir\") pod \"csi-hostpathplugin-zh59x\" (UID: \"dffc851c-bb5e-4691-9c2d-1dff44276a16\") " pod="hostpath-provisioner/csi-hostpathplugin-zh59x" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.230958 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/dffc851c-bb5e-4691-9c2d-1dff44276a16-registration-dir\") pod \"csi-hostpathplugin-zh59x\" (UID: \"dffc851c-bb5e-4691-9c2d-1dff44276a16\") " pod="hostpath-provisioner/csi-hostpathplugin-zh59x" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.231020 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2lvlf\" (UniqueName: \"kubernetes.io/projected/d2f7e436-45ee-4e29-a473-cde6db34524b-kube-api-access-2lvlf\") pod \"marketplace-operator-79b997595-42ln4\" (UID: \"d2f7e436-45ee-4e29-a473-cde6db34524b\") " pod="openshift-marketplace/marketplace-operator-79b997595-42ln4" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.231060 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cxsh8\" (UniqueName: \"kubernetes.io/projected/6c24c4b4-2bf5-49f1-ad2b-b146d1dd3522-kube-api-access-cxsh8\") pod \"catalog-operator-68c6474976-2t55f\" (UID: \"6c24c4b4-2bf5-49f1-ad2b-b146d1dd3522\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-2t55f" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.231100 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t9vgb\" (UniqueName: \"kubernetes.io/projected/69ad14ee-0c64-4ba8-a2b0-15acc76c1d1f-kube-api-access-t9vgb\") pod \"service-ca-operator-777779d784-wdtth\" (UID: \"69ad14ee-0c64-4ba8-a2b0-15acc76c1d1f\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-wdtth" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.231138 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/69ad14ee-0c64-4ba8-a2b0-15acc76c1d1f-serving-cert\") pod \"service-ca-operator-777779d784-wdtth\" (UID: \"69ad14ee-0c64-4ba8-a2b0-15acc76c1d1f\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-wdtth" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.231164 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/f2279937-aa0a-4874-bdd0-b3b1a685ee64-proxy-tls\") pod \"machine-config-controller-84d6567774-jw4c2\" (UID: \"f2279937-aa0a-4874-bdd0-b3b1a685ee64\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-jw4c2" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.231222 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/c63224ae-e78e-484b-83e1-2b6432cc1de6-signing-cabundle\") pod \"service-ca-9c57cc56f-nfgwx\" (UID: \"c63224ae-e78e-484b-83e1-2b6432cc1de6\") " pod="openshift-service-ca/service-ca-9c57cc56f-nfgwx" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.231306 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/2abbe765-36c9-4aed-9cd1-f30b0bfaf484-trusted-ca\") pod \"ingress-operator-5b745b69d9-nlp7l\" (UID: \"2abbe765-36c9-4aed-9cd1-f30b0bfaf484\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-nlp7l" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.231343 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/df5245a3-0385-4fab-b949-d98964045062-srv-cert\") pod \"olm-operator-6b444d44fb-s2485\" (UID: \"df5245a3-0385-4fab-b949-d98964045062\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-s2485" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.231378 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/306f83f8-10af-4389-9546-62325fe60d3a-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-lmd8p\" (UID: \"306f83f8-10af-4389-9546-62325fe60d3a\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-lmd8p" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.231457 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/428bbd8f-09ea-45c6-acd6-3ba9bab95aba-apiservice-cert\") pod \"packageserver-d55dfcdfc-qsct2\" (UID: \"428bbd8f-09ea-45c6-acd6-3ba9bab95aba\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-qsct2" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.231501 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g5r6q\" (UniqueName: \"kubernetes.io/projected/2a7c2d72-7192-4c40-a041-305580c8d0d0-kube-api-access-g5r6q\") pod \"machine-config-server-frglv\" (UID: \"2a7c2d72-7192-4c40-a041-305580c8d0d0\") " pod="openshift-machine-config-operator/machine-config-server-frglv" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.231541 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z2bgj\" (UniqueName: \"kubernetes.io/projected/dffc851c-bb5e-4691-9c2d-1dff44276a16-kube-api-access-z2bgj\") pod \"csi-hostpathplugin-zh59x\" (UID: \"dffc851c-bb5e-4691-9c2d-1dff44276a16\") " pod="hostpath-provisioner/csi-hostpathplugin-zh59x" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.231542 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/2a7c2d72-7192-4c40-a041-305580c8d0d0-node-bootstrap-token\") pod \"machine-config-server-frglv\" (UID: \"2a7c2d72-7192-4c40-a041-305580c8d0d0\") " pod="openshift-machine-config-operator/machine-config-server-frglv" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.231579 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/2abbe765-36c9-4aed-9cd1-f30b0bfaf484-bound-sa-token\") pod \"ingress-operator-5b745b69d9-nlp7l\" (UID: \"2abbe765-36c9-4aed-9cd1-f30b0bfaf484\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-nlp7l" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.231618 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f2c7827d-51b1-4d41-a16e-c29e249be294-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-vtffn\" (UID: \"f2c7827d-51b1-4d41-a16e-c29e249be294\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-vtffn" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.231636 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/306f83f8-10af-4389-9546-62325fe60d3a-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-lmd8p\" (UID: \"306f83f8-10af-4389-9546-62325fe60d3a\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-lmd8p" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.231654 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/3fe6ddbb-59ee-4d35-9d3c-309cdd5eba71-metrics-certs\") pod \"router-default-5444994796-88vsq\" (UID: \"3fe6ddbb-59ee-4d35-9d3c-309cdd5eba71\") " pod="openshift-ingress/router-default-5444994796-88vsq" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.231719 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/2abbe765-36c9-4aed-9cd1-f30b0bfaf484-metrics-tls\") pod \"ingress-operator-5b745b69d9-nlp7l\" (UID: \"2abbe765-36c9-4aed-9cd1-f30b0bfaf484\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-nlp7l" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.231742 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/8f68dfd5-c9df-45b1-8987-8ec4053731ff-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-cgk7c\" (UID: \"8f68dfd5-c9df-45b1-8987-8ec4053731ff\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-cgk7c" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.231768 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/baa40fd8-6584-4742-992d-ccc029178c3e-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-trj9z\" (UID: \"baa40fd8-6584-4742-992d-ccc029178c3e\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-trj9z" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.231796 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ps2r4\" (UniqueName: \"kubernetes.io/projected/3ec8d77c-feb3-49c0-b162-1ee976685255-kube-api-access-ps2r4\") pod \"migrator-59844c95c7-bchvw\" (UID: \"3ec8d77c-feb3-49c0-b162-1ee976685255\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-bchvw" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.231827 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/306f83f8-10af-4389-9546-62325fe60d3a-config\") pod \"kube-controller-manager-operator-78b949d7b-lmd8p\" (UID: \"306f83f8-10af-4389-9546-62325fe60d3a\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-lmd8p" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.231850 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/3fe6ddbb-59ee-4d35-9d3c-309cdd5eba71-service-ca-bundle\") pod \"router-default-5444994796-88vsq\" (UID: \"3fe6ddbb-59ee-4d35-9d3c-309cdd5eba71\") " pod="openshift-ingress/router-default-5444994796-88vsq" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.231864 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/6c24c4b4-2bf5-49f1-ad2b-b146d1dd3522-profile-collector-cert\") pod \"catalog-operator-68c6474976-2t55f\" (UID: \"6c24c4b4-2bf5-49f1-ad2b-b146d1dd3522\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-2t55f" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.231881 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/dffc851c-bb5e-4691-9c2d-1dff44276a16-csi-data-dir\") pod \"csi-hostpathplugin-zh59x\" (UID: \"dffc851c-bb5e-4691-9c2d-1dff44276a16\") " pod="hostpath-provisioner/csi-hostpathplugin-zh59x" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.231907 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/bd2faf96-f859-4c10-871a-6b9ed6f4571b-cert\") pod \"ingress-canary-9mtxr\" (UID: \"bd2faf96-f859-4c10-871a-6b9ed6f4571b\") " pod="openshift-ingress-canary/ingress-canary-9mtxr" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.232607 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/df5245a3-0385-4fab-b949-d98964045062-profile-collector-cert\") pod \"olm-operator-6b444d44fb-s2485\" (UID: \"df5245a3-0385-4fab-b949-d98964045062\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-s2485" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.232690 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f2c7827d-51b1-4d41-a16e-c29e249be294-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-vtffn\" (UID: \"f2c7827d-51b1-4d41-a16e-c29e249be294\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-vtffn" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.232710 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/86d80f6f-9644-43e7-be90-6e94eaf7d71f-config-volume\") pod \"dns-default-jjwh9\" (UID: \"86d80f6f-9644-43e7-be90-6e94eaf7d71f\") " pod="openshift-dns/dns-default-jjwh9" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.233122 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/428bbd8f-09ea-45c6-acd6-3ba9bab95aba-tmpfs\") pod \"packageserver-d55dfcdfc-qsct2\" (UID: \"428bbd8f-09ea-45c6-acd6-3ba9bab95aba\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-qsct2" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.233477 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/4f7f9fe4-0797-4490-8bc2-c48850eb9f02-config-volume\") pod \"collect-profiles-29321010-snrpj\" (UID: \"4f7f9fe4-0797-4490-8bc2-c48850eb9f02\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321010-snrpj" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.233683 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/4f7f9fe4-0797-4490-8bc2-c48850eb9f02-secret-volume\") pod \"collect-profiles-29321010-snrpj\" (UID: \"4f7f9fe4-0797-4490-8bc2-c48850eb9f02\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321010-snrpj" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.234055 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/d2f7e436-45ee-4e29-a473-cde6db34524b-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-42ln4\" (UID: \"d2f7e436-45ee-4e29-a473-cde6db34524b\") " pod="openshift-marketplace/marketplace-operator-79b997595-42ln4" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.234796 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2dd216c3-656c-4c3d-909e-1eabf16f0473-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-9npql\" (UID: \"2dd216c3-656c-4c3d-909e-1eabf16f0473\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-9npql" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.235181 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/69ad14ee-0c64-4ba8-a2b0-15acc76c1d1f-config\") pod \"service-ca-operator-777779d784-wdtth\" (UID: \"69ad14ee-0c64-4ba8-a2b0-15acc76c1d1f\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-wdtth" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.235388 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/3fe6ddbb-59ee-4d35-9d3c-309cdd5eba71-default-certificate\") pod \"router-default-5444994796-88vsq\" (UID: \"3fe6ddbb-59ee-4d35-9d3c-309cdd5eba71\") " pod="openshift-ingress/router-default-5444994796-88vsq" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.235548 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/dffc851c-bb5e-4691-9c2d-1dff44276a16-mountpoint-dir\") pod \"csi-hostpathplugin-zh59x\" (UID: \"dffc851c-bb5e-4691-9c2d-1dff44276a16\") " pod="hostpath-provisioner/csi-hostpathplugin-zh59x" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.235685 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/bd2faf96-f859-4c10-871a-6b9ed6f4571b-cert\") pod \"ingress-canary-9mtxr\" (UID: \"bd2faf96-f859-4c10-871a-6b9ed6f4571b\") " pod="openshift-ingress-canary/ingress-canary-9mtxr" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.235919 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2dd216c3-656c-4c3d-909e-1eabf16f0473-config\") pod \"kube-apiserver-operator-766d6c64bb-9npql\" (UID: \"2dd216c3-656c-4c3d-909e-1eabf16f0473\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-9npql" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.236102 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8f5053dc-916f-4f3f-8aee-83e6d2bfa438-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-fv5z8\" (UID: \"8f5053dc-916f-4f3f-8aee-83e6d2bfa438\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-fv5z8" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.236444 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/306f83f8-10af-4389-9546-62325fe60d3a-config\") pod \"kube-controller-manager-operator-78b949d7b-lmd8p\" (UID: \"306f83f8-10af-4389-9546-62325fe60d3a\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-lmd8p" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.236771 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/428bbd8f-09ea-45c6-acd6-3ba9bab95aba-webhook-cert\") pod \"packageserver-d55dfcdfc-qsct2\" (UID: \"428bbd8f-09ea-45c6-acd6-3ba9bab95aba\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-qsct2" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.237158 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/3fe6ddbb-59ee-4d35-9d3c-309cdd5eba71-service-ca-bundle\") pod \"router-default-5444994796-88vsq\" (UID: \"3fe6ddbb-59ee-4d35-9d3c-309cdd5eba71\") " pod="openshift-ingress/router-default-5444994796-88vsq" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.237208 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/2abbe765-36c9-4aed-9cd1-f30b0bfaf484-trusted-ca\") pod \"ingress-operator-5b745b69d9-nlp7l\" (UID: \"2abbe765-36c9-4aed-9cd1-f30b0bfaf484\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-nlp7l" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.237231 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/dffc851c-bb5e-4691-9c2d-1dff44276a16-csi-data-dir\") pod \"csi-hostpathplugin-zh59x\" (UID: \"dffc851c-bb5e-4691-9c2d-1dff44276a16\") " pod="hostpath-provisioner/csi-hostpathplugin-zh59x" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.237435 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/2abbe765-36c9-4aed-9cd1-f30b0bfaf484-metrics-tls\") pod \"ingress-operator-5b745b69d9-nlp7l\" (UID: \"2abbe765-36c9-4aed-9cd1-f30b0bfaf484\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-nlp7l" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.237506 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/dffc851c-bb5e-4691-9c2d-1dff44276a16-socket-dir\") pod \"csi-hostpathplugin-zh59x\" (UID: \"dffc851c-bb5e-4691-9c2d-1dff44276a16\") " pod="hostpath-provisioner/csi-hostpathplugin-zh59x" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.237761 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/dffc851c-bb5e-4691-9c2d-1dff44276a16-registration-dir\") pod \"csi-hostpathplugin-zh59x\" (UID: \"dffc851c-bb5e-4691-9c2d-1dff44276a16\") " pod="hostpath-provisioner/csi-hostpathplugin-zh59x" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.238175 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/f2279937-aa0a-4874-bdd0-b3b1a685ee64-proxy-tls\") pod \"machine-config-controller-84d6567774-jw4c2\" (UID: \"f2279937-aa0a-4874-bdd0-b3b1a685ee64\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-jw4c2" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.238286 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/dffc851c-bb5e-4691-9c2d-1dff44276a16-plugins-dir\") pod \"csi-hostpathplugin-zh59x\" (UID: \"dffc851c-bb5e-4691-9c2d-1dff44276a16\") " pod="hostpath-provisioner/csi-hostpathplugin-zh59x" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.238809 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/3fe6ddbb-59ee-4d35-9d3c-309cdd5eba71-metrics-certs\") pod \"router-default-5444994796-88vsq\" (UID: \"3fe6ddbb-59ee-4d35-9d3c-309cdd5eba71\") " pod="openshift-ingress/router-default-5444994796-88vsq" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.239075 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/c63224ae-e78e-484b-83e1-2b6432cc1de6-signing-cabundle\") pod \"service-ca-9c57cc56f-nfgwx\" (UID: \"c63224ae-e78e-484b-83e1-2b6432cc1de6\") " pod="openshift-service-ca/service-ca-9c57cc56f-nfgwx" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.239635 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f2c7827d-51b1-4d41-a16e-c29e249be294-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-vtffn\" (UID: \"f2c7827d-51b1-4d41-a16e-c29e249be294\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-vtffn" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.239694 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/428bbd8f-09ea-45c6-acd6-3ba9bab95aba-apiservice-cert\") pod \"packageserver-d55dfcdfc-qsct2\" (UID: \"428bbd8f-09ea-45c6-acd6-3ba9bab95aba\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-qsct2" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.239756 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"certs\" (UniqueName: \"kubernetes.io/secret/2a7c2d72-7192-4c40-a041-305580c8d0d0-certs\") pod \"machine-config-server-frglv\" (UID: \"2a7c2d72-7192-4c40-a041-305580c8d0d0\") " pod="openshift-machine-config-operator/machine-config-server-frglv" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.239873 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/86d80f6f-9644-43e7-be90-6e94eaf7d71f-metrics-tls\") pod \"dns-default-jjwh9\" (UID: \"86d80f6f-9644-43e7-be90-6e94eaf7d71f\") " pod="openshift-dns/dns-default-jjwh9" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.240613 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/df5245a3-0385-4fab-b949-d98964045062-srv-cert\") pod \"olm-operator-6b444d44fb-s2485\" (UID: \"df5245a3-0385-4fab-b949-d98964045062\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-s2485" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.241483 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/c63224ae-e78e-484b-83e1-2b6432cc1de6-signing-key\") pod \"service-ca-9c57cc56f-nfgwx\" (UID: \"c63224ae-e78e-484b-83e1-2b6432cc1de6\") " pod="openshift-service-ca/service-ca-9c57cc56f-nfgwx" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.241924 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/8f68dfd5-c9df-45b1-8987-8ec4053731ff-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-cgk7c\" (UID: \"8f68dfd5-c9df-45b1-8987-8ec4053731ff\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-cgk7c" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.242581 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/baa40fd8-6584-4742-992d-ccc029178c3e-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-trj9z\" (UID: \"baa40fd8-6584-4742-992d-ccc029178c3e\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-trj9z" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.242870 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/d2f7e436-45ee-4e29-a473-cde6db34524b-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-42ln4\" (UID: \"d2f7e436-45ee-4e29-a473-cde6db34524b\") " pod="openshift-marketplace/marketplace-operator-79b997595-42ln4" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.243718 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/d5e87d60-31f6-441c-9966-573390631165-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-mb2s4\" (UID: \"d5e87d60-31f6-441c-9966-573390631165\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-mb2s4" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.248484 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wxl8h\" (UniqueName: \"kubernetes.io/projected/d116b7ef-bc7d-4f25-8aec-dabad585681a-kube-api-access-wxl8h\") pod \"authentication-operator-69f744f599-tvhtj\" (UID: \"d116b7ef-bc7d-4f25-8aec-dabad585681a\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-tvhtj" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.265174 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r8llp\" (UniqueName: \"kubernetes.io/projected/f47ebe12-09aa-476d-a609-f9066a106afb-kube-api-access-r8llp\") pod \"machine-approver-56656f9798-bzmlt\" (UID: \"f47ebe12-09aa-476d-a609-f9066a106afb\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-bzmlt" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.291788 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wnbx9\" (UniqueName: \"kubernetes.io/projected/81849a73-fe68-44ea-b5c6-24614310b67a-kube-api-access-wnbx9\") pod \"openshift-apiserver-operator-796bbdcf4f-dwvn8\" (UID: \"81849a73-fe68-44ea-b5c6-24614310b67a\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-dwvn8" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.303175 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-w7nzp" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.309472 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m8h85\" (UniqueName: \"kubernetes.io/projected/37240768-1c94-44c3-9772-d4671f7e35bf-kube-api-access-m8h85\") pod \"console-f9d7485db-h5cnz\" (UID: \"37240768-1c94-44c3-9772-d4671f7e35bf\") " pod="openshift-console/console-f9d7485db-h5cnz" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.326553 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2mnz4\" (UniqueName: \"kubernetes.io/projected/8fee247b-3f4b-4816-a0c4-7d3854ebc33b-kube-api-access-2mnz4\") pod \"cluster-samples-operator-665b6dd947-lls25\" (UID: \"8fee247b-3f4b-4816-a0c4-7d3854ebc33b\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-lls25" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.333159 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wpxq6\" (UID: \"ce702cef-b28d-4fcd-99b0-9961a980d3e6\") " pod="openshift-image-registry/image-registry-697d97f7c8-wpxq6" Sep 30 19:33:32 crc kubenswrapper[4756]: E0930 19:33:32.333530 4756 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 19:33:32.833513479 +0000 UTC m=+142.454446956 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wpxq6" (UID: "ce702cef-b28d-4fcd-99b0-9961a980d3e6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.346043 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ff466\" (UniqueName: \"kubernetes.io/projected/4110da68-3e86-428e-983e-e01ee898cf2c-kube-api-access-ff466\") pod \"machine-config-operator-74547568cd-nd2dk\" (UID: \"4110da68-3e86-428e-983e-e01ee898cf2c\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-nd2dk" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.369459 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g6snr\" (UniqueName: \"kubernetes.io/projected/d03605a9-6716-409b-a9b2-f54bc2f8e467-kube-api-access-g6snr\") pod \"controller-manager-879f6c89f-mstk5\" (UID: \"d03605a9-6716-409b-a9b2-f54bc2f8e467\") " pod="openshift-controller-manager/controller-manager-879f6c89f-mstk5" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.386661 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gztll\" (UniqueName: \"kubernetes.io/projected/3ae14a06-9c17-4937-b761-13a2247365cd-kube-api-access-gztll\") pod \"openshift-config-operator-7777fb866f-mpnp5\" (UID: \"3ae14a06-9c17-4937-b761-13a2247365cd\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-mpnp5" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.407009 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n4qxs\" (UniqueName: \"kubernetes.io/projected/31438140-007d-4e3a-a6d4-d2c0dcaaf7bc-kube-api-access-n4qxs\") pod \"openshift-controller-manager-operator-756b6f6bc6-4nhvz\" (UID: \"31438140-007d-4e3a-a6d4-d2c0dcaaf7bc\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-4nhvz" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.425107 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-75ssh" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.426200 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fzt7m\" (UniqueName: \"kubernetes.io/projected/4bdd776c-4f43-42e4-ae36-da0ba80eb291-kube-api-access-fzt7m\") pod \"route-controller-manager-6576b87f9c-zxkfm\" (UID: \"4bdd776c-4f43-42e4-ae36-da0ba80eb291\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-zxkfm" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.434464 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 19:33:32 crc kubenswrapper[4756]: E0930 19:33:32.434641 4756 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 19:33:32.934598689 +0000 UTC m=+142.555532176 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.435046 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wpxq6\" (UID: \"ce702cef-b28d-4fcd-99b0-9961a980d3e6\") " pod="openshift-image-registry/image-registry-697d97f7c8-wpxq6" Sep 30 19:33:32 crc kubenswrapper[4756]: E0930 19:33:32.435689 4756 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 19:33:32.935669887 +0000 UTC m=+142.556603374 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wpxq6" (UID: "ce702cef-b28d-4fcd-99b0-9961a980d3e6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.442958 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-glgxx" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.451281 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jf9s9\" (UniqueName: \"kubernetes.io/projected/ce702cef-b28d-4fcd-99b0-9961a980d3e6-kube-api-access-jf9s9\") pod \"image-registry-697d97f7c8-wpxq6\" (UID: \"ce702cef-b28d-4fcd-99b0-9961a980d3e6\") " pod="openshift-image-registry/image-registry-697d97f7c8-wpxq6" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.471369 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-mstk5" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.485654 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vgbqz\" (UniqueName: \"kubernetes.io/projected/52803098-a1e8-4a45-a2dd-0eb8f6d5b8ad-kube-api-access-vgbqz\") pod \"downloads-7954f5f757-7dwjw\" (UID: \"52803098-a1e8-4a45-a2dd-0eb8f6d5b8ad\") " pod="openshift-console/downloads-7954f5f757-7dwjw" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.488229 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-tvhtj" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.500921 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-dwvn8" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.506064 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-bzmlt" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.507762 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cgzt6\" (UniqueName: \"kubernetes.io/projected/e0f26253-f454-4de3-b058-bbbece905a7e-kube-api-access-cgzt6\") pod \"cluster-image-registry-operator-dc59b4c8b-x87xk\" (UID: \"e0f26253-f454-4de3-b058-bbbece905a7e\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-x87xk" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.510099 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-w7nzp"] Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.521849 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-lls25" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.526387 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-mpnp5" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.532774 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-7dwjw" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.532874 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q62jh\" (UniqueName: \"kubernetes.io/projected/d5e87d60-31f6-441c-9966-573390631165-kube-api-access-q62jh\") pod \"package-server-manager-789f6589d5-mb2s4\" (UID: \"d5e87d60-31f6-441c-9966-573390631165\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-mb2s4" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.536383 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 19:33:32 crc kubenswrapper[4756]: E0930 19:33:32.536905 4756 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 19:33:33.036889061 +0000 UTC m=+142.657822538 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.539809 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-x87xk" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.553696 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-h5cnz" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.553701 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gf274\" (UniqueName: \"kubernetes.io/projected/86d80f6f-9644-43e7-be90-6e94eaf7d71f-kube-api-access-gf274\") pod \"dns-default-jjwh9\" (UID: \"86d80f6f-9644-43e7-be90-6e94eaf7d71f\") " pod="openshift-dns/dns-default-jjwh9" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.554557 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-4nhvz" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.568216 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-87fcg\" (UniqueName: \"kubernetes.io/projected/f2c7827d-51b1-4d41-a16e-c29e249be294-kube-api-access-87fcg\") pod \"kube-storage-version-migrator-operator-b67b599dd-vtffn\" (UID: \"f2c7827d-51b1-4d41-a16e-c29e249be294\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-vtffn" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.577334 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-nd2dk" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.591956 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/8f5053dc-916f-4f3f-8aee-83e6d2bfa438-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-fv5z8\" (UID: \"8f5053dc-916f-4f3f-8aee-83e6d2bfa438\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-fv5z8" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.603154 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-vtffn" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.610970 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/2dd216c3-656c-4c3d-909e-1eabf16f0473-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-9npql\" (UID: \"2dd216c3-656c-4c3d-909e-1eabf16f0473\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-9npql" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.638473 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jn98q\" (UniqueName: \"kubernetes.io/projected/3fe6ddbb-59ee-4d35-9d3c-309cdd5eba71-kube-api-access-jn98q\") pod \"router-default-5444994796-88vsq\" (UID: \"3fe6ddbb-59ee-4d35-9d3c-309cdd5eba71\") " pod="openshift-ingress/router-default-5444994796-88vsq" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.638581 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wpxq6\" (UID: \"ce702cef-b28d-4fcd-99b0-9961a980d3e6\") " pod="openshift-image-registry/image-registry-697d97f7c8-wpxq6" Sep 30 19:33:32 crc kubenswrapper[4756]: E0930 19:33:32.639567 4756 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 19:33:33.139545182 +0000 UTC m=+142.760478659 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wpxq6" (UID: "ce702cef-b28d-4fcd-99b0-9961a980d3e6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.653894 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-75ssh"] Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.657609 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-zxkfm" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.661410 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cl248\" (UniqueName: \"kubernetes.io/projected/baa40fd8-6584-4742-992d-ccc029178c3e-kube-api-access-cl248\") pod \"control-plane-machine-set-operator-78cbb6b69f-trj9z\" (UID: \"baa40fd8-6584-4742-992d-ccc029178c3e\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-trj9z" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.671345 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gsf4f\" (UniqueName: \"kubernetes.io/projected/4f7f9fe4-0797-4490-8bc2-c48850eb9f02-kube-api-access-gsf4f\") pod \"collect-profiles-29321010-snrpj\" (UID: \"4f7f9fe4-0797-4490-8bc2-c48850eb9f02\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321010-snrpj" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.676451 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-mb2s4" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.694967 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b6jjg\" (UniqueName: \"kubernetes.io/projected/c63224ae-e78e-484b-83e1-2b6432cc1de6-kube-api-access-b6jjg\") pod \"service-ca-9c57cc56f-nfgwx\" (UID: \"c63224ae-e78e-484b-83e1-2b6432cc1de6\") " pod="openshift-service-ca/service-ca-9c57cc56f-nfgwx" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.695291 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-88vsq" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.707031 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-fv5z8" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.707847 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b5stv\" (UniqueName: \"kubernetes.io/projected/df5245a3-0385-4fab-b949-d98964045062-kube-api-access-b5stv\") pod \"olm-operator-6b444d44fb-s2485\" (UID: \"df5245a3-0385-4fab-b949-d98964045062\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-s2485" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.724978 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-jjwh9" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.738915 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ltqrg\" (UniqueName: \"kubernetes.io/projected/bd2faf96-f859-4c10-871a-6b9ed6f4571b-kube-api-access-ltqrg\") pod \"ingress-canary-9mtxr\" (UID: \"bd2faf96-f859-4c10-871a-6b9ed6f4571b\") " pod="openshift-ingress-canary/ingress-canary-9mtxr" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.749349 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xxmrs\" (UniqueName: \"kubernetes.io/projected/8f68dfd5-c9df-45b1-8987-8ec4053731ff-kube-api-access-xxmrs\") pod \"multus-admission-controller-857f4d67dd-cgk7c\" (UID: \"8f68dfd5-c9df-45b1-8987-8ec4053731ff\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-cgk7c" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.749575 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-9mtxr" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.750249 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 19:33:32 crc kubenswrapper[4756]: E0930 19:33:32.750520 4756 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 19:33:33.250503177 +0000 UTC m=+142.871436654 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.750913 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wpxq6\" (UID: \"ce702cef-b28d-4fcd-99b0-9961a980d3e6\") " pod="openshift-image-registry/image-registry-697d97f7c8-wpxq6" Sep 30 19:33:32 crc kubenswrapper[4756]: E0930 19:33:32.751250 4756 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 19:33:33.251243386 +0000 UTC m=+142.872176863 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wpxq6" (UID: "ce702cef-b28d-4fcd-99b0-9961a980d3e6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.752813 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-glgxx"] Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.776178 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8q7j2\" (UniqueName: \"kubernetes.io/projected/428bbd8f-09ea-45c6-acd6-3ba9bab95aba-kube-api-access-8q7j2\") pod \"packageserver-d55dfcdfc-qsct2\" (UID: \"428bbd8f-09ea-45c6-acd6-3ba9bab95aba\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-qsct2" Sep 30 19:33:32 crc kubenswrapper[4756]: W0930 19:33:32.790640 4756 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podcfc57d6f_f884_42ee_8987_b8e92b4fb95b.slice/crio-b0b9ea3ca6ac78751a093d2227810b126e1c677b89be88333fb91a26a773fc16 WatchSource:0}: Error finding container b0b9ea3ca6ac78751a093d2227810b126e1c677b89be88333fb91a26a773fc16: Status 404 returned error can't find the container with id b0b9ea3ca6ac78751a093d2227810b126e1c677b89be88333fb91a26a773fc16 Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.809609 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/306f83f8-10af-4389-9546-62325fe60d3a-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-lmd8p\" (UID: \"306f83f8-10af-4389-9546-62325fe60d3a\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-lmd8p" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.811599 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-tvhtj"] Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.814128 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t9vgb\" (UniqueName: \"kubernetes.io/projected/69ad14ee-0c64-4ba8-a2b0-15acc76c1d1f-kube-api-access-t9vgb\") pod \"service-ca-operator-777779d784-wdtth\" (UID: \"69ad14ee-0c64-4ba8-a2b0-15acc76c1d1f\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-wdtth" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.830413 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-dwvn8"] Sep 30 19:33:32 crc kubenswrapper[4756]: W0930 19:33:32.847540 4756 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3fe6ddbb_59ee_4d35_9d3c_309cdd5eba71.slice/crio-298369e5abcb9deb30c4b3f037c57196d908cb21a7b9e4ac6fcf8812a410d8cd WatchSource:0}: Error finding container 298369e5abcb9deb30c4b3f037c57196d908cb21a7b9e4ac6fcf8812a410d8cd: Status 404 returned error can't find the container with id 298369e5abcb9deb30c4b3f037c57196d908cb21a7b9e4ac6fcf8812a410d8cd Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.848283 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cxsh8\" (UniqueName: \"kubernetes.io/projected/6c24c4b4-2bf5-49f1-ad2b-b146d1dd3522-kube-api-access-cxsh8\") pod \"catalog-operator-68c6474976-2t55f\" (UID: \"6c24c4b4-2bf5-49f1-ad2b-b146d1dd3522\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-2t55f" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.853446 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 19:33:32 crc kubenswrapper[4756]: E0930 19:33:32.854025 4756 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 19:33:33.35400437 +0000 UTC m=+142.974937847 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.858447 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ps2r4\" (UniqueName: \"kubernetes.io/projected/3ec8d77c-feb3-49c0-b162-1ee976685255-kube-api-access-ps2r4\") pod \"migrator-59844c95c7-bchvw\" (UID: \"3ec8d77c-feb3-49c0-b162-1ee976685255\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-bchvw" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.873066 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-9npql" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.881997 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k7vmz\" (UniqueName: \"kubernetes.io/projected/2abbe765-36c9-4aed-9cd1-f30b0bfaf484-kube-api-access-k7vmz\") pod \"ingress-operator-5b745b69d9-nlp7l\" (UID: \"2abbe765-36c9-4aed-9cd1-f30b0bfaf484\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-nlp7l" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.890057 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29321010-snrpj" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.905771 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z2bgj\" (UniqueName: \"kubernetes.io/projected/dffc851c-bb5e-4691-9c2d-1dff44276a16-kube-api-access-z2bgj\") pod \"csi-hostpathplugin-zh59x\" (UID: \"dffc851c-bb5e-4691-9c2d-1dff44276a16\") " pod="hostpath-provisioner/csi-hostpathplugin-zh59x" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.920929 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-nfgwx" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.921978 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-42lxj\" (UniqueName: \"kubernetes.io/projected/f2279937-aa0a-4874-bdd0-b3b1a685ee64-kube-api-access-42lxj\") pod \"machine-config-controller-84d6567774-jw4c2\" (UID: \"f2279937-aa0a-4874-bdd0-b3b1a685ee64\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-jw4c2" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.925046 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-cgk7c" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.932587 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-trj9z" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.941937 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-s2485" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.948620 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/2abbe765-36c9-4aed-9cd1-f30b0bfaf484-bound-sa-token\") pod \"ingress-operator-5b745b69d9-nlp7l\" (UID: \"2abbe765-36c9-4aed-9cd1-f30b0bfaf484\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-nlp7l" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.949173 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-2t55f" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.957676 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-wdtth" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.961241 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wpxq6\" (UID: \"ce702cef-b28d-4fcd-99b0-9961a980d3e6\") " pod="openshift-image-registry/image-registry-697d97f7c8-wpxq6" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.968334 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-nlp7l" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.969970 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-bchvw" Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.972191 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-tvhtj" event={"ID":"d116b7ef-bc7d-4f25-8aec-dabad585681a","Type":"ContainerStarted","Data":"9f616bacf3371ecfa7223403420447c83ae3881745da7fe456e2c129a5c2ff5a"} Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.989743 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2lvlf\" (UniqueName: \"kubernetes.io/projected/d2f7e436-45ee-4e29-a473-cde6db34524b-kube-api-access-2lvlf\") pod \"marketplace-operator-79b997595-42ln4\" (UID: \"d2f7e436-45ee-4e29-a473-cde6db34524b\") " pod="openshift-marketplace/marketplace-operator-79b997595-42ln4" Sep 30 19:33:32 crc kubenswrapper[4756]: E0930 19:33:32.991851 4756 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 19:33:33.491811799 +0000 UTC m=+143.112745266 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wpxq6" (UID: "ce702cef-b28d-4fcd-99b0-9961a980d3e6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:33:32 crc kubenswrapper[4756]: I0930 19:33:32.992756 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-glgxx" event={"ID":"a7c32461-b400-43b7-829b-4aecbfb73d70","Type":"ContainerStarted","Data":"cb8b8a7318816013eb8966984bbff9fe172ec536b1495106e77d7adfdb76682b"} Sep 30 19:33:33 crc kubenswrapper[4756]: I0930 19:33:32.998261 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-w7nzp" event={"ID":"4d420445-45ce-42fc-9571-f137dddc8a2e","Type":"ContainerStarted","Data":"f370966c7737f662db1a0e5d490e38eba358eed4341e2e4c35048f87f5e2b38e"} Sep 30 19:33:33 crc kubenswrapper[4756]: I0930 19:33:33.001373 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-bzmlt" event={"ID":"f47ebe12-09aa-476d-a609-f9066a106afb","Type":"ContainerStarted","Data":"f6f6b6f68969e4783d208f4177ee7d1b40101e9615c8d421f23648850dbdd19d"} Sep 30 19:33:33 crc kubenswrapper[4756]: I0930 19:33:33.004501 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-qsct2" Sep 30 19:33:33 crc kubenswrapper[4756]: I0930 19:33:33.009874 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-lmd8p" Sep 30 19:33:33 crc kubenswrapper[4756]: I0930 19:33:33.010880 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-75ssh" event={"ID":"cfc57d6f-f884-42ee-8987-b8e92b4fb95b","Type":"ContainerStarted","Data":"b0b9ea3ca6ac78751a093d2227810b126e1c677b89be88333fb91a26a773fc16"} Sep 30 19:33:33 crc kubenswrapper[4756]: I0930 19:33:33.021345 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-88vsq" event={"ID":"3fe6ddbb-59ee-4d35-9d3c-309cdd5eba71","Type":"ContainerStarted","Data":"298369e5abcb9deb30c4b3f037c57196d908cb21a7b9e4ac6fcf8812a410d8cd"} Sep 30 19:33:33 crc kubenswrapper[4756]: I0930 19:33:33.024653 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-mpnp5"] Sep 30 19:33:33 crc kubenswrapper[4756]: I0930 19:33:33.040368 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g5r6q\" (UniqueName: \"kubernetes.io/projected/2a7c2d72-7192-4c40-a041-305580c8d0d0-kube-api-access-g5r6q\") pod \"machine-config-server-frglv\" (UID: \"2a7c2d72-7192-4c40-a041-305580c8d0d0\") " pod="openshift-machine-config-operator/machine-config-server-frglv" Sep 30 19:33:33 crc kubenswrapper[4756]: I0930 19:33:33.042994 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-zh59x" Sep 30 19:33:33 crc kubenswrapper[4756]: I0930 19:33:33.043739 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-mstk5"] Sep 30 19:33:33 crc kubenswrapper[4756]: I0930 19:33:33.050024 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-zlg2j" event={"ID":"5921345a-df3c-4611-b471-52f946af0c5f","Type":"ContainerStarted","Data":"0eb141a9c79af17dd08456484c707faad9a39890af1e8f017a082ee64aebe5e5"} Sep 30 19:33:33 crc kubenswrapper[4756]: I0930 19:33:33.050072 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-zlg2j" event={"ID":"5921345a-df3c-4611-b471-52f946af0c5f","Type":"ContainerStarted","Data":"954e1dc201b5a9c45437ea77a5a01bbe7fd16ccd5005e0380f3199660b43bd63"} Sep 30 19:33:33 crc kubenswrapper[4756]: I0930 19:33:33.063260 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 19:33:33 crc kubenswrapper[4756]: E0930 19:33:33.063467 4756 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 19:33:33.563433508 +0000 UTC m=+143.184366995 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:33:33 crc kubenswrapper[4756]: I0930 19:33:33.063661 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wpxq6\" (UID: \"ce702cef-b28d-4fcd-99b0-9961a980d3e6\") " pod="openshift-image-registry/image-registry-697d97f7c8-wpxq6" Sep 30 19:33:33 crc kubenswrapper[4756]: E0930 19:33:33.064178 4756 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 19:33:33.564066635 +0000 UTC m=+143.185000112 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wpxq6" (UID: "ce702cef-b28d-4fcd-99b0-9961a980d3e6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:33:33 crc kubenswrapper[4756]: I0930 19:33:33.098220 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-dwlkd" event={"ID":"07175e3d-c776-4d48-af22-d30a7e9281ef","Type":"ContainerStarted","Data":"06335514a426ec6f4e2dd806445463c0f7beb484eab0dfb87af6bb875e8b43df"} Sep 30 19:33:33 crc kubenswrapper[4756]: I0930 19:33:33.113251 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-dwvn8" event={"ID":"81849a73-fe68-44ea-b5c6-24614310b67a","Type":"ContainerStarted","Data":"f750d4d63cb309faeab232880213ed38546056ce572cac68fde053232f6c1692"} Sep 30 19:33:33 crc kubenswrapper[4756]: I0930 19:33:33.165141 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 19:33:33 crc kubenswrapper[4756]: E0930 19:33:33.166561 4756 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 19:33:33.666538271 +0000 UTC m=+143.287471748 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:33:33 crc kubenswrapper[4756]: I0930 19:33:33.195020 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-jw4c2" Sep 30 19:33:33 crc kubenswrapper[4756]: I0930 19:33:33.209899 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-42ln4" Sep 30 19:33:33 crc kubenswrapper[4756]: W0930 19:33:33.258674 4756 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3ae14a06_9c17_4937_b761_13a2247365cd.slice/crio-c2b9d492fe4fdc82ccc25593e7cdeba1d4f855636ff078cc3efb26ae4610d428 WatchSource:0}: Error finding container c2b9d492fe4fdc82ccc25593e7cdeba1d4f855636ff078cc3efb26ae4610d428: Status 404 returned error can't find the container with id c2b9d492fe4fdc82ccc25593e7cdeba1d4f855636ff078cc3efb26ae4610d428 Sep 30 19:33:33 crc kubenswrapper[4756]: I0930 19:33:33.267132 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wpxq6\" (UID: \"ce702cef-b28d-4fcd-99b0-9961a980d3e6\") " pod="openshift-image-registry/image-registry-697d97f7c8-wpxq6" Sep 30 19:33:33 crc kubenswrapper[4756]: E0930 19:33:33.267870 4756 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 19:33:33.767858517 +0000 UTC m=+143.388791994 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wpxq6" (UID: "ce702cef-b28d-4fcd-99b0-9961a980d3e6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:33:33 crc kubenswrapper[4756]: I0930 19:33:33.324230 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-frglv" Sep 30 19:33:33 crc kubenswrapper[4756]: I0930 19:33:33.367876 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 19:33:33 crc kubenswrapper[4756]: E0930 19:33:33.368317 4756 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 19:33:33.868295201 +0000 UTC m=+143.489228678 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:33:33 crc kubenswrapper[4756]: I0930 19:33:33.470715 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wpxq6\" (UID: \"ce702cef-b28d-4fcd-99b0-9961a980d3e6\") " pod="openshift-image-registry/image-registry-697d97f7c8-wpxq6" Sep 30 19:33:33 crc kubenswrapper[4756]: E0930 19:33:33.471334 4756 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 19:33:33.971314511 +0000 UTC m=+143.592248018 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wpxq6" (UID: "ce702cef-b28d-4fcd-99b0-9961a980d3e6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:33:33 crc kubenswrapper[4756]: I0930 19:33:33.537992 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-7dwjw"] Sep 30 19:33:33 crc kubenswrapper[4756]: I0930 19:33:33.539598 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-h5cnz"] Sep 30 19:33:33 crc kubenswrapper[4756]: I0930 19:33:33.573024 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 19:33:33 crc kubenswrapper[4756]: E0930 19:33:33.573312 4756 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 19:33:34.073287365 +0000 UTC m=+143.694220842 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:33:33 crc kubenswrapper[4756]: I0930 19:33:33.573378 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wpxq6\" (UID: \"ce702cef-b28d-4fcd-99b0-9961a980d3e6\") " pod="openshift-image-registry/image-registry-697d97f7c8-wpxq6" Sep 30 19:33:33 crc kubenswrapper[4756]: E0930 19:33:33.573733 4756 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 19:33:34.073723796 +0000 UTC m=+143.694657273 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wpxq6" (UID: "ce702cef-b28d-4fcd-99b0-9961a980d3e6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:33:33 crc kubenswrapper[4756]: I0930 19:33:33.583242 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-lls25"] Sep 30 19:33:33 crc kubenswrapper[4756]: I0930 19:33:33.587899 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-4nhvz"] Sep 30 19:33:33 crc kubenswrapper[4756]: I0930 19:33:33.587949 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-vtffn"] Sep 30 19:33:33 crc kubenswrapper[4756]: I0930 19:33:33.677316 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 19:33:33 crc kubenswrapper[4756]: E0930 19:33:33.677721 4756 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 19:33:34.177680441 +0000 UTC m=+143.798613918 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:33:33 crc kubenswrapper[4756]: I0930 19:33:33.678218 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wpxq6\" (UID: \"ce702cef-b28d-4fcd-99b0-9961a980d3e6\") " pod="openshift-image-registry/image-registry-697d97f7c8-wpxq6" Sep 30 19:33:33 crc kubenswrapper[4756]: E0930 19:33:33.678688 4756 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 19:33:34.178680266 +0000 UTC m=+143.799613743 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wpxq6" (UID: "ce702cef-b28d-4fcd-99b0-9961a980d3e6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:33:33 crc kubenswrapper[4756]: I0930 19:33:33.691942 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-nd2dk"] Sep 30 19:33:33 crc kubenswrapper[4756]: I0930 19:33:33.734227 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-mb2s4"] Sep 30 19:33:33 crc kubenswrapper[4756]: I0930 19:33:33.739111 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-jjwh9"] Sep 30 19:33:33 crc kubenswrapper[4756]: I0930 19:33:33.741758 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-x87xk"] Sep 30 19:33:33 crc kubenswrapper[4756]: I0930 19:33:33.742924 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-zxkfm"] Sep 30 19:33:33 crc kubenswrapper[4756]: I0930 19:33:33.779272 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 19:33:33 crc kubenswrapper[4756]: E0930 19:33:33.779799 4756 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 19:33:34.279752487 +0000 UTC m=+143.900685964 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:33:33 crc kubenswrapper[4756]: I0930 19:33:33.780061 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wpxq6\" (UID: \"ce702cef-b28d-4fcd-99b0-9961a980d3e6\") " pod="openshift-image-registry/image-registry-697d97f7c8-wpxq6" Sep 30 19:33:33 crc kubenswrapper[4756]: E0930 19:33:33.780776 4756 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 19:33:34.280767233 +0000 UTC m=+143.901700710 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wpxq6" (UID: "ce702cef-b28d-4fcd-99b0-9961a980d3e6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:33:33 crc kubenswrapper[4756]: I0930 19:33:33.860595 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-fv5z8"] Sep 30 19:33:33 crc kubenswrapper[4756]: I0930 19:33:33.884118 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 19:33:33 crc kubenswrapper[4756]: E0930 19:33:33.884491 4756 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 19:33:34.384465531 +0000 UTC m=+144.005399008 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:33:33 crc kubenswrapper[4756]: I0930 19:33:33.892651 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-wdtth"] Sep 30 19:33:33 crc kubenswrapper[4756]: I0930 19:33:33.900991 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-9mtxr"] Sep 30 19:33:33 crc kubenswrapper[4756]: I0930 19:33:33.933219 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-2t55f"] Sep 30 19:33:34 crc kubenswrapper[4756]: I0930 19:33:34.002067 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wpxq6\" (UID: \"ce702cef-b28d-4fcd-99b0-9961a980d3e6\") " pod="openshift-image-registry/image-registry-697d97f7c8-wpxq6" Sep 30 19:33:34 crc kubenswrapper[4756]: E0930 19:33:34.002461 4756 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 19:33:34.502445297 +0000 UTC m=+144.123378774 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wpxq6" (UID: "ce702cef-b28d-4fcd-99b0-9961a980d3e6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:33:34 crc kubenswrapper[4756]: I0930 19:33:34.103021 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 19:33:34 crc kubenswrapper[4756]: E0930 19:33:34.103284 4756 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 19:33:34.603252191 +0000 UTC m=+144.224185668 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:33:34 crc kubenswrapper[4756]: I0930 19:33:34.105479 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wpxq6\" (UID: \"ce702cef-b28d-4fcd-99b0-9961a980d3e6\") " pod="openshift-image-registry/image-registry-697d97f7c8-wpxq6" Sep 30 19:33:34 crc kubenswrapper[4756]: E0930 19:33:34.105903 4756 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 19:33:34.605890669 +0000 UTC m=+144.226824146 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wpxq6" (UID: "ce702cef-b28d-4fcd-99b0-9961a980d3e6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:33:34 crc kubenswrapper[4756]: I0930 19:33:34.121525 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-7dwjw" event={"ID":"52803098-a1e8-4a45-a2dd-0eb8f6d5b8ad","Type":"ContainerStarted","Data":"c94d2dcec719e349c726c45fcbed2fe86ae50a4764f4817e524b227ce926a433"} Sep 30 19:33:34 crc kubenswrapper[4756]: I0930 19:33:34.122815 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-nd2dk" event={"ID":"4110da68-3e86-428e-983e-e01ee898cf2c","Type":"ContainerStarted","Data":"4ce1f4bfedbdf034379fc7cace66545394cee79698322f3b191698eba528746e"} Sep 30 19:33:34 crc kubenswrapper[4756]: I0930 19:33:34.124158 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-jjwh9" event={"ID":"86d80f6f-9644-43e7-be90-6e94eaf7d71f","Type":"ContainerStarted","Data":"8e5684f030ba05eaeb7a9d2925ab0fc869f3d85a846f9448d9b4e4aa5f5a5dd5"} Sep 30 19:33:34 crc kubenswrapper[4756]: I0930 19:33:34.126471 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-75ssh" event={"ID":"cfc57d6f-f884-42ee-8987-b8e92b4fb95b","Type":"ContainerStarted","Data":"930376abe4086e26fa9d4dcd62d1829a49906a916bd4e7e93200f935cb17ab04"} Sep 30 19:33:34 crc kubenswrapper[4756]: I0930 19:33:34.128198 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-4nhvz" event={"ID":"31438140-007d-4e3a-a6d4-d2c0dcaaf7bc","Type":"ContainerStarted","Data":"8b7f4fcf884a720825db8465f5fe15f9df7d23ef7925ed5f463335ae5934606d"} Sep 30 19:33:34 crc kubenswrapper[4756]: I0930 19:33:34.133265 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-mstk5" event={"ID":"d03605a9-6716-409b-a9b2-f54bc2f8e467","Type":"ContainerStarted","Data":"c71ac6e48ffae355b795592542d7ac87394adfeef20350c215192d9ce275f06f"} Sep 30 19:33:34 crc kubenswrapper[4756]: I0930 19:33:34.135805 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-bzmlt" event={"ID":"f47ebe12-09aa-476d-a609-f9066a106afb","Type":"ContainerStarted","Data":"3cda0560b5871e73411012ab714b88ef6c73da42fac323ec4fd7acfd2c98a4d0"} Sep 30 19:33:34 crc kubenswrapper[4756]: I0930 19:33:34.138734 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-x87xk" event={"ID":"e0f26253-f454-4de3-b058-bbbece905a7e","Type":"ContainerStarted","Data":"029a9eeee8790675fbd15ec3873134b22c2ebb82af912ee0a93ff9918f7a80f2"} Sep 30 19:33:34 crc kubenswrapper[4756]: I0930 19:33:34.140176 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-mb2s4" event={"ID":"d5e87d60-31f6-441c-9966-573390631165","Type":"ContainerStarted","Data":"1f6479b2ccf4bb9e5c0b60fa43e8967ce431a2ea55ca5bf6bb4ac8873fd51cbc"} Sep 30 19:33:34 crc kubenswrapper[4756]: I0930 19:33:34.143556 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-lls25" event={"ID":"8fee247b-3f4b-4816-a0c4-7d3854ebc33b","Type":"ContainerStarted","Data":"c654c4a5abac4bc0127bd00e01ee66a299516762418e6414554244afbb0846dd"} Sep 30 19:33:34 crc kubenswrapper[4756]: I0930 19:33:34.144668 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-frglv" event={"ID":"2a7c2d72-7192-4c40-a041-305580c8d0d0","Type":"ContainerStarted","Data":"f572886749ddeeaa63f5fa8d0e8e9fe759f4cb1c7a364751bd2079ea3360b782"} Sep 30 19:33:34 crc kubenswrapper[4756]: I0930 19:33:34.145663 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-vtffn" event={"ID":"f2c7827d-51b1-4d41-a16e-c29e249be294","Type":"ContainerStarted","Data":"d8a9bee817cffd92932626a80aa0366d12ec195193ea111e02689807f6d11765"} Sep 30 19:33:34 crc kubenswrapper[4756]: I0930 19:33:34.147187 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-tvhtj" event={"ID":"d116b7ef-bc7d-4f25-8aec-dabad585681a","Type":"ContainerStarted","Data":"47f62eff00ebec8a8c956609be94c5f0c1d9d8f194eb25a244a749b5554de950"} Sep 30 19:33:34 crc kubenswrapper[4756]: I0930 19:33:34.148995 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-zxkfm" event={"ID":"4bdd776c-4f43-42e4-ae36-da0ba80eb291","Type":"ContainerStarted","Data":"33db7ba8438a0b31347f3a43620e31c853e48cced06ae5603e0232047e477658"} Sep 30 19:33:34 crc kubenswrapper[4756]: I0930 19:33:34.152330 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-h5cnz" event={"ID":"37240768-1c94-44c3-9772-d4671f7e35bf","Type":"ContainerStarted","Data":"880e034848c0debb953f1f3067f01929986457e0c2fa0c3b8ac0602ba21e57dc"} Sep 30 19:33:34 crc kubenswrapper[4756]: I0930 19:33:34.155779 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-w7nzp" event={"ID":"4d420445-45ce-42fc-9571-f137dddc8a2e","Type":"ContainerStarted","Data":"a3f1407c5ebbaa4038122540a9784b771d737e0ed2934afc512a78f2dc4029c2"} Sep 30 19:33:34 crc kubenswrapper[4756]: I0930 19:33:34.155867 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console-operator/console-operator-58897d9998-w7nzp" Sep 30 19:33:34 crc kubenswrapper[4756]: I0930 19:33:34.157740 4756 patch_prober.go:28] interesting pod/console-operator-58897d9998-w7nzp container/console-operator namespace/openshift-console-operator: Readiness probe status=failure output="Get \"https://10.217.0.15:8443/readyz\": dial tcp 10.217.0.15:8443: connect: connection refused" start-of-body= Sep 30 19:33:34 crc kubenswrapper[4756]: I0930 19:33:34.157820 4756 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console-operator/console-operator-58897d9998-w7nzp" podUID="4d420445-45ce-42fc-9571-f137dddc8a2e" containerName="console-operator" probeResult="failure" output="Get \"https://10.217.0.15:8443/readyz\": dial tcp 10.217.0.15:8443: connect: connection refused" Sep 30 19:33:34 crc kubenswrapper[4756]: I0930 19:33:34.160617 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-mpnp5" event={"ID":"3ae14a06-9c17-4937-b761-13a2247365cd","Type":"ContainerStarted","Data":"c2b9d492fe4fdc82ccc25593e7cdeba1d4f855636ff078cc3efb26ae4610d428"} Sep 30 19:33:34 crc kubenswrapper[4756]: I0930 19:33:34.207076 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 19:33:34 crc kubenswrapper[4756]: E0930 19:33:34.208362 4756 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 19:33:34.708321214 +0000 UTC m=+144.329254691 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:33:34 crc kubenswrapper[4756]: I0930 19:33:34.208521 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wpxq6\" (UID: \"ce702cef-b28d-4fcd-99b0-9961a980d3e6\") " pod="openshift-image-registry/image-registry-697d97f7c8-wpxq6" Sep 30 19:33:34 crc kubenswrapper[4756]: E0930 19:33:34.209280 4756 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 19:33:34.709265368 +0000 UTC m=+144.330198845 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wpxq6" (UID: "ce702cef-b28d-4fcd-99b0-9961a980d3e6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:33:34 crc kubenswrapper[4756]: I0930 19:33:34.309448 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 19:33:34 crc kubenswrapper[4756]: E0930 19:33:34.315858 4756 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 19:33:34.81582025 +0000 UTC m=+144.436753727 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:33:34 crc kubenswrapper[4756]: I0930 19:33:34.414318 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wpxq6\" (UID: \"ce702cef-b28d-4fcd-99b0-9961a980d3e6\") " pod="openshift-image-registry/image-registry-697d97f7c8-wpxq6" Sep 30 19:33:34 crc kubenswrapper[4756]: E0930 19:33:34.414723 4756 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 19:33:34.914706934 +0000 UTC m=+144.535640411 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wpxq6" (UID: "ce702cef-b28d-4fcd-99b0-9961a980d3e6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:33:34 crc kubenswrapper[4756]: I0930 19:33:34.460819 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-nlp7l"] Sep 30 19:33:34 crc kubenswrapper[4756]: I0930 19:33:34.471671 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-dwlkd" podStartSLOduration=122.471633944 podStartE2EDuration="2m2.471633944s" podCreationTimestamp="2025-09-30 19:31:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 19:33:34.471580532 +0000 UTC m=+144.092514009" watchObservedRunningTime="2025-09-30 19:33:34.471633944 +0000 UTC m=+144.092567421" Sep 30 19:33:34 crc kubenswrapper[4756]: I0930 19:33:34.513673 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd-operator/etcd-operator-b45778765-75ssh" podStartSLOduration=122.513655249 podStartE2EDuration="2m2.513655249s" podCreationTimestamp="2025-09-30 19:31:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 19:33:34.508482365 +0000 UTC m=+144.129415832" watchObservedRunningTime="2025-09-30 19:33:34.513655249 +0000 UTC m=+144.134588726" Sep 30 19:33:34 crc kubenswrapper[4756]: I0930 19:33:34.514836 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 19:33:34 crc kubenswrapper[4756]: E0930 19:33:34.517264 4756 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 19:33:35.017239151 +0000 UTC m=+144.638172628 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:33:34 crc kubenswrapper[4756]: I0930 19:33:34.546614 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29321010-snrpj"] Sep 30 19:33:34 crc kubenswrapper[4756]: I0930 19:33:34.557513 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-42ln4"] Sep 30 19:33:34 crc kubenswrapper[4756]: I0930 19:33:34.566954 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-s2485"] Sep 30 19:33:34 crc kubenswrapper[4756]: I0930 19:33:34.571364 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication-operator/authentication-operator-69f744f599-tvhtj" podStartSLOduration=123.571334818 podStartE2EDuration="2m3.571334818s" podCreationTimestamp="2025-09-30 19:31:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 19:33:34.553730214 +0000 UTC m=+144.174663691" watchObservedRunningTime="2025-09-30 19:33:34.571334818 +0000 UTC m=+144.192268295" Sep 30 19:33:34 crc kubenswrapper[4756]: W0930 19:33:34.576488 4756 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poddf5245a3_0385_4fab_b949_d98964045062.slice/crio-3ef9a9dc728c33784b5bfdf044c7202b1301a68952551df373ea4af6b257d705 WatchSource:0}: Error finding container 3ef9a9dc728c33784b5bfdf044c7202b1301a68952551df373ea4af6b257d705: Status 404 returned error can't find the container with id 3ef9a9dc728c33784b5bfdf044c7202b1301a68952551df373ea4af6b257d705 Sep 30 19:33:34 crc kubenswrapper[4756]: I0930 19:33:34.578618 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-cgk7c"] Sep 30 19:33:34 crc kubenswrapper[4756]: I0930 19:33:34.622489 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/machine-api-operator-5694c8668f-ln592" podStartSLOduration=122.622472129 podStartE2EDuration="2m2.622472129s" podCreationTimestamp="2025-09-30 19:31:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 19:33:34.585772091 +0000 UTC m=+144.206705568" watchObservedRunningTime="2025-09-30 19:33:34.622472129 +0000 UTC m=+144.243405606" Sep 30 19:33:34 crc kubenswrapper[4756]: I0930 19:33:34.625224 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wpxq6\" (UID: \"ce702cef-b28d-4fcd-99b0-9961a980d3e6\") " pod="openshift-image-registry/image-registry-697d97f7c8-wpxq6" Sep 30 19:33:34 crc kubenswrapper[4756]: E0930 19:33:34.625508 4756 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 19:33:35.125497597 +0000 UTC m=+144.746431074 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wpxq6" (UID: "ce702cef-b28d-4fcd-99b0-9961a980d3e6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:33:34 crc kubenswrapper[4756]: I0930 19:33:34.663289 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console-operator/console-operator-58897d9998-w7nzp" podStartSLOduration=123.663273143 podStartE2EDuration="2m3.663273143s" podCreationTimestamp="2025-09-30 19:31:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 19:33:34.623362712 +0000 UTC m=+144.244296189" watchObservedRunningTime="2025-09-30 19:33:34.663273143 +0000 UTC m=+144.284206620" Sep 30 19:33:34 crc kubenswrapper[4756]: I0930 19:33:34.664643 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-bchvw"] Sep 30 19:33:34 crc kubenswrapper[4756]: I0930 19:33:34.708712 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver/apiserver-76f77b778f-zlg2j" podStartSLOduration=123.708691166 podStartE2EDuration="2m3.708691166s" podCreationTimestamp="2025-09-30 19:31:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 19:33:34.676096054 +0000 UTC m=+144.297029531" watchObservedRunningTime="2025-09-30 19:33:34.708691166 +0000 UTC m=+144.329624643" Sep 30 19:33:34 crc kubenswrapper[4756]: I0930 19:33:34.715732 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-558db77b4-h5tp5" podStartSLOduration=123.715711547 podStartE2EDuration="2m3.715711547s" podCreationTimestamp="2025-09-30 19:31:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 19:33:34.70575969 +0000 UTC m=+144.326693177" watchObservedRunningTime="2025-09-30 19:33:34.715711547 +0000 UTC m=+144.336645024" Sep 30 19:33:34 crc kubenswrapper[4756]: I0930 19:33:34.741808 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 19:33:34 crc kubenswrapper[4756]: E0930 19:33:34.762875 4756 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 19:33:35.262825374 +0000 UTC m=+144.883758841 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:33:34 crc kubenswrapper[4756]: I0930 19:33:34.799725 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-nfgwx"] Sep 30 19:33:34 crc kubenswrapper[4756]: I0930 19:33:34.816513 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-qsct2"] Sep 30 19:33:34 crc kubenswrapper[4756]: I0930 19:33:34.835662 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-trj9z"] Sep 30 19:33:34 crc kubenswrapper[4756]: I0930 19:33:34.846724 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wpxq6\" (UID: \"ce702cef-b28d-4fcd-99b0-9961a980d3e6\") " pod="openshift-image-registry/image-registry-697d97f7c8-wpxq6" Sep 30 19:33:34 crc kubenswrapper[4756]: E0930 19:33:34.847070 4756 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 19:33:35.347056779 +0000 UTC m=+144.967990256 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wpxq6" (UID: "ce702cef-b28d-4fcd-99b0-9961a980d3e6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:33:34 crc kubenswrapper[4756]: W0930 19:33:34.877840 4756 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3ec8d77c_feb3_49c0_b162_1ee976685255.slice/crio-937492c82ae7a985e8df75c389a7753a07972ec09576a74be099818d1556b3a4 WatchSource:0}: Error finding container 937492c82ae7a985e8df75c389a7753a07972ec09576a74be099818d1556b3a4: Status 404 returned error can't find the container with id 937492c82ae7a985e8df75c389a7753a07972ec09576a74be099818d1556b3a4 Sep 30 19:33:34 crc kubenswrapper[4756]: I0930 19:33:34.878544 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-9npql"] Sep 30 19:33:34 crc kubenswrapper[4756]: I0930 19:33:34.881778 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-lmd8p"] Sep 30 19:33:34 crc kubenswrapper[4756]: I0930 19:33:34.888200 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-zh59x"] Sep 30 19:33:34 crc kubenswrapper[4756]: I0930 19:33:34.929434 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-jw4c2"] Sep 30 19:33:34 crc kubenswrapper[4756]: I0930 19:33:34.949048 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 19:33:34 crc kubenswrapper[4756]: E0930 19:33:34.949467 4756 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 19:33:35.449453073 +0000 UTC m=+145.070386550 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:33:34 crc kubenswrapper[4756]: W0930 19:33:34.978953 4756 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc63224ae_e78e_484b_83e1_2b6432cc1de6.slice/crio-b2c8b8875d40332aa0a1aa6d999f9422ff347944b57d942ce32022219350c765 WatchSource:0}: Error finding container b2c8b8875d40332aa0a1aa6d999f9422ff347944b57d942ce32022219350c765: Status 404 returned error can't find the container with id b2c8b8875d40332aa0a1aa6d999f9422ff347944b57d942ce32022219350c765 Sep 30 19:33:35 crc kubenswrapper[4756]: I0930 19:33:35.050167 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wpxq6\" (UID: \"ce702cef-b28d-4fcd-99b0-9961a980d3e6\") " pod="openshift-image-registry/image-registry-697d97f7c8-wpxq6" Sep 30 19:33:35 crc kubenswrapper[4756]: E0930 19:33:35.050557 4756 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 19:33:35.550543444 +0000 UTC m=+145.171476921 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wpxq6" (UID: "ce702cef-b28d-4fcd-99b0-9961a980d3e6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:33:35 crc kubenswrapper[4756]: I0930 19:33:35.153928 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 19:33:35 crc kubenswrapper[4756]: E0930 19:33:35.154025 4756 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 19:33:35.654007795 +0000 UTC m=+145.274941272 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:33:35 crc kubenswrapper[4756]: I0930 19:33:35.154371 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wpxq6\" (UID: \"ce702cef-b28d-4fcd-99b0-9961a980d3e6\") " pod="openshift-image-registry/image-registry-697d97f7c8-wpxq6" Sep 30 19:33:35 crc kubenswrapper[4756]: E0930 19:33:35.154896 4756 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 19:33:35.654862068 +0000 UTC m=+145.275795545 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wpxq6" (UID: "ce702cef-b28d-4fcd-99b0-9961a980d3e6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:33:35 crc kubenswrapper[4756]: I0930 19:33:35.210230 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-cgk7c" event={"ID":"8f68dfd5-c9df-45b1-8987-8ec4053731ff","Type":"ContainerStarted","Data":"4ec7be2f34a55302a0251b44dbe68c4265701c5179c0099148f2e311e87cf353"} Sep 30 19:33:35 crc kubenswrapper[4756]: I0930 19:33:35.234866 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-9npql" event={"ID":"2dd216c3-656c-4c3d-909e-1eabf16f0473","Type":"ContainerStarted","Data":"c947880ce9e9dab3fe0d647c6f72fcc8cb4f6b0789979c0a8943e02327181f82"} Sep 30 19:33:35 crc kubenswrapper[4756]: I0930 19:33:35.239572 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-42ln4" event={"ID":"d2f7e436-45ee-4e29-a473-cde6db34524b","Type":"ContainerStarted","Data":"c597cad2f64b3bada01dc2954911a6ba55ec96c762334afa4997069affa44cd0"} Sep 30 19:33:35 crc kubenswrapper[4756]: I0930 19:33:35.241060 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-qsct2" event={"ID":"428bbd8f-09ea-45c6-acd6-3ba9bab95aba","Type":"ContainerStarted","Data":"d1fc5fee964f30687b408ba36e17f70208801afc7252db35dc897ac8f90952fe"} Sep 30 19:33:35 crc kubenswrapper[4756]: I0930 19:33:35.242438 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29321010-snrpj" event={"ID":"4f7f9fe4-0797-4490-8bc2-c48850eb9f02","Type":"ContainerStarted","Data":"46f1313d518a5e8e2506314a056221c0a9dd6627fa7df89d156c8357fd1be599"} Sep 30 19:33:35 crc kubenswrapper[4756]: I0930 19:33:35.244020 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-bzmlt" event={"ID":"f47ebe12-09aa-476d-a609-f9066a106afb","Type":"ContainerStarted","Data":"422d803be788e18f11fc6a3b131661992f57449c830c313c2f5f603c839cfd4b"} Sep 30 19:33:35 crc kubenswrapper[4756]: I0930 19:33:35.255316 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 19:33:35 crc kubenswrapper[4756]: E0930 19:33:35.255757 4756 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 19:33:35.755741883 +0000 UTC m=+145.376675360 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:33:35 crc kubenswrapper[4756]: I0930 19:33:35.259959 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-wdtth" event={"ID":"69ad14ee-0c64-4ba8-a2b0-15acc76c1d1f","Type":"ContainerStarted","Data":"a26c26acfe0ddb96db60ee06a5eb57f53a61efbdf6a4823bae89e337f96421b1"} Sep 30 19:33:35 crc kubenswrapper[4756]: I0930 19:33:35.269484 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-nlp7l" event={"ID":"2abbe765-36c9-4aed-9cd1-f30b0bfaf484","Type":"ContainerStarted","Data":"8077bff0023bcbebda47b488f9a7b9e52cc2a11a781750c0d2b8571f1d34a095"} Sep 30 19:33:35 crc kubenswrapper[4756]: I0930 19:33:35.275620 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-nfgwx" event={"ID":"c63224ae-e78e-484b-83e1-2b6432cc1de6","Type":"ContainerStarted","Data":"b2c8b8875d40332aa0a1aa6d999f9422ff347944b57d942ce32022219350c765"} Sep 30 19:33:35 crc kubenswrapper[4756]: I0930 19:33:35.297195 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-mstk5" event={"ID":"d03605a9-6716-409b-a9b2-f54bc2f8e467","Type":"ContainerStarted","Data":"eba17d45863ccf87f8d339590dc59b3e347f2a64340f8b95d4be055561200fcf"} Sep 30 19:33:35 crc kubenswrapper[4756]: I0930 19:33:35.298156 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-879f6c89f-mstk5" Sep 30 19:33:35 crc kubenswrapper[4756]: I0930 19:33:35.317041 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-dwvn8" event={"ID":"81849a73-fe68-44ea-b5c6-24614310b67a","Type":"ContainerStarted","Data":"e19fe2af1e148d9a994ca9ffe6029011d868798cd89b968a497f10ce8d8ae769"} Sep 30 19:33:35 crc kubenswrapper[4756]: I0930 19:33:35.323155 4756 patch_prober.go:28] interesting pod/controller-manager-879f6c89f-mstk5 container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.20:8443/healthz\": dial tcp 10.217.0.20:8443: connect: connection refused" start-of-body= Sep 30 19:33:35 crc kubenswrapper[4756]: I0930 19:33:35.326384 4756 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-879f6c89f-mstk5" podUID="d03605a9-6716-409b-a9b2-f54bc2f8e467" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.20:8443/healthz\": dial tcp 10.217.0.20:8443: connect: connection refused" Sep 30 19:33:35 crc kubenswrapper[4756]: I0930 19:33:35.327456 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-bzmlt" podStartSLOduration=124.327349942 podStartE2EDuration="2m4.327349942s" podCreationTimestamp="2025-09-30 19:31:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 19:33:35.308956417 +0000 UTC m=+144.929889904" watchObservedRunningTime="2025-09-30 19:33:35.327349942 +0000 UTC m=+144.948283439" Sep 30 19:33:35 crc kubenswrapper[4756]: I0930 19:33:35.364259 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-879f6c89f-mstk5" podStartSLOduration=124.364224004 podStartE2EDuration="2m4.364224004s" podCreationTimestamp="2025-09-30 19:31:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 19:33:35.351069414 +0000 UTC m=+144.972002891" watchObservedRunningTime="2025-09-30 19:33:35.364224004 +0000 UTC m=+144.985157481" Sep 30 19:33:35 crc kubenswrapper[4756]: I0930 19:33:35.370832 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-bchvw" event={"ID":"3ec8d77c-feb3-49c0-b162-1ee976685255","Type":"ContainerStarted","Data":"937492c82ae7a985e8df75c389a7753a07972ec09576a74be099818d1556b3a4"} Sep 30 19:33:35 crc kubenswrapper[4756]: I0930 19:33:35.380150 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wpxq6\" (UID: \"ce702cef-b28d-4fcd-99b0-9961a980d3e6\") " pod="openshift-image-registry/image-registry-697d97f7c8-wpxq6" Sep 30 19:33:35 crc kubenswrapper[4756]: E0930 19:33:35.381010 4756 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 19:33:35.880990057 +0000 UTC m=+145.501923524 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wpxq6" (UID: "ce702cef-b28d-4fcd-99b0-9961a980d3e6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:33:35 crc kubenswrapper[4756]: I0930 19:33:35.395713 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-dwvn8" podStartSLOduration=124.395695267 podStartE2EDuration="2m4.395695267s" podCreationTimestamp="2025-09-30 19:31:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 19:33:35.394711281 +0000 UTC m=+145.015644758" watchObservedRunningTime="2025-09-30 19:33:35.395695267 +0000 UTC m=+145.016628744" Sep 30 19:33:35 crc kubenswrapper[4756]: I0930 19:33:35.399115 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-2t55f" event={"ID":"6c24c4b4-2bf5-49f1-ad2b-b146d1dd3522","Type":"ContainerStarted","Data":"e56a5eeba6ad03396b2642e3be5584b7ed807e67f8100e471c4abe6df5196bb6"} Sep 30 19:33:35 crc kubenswrapper[4756]: I0930 19:33:35.399183 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-2t55f" event={"ID":"6c24c4b4-2bf5-49f1-ad2b-b146d1dd3522","Type":"ContainerStarted","Data":"f6951e1e35b33931ca48795fb88e69d512d3b9a3c243982dd5d416d0ed684bd6"} Sep 30 19:33:35 crc kubenswrapper[4756]: I0930 19:33:35.399559 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-2t55f" Sep 30 19:33:35 crc kubenswrapper[4756]: I0930 19:33:35.412751 4756 patch_prober.go:28] interesting pod/catalog-operator-68c6474976-2t55f container/catalog-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.39:8443/healthz\": dial tcp 10.217.0.39:8443: connect: connection refused" start-of-body= Sep 30 19:33:35 crc kubenswrapper[4756]: I0930 19:33:35.412817 4756 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-2t55f" podUID="6c24c4b4-2bf5-49f1-ad2b-b146d1dd3522" containerName="catalog-operator" probeResult="failure" output="Get \"https://10.217.0.39:8443/healthz\": dial tcp 10.217.0.39:8443: connect: connection refused" Sep 30 19:33:35 crc kubenswrapper[4756]: I0930 19:33:35.423420 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-lls25" event={"ID":"8fee247b-3f4b-4816-a0c4-7d3854ebc33b","Type":"ContainerStarted","Data":"e931b725898c300b5908beee2e7ffd8cbf15e4f9bbe31d8580b5dfcb8c2eceda"} Sep 30 19:33:35 crc kubenswrapper[4756]: I0930 19:33:35.436509 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-h5cnz" event={"ID":"37240768-1c94-44c3-9772-d4671f7e35bf","Type":"ContainerStarted","Data":"6c224e8d17bfbca4fb8e8a0784ceb0718b95b7d7538f037c9b7b5d11e79372ad"} Sep 30 19:33:35 crc kubenswrapper[4756]: I0930 19:33:35.449852 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-dwlkd" Sep 30 19:33:35 crc kubenswrapper[4756]: I0930 19:33:35.449914 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-frglv" event={"ID":"2a7c2d72-7192-4c40-a041-305580c8d0d0","Type":"ContainerStarted","Data":"390472a715f58a7123eaedb541b439ad3db7c98d33372cb713a778296bdf07e4"} Sep 30 19:33:35 crc kubenswrapper[4756]: I0930 19:33:35.449947 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-dwlkd" Sep 30 19:33:35 crc kubenswrapper[4756]: I0930 19:33:35.460850 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-x87xk" event={"ID":"e0f26253-f454-4de3-b058-bbbece905a7e","Type":"ContainerStarted","Data":"45d3ff916d3078fee8d6ff8642003f822c4efe2c333ac7686dcb1a95eb535777"} Sep 30 19:33:35 crc kubenswrapper[4756]: I0930 19:33:35.461012 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-dwlkd" Sep 30 19:33:35 crc kubenswrapper[4756]: I0930 19:33:35.470069 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-f9d7485db-h5cnz" podStartSLOduration=124.470052217 podStartE2EDuration="2m4.470052217s" podCreationTimestamp="2025-09-30 19:31:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 19:33:35.467411289 +0000 UTC m=+145.088344766" watchObservedRunningTime="2025-09-30 19:33:35.470052217 +0000 UTC m=+145.090985694" Sep 30 19:33:35 crc kubenswrapper[4756]: I0930 19:33:35.471437 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-2t55f" podStartSLOduration=123.471429453 podStartE2EDuration="2m3.471429453s" podCreationTimestamp="2025-09-30 19:31:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 19:33:35.428783761 +0000 UTC m=+145.049717248" watchObservedRunningTime="2025-09-30 19:33:35.471429453 +0000 UTC m=+145.092362930" Sep 30 19:33:35 crc kubenswrapper[4756]: I0930 19:33:35.473281 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-zh59x" event={"ID":"dffc851c-bb5e-4691-9c2d-1dff44276a16","Type":"ContainerStarted","Data":"164b97a2788c2d4d745c8279db38c0e2d411ba369879945ac7de03a1e674feb5"} Sep 30 19:33:35 crc kubenswrapper[4756]: I0930 19:33:35.482979 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-4nhvz" event={"ID":"31438140-007d-4e3a-a6d4-d2c0dcaaf7bc","Type":"ContainerStarted","Data":"4c9a43195db358288b7a3ac1b83d477e95500ccf9c920a122babd64ee7473803"} Sep 30 19:33:35 crc kubenswrapper[4756]: I0930 19:33:35.486833 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-trj9z" event={"ID":"baa40fd8-6584-4742-992d-ccc029178c3e","Type":"ContainerStarted","Data":"def9c4f3444b509eff908b5b0fae2027126153cc57c6f6e64c74cf6f9a521522"} Sep 30 19:33:35 crc kubenswrapper[4756]: I0930 19:33:35.494838 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 19:33:35 crc kubenswrapper[4756]: E0930 19:33:35.495109 4756 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 19:33:35.995082953 +0000 UTC m=+145.616016420 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:33:35 crc kubenswrapper[4756]: I0930 19:33:35.496014 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wpxq6\" (UID: \"ce702cef-b28d-4fcd-99b0-9961a980d3e6\") " pod="openshift-image-registry/image-registry-697d97f7c8-wpxq6" Sep 30 19:33:35 crc kubenswrapper[4756]: I0930 19:33:35.496205 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-9mtxr" event={"ID":"bd2faf96-f859-4c10-871a-6b9ed6f4571b","Type":"ContainerStarted","Data":"0d578e2e6d37fc2c248a9a811858425a774d792a3885ac2717d7e429bb4d39ba"} Sep 30 19:33:35 crc kubenswrapper[4756]: E0930 19:33:35.498648 4756 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 19:33:35.998621895 +0000 UTC m=+145.619555382 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wpxq6" (UID: "ce702cef-b28d-4fcd-99b0-9961a980d3e6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:33:35 crc kubenswrapper[4756]: I0930 19:33:35.514535 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-lmd8p" event={"ID":"306f83f8-10af-4389-9546-62325fe60d3a","Type":"ContainerStarted","Data":"dd2c134ff08387ea35bba18b3f68eefdbf180276df6d1124335f611854fa5d0f"} Sep 30 19:33:35 crc kubenswrapper[4756]: I0930 19:33:35.522528 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-88vsq" event={"ID":"3fe6ddbb-59ee-4d35-9d3c-309cdd5eba71","Type":"ContainerStarted","Data":"4d57cc87bca44ccc5090ce8129b3205399678fdf91563ed7a1a4a5154931bd72"} Sep 30 19:33:35 crc kubenswrapper[4756]: I0930 19:33:35.527161 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-glgxx" event={"ID":"a7c32461-b400-43b7-829b-4aecbfb73d70","Type":"ContainerStarted","Data":"725911a460972ddc4f27ebf4713e82660f6b64d178a985fd039f6edf36e298eb"} Sep 30 19:33:35 crc kubenswrapper[4756]: I0930 19:33:35.528815 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-zxkfm" event={"ID":"4bdd776c-4f43-42e4-ae36-da0ba80eb291","Type":"ContainerStarted","Data":"9fb2b2fb2f166dbd1ce3017013ff43165e9143f946d6637299a2c353cd13f5bf"} Sep 30 19:33:35 crc kubenswrapper[4756]: I0930 19:33:35.529837 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-zxkfm" Sep 30 19:33:35 crc kubenswrapper[4756]: I0930 19:33:35.530889 4756 patch_prober.go:28] interesting pod/route-controller-manager-6576b87f9c-zxkfm container/route-controller-manager namespace/openshift-route-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.18:8443/healthz\": dial tcp 10.217.0.18:8443: connect: connection refused" start-of-body= Sep 30 19:33:35 crc kubenswrapper[4756]: I0930 19:33:35.530929 4756 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-zxkfm" podUID="4bdd776c-4f43-42e4-ae36-da0ba80eb291" containerName="route-controller-manager" probeResult="failure" output="Get \"https://10.217.0.18:8443/healthz\": dial tcp 10.217.0.18:8443: connect: connection refused" Sep 30 19:33:35 crc kubenswrapper[4756]: I0930 19:33:35.551258 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-server-frglv" podStartSLOduration=6.551231072 podStartE2EDuration="6.551231072s" podCreationTimestamp="2025-09-30 19:33:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 19:33:35.541243444 +0000 UTC m=+145.162176921" watchObservedRunningTime="2025-09-30 19:33:35.551231072 +0000 UTC m=+145.172164549" Sep 30 19:33:35 crc kubenswrapper[4756]: I0930 19:33:35.564052 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-fv5z8" event={"ID":"8f5053dc-916f-4f3f-8aee-83e6d2bfa438","Type":"ContainerStarted","Data":"d0dafedb2a17ba5c558562923fc89d6e9759ad1e6e2ce2607e14ea2c7e669b09"} Sep 30 19:33:35 crc kubenswrapper[4756]: I0930 19:33:35.564203 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-fv5z8" event={"ID":"8f5053dc-916f-4f3f-8aee-83e6d2bfa438","Type":"ContainerStarted","Data":"0d6c684201b6ff50f6693e15709639e38565440d915ae42fdf7ab82102217400"} Sep 30 19:33:35 crc kubenswrapper[4756]: I0930 19:33:35.568183 4756 generic.go:334] "Generic (PLEG): container finished" podID="3ae14a06-9c17-4937-b761-13a2247365cd" containerID="ccd153a126e52fc352010c8de1c819dcf6ed30e6a3f05e080c5e45683dcf9cc3" exitCode=0 Sep 30 19:33:35 crc kubenswrapper[4756]: I0930 19:33:35.568309 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-mpnp5" event={"ID":"3ae14a06-9c17-4937-b761-13a2247365cd","Type":"ContainerDied","Data":"ccd153a126e52fc352010c8de1c819dcf6ed30e6a3f05e080c5e45683dcf9cc3"} Sep 30 19:33:35 crc kubenswrapper[4756]: I0930 19:33:35.579422 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-nd2dk" event={"ID":"4110da68-3e86-428e-983e-e01ee898cf2c","Type":"ContainerStarted","Data":"3d81743d932636b7b28b7f8cd65673456d3df77243e2bfa71818b5f8813d59e9"} Sep 30 19:33:35 crc kubenswrapper[4756]: I0930 19:33:35.583088 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-vtffn" event={"ID":"f2c7827d-51b1-4d41-a16e-c29e249be294","Type":"ContainerStarted","Data":"69004f8e9de5608e497479aab93d3dd57a2d661e597ebbf04912e1b29a67fb48"} Sep 30 19:33:35 crc kubenswrapper[4756]: I0930 19:33:35.593773 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-s2485" event={"ID":"df5245a3-0385-4fab-b949-d98964045062","Type":"ContainerStarted","Data":"3ef9a9dc728c33784b5bfdf044c7202b1301a68952551df373ea4af6b257d705"} Sep 30 19:33:35 crc kubenswrapper[4756]: I0930 19:33:35.594710 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-s2485" Sep 30 19:33:35 crc kubenswrapper[4756]: I0930 19:33:35.598610 4756 patch_prober.go:28] interesting pod/olm-operator-6b444d44fb-s2485 container/olm-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.36:8443/healthz\": dial tcp 10.217.0.36:8443: connect: connection refused" start-of-body= Sep 30 19:33:35 crc kubenswrapper[4756]: I0930 19:33:35.598646 4756 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-s2485" podUID="df5245a3-0385-4fab-b949-d98964045062" containerName="olm-operator" probeResult="failure" output="Get \"https://10.217.0.36:8443/healthz\": dial tcp 10.217.0.36:8443: connect: connection refused" Sep 30 19:33:35 crc kubenswrapper[4756]: I0930 19:33:35.604574 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 19:33:35 crc kubenswrapper[4756]: E0930 19:33:35.606603 4756 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 19:33:36.106581122 +0000 UTC m=+145.727514599 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:33:35 crc kubenswrapper[4756]: I0930 19:33:35.608768 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-jw4c2" event={"ID":"f2279937-aa0a-4874-bdd0-b3b1a685ee64","Type":"ContainerStarted","Data":"1fd80408d551b0e1fa3f59d4d129d5709f9269586cf2886906d110b601251a5e"} Sep 30 19:33:35 crc kubenswrapper[4756]: I0930 19:33:35.627855 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/downloads-7954f5f757-7dwjw" Sep 30 19:33:35 crc kubenswrapper[4756]: I0930 19:33:35.628264 4756 patch_prober.go:28] interesting pod/downloads-7954f5f757-7dwjw container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.17:8080/\": dial tcp 10.217.0.17:8080: connect: connection refused" start-of-body= Sep 30 19:33:35 crc kubenswrapper[4756]: I0930 19:33:35.628453 4756 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-7dwjw" podUID="52803098-a1e8-4a45-a2dd-0eb8f6d5b8ad" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.17:8080/\": dial tcp 10.217.0.17:8080: connect: connection refused" Sep 30 19:33:35 crc kubenswrapper[4756]: I0930 19:33:35.636948 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-dwlkd" Sep 30 19:33:35 crc kubenswrapper[4756]: I0930 19:33:35.641309 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-x87xk" podStartSLOduration=123.641282318 podStartE2EDuration="2m3.641282318s" podCreationTimestamp="2025-09-30 19:31:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 19:33:35.588256299 +0000 UTC m=+145.209189786" watchObservedRunningTime="2025-09-30 19:33:35.641282318 +0000 UTC m=+145.262215815" Sep 30 19:33:35 crc kubenswrapper[4756]: I0930 19:33:35.642742 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress/router-default-5444994796-88vsq" podStartSLOduration=123.642727935 podStartE2EDuration="2m3.642727935s" podCreationTimestamp="2025-09-30 19:31:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 19:33:35.637025168 +0000 UTC m=+145.257958675" watchObservedRunningTime="2025-09-30 19:33:35.642727935 +0000 UTC m=+145.263661412" Sep 30 19:33:35 crc kubenswrapper[4756]: I0930 19:33:35.648074 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console-operator/console-operator-58897d9998-w7nzp" Sep 30 19:33:35 crc kubenswrapper[4756]: I0930 19:33:35.665227 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-vtffn" podStartSLOduration=123.665172175 podStartE2EDuration="2m3.665172175s" podCreationTimestamp="2025-09-30 19:31:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 19:33:35.662940657 +0000 UTC m=+145.283874134" watchObservedRunningTime="2025-09-30 19:33:35.665172175 +0000 UTC m=+145.286105642" Sep 30 19:33:35 crc kubenswrapper[4756]: I0930 19:33:35.696886 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-ingress/router-default-5444994796-88vsq" Sep 30 19:33:35 crc kubenswrapper[4756]: I0930 19:33:35.708287 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wpxq6\" (UID: \"ce702cef-b28d-4fcd-99b0-9961a980d3e6\") " pod="openshift-image-registry/image-registry-697d97f7c8-wpxq6" Sep 30 19:33:35 crc kubenswrapper[4756]: I0930 19:33:35.709436 4756 patch_prober.go:28] interesting pod/router-default-5444994796-88vsq container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Sep 30 19:33:35 crc kubenswrapper[4756]: [-]has-synced failed: reason withheld Sep 30 19:33:35 crc kubenswrapper[4756]: [+]process-running ok Sep 30 19:33:35 crc kubenswrapper[4756]: healthz check failed Sep 30 19:33:35 crc kubenswrapper[4756]: I0930 19:33:35.709477 4756 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-88vsq" podUID="3fe6ddbb-59ee-4d35-9d3c-309cdd5eba71" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Sep 30 19:33:35 crc kubenswrapper[4756]: E0930 19:33:35.709869 4756 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 19:33:36.209849559 +0000 UTC m=+145.830783036 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wpxq6" (UID: "ce702cef-b28d-4fcd-99b0-9961a980d3e6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:33:35 crc kubenswrapper[4756]: I0930 19:33:35.739050 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-4nhvz" podStartSLOduration=124.739025172 podStartE2EDuration="2m4.739025172s" podCreationTimestamp="2025-09-30 19:31:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 19:33:35.725425701 +0000 UTC m=+145.346359188" watchObservedRunningTime="2025-09-30 19:33:35.739025172 +0000 UTC m=+145.359958649" Sep 30 19:33:35 crc kubenswrapper[4756]: I0930 19:33:35.757406 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-apiserver/apiserver-76f77b778f-zlg2j" Sep 30 19:33:35 crc kubenswrapper[4756]: I0930 19:33:35.757845 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-apiserver/apiserver-76f77b778f-zlg2j" Sep 30 19:33:35 crc kubenswrapper[4756]: I0930 19:33:35.768565 4756 patch_prober.go:28] interesting pod/apiserver-76f77b778f-zlg2j container/openshift-apiserver namespace/openshift-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[+]ping ok Sep 30 19:33:35 crc kubenswrapper[4756]: [+]log ok Sep 30 19:33:35 crc kubenswrapper[4756]: [+]etcd ok Sep 30 19:33:35 crc kubenswrapper[4756]: [+]poststarthook/start-apiserver-admission-initializer ok Sep 30 19:33:35 crc kubenswrapper[4756]: [+]poststarthook/generic-apiserver-start-informers ok Sep 30 19:33:35 crc kubenswrapper[4756]: [+]poststarthook/max-in-flight-filter ok Sep 30 19:33:35 crc kubenswrapper[4756]: [+]poststarthook/storage-object-count-tracker-hook ok Sep 30 19:33:35 crc kubenswrapper[4756]: [+]poststarthook/image.openshift.io-apiserver-caches ok Sep 30 19:33:35 crc kubenswrapper[4756]: [-]poststarthook/authorization.openshift.io-bootstrapclusterroles failed: reason withheld Sep 30 19:33:35 crc kubenswrapper[4756]: [-]poststarthook/authorization.openshift.io-ensurenodebootstrap-sa failed: reason withheld Sep 30 19:33:35 crc kubenswrapper[4756]: [+]poststarthook/project.openshift.io-projectcache ok Sep 30 19:33:35 crc kubenswrapper[4756]: [+]poststarthook/project.openshift.io-projectauthorizationcache ok Sep 30 19:33:35 crc kubenswrapper[4756]: [+]poststarthook/openshift.io-startinformers ok Sep 30 19:33:35 crc kubenswrapper[4756]: [+]poststarthook/openshift.io-restmapperupdater ok Sep 30 19:33:35 crc kubenswrapper[4756]: [+]poststarthook/quota.openshift.io-clusterquotamapping ok Sep 30 19:33:35 crc kubenswrapper[4756]: livez check failed Sep 30 19:33:35 crc kubenswrapper[4756]: I0930 19:33:35.768644 4756 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-apiserver/apiserver-76f77b778f-zlg2j" podUID="5921345a-df3c-4611-b471-52f946af0c5f" containerName="openshift-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 500" Sep 30 19:33:35 crc kubenswrapper[4756]: I0930 19:33:35.771123 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-fv5z8" podStartSLOduration=123.77110651 podStartE2EDuration="2m3.77110651s" podCreationTimestamp="2025-09-30 19:31:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 19:33:35.762258692 +0000 UTC m=+145.383192179" watchObservedRunningTime="2025-09-30 19:33:35.77110651 +0000 UTC m=+145.392039987" Sep 30 19:33:35 crc kubenswrapper[4756]: I0930 19:33:35.810493 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 19:33:35 crc kubenswrapper[4756]: E0930 19:33:35.813362 4756 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 19:33:36.313345591 +0000 UTC m=+145.934279068 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:33:35 crc kubenswrapper[4756]: I0930 19:33:35.864691 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-canary/ingress-canary-9mtxr" podStartSLOduration=5.864674047 podStartE2EDuration="5.864674047s" podCreationTimestamp="2025-09-30 19:33:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 19:33:35.841100628 +0000 UTC m=+145.462034105" watchObservedRunningTime="2025-09-30 19:33:35.864674047 +0000 UTC m=+145.485607524" Sep 30 19:33:35 crc kubenswrapper[4756]: I0930 19:33:35.869287 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-zxkfm" podStartSLOduration=123.868256399 podStartE2EDuration="2m3.868256399s" podCreationTimestamp="2025-09-30 19:31:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 19:33:35.866852413 +0000 UTC m=+145.487785890" watchObservedRunningTime="2025-09-30 19:33:35.868256399 +0000 UTC m=+145.489189876" Sep 30 19:33:35 crc kubenswrapper[4756]: I0930 19:33:35.903121 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-s2485" podStartSLOduration=123.903099359 podStartE2EDuration="2m3.903099359s" podCreationTimestamp="2025-09-30 19:31:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 19:33:35.90237407 +0000 UTC m=+145.523307537" watchObservedRunningTime="2025-09-30 19:33:35.903099359 +0000 UTC m=+145.524032836" Sep 30 19:33:35 crc kubenswrapper[4756]: I0930 19:33:35.912538 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wpxq6\" (UID: \"ce702cef-b28d-4fcd-99b0-9961a980d3e6\") " pod="openshift-image-registry/image-registry-697d97f7c8-wpxq6" Sep 30 19:33:35 crc kubenswrapper[4756]: E0930 19:33:35.913508 4756 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 19:33:36.413490937 +0000 UTC m=+146.034424414 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wpxq6" (UID: "ce702cef-b28d-4fcd-99b0-9961a980d3e6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:33:36 crc kubenswrapper[4756]: I0930 19:33:36.004362 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/downloads-7954f5f757-7dwjw" podStartSLOduration=125.004345814 podStartE2EDuration="2m5.004345814s" podCreationTimestamp="2025-09-30 19:31:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 19:33:36.00342689 +0000 UTC m=+145.624360377" watchObservedRunningTime="2025-09-30 19:33:36.004345814 +0000 UTC m=+145.625279291" Sep 30 19:33:36 crc kubenswrapper[4756]: I0930 19:33:36.016990 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 19:33:36 crc kubenswrapper[4756]: E0930 19:33:36.017300 4756 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 19:33:36.517285188 +0000 UTC m=+146.138218665 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:33:36 crc kubenswrapper[4756]: I0930 19:33:36.118465 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wpxq6\" (UID: \"ce702cef-b28d-4fcd-99b0-9961a980d3e6\") " pod="openshift-image-registry/image-registry-697d97f7c8-wpxq6" Sep 30 19:33:36 crc kubenswrapper[4756]: E0930 19:33:36.118835 4756 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 19:33:36.61881998 +0000 UTC m=+146.239753457 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wpxq6" (UID: "ce702cef-b28d-4fcd-99b0-9961a980d3e6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:33:36 crc kubenswrapper[4756]: I0930 19:33:36.224927 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 19:33:36 crc kubenswrapper[4756]: E0930 19:33:36.225333 4756 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 19:33:36.72531918 +0000 UTC m=+146.346252657 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:33:36 crc kubenswrapper[4756]: I0930 19:33:36.326578 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wpxq6\" (UID: \"ce702cef-b28d-4fcd-99b0-9961a980d3e6\") " pod="openshift-image-registry/image-registry-697d97f7c8-wpxq6" Sep 30 19:33:36 crc kubenswrapper[4756]: E0930 19:33:36.326959 4756 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 19:33:36.826941654 +0000 UTC m=+146.447875131 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wpxq6" (UID: "ce702cef-b28d-4fcd-99b0-9961a980d3e6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:33:36 crc kubenswrapper[4756]: I0930 19:33:36.428695 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 19:33:36 crc kubenswrapper[4756]: E0930 19:33:36.428894 4756 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 19:33:36.928871977 +0000 UTC m=+146.549805454 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:33:36 crc kubenswrapper[4756]: I0930 19:33:36.429050 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wpxq6\" (UID: \"ce702cef-b28d-4fcd-99b0-9961a980d3e6\") " pod="openshift-image-registry/image-registry-697d97f7c8-wpxq6" Sep 30 19:33:36 crc kubenswrapper[4756]: E0930 19:33:36.429311 4756 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 19:33:36.929303618 +0000 UTC m=+146.550237095 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wpxq6" (UID: "ce702cef-b28d-4fcd-99b0-9961a980d3e6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:33:36 crc kubenswrapper[4756]: I0930 19:33:36.530284 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 19:33:36 crc kubenswrapper[4756]: E0930 19:33:36.530572 4756 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 19:33:37.030531892 +0000 UTC m=+146.651465369 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:33:36 crc kubenswrapper[4756]: I0930 19:33:36.530936 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wpxq6\" (UID: \"ce702cef-b28d-4fcd-99b0-9961a980d3e6\") " pod="openshift-image-registry/image-registry-697d97f7c8-wpxq6" Sep 30 19:33:36 crc kubenswrapper[4756]: E0930 19:33:36.531465 4756 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 19:33:37.031455896 +0000 UTC m=+146.652389373 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wpxq6" (UID: "ce702cef-b28d-4fcd-99b0-9961a980d3e6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:33:36 crc kubenswrapper[4756]: I0930 19:33:36.632580 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 19:33:36 crc kubenswrapper[4756]: E0930 19:33:36.632988 4756 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 19:33:37.132958547 +0000 UTC m=+146.753892024 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:33:36 crc kubenswrapper[4756]: I0930 19:33:36.633176 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wpxq6\" (UID: \"ce702cef-b28d-4fcd-99b0-9961a980d3e6\") " pod="openshift-image-registry/image-registry-697d97f7c8-wpxq6" Sep 30 19:33:36 crc kubenswrapper[4756]: E0930 19:33:36.633581 4756 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 19:33:37.133563343 +0000 UTC m=+146.754497000 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wpxq6" (UID: "ce702cef-b28d-4fcd-99b0-9961a980d3e6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:33:36 crc kubenswrapper[4756]: I0930 19:33:36.635642 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-nfgwx" event={"ID":"c63224ae-e78e-484b-83e1-2b6432cc1de6","Type":"ContainerStarted","Data":"24c5c03d6a382707f72f88fd8839318f40a4117196d8aab4ae42fbf09916032b"} Sep 30 19:33:36 crc kubenswrapper[4756]: I0930 19:33:36.640633 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-glgxx" event={"ID":"a7c32461-b400-43b7-829b-4aecbfb73d70","Type":"ContainerStarted","Data":"9a9b520b4706fbfe76eb0be4994ebc52059063279c5b18e9bf5865aeab9081ec"} Sep 30 19:33:36 crc kubenswrapper[4756]: I0930 19:33:36.643088 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-qsct2" event={"ID":"428bbd8f-09ea-45c6-acd6-3ba9bab95aba","Type":"ContainerStarted","Data":"417da1a11140724ef33ee37476f9de4561e126ecfee80a809b29f744a694283e"} Sep 30 19:33:36 crc kubenswrapper[4756]: I0930 19:33:36.644114 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-qsct2" Sep 30 19:33:36 crc kubenswrapper[4756]: I0930 19:33:36.645612 4756 patch_prober.go:28] interesting pod/packageserver-d55dfcdfc-qsct2 container/packageserver namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.40:5443/healthz\": dial tcp 10.217.0.40:5443: connect: connection refused" start-of-body= Sep 30 19:33:36 crc kubenswrapper[4756]: I0930 19:33:36.645663 4756 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-qsct2" podUID="428bbd8f-09ea-45c6-acd6-3ba9bab95aba" containerName="packageserver" probeResult="failure" output="Get \"https://10.217.0.40:5443/healthz\": dial tcp 10.217.0.40:5443: connect: connection refused" Sep 30 19:33:36 crc kubenswrapper[4756]: I0930 19:33:36.646477 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-mb2s4" event={"ID":"d5e87d60-31f6-441c-9966-573390631165","Type":"ContainerStarted","Data":"fc9a05a51c0c76db9741b636552b05e78a6b06c5487287f83c063535ddde20d8"} Sep 30 19:33:36 crc kubenswrapper[4756]: I0930 19:33:36.646520 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-mb2s4" event={"ID":"d5e87d60-31f6-441c-9966-573390631165","Type":"ContainerStarted","Data":"6bbb2c0be7b0880bcb77a3391c7d99f1814f99be34fd50c116e12b4f3c17b0b1"} Sep 30 19:33:36 crc kubenswrapper[4756]: I0930 19:33:36.647145 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-mb2s4" Sep 30 19:33:36 crc kubenswrapper[4756]: I0930 19:33:36.655171 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca/service-ca-9c57cc56f-nfgwx" podStartSLOduration=124.6551447 podStartE2EDuration="2m4.6551447s" podCreationTimestamp="2025-09-30 19:31:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 19:33:36.651501526 +0000 UTC m=+146.272435003" watchObservedRunningTime="2025-09-30 19:33:36.6551447 +0000 UTC m=+146.276078177" Sep 30 19:33:36 crc kubenswrapper[4756]: I0930 19:33:36.659510 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-nd2dk" event={"ID":"4110da68-3e86-428e-983e-e01ee898cf2c","Type":"ContainerStarted","Data":"b65393a3e634c07711694827a92320a01840012188f6ab5a0df8a48c148d3527"} Sep 30 19:33:36 crc kubenswrapper[4756]: I0930 19:33:36.664911 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-s2485" event={"ID":"df5245a3-0385-4fab-b949-d98964045062","Type":"ContainerStarted","Data":"2904116e0d317715c175f4edf154519e65f41cd8d36ea72b96e738b9c82d05c8"} Sep 30 19:33:36 crc kubenswrapper[4756]: I0930 19:33:36.666371 4756 patch_prober.go:28] interesting pod/olm-operator-6b444d44fb-s2485 container/olm-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.36:8443/healthz\": dial tcp 10.217.0.36:8443: connect: connection refused" start-of-body= Sep 30 19:33:36 crc kubenswrapper[4756]: I0930 19:33:36.666436 4756 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-s2485" podUID="df5245a3-0385-4fab-b949-d98964045062" containerName="olm-operator" probeResult="failure" output="Get \"https://10.217.0.36:8443/healthz\": dial tcp 10.217.0.36:8443: connect: connection refused" Sep 30 19:33:36 crc kubenswrapper[4756]: I0930 19:33:36.668198 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-trj9z" event={"ID":"baa40fd8-6584-4742-992d-ccc029178c3e","Type":"ContainerStarted","Data":"b80cfed28caa6fad553ba09a32b413f5b7758952f19dff79a9a91796899ede0c"} Sep 30 19:33:36 crc kubenswrapper[4756]: I0930 19:33:36.670110 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-9mtxr" event={"ID":"bd2faf96-f859-4c10-871a-6b9ed6f4571b","Type":"ContainerStarted","Data":"b102b05dc9a597831c4960de8ecb90a821dffe8750447df8f076a55a10313d81"} Sep 30 19:33:36 crc kubenswrapper[4756]: I0930 19:33:36.673377 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-42ln4" event={"ID":"d2f7e436-45ee-4e29-a473-cde6db34524b","Type":"ContainerStarted","Data":"36ef99d9611b62aa39e56c45e5a9cda1cc95c989eb64a847b1f60fd69420d385"} Sep 30 19:33:36 crc kubenswrapper[4756]: I0930 19:33:36.674556 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-42ln4" Sep 30 19:33:36 crc kubenswrapper[4756]: I0930 19:33:36.676874 4756 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-42ln4 container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.38:8080/healthz\": dial tcp 10.217.0.38:8080: connect: connection refused" start-of-body= Sep 30 19:33:36 crc kubenswrapper[4756]: I0930 19:33:36.676936 4756 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-42ln4" podUID="d2f7e436-45ee-4e29-a473-cde6db34524b" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.38:8080/healthz\": dial tcp 10.217.0.38:8080: connect: connection refused" Sep 30 19:33:36 crc kubenswrapper[4756]: I0930 19:33:36.677418 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-lls25" event={"ID":"8fee247b-3f4b-4816-a0c4-7d3854ebc33b","Type":"ContainerStarted","Data":"d4f02a931336663dad46a40bf513395f0fe60831f940ad726fbc05f6737a36ab"} Sep 30 19:33:36 crc kubenswrapper[4756]: I0930 19:33:36.684167 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-jjwh9" event={"ID":"86d80f6f-9644-43e7-be90-6e94eaf7d71f","Type":"ContainerStarted","Data":"5d534e52805622f1c05c82477d57a3f466cd5749b963334617690645008bb2d8"} Sep 30 19:33:36 crc kubenswrapper[4756]: I0930 19:33:36.684216 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-jjwh9" event={"ID":"86d80f6f-9644-43e7-be90-6e94eaf7d71f","Type":"ContainerStarted","Data":"eabc7c7f1a71d7b106aba5b9d23be54baa9859117e4be966b93f82abd3ee05bd"} Sep 30 19:33:36 crc kubenswrapper[4756]: I0930 19:33:36.684334 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-dns/dns-default-jjwh9" Sep 30 19:33:36 crc kubenswrapper[4756]: I0930 19:33:36.689049 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-qsct2" podStartSLOduration=124.689032795 podStartE2EDuration="2m4.689032795s" podCreationTimestamp="2025-09-30 19:31:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 19:33:36.688415209 +0000 UTC m=+146.309348686" watchObservedRunningTime="2025-09-30 19:33:36.689032795 +0000 UTC m=+146.309966272" Sep 30 19:33:36 crc kubenswrapper[4756]: I0930 19:33:36.691891 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-wdtth" event={"ID":"69ad14ee-0c64-4ba8-a2b0-15acc76c1d1f","Type":"ContainerStarted","Data":"01690c19926d7cf9513a70451aaea8108238cd855e8008935d6afc3ed2b47497"} Sep 30 19:33:36 crc kubenswrapper[4756]: I0930 19:33:36.698776 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-bchvw" event={"ID":"3ec8d77c-feb3-49c0-b162-1ee976685255","Type":"ContainerStarted","Data":"f2615420c96559cab74bc71404834a78ca7b33445cb96aa7fc60e082e3e4fe98"} Sep 30 19:33:36 crc kubenswrapper[4756]: I0930 19:33:36.698839 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-bchvw" event={"ID":"3ec8d77c-feb3-49c0-b162-1ee976685255","Type":"ContainerStarted","Data":"a792c5e0224400904494f48dbe29a3ee5fe1ebdfb98780371eda2904b85761ce"} Sep 30 19:33:36 crc kubenswrapper[4756]: I0930 19:33:36.701714 4756 patch_prober.go:28] interesting pod/router-default-5444994796-88vsq container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Sep 30 19:33:36 crc kubenswrapper[4756]: [-]has-synced failed: reason withheld Sep 30 19:33:36 crc kubenswrapper[4756]: [+]process-running ok Sep 30 19:33:36 crc kubenswrapper[4756]: healthz check failed Sep 30 19:33:36 crc kubenswrapper[4756]: I0930 19:33:36.701767 4756 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-88vsq" podUID="3fe6ddbb-59ee-4d35-9d3c-309cdd5eba71" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Sep 30 19:33:36 crc kubenswrapper[4756]: I0930 19:33:36.708330 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-cgk7c" event={"ID":"8f68dfd5-c9df-45b1-8987-8ec4053731ff","Type":"ContainerStarted","Data":"6d067bf7261e8f3c99f873f3a3341fb1696ed6dd92b9245841723cf803be806b"} Sep 30 19:33:36 crc kubenswrapper[4756]: I0930 19:33:36.708414 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-cgk7c" event={"ID":"8f68dfd5-c9df-45b1-8987-8ec4053731ff","Type":"ContainerStarted","Data":"e6d1c9af25b158386f22e1aefd8c64b3e56f5efbb735f16b8367d191b9cf8049"} Sep 30 19:33:36 crc kubenswrapper[4756]: I0930 19:33:36.709614 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns-operator/dns-operator-744455d44c-glgxx" podStartSLOduration=124.709597116 podStartE2EDuration="2m4.709597116s" podCreationTimestamp="2025-09-30 19:31:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 19:33:36.709558975 +0000 UTC m=+146.330492452" watchObservedRunningTime="2025-09-30 19:33:36.709597116 +0000 UTC m=+146.330530583" Sep 30 19:33:36 crc kubenswrapper[4756]: I0930 19:33:36.716871 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-jw4c2" event={"ID":"f2279937-aa0a-4874-bdd0-b3b1a685ee64","Type":"ContainerStarted","Data":"d95e19d39a64871afc32611ab487e990f6b561d273e241466deab96f25d717d1"} Sep 30 19:33:36 crc kubenswrapper[4756]: I0930 19:33:36.716944 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-jw4c2" event={"ID":"f2279937-aa0a-4874-bdd0-b3b1a685ee64","Type":"ContainerStarted","Data":"bf8581a60f66360ac89867c8891fbd5f25ca4a9bdbefbe433945dc690446fd1c"} Sep 30 19:33:36 crc kubenswrapper[4756]: I0930 19:33:36.726152 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-7dwjw" event={"ID":"52803098-a1e8-4a45-a2dd-0eb8f6d5b8ad","Type":"ContainerStarted","Data":"78fbf2a02c416055148a8f54aef6d6fafcd3e4ef11559ade67de90d28a1df0a5"} Sep 30 19:33:36 crc kubenswrapper[4756]: I0930 19:33:36.726779 4756 patch_prober.go:28] interesting pod/downloads-7954f5f757-7dwjw container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.17:8080/\": dial tcp 10.217.0.17:8080: connect: connection refused" start-of-body= Sep 30 19:33:36 crc kubenswrapper[4756]: I0930 19:33:36.726814 4756 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-7dwjw" podUID="52803098-a1e8-4a45-a2dd-0eb8f6d5b8ad" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.17:8080/\": dial tcp 10.217.0.17:8080: connect: connection refused" Sep 30 19:33:36 crc kubenswrapper[4756]: I0930 19:33:36.728525 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-lmd8p" event={"ID":"306f83f8-10af-4389-9546-62325fe60d3a","Type":"ContainerStarted","Data":"eb5d443d6b7a87dbcf378d76f92f91deeeb5c92a6d714af00e02728df0a0e7cf"} Sep 30 19:33:36 crc kubenswrapper[4756]: I0930 19:33:36.734327 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 19:33:36 crc kubenswrapper[4756]: I0930 19:33:36.735584 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29321010-snrpj" event={"ID":"4f7f9fe4-0797-4490-8bc2-c48850eb9f02","Type":"ContainerStarted","Data":"a8c5ebe8c2ce3631ae5ba6c5403872e352b1e5e30e3c6977ceaa595219d3b3ac"} Sep 30 19:33:36 crc kubenswrapper[4756]: E0930 19:33:36.738137 4756 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 19:33:37.238116933 +0000 UTC m=+146.859050410 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:33:36 crc kubenswrapper[4756]: I0930 19:33:36.751966 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-9npql" event={"ID":"2dd216c3-656c-4c3d-909e-1eabf16f0473","Type":"ContainerStarted","Data":"bdfcc289a5e20f34d86650d792811fce19d96c66fa316cbd68a921cd5343352d"} Sep 30 19:33:36 crc kubenswrapper[4756]: I0930 19:33:36.757332 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-mpnp5" event={"ID":"3ae14a06-9c17-4937-b761-13a2247365cd","Type":"ContainerStarted","Data":"df7115fe70eb6cabbd51a0fa9078ba97f0242346f29f268722d0907750f70ced"} Sep 30 19:33:36 crc kubenswrapper[4756]: I0930 19:33:36.758538 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-config-operator/openshift-config-operator-7777fb866f-mpnp5" Sep 30 19:33:36 crc kubenswrapper[4756]: I0930 19:33:36.762634 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-mb2s4" podStartSLOduration=124.762615885 podStartE2EDuration="2m4.762615885s" podCreationTimestamp="2025-09-30 19:31:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 19:33:36.760599443 +0000 UTC m=+146.381532930" watchObservedRunningTime="2025-09-30 19:33:36.762615885 +0000 UTC m=+146.383549362" Sep 30 19:33:36 crc kubenswrapper[4756]: I0930 19:33:36.779419 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-nlp7l" event={"ID":"2abbe765-36c9-4aed-9cd1-f30b0bfaf484","Type":"ContainerStarted","Data":"3ea55c5c8bf2e5308e61f44094cf0003e84407617577a29214926c9a4307840f"} Sep 30 19:33:36 crc kubenswrapper[4756]: I0930 19:33:36.779482 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-nlp7l" event={"ID":"2abbe765-36c9-4aed-9cd1-f30b0bfaf484","Type":"ContainerStarted","Data":"639c404b9c1c63615d824c92613d41a3ac5fa3cbc45a51ed914715c90c883e3e"} Sep 30 19:33:36 crc kubenswrapper[4756]: I0930 19:33:36.782022 4756 patch_prober.go:28] interesting pod/catalog-operator-68c6474976-2t55f container/catalog-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.39:8443/healthz\": dial tcp 10.217.0.39:8443: connect: connection refused" start-of-body= Sep 30 19:33:36 crc kubenswrapper[4756]: I0930 19:33:36.782070 4756 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-2t55f" podUID="6c24c4b4-2bf5-49f1-ad2b-b146d1dd3522" containerName="catalog-operator" probeResult="failure" output="Get \"https://10.217.0.39:8443/healthz\": dial tcp 10.217.0.39:8443: connect: connection refused" Sep 30 19:33:36 crc kubenswrapper[4756]: I0930 19:33:36.783154 4756 patch_prober.go:28] interesting pod/controller-manager-879f6c89f-mstk5 container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.20:8443/healthz\": dial tcp 10.217.0.20:8443: connect: connection refused" start-of-body= Sep 30 19:33:36 crc kubenswrapper[4756]: I0930 19:33:36.783283 4756 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-879f6c89f-mstk5" podUID="d03605a9-6716-409b-a9b2-f54bc2f8e467" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.20:8443/healthz\": dial tcp 10.217.0.20:8443: connect: connection refused" Sep 30 19:33:36 crc kubenswrapper[4756]: I0930 19:33:36.813044 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/dns-default-jjwh9" podStartSLOduration=7.813029087 podStartE2EDuration="7.813029087s" podCreationTimestamp="2025-09-30 19:33:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 19:33:36.789867419 +0000 UTC m=+146.410800896" watchObservedRunningTime="2025-09-30 19:33:36.813029087 +0000 UTC m=+146.433962564" Sep 30 19:33:36 crc kubenswrapper[4756]: I0930 19:33:36.837092 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wpxq6\" (UID: \"ce702cef-b28d-4fcd-99b0-9961a980d3e6\") " pod="openshift-image-registry/image-registry-697d97f7c8-wpxq6" Sep 30 19:33:36 crc kubenswrapper[4756]: E0930 19:33:36.838041 4756 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 19:33:37.338015922 +0000 UTC m=+146.958949399 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wpxq6" (UID: "ce702cef-b28d-4fcd-99b0-9961a980d3e6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:33:36 crc kubenswrapper[4756]: I0930 19:33:36.842289 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29321010-snrpj" podStartSLOduration=124.842272182 podStartE2EDuration="2m4.842272182s" podCreationTimestamp="2025-09-30 19:31:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 19:33:36.814933436 +0000 UTC m=+146.435866923" watchObservedRunningTime="2025-09-30 19:33:36.842272182 +0000 UTC m=+146.463205659" Sep 30 19:33:36 crc kubenswrapper[4756]: I0930 19:33:36.844287 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-trj9z" podStartSLOduration=124.844277154 podStartE2EDuration="2m4.844277154s" podCreationTimestamp="2025-09-30 19:31:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 19:33:36.841430601 +0000 UTC m=+146.462364078" watchObservedRunningTime="2025-09-30 19:33:36.844277154 +0000 UTC m=+146.465210631" Sep 30 19:33:36 crc kubenswrapper[4756]: I0930 19:33:36.883742 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-bchvw" podStartSLOduration=124.883711492 podStartE2EDuration="2m4.883711492s" podCreationTimestamp="2025-09-30 19:31:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 19:33:36.875252474 +0000 UTC m=+146.496185961" watchObservedRunningTime="2025-09-30 19:33:36.883711492 +0000 UTC m=+146.504644969" Sep 30 19:33:36 crc kubenswrapper[4756]: I0930 19:33:36.899376 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-lls25" podStartSLOduration=125.899335356 podStartE2EDuration="2m5.899335356s" podCreationTimestamp="2025-09-30 19:31:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 19:33:36.898291939 +0000 UTC m=+146.519225416" watchObservedRunningTime="2025-09-30 19:33:36.899335356 +0000 UTC m=+146.520268833" Sep 30 19:33:36 crc kubenswrapper[4756]: I0930 19:33:36.938427 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-9npql" podStartSLOduration=124.938373014 podStartE2EDuration="2m4.938373014s" podCreationTimestamp="2025-09-30 19:31:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 19:33:36.924922247 +0000 UTC m=+146.545855734" watchObservedRunningTime="2025-09-30 19:33:36.938373014 +0000 UTC m=+146.559306491" Sep 30 19:33:36 crc kubenswrapper[4756]: I0930 19:33:36.947003 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 19:33:36 crc kubenswrapper[4756]: E0930 19:33:36.948087 4756 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 19:33:37.448062404 +0000 UTC m=+147.068995881 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:33:36 crc kubenswrapper[4756]: I0930 19:33:36.963310 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-42ln4" podStartSLOduration=124.963277347 podStartE2EDuration="2m4.963277347s" podCreationTimestamp="2025-09-30 19:31:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 19:33:36.953053123 +0000 UTC m=+146.573986600" watchObservedRunningTime="2025-09-30 19:33:36.963277347 +0000 UTC m=+146.584210824" Sep 30 19:33:37 crc kubenswrapper[4756]: I0930 19:33:37.002458 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-nd2dk" podStartSLOduration=125.002432558 podStartE2EDuration="2m5.002432558s" podCreationTimestamp="2025-09-30 19:31:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 19:33:36.984431103 +0000 UTC m=+146.605364580" watchObservedRunningTime="2025-09-30 19:33:37.002432558 +0000 UTC m=+146.623366025" Sep 30 19:33:37 crc kubenswrapper[4756]: E0930 19:33:37.052298 4756 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 19:33:37.552281426 +0000 UTC m=+147.173214903 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wpxq6" (UID: "ce702cef-b28d-4fcd-99b0-9961a980d3e6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:33:37 crc kubenswrapper[4756]: I0930 19:33:37.051880 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wpxq6\" (UID: \"ce702cef-b28d-4fcd-99b0-9961a980d3e6\") " pod="openshift-image-registry/image-registry-697d97f7c8-wpxq6" Sep 30 19:33:37 crc kubenswrapper[4756]: I0930 19:33:37.053032 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-jw4c2" podStartSLOduration=125.052998334 podStartE2EDuration="2m5.052998334s" podCreationTimestamp="2025-09-30 19:31:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 19:33:37.01644411 +0000 UTC m=+146.637377597" watchObservedRunningTime="2025-09-30 19:33:37.052998334 +0000 UTC m=+146.673931811" Sep 30 19:33:37 crc kubenswrapper[4756]: I0930 19:33:37.133199 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-admission-controller-857f4d67dd-cgk7c" podStartSLOduration=125.133169474 podStartE2EDuration="2m5.133169474s" podCreationTimestamp="2025-09-30 19:31:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 19:33:37.125929197 +0000 UTC m=+146.746862674" watchObservedRunningTime="2025-09-30 19:33:37.133169474 +0000 UTC m=+146.754102971" Sep 30 19:33:37 crc kubenswrapper[4756]: I0930 19:33:37.135169 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-config-operator/openshift-config-operator-7777fb866f-mpnp5" podStartSLOduration=126.135158656 podStartE2EDuration="2m6.135158656s" podCreationTimestamp="2025-09-30 19:31:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 19:33:37.09543202 +0000 UTC m=+146.716365507" watchObservedRunningTime="2025-09-30 19:33:37.135158656 +0000 UTC m=+146.756092133" Sep 30 19:33:37 crc kubenswrapper[4756]: I0930 19:33:37.154179 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 19:33:37 crc kubenswrapper[4756]: E0930 19:33:37.154499 4756 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 19:33:37.654476115 +0000 UTC m=+147.275409592 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:33:37 crc kubenswrapper[4756]: I0930 19:33:37.255680 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wpxq6\" (UID: \"ce702cef-b28d-4fcd-99b0-9961a980d3e6\") " pod="openshift-image-registry/image-registry-697d97f7c8-wpxq6" Sep 30 19:33:37 crc kubenswrapper[4756]: E0930 19:33:37.256207 4756 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 19:33:37.756187311 +0000 UTC m=+147.377120788 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wpxq6" (UID: "ce702cef-b28d-4fcd-99b0-9961a980d3e6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:33:37 crc kubenswrapper[4756]: I0930 19:33:37.283868 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca-operator/service-ca-operator-777779d784-wdtth" podStartSLOduration=125.283822915 podStartE2EDuration="2m5.283822915s" podCreationTimestamp="2025-09-30 19:31:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 19:33:37.209569627 +0000 UTC m=+146.830503094" watchObservedRunningTime="2025-09-30 19:33:37.283822915 +0000 UTC m=+146.904756392" Sep 30 19:33:37 crc kubenswrapper[4756]: I0930 19:33:37.290024 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-lmd8p" podStartSLOduration=125.289986344 podStartE2EDuration="2m5.289986344s" podCreationTimestamp="2025-09-30 19:31:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 19:33:37.286117464 +0000 UTC m=+146.907050941" watchObservedRunningTime="2025-09-30 19:33:37.289986344 +0000 UTC m=+146.910919821" Sep 30 19:33:37 crc kubenswrapper[4756]: I0930 19:33:37.323921 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-zxkfm" Sep 30 19:33:37 crc kubenswrapper[4756]: I0930 19:33:37.356519 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 19:33:37 crc kubenswrapper[4756]: E0930 19:33:37.357213 4756 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 19:33:37.85718812 +0000 UTC m=+147.478121597 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:33:37 crc kubenswrapper[4756]: I0930 19:33:37.401101 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-nlp7l" podStartSLOduration=125.401074543 podStartE2EDuration="2m5.401074543s" podCreationTimestamp="2025-09-30 19:31:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 19:33:37.399103502 +0000 UTC m=+147.020036979" watchObservedRunningTime="2025-09-30 19:33:37.401074543 +0000 UTC m=+147.022008020" Sep 30 19:33:37 crc kubenswrapper[4756]: I0930 19:33:37.458265 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wpxq6\" (UID: \"ce702cef-b28d-4fcd-99b0-9961a980d3e6\") " pod="openshift-image-registry/image-registry-697d97f7c8-wpxq6" Sep 30 19:33:37 crc kubenswrapper[4756]: E0930 19:33:37.458711 4756 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 19:33:37.958695091 +0000 UTC m=+147.579628558 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wpxq6" (UID: "ce702cef-b28d-4fcd-99b0-9961a980d3e6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:33:37 crc kubenswrapper[4756]: I0930 19:33:37.558963 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 19:33:37 crc kubenswrapper[4756]: E0930 19:33:37.559186 4756 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 19:33:38.059142615 +0000 UTC m=+147.680076092 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:33:37 crc kubenswrapper[4756]: I0930 19:33:37.559649 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wpxq6\" (UID: \"ce702cef-b28d-4fcd-99b0-9961a980d3e6\") " pod="openshift-image-registry/image-registry-697d97f7c8-wpxq6" Sep 30 19:33:37 crc kubenswrapper[4756]: E0930 19:33:37.559982 4756 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 19:33:38.059968256 +0000 UTC m=+147.680901733 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wpxq6" (UID: "ce702cef-b28d-4fcd-99b0-9961a980d3e6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:33:37 crc kubenswrapper[4756]: I0930 19:33:37.660155 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 19:33:37 crc kubenswrapper[4756]: E0930 19:33:37.660484 4756 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 19:33:38.160464341 +0000 UTC m=+147.781397818 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:33:37 crc kubenswrapper[4756]: I0930 19:33:37.704177 4756 patch_prober.go:28] interesting pod/router-default-5444994796-88vsq container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Sep 30 19:33:37 crc kubenswrapper[4756]: [-]has-synced failed: reason withheld Sep 30 19:33:37 crc kubenswrapper[4756]: [+]process-running ok Sep 30 19:33:37 crc kubenswrapper[4756]: healthz check failed Sep 30 19:33:37 crc kubenswrapper[4756]: I0930 19:33:37.704298 4756 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-88vsq" podUID="3fe6ddbb-59ee-4d35-9d3c-309cdd5eba71" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Sep 30 19:33:37 crc kubenswrapper[4756]: I0930 19:33:37.761622 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wpxq6\" (UID: \"ce702cef-b28d-4fcd-99b0-9961a980d3e6\") " pod="openshift-image-registry/image-registry-697d97f7c8-wpxq6" Sep 30 19:33:37 crc kubenswrapper[4756]: E0930 19:33:37.761958 4756 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 19:33:38.261945902 +0000 UTC m=+147.882879379 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wpxq6" (UID: "ce702cef-b28d-4fcd-99b0-9961a980d3e6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:33:37 crc kubenswrapper[4756]: I0930 19:33:37.784843 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-zh59x" event={"ID":"dffc851c-bb5e-4691-9c2d-1dff44276a16","Type":"ContainerStarted","Data":"1f5dc8bbac9bfe082b7b1b5ff100874d221d9108d9c2576d7243b529b9149a4f"} Sep 30 19:33:37 crc kubenswrapper[4756]: I0930 19:33:37.786205 4756 patch_prober.go:28] interesting pod/packageserver-d55dfcdfc-qsct2 container/packageserver namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.40:5443/healthz\": dial tcp 10.217.0.40:5443: connect: connection refused" start-of-body= Sep 30 19:33:37 crc kubenswrapper[4756]: I0930 19:33:37.786241 4756 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-qsct2" podUID="428bbd8f-09ea-45c6-acd6-3ba9bab95aba" containerName="packageserver" probeResult="failure" output="Get \"https://10.217.0.40:5443/healthz\": dial tcp 10.217.0.40:5443: connect: connection refused" Sep 30 19:33:37 crc kubenswrapper[4756]: I0930 19:33:37.788004 4756 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-42ln4 container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.38:8080/healthz\": dial tcp 10.217.0.38:8080: connect: connection refused" start-of-body= Sep 30 19:33:37 crc kubenswrapper[4756]: I0930 19:33:37.788044 4756 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-42ln4" podUID="d2f7e436-45ee-4e29-a473-cde6db34524b" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.38:8080/healthz\": dial tcp 10.217.0.38:8080: connect: connection refused" Sep 30 19:33:37 crc kubenswrapper[4756]: I0930 19:33:37.790082 4756 patch_prober.go:28] interesting pod/downloads-7954f5f757-7dwjw container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.17:8080/\": dial tcp 10.217.0.17:8080: connect: connection refused" start-of-body= Sep 30 19:33:37 crc kubenswrapper[4756]: I0930 19:33:37.790115 4756 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-7dwjw" podUID="52803098-a1e8-4a45-a2dd-0eb8f6d5b8ad" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.17:8080/\": dial tcp 10.217.0.17:8080: connect: connection refused" Sep 30 19:33:37 crc kubenswrapper[4756]: I0930 19:33:37.819644 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-879f6c89f-mstk5" Sep 30 19:33:37 crc kubenswrapper[4756]: I0930 19:33:37.863203 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 19:33:37 crc kubenswrapper[4756]: E0930 19:33:37.863420 4756 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 19:33:38.363371751 +0000 UTC m=+147.984305228 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:33:37 crc kubenswrapper[4756]: I0930 19:33:37.863630 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wpxq6\" (UID: \"ce702cef-b28d-4fcd-99b0-9961a980d3e6\") " pod="openshift-image-registry/image-registry-697d97f7c8-wpxq6" Sep 30 19:33:37 crc kubenswrapper[4756]: I0930 19:33:37.867657 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-s2485" Sep 30 19:33:37 crc kubenswrapper[4756]: E0930 19:33:37.870112 4756 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 19:33:38.370083004 +0000 UTC m=+147.991016671 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wpxq6" (UID: "ce702cef-b28d-4fcd-99b0-9961a980d3e6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:33:37 crc kubenswrapper[4756]: I0930 19:33:37.965705 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 19:33:37 crc kubenswrapper[4756]: E0930 19:33:37.966104 4756 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 19:33:38.466088304 +0000 UTC m=+148.087021781 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:33:38 crc kubenswrapper[4756]: I0930 19:33:38.068967 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wpxq6\" (UID: \"ce702cef-b28d-4fcd-99b0-9961a980d3e6\") " pod="openshift-image-registry/image-registry-697d97f7c8-wpxq6" Sep 30 19:33:38 crc kubenswrapper[4756]: E0930 19:33:38.069381 4756 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 19:33:38.569369471 +0000 UTC m=+148.190302948 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wpxq6" (UID: "ce702cef-b28d-4fcd-99b0-9961a980d3e6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:33:38 crc kubenswrapper[4756]: I0930 19:33:38.172794 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 19:33:38 crc kubenswrapper[4756]: E0930 19:33:38.173104 4756 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 19:33:38.673087469 +0000 UTC m=+148.294020946 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:33:38 crc kubenswrapper[4756]: I0930 19:33:38.273959 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wpxq6\" (UID: \"ce702cef-b28d-4fcd-99b0-9961a980d3e6\") " pod="openshift-image-registry/image-registry-697d97f7c8-wpxq6" Sep 30 19:33:38 crc kubenswrapper[4756]: E0930 19:33:38.274556 4756 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 19:33:38.774525919 +0000 UTC m=+148.395459396 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wpxq6" (UID: "ce702cef-b28d-4fcd-99b0-9961a980d3e6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:33:38 crc kubenswrapper[4756]: I0930 19:33:38.375265 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 19:33:38 crc kubenswrapper[4756]: E0930 19:33:38.375548 4756 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 19:33:38.875506027 +0000 UTC m=+148.496439504 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:33:38 crc kubenswrapper[4756]: I0930 19:33:38.375625 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wpxq6\" (UID: \"ce702cef-b28d-4fcd-99b0-9961a980d3e6\") " pod="openshift-image-registry/image-registry-697d97f7c8-wpxq6" Sep 30 19:33:38 crc kubenswrapper[4756]: E0930 19:33:38.376142 4756 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 19:33:38.876132443 +0000 UTC m=+148.497065920 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wpxq6" (UID: "ce702cef-b28d-4fcd-99b0-9961a980d3e6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:33:38 crc kubenswrapper[4756]: I0930 19:33:38.476517 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 19:33:38 crc kubenswrapper[4756]: E0930 19:33:38.476711 4756 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 19:33:38.976685929 +0000 UTC m=+148.597619396 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:33:38 crc kubenswrapper[4756]: I0930 19:33:38.477119 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wpxq6\" (UID: \"ce702cef-b28d-4fcd-99b0-9961a980d3e6\") " pod="openshift-image-registry/image-registry-697d97f7c8-wpxq6" Sep 30 19:33:38 crc kubenswrapper[4756]: E0930 19:33:38.477483 4756 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 19:33:38.97746999 +0000 UTC m=+148.598403467 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wpxq6" (UID: "ce702cef-b28d-4fcd-99b0-9961a980d3e6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:33:38 crc kubenswrapper[4756]: I0930 19:33:38.578121 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 19:33:38 crc kubenswrapper[4756]: E0930 19:33:38.578441 4756 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 19:33:39.078423087 +0000 UTC m=+148.699356564 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:33:38 crc kubenswrapper[4756]: I0930 19:33:38.680091 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wpxq6\" (UID: \"ce702cef-b28d-4fcd-99b0-9961a980d3e6\") " pod="openshift-image-registry/image-registry-697d97f7c8-wpxq6" Sep 30 19:33:38 crc kubenswrapper[4756]: E0930 19:33:38.680734 4756 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 19:33:39.180706508 +0000 UTC m=+148.801639985 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wpxq6" (UID: "ce702cef-b28d-4fcd-99b0-9961a980d3e6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:33:38 crc kubenswrapper[4756]: I0930 19:33:38.700207 4756 patch_prober.go:28] interesting pod/router-default-5444994796-88vsq container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Sep 30 19:33:38 crc kubenswrapper[4756]: [-]has-synced failed: reason withheld Sep 30 19:33:38 crc kubenswrapper[4756]: [+]process-running ok Sep 30 19:33:38 crc kubenswrapper[4756]: healthz check failed Sep 30 19:33:38 crc kubenswrapper[4756]: I0930 19:33:38.700300 4756 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-88vsq" podUID="3fe6ddbb-59ee-4d35-9d3c-309cdd5eba71" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Sep 30 19:33:38 crc kubenswrapper[4756]: I0930 19:33:38.780924 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 19:33:38 crc kubenswrapper[4756]: E0930 19:33:38.781291 4756 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 19:33:39.281261065 +0000 UTC m=+148.902194542 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:33:38 crc kubenswrapper[4756]: I0930 19:33:38.781527 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wpxq6\" (UID: \"ce702cef-b28d-4fcd-99b0-9961a980d3e6\") " pod="openshift-image-registry/image-registry-697d97f7c8-wpxq6" Sep 30 19:33:38 crc kubenswrapper[4756]: E0930 19:33:38.781963 4756 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 19:33:39.281946613 +0000 UTC m=+148.902880090 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wpxq6" (UID: "ce702cef-b28d-4fcd-99b0-9961a980d3e6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:33:38 crc kubenswrapper[4756]: I0930 19:33:38.810195 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-zh59x" event={"ID":"dffc851c-bb5e-4691-9c2d-1dff44276a16","Type":"ContainerStarted","Data":"40d17743ae3e81643009fa41b6229dee004229f07283ab315a835fc8f1f346da"} Sep 30 19:33:38 crc kubenswrapper[4756]: I0930 19:33:38.810274 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-zh59x" event={"ID":"dffc851c-bb5e-4691-9c2d-1dff44276a16","Type":"ContainerStarted","Data":"e9cac9c386dc0e8c229d13050e28d00261bf72ecacb58fd5ef855fe7b2b4cede"} Sep 30 19:33:38 crc kubenswrapper[4756]: I0930 19:33:38.813017 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-42ln4" Sep 30 19:33:38 crc kubenswrapper[4756]: I0930 19:33:38.883237 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 19:33:38 crc kubenswrapper[4756]: E0930 19:33:38.885815 4756 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 19:33:39.385781304 +0000 UTC m=+149.006714781 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:33:38 crc kubenswrapper[4756]: I0930 19:33:38.926612 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-j6fg8"] Sep 30 19:33:38 crc kubenswrapper[4756]: I0930 19:33:38.928078 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-j6fg8" Sep 30 19:33:38 crc kubenswrapper[4756]: I0930 19:33:38.937916 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Sep 30 19:33:38 crc kubenswrapper[4756]: I0930 19:33:38.973418 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-j6fg8"] Sep 30 19:33:38 crc kubenswrapper[4756]: I0930 19:33:38.985297 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wpxq6\" (UID: \"ce702cef-b28d-4fcd-99b0-9961a980d3e6\") " pod="openshift-image-registry/image-registry-697d97f7c8-wpxq6" Sep 30 19:33:38 crc kubenswrapper[4756]: I0930 19:33:38.985346 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/252408ae-8514-4b7b-b8c9-348d43fa0323-utilities\") pod \"community-operators-j6fg8\" (UID: \"252408ae-8514-4b7b-b8c9-348d43fa0323\") " pod="openshift-marketplace/community-operators-j6fg8" Sep 30 19:33:38 crc kubenswrapper[4756]: I0930 19:33:38.985366 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-72vcn\" (UniqueName: \"kubernetes.io/projected/252408ae-8514-4b7b-b8c9-348d43fa0323-kube-api-access-72vcn\") pod \"community-operators-j6fg8\" (UID: \"252408ae-8514-4b7b-b8c9-348d43fa0323\") " pod="openshift-marketplace/community-operators-j6fg8" Sep 30 19:33:38 crc kubenswrapper[4756]: I0930 19:33:38.985419 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/252408ae-8514-4b7b-b8c9-348d43fa0323-catalog-content\") pod \"community-operators-j6fg8\" (UID: \"252408ae-8514-4b7b-b8c9-348d43fa0323\") " pod="openshift-marketplace/community-operators-j6fg8" Sep 30 19:33:38 crc kubenswrapper[4756]: E0930 19:33:38.985724 4756 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 19:33:39.485712375 +0000 UTC m=+149.106645852 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wpxq6" (UID: "ce702cef-b28d-4fcd-99b0-9961a980d3e6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:33:39 crc kubenswrapper[4756]: E0930 19:33:39.087130 4756 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 19:33:39.587096383 +0000 UTC m=+149.208029860 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:33:39 crc kubenswrapper[4756]: I0930 19:33:39.086978 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 19:33:39 crc kubenswrapper[4756]: I0930 19:33:39.087720 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 19:33:39 crc kubenswrapper[4756]: I0930 19:33:39.088674 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wpxq6\" (UID: \"ce702cef-b28d-4fcd-99b0-9961a980d3e6\") " pod="openshift-image-registry/image-registry-697d97f7c8-wpxq6" Sep 30 19:33:39 crc kubenswrapper[4756]: I0930 19:33:39.088735 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 19:33:39 crc kubenswrapper[4756]: I0930 19:33:39.088789 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/252408ae-8514-4b7b-b8c9-348d43fa0323-utilities\") pod \"community-operators-j6fg8\" (UID: \"252408ae-8514-4b7b-b8c9-348d43fa0323\") " pod="openshift-marketplace/community-operators-j6fg8" Sep 30 19:33:39 crc kubenswrapper[4756]: I0930 19:33:39.088820 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-72vcn\" (UniqueName: \"kubernetes.io/projected/252408ae-8514-4b7b-b8c9-348d43fa0323-kube-api-access-72vcn\") pod \"community-operators-j6fg8\" (UID: \"252408ae-8514-4b7b-b8c9-348d43fa0323\") " pod="openshift-marketplace/community-operators-j6fg8" Sep 30 19:33:39 crc kubenswrapper[4756]: I0930 19:33:39.088864 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 19:33:39 crc kubenswrapper[4756]: I0930 19:33:39.089047 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 19:33:39 crc kubenswrapper[4756]: I0930 19:33:39.089089 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/252408ae-8514-4b7b-b8c9-348d43fa0323-catalog-content\") pod \"community-operators-j6fg8\" (UID: \"252408ae-8514-4b7b-b8c9-348d43fa0323\") " pod="openshift-marketplace/community-operators-j6fg8" Sep 30 19:33:39 crc kubenswrapper[4756]: I0930 19:33:39.088968 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 19:33:39 crc kubenswrapper[4756]: I0930 19:33:39.090521 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/252408ae-8514-4b7b-b8c9-348d43fa0323-utilities\") pod \"community-operators-j6fg8\" (UID: \"252408ae-8514-4b7b-b8c9-348d43fa0323\") " pod="openshift-marketplace/community-operators-j6fg8" Sep 30 19:33:39 crc kubenswrapper[4756]: E0930 19:33:39.091222 4756 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 19:33:39.591202469 +0000 UTC m=+149.212135946 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wpxq6" (UID: "ce702cef-b28d-4fcd-99b0-9961a980d3e6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:33:39 crc kubenswrapper[4756]: I0930 19:33:39.096183 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 19:33:39 crc kubenswrapper[4756]: I0930 19:33:39.101291 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 19:33:39 crc kubenswrapper[4756]: I0930 19:33:39.101617 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/252408ae-8514-4b7b-b8c9-348d43fa0323-catalog-content\") pod \"community-operators-j6fg8\" (UID: \"252408ae-8514-4b7b-b8c9-348d43fa0323\") " pod="openshift-marketplace/community-operators-j6fg8" Sep 30 19:33:39 crc kubenswrapper[4756]: I0930 19:33:39.111069 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-rqhnz"] Sep 30 19:33:39 crc kubenswrapper[4756]: I0930 19:33:39.112090 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-rqhnz" Sep 30 19:33:39 crc kubenswrapper[4756]: I0930 19:33:39.115090 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Sep 30 19:33:39 crc kubenswrapper[4756]: I0930 19:33:39.115854 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 19:33:39 crc kubenswrapper[4756]: I0930 19:33:39.124882 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-72vcn\" (UniqueName: \"kubernetes.io/projected/252408ae-8514-4b7b-b8c9-348d43fa0323-kube-api-access-72vcn\") pod \"community-operators-j6fg8\" (UID: \"252408ae-8514-4b7b-b8c9-348d43fa0323\") " pod="openshift-marketplace/community-operators-j6fg8" Sep 30 19:33:39 crc kubenswrapper[4756]: I0930 19:33:39.135877 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-rqhnz"] Sep 30 19:33:39 crc kubenswrapper[4756]: I0930 19:33:39.175200 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-qsct2" Sep 30 19:33:39 crc kubenswrapper[4756]: I0930 19:33:39.191371 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 19:33:39 crc kubenswrapper[4756]: I0930 19:33:39.191748 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8badbed7-383d-4cc6-9fa8-aac7c4ac97ac-catalog-content\") pod \"certified-operators-rqhnz\" (UID: \"8badbed7-383d-4cc6-9fa8-aac7c4ac97ac\") " pod="openshift-marketplace/certified-operators-rqhnz" Sep 30 19:33:39 crc kubenswrapper[4756]: I0930 19:33:39.191776 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8badbed7-383d-4cc6-9fa8-aac7c4ac97ac-utilities\") pod \"certified-operators-rqhnz\" (UID: \"8badbed7-383d-4cc6-9fa8-aac7c4ac97ac\") " pod="openshift-marketplace/certified-operators-rqhnz" Sep 30 19:33:39 crc kubenswrapper[4756]: I0930 19:33:39.191822 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tznvq\" (UniqueName: \"kubernetes.io/projected/8badbed7-383d-4cc6-9fa8-aac7c4ac97ac-kube-api-access-tznvq\") pod \"certified-operators-rqhnz\" (UID: \"8badbed7-383d-4cc6-9fa8-aac7c4ac97ac\") " pod="openshift-marketplace/certified-operators-rqhnz" Sep 30 19:33:39 crc kubenswrapper[4756]: E0930 19:33:39.192152 4756 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 19:33:39.692124074 +0000 UTC m=+149.313057551 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:33:39 crc kubenswrapper[4756]: I0930 19:33:39.260936 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-j6fg8" Sep 30 19:33:39 crc kubenswrapper[4756]: I0930 19:33:39.305372 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8badbed7-383d-4cc6-9fa8-aac7c4ac97ac-catalog-content\") pod \"certified-operators-rqhnz\" (UID: \"8badbed7-383d-4cc6-9fa8-aac7c4ac97ac\") " pod="openshift-marketplace/certified-operators-rqhnz" Sep 30 19:33:39 crc kubenswrapper[4756]: I0930 19:33:39.322308 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8badbed7-383d-4cc6-9fa8-aac7c4ac97ac-utilities\") pod \"certified-operators-rqhnz\" (UID: \"8badbed7-383d-4cc6-9fa8-aac7c4ac97ac\") " pod="openshift-marketplace/certified-operators-rqhnz" Sep 30 19:33:39 crc kubenswrapper[4756]: I0930 19:33:39.322506 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tznvq\" (UniqueName: \"kubernetes.io/projected/8badbed7-383d-4cc6-9fa8-aac7c4ac97ac-kube-api-access-tznvq\") pod \"certified-operators-rqhnz\" (UID: \"8badbed7-383d-4cc6-9fa8-aac7c4ac97ac\") " pod="openshift-marketplace/certified-operators-rqhnz" Sep 30 19:33:39 crc kubenswrapper[4756]: I0930 19:33:39.322814 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wpxq6\" (UID: \"ce702cef-b28d-4fcd-99b0-9961a980d3e6\") " pod="openshift-image-registry/image-registry-697d97f7c8-wpxq6" Sep 30 19:33:39 crc kubenswrapper[4756]: E0930 19:33:39.323223 4756 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 19:33:39.823207559 +0000 UTC m=+149.444141036 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wpxq6" (UID: "ce702cef-b28d-4fcd-99b0-9961a980d3e6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:33:39 crc kubenswrapper[4756]: I0930 19:33:39.308078 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8badbed7-383d-4cc6-9fa8-aac7c4ac97ac-catalog-content\") pod \"certified-operators-rqhnz\" (UID: \"8badbed7-383d-4cc6-9fa8-aac7c4ac97ac\") " pod="openshift-marketplace/certified-operators-rqhnz" Sep 30 19:33:39 crc kubenswrapper[4756]: I0930 19:33:39.323855 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8badbed7-383d-4cc6-9fa8-aac7c4ac97ac-utilities\") pod \"certified-operators-rqhnz\" (UID: \"8badbed7-383d-4cc6-9fa8-aac7c4ac97ac\") " pod="openshift-marketplace/certified-operators-rqhnz" Sep 30 19:33:39 crc kubenswrapper[4756]: I0930 19:33:39.309844 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-js94r"] Sep 30 19:33:39 crc kubenswrapper[4756]: I0930 19:33:39.324922 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-js94r" Sep 30 19:33:39 crc kubenswrapper[4756]: I0930 19:33:39.326258 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-js94r"] Sep 30 19:33:39 crc kubenswrapper[4756]: I0930 19:33:39.330933 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 19:33:39 crc kubenswrapper[4756]: I0930 19:33:39.344770 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 19:33:39 crc kubenswrapper[4756]: I0930 19:33:39.346161 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 19:33:39 crc kubenswrapper[4756]: I0930 19:33:39.360318 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tznvq\" (UniqueName: \"kubernetes.io/projected/8badbed7-383d-4cc6-9fa8-aac7c4ac97ac-kube-api-access-tznvq\") pod \"certified-operators-rqhnz\" (UID: \"8badbed7-383d-4cc6-9fa8-aac7c4ac97ac\") " pod="openshift-marketplace/certified-operators-rqhnz" Sep 30 19:33:39 crc kubenswrapper[4756]: I0930 19:33:39.424601 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 19:33:39 crc kubenswrapper[4756]: I0930 19:33:39.424954 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b0b29af6-5e9a-48a8-8aeb-72bc2d9539ac-utilities\") pod \"community-operators-js94r\" (UID: \"b0b29af6-5e9a-48a8-8aeb-72bc2d9539ac\") " pod="openshift-marketplace/community-operators-js94r" Sep 30 19:33:39 crc kubenswrapper[4756]: I0930 19:33:39.425039 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f7spp\" (UniqueName: \"kubernetes.io/projected/b0b29af6-5e9a-48a8-8aeb-72bc2d9539ac-kube-api-access-f7spp\") pod \"community-operators-js94r\" (UID: \"b0b29af6-5e9a-48a8-8aeb-72bc2d9539ac\") " pod="openshift-marketplace/community-operators-js94r" Sep 30 19:33:39 crc kubenswrapper[4756]: I0930 19:33:39.425073 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b0b29af6-5e9a-48a8-8aeb-72bc2d9539ac-catalog-content\") pod \"community-operators-js94r\" (UID: \"b0b29af6-5e9a-48a8-8aeb-72bc2d9539ac\") " pod="openshift-marketplace/community-operators-js94r" Sep 30 19:33:39 crc kubenswrapper[4756]: E0930 19:33:39.425201 4756 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 19:33:39.925177533 +0000 UTC m=+149.546111000 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:33:39 crc kubenswrapper[4756]: I0930 19:33:39.467866 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-rqhnz" Sep 30 19:33:39 crc kubenswrapper[4756]: I0930 19:33:39.498978 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-scg2n"] Sep 30 19:33:39 crc kubenswrapper[4756]: I0930 19:33:39.507707 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-scg2n" Sep 30 19:33:39 crc kubenswrapper[4756]: I0930 19:33:39.527328 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-scg2n"] Sep 30 19:33:39 crc kubenswrapper[4756]: I0930 19:33:39.529263 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/aadc6862-7561-4dc2-a788-a4085dc5f4ad-catalog-content\") pod \"certified-operators-scg2n\" (UID: \"aadc6862-7561-4dc2-a788-a4085dc5f4ad\") " pod="openshift-marketplace/certified-operators-scg2n" Sep 30 19:33:39 crc kubenswrapper[4756]: I0930 19:33:39.529353 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wpxq6\" (UID: \"ce702cef-b28d-4fcd-99b0-9961a980d3e6\") " pod="openshift-image-registry/image-registry-697d97f7c8-wpxq6" Sep 30 19:33:39 crc kubenswrapper[4756]: I0930 19:33:39.529378 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f7spp\" (UniqueName: \"kubernetes.io/projected/b0b29af6-5e9a-48a8-8aeb-72bc2d9539ac-kube-api-access-f7spp\") pod \"community-operators-js94r\" (UID: \"b0b29af6-5e9a-48a8-8aeb-72bc2d9539ac\") " pod="openshift-marketplace/community-operators-js94r" Sep 30 19:33:39 crc kubenswrapper[4756]: I0930 19:33:39.529418 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t2whz\" (UniqueName: \"kubernetes.io/projected/aadc6862-7561-4dc2-a788-a4085dc5f4ad-kube-api-access-t2whz\") pod \"certified-operators-scg2n\" (UID: \"aadc6862-7561-4dc2-a788-a4085dc5f4ad\") " pod="openshift-marketplace/certified-operators-scg2n" Sep 30 19:33:39 crc kubenswrapper[4756]: I0930 19:33:39.529448 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/aadc6862-7561-4dc2-a788-a4085dc5f4ad-utilities\") pod \"certified-operators-scg2n\" (UID: \"aadc6862-7561-4dc2-a788-a4085dc5f4ad\") " pod="openshift-marketplace/certified-operators-scg2n" Sep 30 19:33:39 crc kubenswrapper[4756]: I0930 19:33:39.529467 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b0b29af6-5e9a-48a8-8aeb-72bc2d9539ac-catalog-content\") pod \"community-operators-js94r\" (UID: \"b0b29af6-5e9a-48a8-8aeb-72bc2d9539ac\") " pod="openshift-marketplace/community-operators-js94r" Sep 30 19:33:39 crc kubenswrapper[4756]: I0930 19:33:39.529517 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b0b29af6-5e9a-48a8-8aeb-72bc2d9539ac-utilities\") pod \"community-operators-js94r\" (UID: \"b0b29af6-5e9a-48a8-8aeb-72bc2d9539ac\") " pod="openshift-marketplace/community-operators-js94r" Sep 30 19:33:39 crc kubenswrapper[4756]: I0930 19:33:39.529970 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b0b29af6-5e9a-48a8-8aeb-72bc2d9539ac-utilities\") pod \"community-operators-js94r\" (UID: \"b0b29af6-5e9a-48a8-8aeb-72bc2d9539ac\") " pod="openshift-marketplace/community-operators-js94r" Sep 30 19:33:39 crc kubenswrapper[4756]: I0930 19:33:39.536261 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b0b29af6-5e9a-48a8-8aeb-72bc2d9539ac-catalog-content\") pod \"community-operators-js94r\" (UID: \"b0b29af6-5e9a-48a8-8aeb-72bc2d9539ac\") " pod="openshift-marketplace/community-operators-js94r" Sep 30 19:33:39 crc kubenswrapper[4756]: E0930 19:33:39.539897 4756 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 19:33:40.039828683 +0000 UTC m=+149.660762160 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wpxq6" (UID: "ce702cef-b28d-4fcd-99b0-9961a980d3e6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:33:39 crc kubenswrapper[4756]: I0930 19:33:39.563596 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f7spp\" (UniqueName: \"kubernetes.io/projected/b0b29af6-5e9a-48a8-8aeb-72bc2d9539ac-kube-api-access-f7spp\") pod \"community-operators-js94r\" (UID: \"b0b29af6-5e9a-48a8-8aeb-72bc2d9539ac\") " pod="openshift-marketplace/community-operators-js94r" Sep 30 19:33:39 crc kubenswrapper[4756]: I0930 19:33:39.636117 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 19:33:39 crc kubenswrapper[4756]: E0930 19:33:39.636333 4756 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 19:33:40.136305885 +0000 UTC m=+149.757239362 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:33:39 crc kubenswrapper[4756]: I0930 19:33:39.636380 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/aadc6862-7561-4dc2-a788-a4085dc5f4ad-catalog-content\") pod \"certified-operators-scg2n\" (UID: \"aadc6862-7561-4dc2-a788-a4085dc5f4ad\") " pod="openshift-marketplace/certified-operators-scg2n" Sep 30 19:33:39 crc kubenswrapper[4756]: I0930 19:33:39.636566 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wpxq6\" (UID: \"ce702cef-b28d-4fcd-99b0-9961a980d3e6\") " pod="openshift-image-registry/image-registry-697d97f7c8-wpxq6" Sep 30 19:33:39 crc kubenswrapper[4756]: I0930 19:33:39.636601 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t2whz\" (UniqueName: \"kubernetes.io/projected/aadc6862-7561-4dc2-a788-a4085dc5f4ad-kube-api-access-t2whz\") pod \"certified-operators-scg2n\" (UID: \"aadc6862-7561-4dc2-a788-a4085dc5f4ad\") " pod="openshift-marketplace/certified-operators-scg2n" Sep 30 19:33:39 crc kubenswrapper[4756]: I0930 19:33:39.636644 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/aadc6862-7561-4dc2-a788-a4085dc5f4ad-utilities\") pod \"certified-operators-scg2n\" (UID: \"aadc6862-7561-4dc2-a788-a4085dc5f4ad\") " pod="openshift-marketplace/certified-operators-scg2n" Sep 30 19:33:39 crc kubenswrapper[4756]: I0930 19:33:39.637713 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/aadc6862-7561-4dc2-a788-a4085dc5f4ad-utilities\") pod \"certified-operators-scg2n\" (UID: \"aadc6862-7561-4dc2-a788-a4085dc5f4ad\") " pod="openshift-marketplace/certified-operators-scg2n" Sep 30 19:33:39 crc kubenswrapper[4756]: E0930 19:33:39.638054 4756 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 19:33:40.138030289 +0000 UTC m=+149.758963766 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wpxq6" (UID: "ce702cef-b28d-4fcd-99b0-9961a980d3e6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:33:39 crc kubenswrapper[4756]: I0930 19:33:39.638229 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/aadc6862-7561-4dc2-a788-a4085dc5f4ad-catalog-content\") pod \"certified-operators-scg2n\" (UID: \"aadc6862-7561-4dc2-a788-a4085dc5f4ad\") " pod="openshift-marketplace/certified-operators-scg2n" Sep 30 19:33:39 crc kubenswrapper[4756]: I0930 19:33:39.668234 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-js94r" Sep 30 19:33:39 crc kubenswrapper[4756]: I0930 19:33:39.672697 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t2whz\" (UniqueName: \"kubernetes.io/projected/aadc6862-7561-4dc2-a788-a4085dc5f4ad-kube-api-access-t2whz\") pod \"certified-operators-scg2n\" (UID: \"aadc6862-7561-4dc2-a788-a4085dc5f4ad\") " pod="openshift-marketplace/certified-operators-scg2n" Sep 30 19:33:39 crc kubenswrapper[4756]: I0930 19:33:39.736296 4756 patch_prober.go:28] interesting pod/router-default-5444994796-88vsq container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Sep 30 19:33:39 crc kubenswrapper[4756]: [-]has-synced failed: reason withheld Sep 30 19:33:39 crc kubenswrapper[4756]: [+]process-running ok Sep 30 19:33:39 crc kubenswrapper[4756]: healthz check failed Sep 30 19:33:39 crc kubenswrapper[4756]: I0930 19:33:39.736868 4756 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-88vsq" podUID="3fe6ddbb-59ee-4d35-9d3c-309cdd5eba71" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Sep 30 19:33:39 crc kubenswrapper[4756]: I0930 19:33:39.737361 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 19:33:39 crc kubenswrapper[4756]: E0930 19:33:39.737616 4756 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 19:33:40.23758995 +0000 UTC m=+149.858523427 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:33:39 crc kubenswrapper[4756]: I0930 19:33:39.737769 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wpxq6\" (UID: \"ce702cef-b28d-4fcd-99b0-9961a980d3e6\") " pod="openshift-image-registry/image-registry-697d97f7c8-wpxq6" Sep 30 19:33:39 crc kubenswrapper[4756]: E0930 19:33:39.738109 4756 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 19:33:40.238089483 +0000 UTC m=+149.859022960 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wpxq6" (UID: "ce702cef-b28d-4fcd-99b0-9961a980d3e6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:33:39 crc kubenswrapper[4756]: I0930 19:33:39.743069 4756 plugin_watcher.go:194] "Adding socket path or updating timestamp to desired state cache" path="/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock" Sep 30 19:33:39 crc kubenswrapper[4756]: I0930 19:33:39.840016 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 19:33:39 crc kubenswrapper[4756]: E0930 19:33:39.840300 4756 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 19:33:40.340274582 +0000 UTC m=+149.961208059 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:33:39 crc kubenswrapper[4756]: I0930 19:33:39.840332 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-zh59x" event={"ID":"dffc851c-bb5e-4691-9c2d-1dff44276a16","Type":"ContainerStarted","Data":"284bf2b6a2f9487bec8ee09ed22c89fc1791e1e61a8ca19db5b7e9b216695628"} Sep 30 19:33:39 crc kubenswrapper[4756]: I0930 19:33:39.872667 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-scg2n" Sep 30 19:33:39 crc kubenswrapper[4756]: I0930 19:33:39.921094 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="hostpath-provisioner/csi-hostpathplugin-zh59x" podStartSLOduration=10.921072709 podStartE2EDuration="10.921072709s" podCreationTimestamp="2025-09-30 19:33:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 19:33:39.894760359 +0000 UTC m=+149.515693836" watchObservedRunningTime="2025-09-30 19:33:39.921072709 +0000 UTC m=+149.542006186" Sep 30 19:33:39 crc kubenswrapper[4756]: I0930 19:33:39.924988 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-j6fg8"] Sep 30 19:33:39 crc kubenswrapper[4756]: I0930 19:33:39.940921 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wpxq6\" (UID: \"ce702cef-b28d-4fcd-99b0-9961a980d3e6\") " pod="openshift-image-registry/image-registry-697d97f7c8-wpxq6" Sep 30 19:33:39 crc kubenswrapper[4756]: E0930 19:33:39.941177 4756 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 19:33:40.441165407 +0000 UTC m=+150.062098884 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wpxq6" (UID: "ce702cef-b28d-4fcd-99b0-9961a980d3e6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:33:39 crc kubenswrapper[4756]: W0930 19:33:39.953521 4756 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod252408ae_8514_4b7b_b8c9_348d43fa0323.slice/crio-e78c61f3fe43859670167c7f3b9fba81b5f473d2db13ba1876f9360674555e1b WatchSource:0}: Error finding container e78c61f3fe43859670167c7f3b9fba81b5f473d2db13ba1876f9360674555e1b: Status 404 returned error can't find the container with id e78c61f3fe43859670167c7f3b9fba81b5f473d2db13ba1876f9360674555e1b Sep 30 19:33:40 crc kubenswrapper[4756]: I0930 19:33:40.041710 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 19:33:40 crc kubenswrapper[4756]: E0930 19:33:40.041920 4756 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 19:33:40.541866198 +0000 UTC m=+150.162799685 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:33:40 crc kubenswrapper[4756]: I0930 19:33:40.044012 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wpxq6\" (UID: \"ce702cef-b28d-4fcd-99b0-9961a980d3e6\") " pod="openshift-image-registry/image-registry-697d97f7c8-wpxq6" Sep 30 19:33:40 crc kubenswrapper[4756]: E0930 19:33:40.044308 4756 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 19:33:40.544294551 +0000 UTC m=+150.165228028 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wpxq6" (UID: "ce702cef-b28d-4fcd-99b0-9961a980d3e6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:33:40 crc kubenswrapper[4756]: I0930 19:33:40.104952 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-rqhnz"] Sep 30 19:33:40 crc kubenswrapper[4756]: W0930 19:33:40.132807 4756 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod8badbed7_383d_4cc6_9fa8_aac7c4ac97ac.slice/crio-bf5d94e99509f10cfc5d8f864ec978f52a39a575fcf3972ef405717c3c77afc3 WatchSource:0}: Error finding container bf5d94e99509f10cfc5d8f864ec978f52a39a575fcf3972ef405717c3c77afc3: Status 404 returned error can't find the container with id bf5d94e99509f10cfc5d8f864ec978f52a39a575fcf3972ef405717c3c77afc3 Sep 30 19:33:40 crc kubenswrapper[4756]: I0930 19:33:40.145366 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 19:33:40 crc kubenswrapper[4756]: E0930 19:33:40.145735 4756 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 19:33:40.6457213 +0000 UTC m=+150.266654767 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:33:40 crc kubenswrapper[4756]: I0930 19:33:40.249645 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-js94r"] Sep 30 19:33:40 crc kubenswrapper[4756]: I0930 19:33:40.276092 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wpxq6\" (UID: \"ce702cef-b28d-4fcd-99b0-9961a980d3e6\") " pod="openshift-image-registry/image-registry-697d97f7c8-wpxq6" Sep 30 19:33:40 crc kubenswrapper[4756]: E0930 19:33:40.276551 4756 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 19:33:40.776528388 +0000 UTC m=+150.397461865 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wpxq6" (UID: "ce702cef-b28d-4fcd-99b0-9961a980d3e6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:33:40 crc kubenswrapper[4756]: I0930 19:33:40.290654 4756 reconciler.go:161] "OperationExecutor.RegisterPlugin started" plugin={"SocketPath":"/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock","Timestamp":"2025-09-30T19:33:39.743113653Z","Handler":null,"Name":""} Sep 30 19:33:40 crc kubenswrapper[4756]: I0930 19:33:40.314014 4756 csi_plugin.go:100] kubernetes.io/csi: Trying to validate a new CSI Driver with name: kubevirt.io.hostpath-provisioner endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock versions: 1.0.0 Sep 30 19:33:40 crc kubenswrapper[4756]: I0930 19:33:40.314062 4756 csi_plugin.go:113] kubernetes.io/csi: Register new plugin with name: kubevirt.io.hostpath-provisioner at endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock Sep 30 19:33:40 crc kubenswrapper[4756]: I0930 19:33:40.365255 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-scg2n"] Sep 30 19:33:40 crc kubenswrapper[4756]: I0930 19:33:40.376601 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 19:33:40 crc kubenswrapper[4756]: I0930 19:33:40.549474 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Sep 30 19:33:40 crc kubenswrapper[4756]: I0930 19:33:40.579759 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wpxq6\" (UID: \"ce702cef-b28d-4fcd-99b0-9961a980d3e6\") " pod="openshift-image-registry/image-registry-697d97f7c8-wpxq6" Sep 30 19:33:40 crc kubenswrapper[4756]: I0930 19:33:40.606785 4756 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Sep 30 19:33:40 crc kubenswrapper[4756]: I0930 19:33:40.606888 4756 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wpxq6\" (UID: \"ce702cef-b28d-4fcd-99b0-9961a980d3e6\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount\"" pod="openshift-image-registry/image-registry-697d97f7c8-wpxq6" Sep 30 19:33:40 crc kubenswrapper[4756]: I0930 19:33:40.650751 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wpxq6\" (UID: \"ce702cef-b28d-4fcd-99b0-9961a980d3e6\") " pod="openshift-image-registry/image-registry-697d97f7c8-wpxq6" Sep 30 19:33:40 crc kubenswrapper[4756]: I0930 19:33:40.662039 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-wpxq6" Sep 30 19:33:40 crc kubenswrapper[4756]: I0930 19:33:40.702228 4756 patch_prober.go:28] interesting pod/router-default-5444994796-88vsq container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Sep 30 19:33:40 crc kubenswrapper[4756]: [-]has-synced failed: reason withheld Sep 30 19:33:40 crc kubenswrapper[4756]: [+]process-running ok Sep 30 19:33:40 crc kubenswrapper[4756]: healthz check failed Sep 30 19:33:40 crc kubenswrapper[4756]: I0930 19:33:40.702279 4756 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-88vsq" podUID="3fe6ddbb-59ee-4d35-9d3c-309cdd5eba71" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Sep 30 19:33:40 crc kubenswrapper[4756]: I0930 19:33:40.771895 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-apiserver/apiserver-76f77b778f-zlg2j" Sep 30 19:33:40 crc kubenswrapper[4756]: I0930 19:33:40.791534 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-apiserver/apiserver-76f77b778f-zlg2j" Sep 30 19:33:40 crc kubenswrapper[4756]: I0930 19:33:40.878980 4756 generic.go:334] "Generic (PLEG): container finished" podID="aadc6862-7561-4dc2-a788-a4085dc5f4ad" containerID="c5d2eda457ff897f98b1492f3324d46594aa173c5b3322a7de4484cdb5460ee0" exitCode=0 Sep 30 19:33:40 crc kubenswrapper[4756]: I0930 19:33:40.879036 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-scg2n" event={"ID":"aadc6862-7561-4dc2-a788-a4085dc5f4ad","Type":"ContainerDied","Data":"c5d2eda457ff897f98b1492f3324d46594aa173c5b3322a7de4484cdb5460ee0"} Sep 30 19:33:40 crc kubenswrapper[4756]: I0930 19:33:40.879136 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-scg2n" event={"ID":"aadc6862-7561-4dc2-a788-a4085dc5f4ad","Type":"ContainerStarted","Data":"53445e38c2202f99918186312939b357d7f7337c04f6d43cd3ad37a43a6f1c50"} Sep 30 19:33:40 crc kubenswrapper[4756]: I0930 19:33:40.888580 4756 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Sep 30 19:33:40 crc kubenswrapper[4756]: I0930 19:33:40.898914 4756 generic.go:334] "Generic (PLEG): container finished" podID="252408ae-8514-4b7b-b8c9-348d43fa0323" containerID="4bb22368b3e1be067c056759243a550de38e0360fc99f890b9d12c518e8dbefb" exitCode=0 Sep 30 19:33:40 crc kubenswrapper[4756]: I0930 19:33:40.899011 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-j6fg8" event={"ID":"252408ae-8514-4b7b-b8c9-348d43fa0323","Type":"ContainerDied","Data":"4bb22368b3e1be067c056759243a550de38e0360fc99f890b9d12c518e8dbefb"} Sep 30 19:33:40 crc kubenswrapper[4756]: I0930 19:33:40.899039 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-j6fg8" event={"ID":"252408ae-8514-4b7b-b8c9-348d43fa0323","Type":"ContainerStarted","Data":"e78c61f3fe43859670167c7f3b9fba81b5f473d2db13ba1876f9360674555e1b"} Sep 30 19:33:40 crc kubenswrapper[4756]: I0930 19:33:40.919132 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"07c8271310286009b1f7e771251944709c4f482f19806a3a9e044414c44633f3"} Sep 30 19:33:40 crc kubenswrapper[4756]: I0930 19:33:40.919226 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"7ff6c229e0939edbd681474c3b622913977049ed60695207cc8c03721621ea55"} Sep 30 19:33:40 crc kubenswrapper[4756]: I0930 19:33:40.957657 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"6fda6245e5f5eedbf4d47e35dafc2c497038af9bc949d5d99f1da7f1e8c2bf16"} Sep 30 19:33:40 crc kubenswrapper[4756]: I0930 19:33:40.957709 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"46e8ea4e6f6b6889fcac228e183801e22d4a59a2f08e98e71ec0a8c5c4ca49dd"} Sep 30 19:33:40 crc kubenswrapper[4756]: I0930 19:33:40.958319 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 19:33:40 crc kubenswrapper[4756]: I0930 19:33:40.988448 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"a3e17b41c8dd427b476547bc26d06f54a35c37234ab491cc2cb43fb7e937e800"} Sep 30 19:33:40 crc kubenswrapper[4756]: I0930 19:33:40.988493 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"7a59fa10f18eb1d8edd30409e320961e51b855bf714372b8502be7a354f6e29c"} Sep 30 19:33:41 crc kubenswrapper[4756]: I0930 19:33:41.003799 4756 generic.go:334] "Generic (PLEG): container finished" podID="8badbed7-383d-4cc6-9fa8-aac7c4ac97ac" containerID="00888ea535890c5455e28e0fd50c6dd5f1b4d4d425df59e714788cbb5e1e431b" exitCode=0 Sep 30 19:33:41 crc kubenswrapper[4756]: I0930 19:33:41.003874 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rqhnz" event={"ID":"8badbed7-383d-4cc6-9fa8-aac7c4ac97ac","Type":"ContainerDied","Data":"00888ea535890c5455e28e0fd50c6dd5f1b4d4d425df59e714788cbb5e1e431b"} Sep 30 19:33:41 crc kubenswrapper[4756]: I0930 19:33:41.003902 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rqhnz" event={"ID":"8badbed7-383d-4cc6-9fa8-aac7c4ac97ac","Type":"ContainerStarted","Data":"bf5d94e99509f10cfc5d8f864ec978f52a39a575fcf3972ef405717c3c77afc3"} Sep 30 19:33:41 crc kubenswrapper[4756]: I0930 19:33:41.016863 4756 generic.go:334] "Generic (PLEG): container finished" podID="b0b29af6-5e9a-48a8-8aeb-72bc2d9539ac" containerID="f655ee459e63ac1c9098fcd9da8868dbc75d1dd7bf2cb8622c18c745d6431c0e" exitCode=0 Sep 30 19:33:41 crc kubenswrapper[4756]: I0930 19:33:41.017032 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-js94r" event={"ID":"b0b29af6-5e9a-48a8-8aeb-72bc2d9539ac","Type":"ContainerDied","Data":"f655ee459e63ac1c9098fcd9da8868dbc75d1dd7bf2cb8622c18c745d6431c0e"} Sep 30 19:33:41 crc kubenswrapper[4756]: I0930 19:33:41.017090 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-js94r" event={"ID":"b0b29af6-5e9a-48a8-8aeb-72bc2d9539ac","Type":"ContainerStarted","Data":"92dc9e8633cd3b60fe04e4c9aed3b8d2ca45531acbf34360c066bde81f817c7b"} Sep 30 19:33:41 crc kubenswrapper[4756]: I0930 19:33:41.151804 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8f668bae-612b-4b75-9490-919e737c6a3b" path="/var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes" Sep 30 19:33:41 crc kubenswrapper[4756]: I0930 19:33:41.152531 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-vs4g5"] Sep 30 19:33:41 crc kubenswrapper[4756]: I0930 19:33:41.155763 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-vs4g5" Sep 30 19:33:41 crc kubenswrapper[4756]: I0930 19:33:41.173343 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Sep 30 19:33:41 crc kubenswrapper[4756]: I0930 19:33:41.198874 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-trqvx\" (UniqueName: \"kubernetes.io/projected/60ec2479-5451-47c2-ada6-a659f57092e5-kube-api-access-trqvx\") pod \"redhat-marketplace-vs4g5\" (UID: \"60ec2479-5451-47c2-ada6-a659f57092e5\") " pod="openshift-marketplace/redhat-marketplace-vs4g5" Sep 30 19:33:41 crc kubenswrapper[4756]: I0930 19:33:41.198956 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/60ec2479-5451-47c2-ada6-a659f57092e5-utilities\") pod \"redhat-marketplace-vs4g5\" (UID: \"60ec2479-5451-47c2-ada6-a659f57092e5\") " pod="openshift-marketplace/redhat-marketplace-vs4g5" Sep 30 19:33:41 crc kubenswrapper[4756]: I0930 19:33:41.198982 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/60ec2479-5451-47c2-ada6-a659f57092e5-catalog-content\") pod \"redhat-marketplace-vs4g5\" (UID: \"60ec2479-5451-47c2-ada6-a659f57092e5\") " pod="openshift-marketplace/redhat-marketplace-vs4g5" Sep 30 19:33:41 crc kubenswrapper[4756]: I0930 19:33:41.228584 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-wpxq6"] Sep 30 19:33:41 crc kubenswrapper[4756]: I0930 19:33:41.238741 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-vs4g5"] Sep 30 19:33:41 crc kubenswrapper[4756]: I0930 19:33:41.271040 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Sep 30 19:33:41 crc kubenswrapper[4756]: I0930 19:33:41.285146 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Sep 30 19:33:41 crc kubenswrapper[4756]: I0930 19:33:41.287865 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager"/"installer-sa-dockercfg-kjl2n" Sep 30 19:33:41 crc kubenswrapper[4756]: I0930 19:33:41.287759 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager"/"kube-root-ca.crt" Sep 30 19:33:41 crc kubenswrapper[4756]: I0930 19:33:41.293246 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Sep 30 19:33:41 crc kubenswrapper[4756]: I0930 19:33:41.299528 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/1c3c4ad7-5502-4850-8813-c16a0f44f13c-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"1c3c4ad7-5502-4850-8813-c16a0f44f13c\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Sep 30 19:33:41 crc kubenswrapper[4756]: I0930 19:33:41.299578 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1c3c4ad7-5502-4850-8813-c16a0f44f13c-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"1c3c4ad7-5502-4850-8813-c16a0f44f13c\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Sep 30 19:33:41 crc kubenswrapper[4756]: I0930 19:33:41.299610 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-trqvx\" (UniqueName: \"kubernetes.io/projected/60ec2479-5451-47c2-ada6-a659f57092e5-kube-api-access-trqvx\") pod \"redhat-marketplace-vs4g5\" (UID: \"60ec2479-5451-47c2-ada6-a659f57092e5\") " pod="openshift-marketplace/redhat-marketplace-vs4g5" Sep 30 19:33:41 crc kubenswrapper[4756]: I0930 19:33:41.299636 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/60ec2479-5451-47c2-ada6-a659f57092e5-utilities\") pod \"redhat-marketplace-vs4g5\" (UID: \"60ec2479-5451-47c2-ada6-a659f57092e5\") " pod="openshift-marketplace/redhat-marketplace-vs4g5" Sep 30 19:33:41 crc kubenswrapper[4756]: I0930 19:33:41.299655 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/60ec2479-5451-47c2-ada6-a659f57092e5-catalog-content\") pod \"redhat-marketplace-vs4g5\" (UID: \"60ec2479-5451-47c2-ada6-a659f57092e5\") " pod="openshift-marketplace/redhat-marketplace-vs4g5" Sep 30 19:33:41 crc kubenswrapper[4756]: I0930 19:33:41.300083 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/60ec2479-5451-47c2-ada6-a659f57092e5-catalog-content\") pod \"redhat-marketplace-vs4g5\" (UID: \"60ec2479-5451-47c2-ada6-a659f57092e5\") " pod="openshift-marketplace/redhat-marketplace-vs4g5" Sep 30 19:33:41 crc kubenswrapper[4756]: I0930 19:33:41.302441 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/60ec2479-5451-47c2-ada6-a659f57092e5-utilities\") pod \"redhat-marketplace-vs4g5\" (UID: \"60ec2479-5451-47c2-ada6-a659f57092e5\") " pod="openshift-marketplace/redhat-marketplace-vs4g5" Sep 30 19:33:41 crc kubenswrapper[4756]: I0930 19:33:41.337011 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-trqvx\" (UniqueName: \"kubernetes.io/projected/60ec2479-5451-47c2-ada6-a659f57092e5-kube-api-access-trqvx\") pod \"redhat-marketplace-vs4g5\" (UID: \"60ec2479-5451-47c2-ada6-a659f57092e5\") " pod="openshift-marketplace/redhat-marketplace-vs4g5" Sep 30 19:33:41 crc kubenswrapper[4756]: I0930 19:33:41.400621 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1c3c4ad7-5502-4850-8813-c16a0f44f13c-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"1c3c4ad7-5502-4850-8813-c16a0f44f13c\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Sep 30 19:33:41 crc kubenswrapper[4756]: I0930 19:33:41.400840 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/1c3c4ad7-5502-4850-8813-c16a0f44f13c-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"1c3c4ad7-5502-4850-8813-c16a0f44f13c\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Sep 30 19:33:41 crc kubenswrapper[4756]: I0930 19:33:41.400940 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/1c3c4ad7-5502-4850-8813-c16a0f44f13c-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"1c3c4ad7-5502-4850-8813-c16a0f44f13c\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Sep 30 19:33:41 crc kubenswrapper[4756]: I0930 19:33:41.419645 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1c3c4ad7-5502-4850-8813-c16a0f44f13c-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"1c3c4ad7-5502-4850-8813-c16a0f44f13c\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Sep 30 19:33:41 crc kubenswrapper[4756]: I0930 19:33:41.482136 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-pqxlg"] Sep 30 19:33:41 crc kubenswrapper[4756]: I0930 19:33:41.484947 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-pqxlg" Sep 30 19:33:41 crc kubenswrapper[4756]: I0930 19:33:41.494266 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-pqxlg"] Sep 30 19:33:41 crc kubenswrapper[4756]: I0930 19:33:41.502645 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7jcbz\" (UniqueName: \"kubernetes.io/projected/e9556a46-a1ed-4d3f-bcbd-7dafe3e6f3dc-kube-api-access-7jcbz\") pod \"redhat-marketplace-pqxlg\" (UID: \"e9556a46-a1ed-4d3f-bcbd-7dafe3e6f3dc\") " pod="openshift-marketplace/redhat-marketplace-pqxlg" Sep 30 19:33:41 crc kubenswrapper[4756]: I0930 19:33:41.502702 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e9556a46-a1ed-4d3f-bcbd-7dafe3e6f3dc-utilities\") pod \"redhat-marketplace-pqxlg\" (UID: \"e9556a46-a1ed-4d3f-bcbd-7dafe3e6f3dc\") " pod="openshift-marketplace/redhat-marketplace-pqxlg" Sep 30 19:33:41 crc kubenswrapper[4756]: I0930 19:33:41.502797 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e9556a46-a1ed-4d3f-bcbd-7dafe3e6f3dc-catalog-content\") pod \"redhat-marketplace-pqxlg\" (UID: \"e9556a46-a1ed-4d3f-bcbd-7dafe3e6f3dc\") " pod="openshift-marketplace/redhat-marketplace-pqxlg" Sep 30 19:33:41 crc kubenswrapper[4756]: I0930 19:33:41.510474 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-vs4g5" Sep 30 19:33:41 crc kubenswrapper[4756]: I0930 19:33:41.538991 4756 patch_prober.go:28] interesting pod/machine-config-daemon-4n9zj container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 19:33:41 crc kubenswrapper[4756]: I0930 19:33:41.539079 4756 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 19:33:41 crc kubenswrapper[4756]: I0930 19:33:41.551725 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-config-operator/openshift-config-operator-7777fb866f-mpnp5" Sep 30 19:33:41 crc kubenswrapper[4756]: I0930 19:33:41.604338 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7jcbz\" (UniqueName: \"kubernetes.io/projected/e9556a46-a1ed-4d3f-bcbd-7dafe3e6f3dc-kube-api-access-7jcbz\") pod \"redhat-marketplace-pqxlg\" (UID: \"e9556a46-a1ed-4d3f-bcbd-7dafe3e6f3dc\") " pod="openshift-marketplace/redhat-marketplace-pqxlg" Sep 30 19:33:41 crc kubenswrapper[4756]: I0930 19:33:41.604387 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e9556a46-a1ed-4d3f-bcbd-7dafe3e6f3dc-utilities\") pod \"redhat-marketplace-pqxlg\" (UID: \"e9556a46-a1ed-4d3f-bcbd-7dafe3e6f3dc\") " pod="openshift-marketplace/redhat-marketplace-pqxlg" Sep 30 19:33:41 crc kubenswrapper[4756]: I0930 19:33:41.604460 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e9556a46-a1ed-4d3f-bcbd-7dafe3e6f3dc-catalog-content\") pod \"redhat-marketplace-pqxlg\" (UID: \"e9556a46-a1ed-4d3f-bcbd-7dafe3e6f3dc\") " pod="openshift-marketplace/redhat-marketplace-pqxlg" Sep 30 19:33:41 crc kubenswrapper[4756]: I0930 19:33:41.605066 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e9556a46-a1ed-4d3f-bcbd-7dafe3e6f3dc-utilities\") pod \"redhat-marketplace-pqxlg\" (UID: \"e9556a46-a1ed-4d3f-bcbd-7dafe3e6f3dc\") " pod="openshift-marketplace/redhat-marketplace-pqxlg" Sep 30 19:33:41 crc kubenswrapper[4756]: I0930 19:33:41.605091 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e9556a46-a1ed-4d3f-bcbd-7dafe3e6f3dc-catalog-content\") pod \"redhat-marketplace-pqxlg\" (UID: \"e9556a46-a1ed-4d3f-bcbd-7dafe3e6f3dc\") " pod="openshift-marketplace/redhat-marketplace-pqxlg" Sep 30 19:33:41 crc kubenswrapper[4756]: I0930 19:33:41.618786 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Sep 30 19:33:41 crc kubenswrapper[4756]: I0930 19:33:41.632937 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7jcbz\" (UniqueName: \"kubernetes.io/projected/e9556a46-a1ed-4d3f-bcbd-7dafe3e6f3dc-kube-api-access-7jcbz\") pod \"redhat-marketplace-pqxlg\" (UID: \"e9556a46-a1ed-4d3f-bcbd-7dafe3e6f3dc\") " pod="openshift-marketplace/redhat-marketplace-pqxlg" Sep 30 19:33:41 crc kubenswrapper[4756]: I0930 19:33:41.702591 4756 patch_prober.go:28] interesting pod/router-default-5444994796-88vsq container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Sep 30 19:33:41 crc kubenswrapper[4756]: [-]has-synced failed: reason withheld Sep 30 19:33:41 crc kubenswrapper[4756]: [+]process-running ok Sep 30 19:33:41 crc kubenswrapper[4756]: healthz check failed Sep 30 19:33:41 crc kubenswrapper[4756]: I0930 19:33:41.702669 4756 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-88vsq" podUID="3fe6ddbb-59ee-4d35-9d3c-309cdd5eba71" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Sep 30 19:33:41 crc kubenswrapper[4756]: I0930 19:33:41.760287 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-vs4g5"] Sep 30 19:33:41 crc kubenswrapper[4756]: I0930 19:33:41.802268 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-pqxlg" Sep 30 19:33:41 crc kubenswrapper[4756]: W0930 19:33:41.835026 4756 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod60ec2479_5451_47c2_ada6_a659f57092e5.slice/crio-2d727c28cfcc19953a2e0ad4ae5856320cc1a6fa9c4bcd19fc0749cc3bb3c8b3 WatchSource:0}: Error finding container 2d727c28cfcc19953a2e0ad4ae5856320cc1a6fa9c4bcd19fc0749cc3bb3c8b3: Status 404 returned error can't find the container with id 2d727c28cfcc19953a2e0ad4ae5856320cc1a6fa9c4bcd19fc0749cc3bb3c8b3 Sep 30 19:33:42 crc kubenswrapper[4756]: I0930 19:33:42.047238 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-wpxq6" event={"ID":"ce702cef-b28d-4fcd-99b0-9961a980d3e6","Type":"ContainerStarted","Data":"c801d9a345fada93537e26288362d5759104548aa511a1ec3f3e39ef03d601e3"} Sep 30 19:33:42 crc kubenswrapper[4756]: I0930 19:33:42.047780 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-wpxq6" event={"ID":"ce702cef-b28d-4fcd-99b0-9961a980d3e6","Type":"ContainerStarted","Data":"50809457fec328570a1c0b7cff90f0c35b928a2f3bf0ad8f14b9f4f0e60b0260"} Sep 30 19:33:42 crc kubenswrapper[4756]: I0930 19:33:42.048144 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-697d97f7c8-wpxq6" Sep 30 19:33:42 crc kubenswrapper[4756]: I0930 19:33:42.063007 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vs4g5" event={"ID":"60ec2479-5451-47c2-ada6-a659f57092e5","Type":"ContainerStarted","Data":"2d727c28cfcc19953a2e0ad4ae5856320cc1a6fa9c4bcd19fc0749cc3bb3c8b3"} Sep 30 19:33:42 crc kubenswrapper[4756]: I0930 19:33:42.087672 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-697d97f7c8-wpxq6" podStartSLOduration=130.087651948 podStartE2EDuration="2m10.087651948s" podCreationTimestamp="2025-09-30 19:31:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 19:33:42.085626136 +0000 UTC m=+151.706559633" watchObservedRunningTime="2025-09-30 19:33:42.087651948 +0000 UTC m=+151.708585425" Sep 30 19:33:42 crc kubenswrapper[4756]: I0930 19:33:42.108390 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-kvcxz"] Sep 30 19:33:42 crc kubenswrapper[4756]: I0930 19:33:42.109939 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-kvcxz" Sep 30 19:33:42 crc kubenswrapper[4756]: I0930 19:33:42.112285 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-kvcxz"] Sep 30 19:33:42 crc kubenswrapper[4756]: I0930 19:33:42.114352 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Sep 30 19:33:42 crc kubenswrapper[4756]: I0930 19:33:42.118987 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tpcm2\" (UniqueName: \"kubernetes.io/projected/700d58ef-f68b-41f1-837a-984f6afffb82-kube-api-access-tpcm2\") pod \"redhat-operators-kvcxz\" (UID: \"700d58ef-f68b-41f1-837a-984f6afffb82\") " pod="openshift-marketplace/redhat-operators-kvcxz" Sep 30 19:33:42 crc kubenswrapper[4756]: I0930 19:33:42.119017 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/700d58ef-f68b-41f1-837a-984f6afffb82-catalog-content\") pod \"redhat-operators-kvcxz\" (UID: \"700d58ef-f68b-41f1-837a-984f6afffb82\") " pod="openshift-marketplace/redhat-operators-kvcxz" Sep 30 19:33:42 crc kubenswrapper[4756]: I0930 19:33:42.119066 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/700d58ef-f68b-41f1-837a-984f6afffb82-utilities\") pod \"redhat-operators-kvcxz\" (UID: \"700d58ef-f68b-41f1-837a-984f6afffb82\") " pod="openshift-marketplace/redhat-operators-kvcxz" Sep 30 19:33:42 crc kubenswrapper[4756]: I0930 19:33:42.141614 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-pqxlg"] Sep 30 19:33:42 crc kubenswrapper[4756]: I0930 19:33:42.158199 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Sep 30 19:33:42 crc kubenswrapper[4756]: W0930 19:33:42.180746 4756 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode9556a46_a1ed_4d3f_bcbd_7dafe3e6f3dc.slice/crio-ec9e0b7b402023b601c6ecef7f005345c1d58e35a93f2c6a2434a04b25b5a7e6 WatchSource:0}: Error finding container ec9e0b7b402023b601c6ecef7f005345c1d58e35a93f2c6a2434a04b25b5a7e6: Status 404 returned error can't find the container with id ec9e0b7b402023b601c6ecef7f005345c1d58e35a93f2c6a2434a04b25b5a7e6 Sep 30 19:33:42 crc kubenswrapper[4756]: I0930 19:33:42.219951 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tpcm2\" (UniqueName: \"kubernetes.io/projected/700d58ef-f68b-41f1-837a-984f6afffb82-kube-api-access-tpcm2\") pod \"redhat-operators-kvcxz\" (UID: \"700d58ef-f68b-41f1-837a-984f6afffb82\") " pod="openshift-marketplace/redhat-operators-kvcxz" Sep 30 19:33:42 crc kubenswrapper[4756]: I0930 19:33:42.219998 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/700d58ef-f68b-41f1-837a-984f6afffb82-catalog-content\") pod \"redhat-operators-kvcxz\" (UID: \"700d58ef-f68b-41f1-837a-984f6afffb82\") " pod="openshift-marketplace/redhat-operators-kvcxz" Sep 30 19:33:42 crc kubenswrapper[4756]: I0930 19:33:42.220060 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/700d58ef-f68b-41f1-837a-984f6afffb82-utilities\") pod \"redhat-operators-kvcxz\" (UID: \"700d58ef-f68b-41f1-837a-984f6afffb82\") " pod="openshift-marketplace/redhat-operators-kvcxz" Sep 30 19:33:42 crc kubenswrapper[4756]: I0930 19:33:42.221224 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/700d58ef-f68b-41f1-837a-984f6afffb82-utilities\") pod \"redhat-operators-kvcxz\" (UID: \"700d58ef-f68b-41f1-837a-984f6afffb82\") " pod="openshift-marketplace/redhat-operators-kvcxz" Sep 30 19:33:42 crc kubenswrapper[4756]: I0930 19:33:42.221855 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/700d58ef-f68b-41f1-837a-984f6afffb82-catalog-content\") pod \"redhat-operators-kvcxz\" (UID: \"700d58ef-f68b-41f1-837a-984f6afffb82\") " pod="openshift-marketplace/redhat-operators-kvcxz" Sep 30 19:33:42 crc kubenswrapper[4756]: I0930 19:33:42.256234 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tpcm2\" (UniqueName: \"kubernetes.io/projected/700d58ef-f68b-41f1-837a-984f6afffb82-kube-api-access-tpcm2\") pod \"redhat-operators-kvcxz\" (UID: \"700d58ef-f68b-41f1-837a-984f6afffb82\") " pod="openshift-marketplace/redhat-operators-kvcxz" Sep 30 19:33:42 crc kubenswrapper[4756]: I0930 19:33:42.482895 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-btrj8"] Sep 30 19:33:42 crc kubenswrapper[4756]: I0930 19:33:42.485348 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-btrj8" Sep 30 19:33:42 crc kubenswrapper[4756]: I0930 19:33:42.488550 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-kvcxz" Sep 30 19:33:42 crc kubenswrapper[4756]: I0930 19:33:42.493812 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-btrj8"] Sep 30 19:33:42 crc kubenswrapper[4756]: I0930 19:33:42.534632 4756 patch_prober.go:28] interesting pod/downloads-7954f5f757-7dwjw container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.17:8080/\": dial tcp 10.217.0.17:8080: connect: connection refused" start-of-body= Sep 30 19:33:42 crc kubenswrapper[4756]: I0930 19:33:42.534704 4756 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-7dwjw" podUID="52803098-a1e8-4a45-a2dd-0eb8f6d5b8ad" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.17:8080/\": dial tcp 10.217.0.17:8080: connect: connection refused" Sep 30 19:33:42 crc kubenswrapper[4756]: I0930 19:33:42.534857 4756 patch_prober.go:28] interesting pod/downloads-7954f5f757-7dwjw container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.17:8080/\": dial tcp 10.217.0.17:8080: connect: connection refused" start-of-body= Sep 30 19:33:42 crc kubenswrapper[4756]: I0930 19:33:42.534925 4756 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-7dwjw" podUID="52803098-a1e8-4a45-a2dd-0eb8f6d5b8ad" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.17:8080/\": dial tcp 10.217.0.17:8080: connect: connection refused" Sep 30 19:33:42 crc kubenswrapper[4756]: I0930 19:33:42.560352 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-f9d7485db-h5cnz" Sep 30 19:33:42 crc kubenswrapper[4756]: I0930 19:33:42.560422 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-f9d7485db-h5cnz" Sep 30 19:33:42 crc kubenswrapper[4756]: I0930 19:33:42.563922 4756 patch_prober.go:28] interesting pod/console-f9d7485db-h5cnz container/console namespace/openshift-console: Startup probe status=failure output="Get \"https://10.217.0.13:8443/health\": dial tcp 10.217.0.13:8443: connect: connection refused" start-of-body= Sep 30 19:33:42 crc kubenswrapper[4756]: I0930 19:33:42.563982 4756 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-console/console-f9d7485db-h5cnz" podUID="37240768-1c94-44c3-9772-d4671f7e35bf" containerName="console" probeResult="failure" output="Get \"https://10.217.0.13:8443/health\": dial tcp 10.217.0.13:8443: connect: connection refused" Sep 30 19:33:42 crc kubenswrapper[4756]: I0930 19:33:42.627918 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b363e863-a59e-4272-9c06-f1a9dc15fc3b-utilities\") pod \"redhat-operators-btrj8\" (UID: \"b363e863-a59e-4272-9c06-f1a9dc15fc3b\") " pod="openshift-marketplace/redhat-operators-btrj8" Sep 30 19:33:42 crc kubenswrapper[4756]: I0930 19:33:42.628019 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f8b67\" (UniqueName: \"kubernetes.io/projected/b363e863-a59e-4272-9c06-f1a9dc15fc3b-kube-api-access-f8b67\") pod \"redhat-operators-btrj8\" (UID: \"b363e863-a59e-4272-9c06-f1a9dc15fc3b\") " pod="openshift-marketplace/redhat-operators-btrj8" Sep 30 19:33:42 crc kubenswrapper[4756]: I0930 19:33:42.628248 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b363e863-a59e-4272-9c06-f1a9dc15fc3b-catalog-content\") pod \"redhat-operators-btrj8\" (UID: \"b363e863-a59e-4272-9c06-f1a9dc15fc3b\") " pod="openshift-marketplace/redhat-operators-btrj8" Sep 30 19:33:42 crc kubenswrapper[4756]: I0930 19:33:42.697073 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ingress/router-default-5444994796-88vsq" Sep 30 19:33:42 crc kubenswrapper[4756]: I0930 19:33:42.716228 4756 patch_prober.go:28] interesting pod/router-default-5444994796-88vsq container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Sep 30 19:33:42 crc kubenswrapper[4756]: [-]has-synced failed: reason withheld Sep 30 19:33:42 crc kubenswrapper[4756]: [+]process-running ok Sep 30 19:33:42 crc kubenswrapper[4756]: healthz check failed Sep 30 19:33:42 crc kubenswrapper[4756]: I0930 19:33:42.716934 4756 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-88vsq" podUID="3fe6ddbb-59ee-4d35-9d3c-309cdd5eba71" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Sep 30 19:33:42 crc kubenswrapper[4756]: I0930 19:33:42.729110 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b363e863-a59e-4272-9c06-f1a9dc15fc3b-catalog-content\") pod \"redhat-operators-btrj8\" (UID: \"b363e863-a59e-4272-9c06-f1a9dc15fc3b\") " pod="openshift-marketplace/redhat-operators-btrj8" Sep 30 19:33:42 crc kubenswrapper[4756]: I0930 19:33:42.729201 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b363e863-a59e-4272-9c06-f1a9dc15fc3b-utilities\") pod \"redhat-operators-btrj8\" (UID: \"b363e863-a59e-4272-9c06-f1a9dc15fc3b\") " pod="openshift-marketplace/redhat-operators-btrj8" Sep 30 19:33:42 crc kubenswrapper[4756]: I0930 19:33:42.729291 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f8b67\" (UniqueName: \"kubernetes.io/projected/b363e863-a59e-4272-9c06-f1a9dc15fc3b-kube-api-access-f8b67\") pod \"redhat-operators-btrj8\" (UID: \"b363e863-a59e-4272-9c06-f1a9dc15fc3b\") " pod="openshift-marketplace/redhat-operators-btrj8" Sep 30 19:33:42 crc kubenswrapper[4756]: I0930 19:33:42.730892 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b363e863-a59e-4272-9c06-f1a9dc15fc3b-catalog-content\") pod \"redhat-operators-btrj8\" (UID: \"b363e863-a59e-4272-9c06-f1a9dc15fc3b\") " pod="openshift-marketplace/redhat-operators-btrj8" Sep 30 19:33:42 crc kubenswrapper[4756]: I0930 19:33:42.731491 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b363e863-a59e-4272-9c06-f1a9dc15fc3b-utilities\") pod \"redhat-operators-btrj8\" (UID: \"b363e863-a59e-4272-9c06-f1a9dc15fc3b\") " pod="openshift-marketplace/redhat-operators-btrj8" Sep 30 19:33:42 crc kubenswrapper[4756]: I0930 19:33:42.777809 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f8b67\" (UniqueName: \"kubernetes.io/projected/b363e863-a59e-4272-9c06-f1a9dc15fc3b-kube-api-access-f8b67\") pod \"redhat-operators-btrj8\" (UID: \"b363e863-a59e-4272-9c06-f1a9dc15fc3b\") " pod="openshift-marketplace/redhat-operators-btrj8" Sep 30 19:33:42 crc kubenswrapper[4756]: I0930 19:33:42.860777 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-kvcxz"] Sep 30 19:33:42 crc kubenswrapper[4756]: I0930 19:33:42.911671 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-btrj8" Sep 30 19:33:42 crc kubenswrapper[4756]: I0930 19:33:42.966740 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-2t55f" Sep 30 19:33:43 crc kubenswrapper[4756]: I0930 19:33:43.164841 4756 generic.go:334] "Generic (PLEG): container finished" podID="60ec2479-5451-47c2-ada6-a659f57092e5" containerID="f6058539de0f019c6165eab560cb8d6aa586fe32e016b627b7dc7112a1e80b35" exitCode=0 Sep 30 19:33:43 crc kubenswrapper[4756]: I0930 19:33:43.166508 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vs4g5" event={"ID":"60ec2479-5451-47c2-ada6-a659f57092e5","Type":"ContainerDied","Data":"f6058539de0f019c6165eab560cb8d6aa586fe32e016b627b7dc7112a1e80b35"} Sep 30 19:33:43 crc kubenswrapper[4756]: I0930 19:33:43.188372 4756 generic.go:334] "Generic (PLEG): container finished" podID="e9556a46-a1ed-4d3f-bcbd-7dafe3e6f3dc" containerID="6f752b7d412509f199ba517ac1fe26e532d2e28ef1260b90c53bdd187420ce8e" exitCode=0 Sep 30 19:33:43 crc kubenswrapper[4756]: I0930 19:33:43.188597 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-pqxlg" event={"ID":"e9556a46-a1ed-4d3f-bcbd-7dafe3e6f3dc","Type":"ContainerDied","Data":"6f752b7d412509f199ba517ac1fe26e532d2e28ef1260b90c53bdd187420ce8e"} Sep 30 19:33:43 crc kubenswrapper[4756]: I0930 19:33:43.188690 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-pqxlg" event={"ID":"e9556a46-a1ed-4d3f-bcbd-7dafe3e6f3dc","Type":"ContainerStarted","Data":"ec9e0b7b402023b601c6ecef7f005345c1d58e35a93f2c6a2434a04b25b5a7e6"} Sep 30 19:33:43 crc kubenswrapper[4756]: I0930 19:33:43.218301 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"1c3c4ad7-5502-4850-8813-c16a0f44f13c","Type":"ContainerStarted","Data":"5185b9123186a737d8de047433f71dded6eb8d45df96b545d7979cb87a2cea66"} Sep 30 19:33:43 crc kubenswrapper[4756]: I0930 19:33:43.218356 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"1c3c4ad7-5502-4850-8813-c16a0f44f13c","Type":"ContainerStarted","Data":"2cc996d75e0f038e79aede511dc7661b21169fef8fdee6e1b47fad58dd6efb9e"} Sep 30 19:33:43 crc kubenswrapper[4756]: I0930 19:33:43.272914 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-kvcxz" event={"ID":"700d58ef-f68b-41f1-837a-984f6afffb82","Type":"ContainerStarted","Data":"d1ad1d7db0365b3832a631bfd01d4bdb449ea5eb56e0b7c226a93d7fb2d3ee7e"} Sep 30 19:33:43 crc kubenswrapper[4756]: I0930 19:33:43.325934 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager/revision-pruner-9-crc" podStartSLOduration=2.325918324 podStartE2EDuration="2.325918324s" podCreationTimestamp="2025-09-30 19:33:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 19:33:43.323348228 +0000 UTC m=+152.944281705" watchObservedRunningTime="2025-09-30 19:33:43.325918324 +0000 UTC m=+152.946851801" Sep 30 19:33:43 crc kubenswrapper[4756]: I0930 19:33:43.672413 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-btrj8"] Sep 30 19:33:43 crc kubenswrapper[4756]: I0930 19:33:43.703286 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-ingress/router-default-5444994796-88vsq" Sep 30 19:33:43 crc kubenswrapper[4756]: I0930 19:33:43.706307 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ingress/router-default-5444994796-88vsq" Sep 30 19:33:44 crc kubenswrapper[4756]: I0930 19:33:44.283519 4756 generic.go:334] "Generic (PLEG): container finished" podID="1c3c4ad7-5502-4850-8813-c16a0f44f13c" containerID="5185b9123186a737d8de047433f71dded6eb8d45df96b545d7979cb87a2cea66" exitCode=0 Sep 30 19:33:44 crc kubenswrapper[4756]: I0930 19:33:44.283650 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"1c3c4ad7-5502-4850-8813-c16a0f44f13c","Type":"ContainerDied","Data":"5185b9123186a737d8de047433f71dded6eb8d45df96b545d7979cb87a2cea66"} Sep 30 19:33:44 crc kubenswrapper[4756]: I0930 19:33:44.287845 4756 generic.go:334] "Generic (PLEG): container finished" podID="b363e863-a59e-4272-9c06-f1a9dc15fc3b" containerID="6219c122cd0d2c30e77b6e3644c3fecd6323bc31c25c8ac35d8fe54c66b08695" exitCode=0 Sep 30 19:33:44 crc kubenswrapper[4756]: I0930 19:33:44.287906 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-btrj8" event={"ID":"b363e863-a59e-4272-9c06-f1a9dc15fc3b","Type":"ContainerDied","Data":"6219c122cd0d2c30e77b6e3644c3fecd6323bc31c25c8ac35d8fe54c66b08695"} Sep 30 19:33:44 crc kubenswrapper[4756]: I0930 19:33:44.287935 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-btrj8" event={"ID":"b363e863-a59e-4272-9c06-f1a9dc15fc3b","Type":"ContainerStarted","Data":"26fe89a0d339d6a7ed0a2d3dc6b32057b069f73d685f2277b0728220047a7937"} Sep 30 19:33:44 crc kubenswrapper[4756]: I0930 19:33:44.291688 4756 generic.go:334] "Generic (PLEG): container finished" podID="700d58ef-f68b-41f1-837a-984f6afffb82" containerID="a0f283366f8371d89ad5a23a490c64aceb3b81cbee97e11c2c6ca1662db7f319" exitCode=0 Sep 30 19:33:44 crc kubenswrapper[4756]: I0930 19:33:44.292448 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-kvcxz" event={"ID":"700d58ef-f68b-41f1-837a-984f6afffb82","Type":"ContainerDied","Data":"a0f283366f8371d89ad5a23a490c64aceb3b81cbee97e11c2c6ca1662db7f319"} Sep 30 19:33:44 crc kubenswrapper[4756]: I0930 19:33:44.595255 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Sep 30 19:33:44 crc kubenswrapper[4756]: I0930 19:33:44.596171 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Sep 30 19:33:44 crc kubenswrapper[4756]: I0930 19:33:44.601406 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Sep 30 19:33:44 crc kubenswrapper[4756]: I0930 19:33:44.601594 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Sep 30 19:33:44 crc kubenswrapper[4756]: I0930 19:33:44.604475 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Sep 30 19:33:44 crc kubenswrapper[4756]: I0930 19:33:44.697572 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/2a362a68-3a77-4ad6-a130-8f99546e5e76-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"2a362a68-3a77-4ad6-a130-8f99546e5e76\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Sep 30 19:33:44 crc kubenswrapper[4756]: I0930 19:33:44.697798 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/2a362a68-3a77-4ad6-a130-8f99546e5e76-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"2a362a68-3a77-4ad6-a130-8f99546e5e76\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Sep 30 19:33:44 crc kubenswrapper[4756]: I0930 19:33:44.799285 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/2a362a68-3a77-4ad6-a130-8f99546e5e76-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"2a362a68-3a77-4ad6-a130-8f99546e5e76\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Sep 30 19:33:44 crc kubenswrapper[4756]: I0930 19:33:44.799338 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/2a362a68-3a77-4ad6-a130-8f99546e5e76-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"2a362a68-3a77-4ad6-a130-8f99546e5e76\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Sep 30 19:33:44 crc kubenswrapper[4756]: I0930 19:33:44.799792 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/2a362a68-3a77-4ad6-a130-8f99546e5e76-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"2a362a68-3a77-4ad6-a130-8f99546e5e76\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Sep 30 19:33:44 crc kubenswrapper[4756]: I0930 19:33:44.846174 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/2a362a68-3a77-4ad6-a130-8f99546e5e76-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"2a362a68-3a77-4ad6-a130-8f99546e5e76\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Sep 30 19:33:44 crc kubenswrapper[4756]: I0930 19:33:44.945685 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Sep 30 19:33:45 crc kubenswrapper[4756]: I0930 19:33:45.503315 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Sep 30 19:33:45 crc kubenswrapper[4756]: I0930 19:33:45.699753 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Sep 30 19:33:45 crc kubenswrapper[4756]: I0930 19:33:45.839824 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/1c3c4ad7-5502-4850-8813-c16a0f44f13c-kubelet-dir\") pod \"1c3c4ad7-5502-4850-8813-c16a0f44f13c\" (UID: \"1c3c4ad7-5502-4850-8813-c16a0f44f13c\") " Sep 30 19:33:45 crc kubenswrapper[4756]: I0930 19:33:45.839948 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1c3c4ad7-5502-4850-8813-c16a0f44f13c-kube-api-access\") pod \"1c3c4ad7-5502-4850-8813-c16a0f44f13c\" (UID: \"1c3c4ad7-5502-4850-8813-c16a0f44f13c\") " Sep 30 19:33:45 crc kubenswrapper[4756]: I0930 19:33:45.839902 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/1c3c4ad7-5502-4850-8813-c16a0f44f13c-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "1c3c4ad7-5502-4850-8813-c16a0f44f13c" (UID: "1c3c4ad7-5502-4850-8813-c16a0f44f13c"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 19:33:45 crc kubenswrapper[4756]: I0930 19:33:45.841810 4756 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/1c3c4ad7-5502-4850-8813-c16a0f44f13c-kubelet-dir\") on node \"crc\" DevicePath \"\"" Sep 30 19:33:45 crc kubenswrapper[4756]: I0930 19:33:45.862676 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1c3c4ad7-5502-4850-8813-c16a0f44f13c-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "1c3c4ad7-5502-4850-8813-c16a0f44f13c" (UID: "1c3c4ad7-5502-4850-8813-c16a0f44f13c"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:33:45 crc kubenswrapper[4756]: I0930 19:33:45.950126 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1c3c4ad7-5502-4850-8813-c16a0f44f13c-kube-api-access\") on node \"crc\" DevicePath \"\"" Sep 30 19:33:46 crc kubenswrapper[4756]: I0930 19:33:46.335097 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"1c3c4ad7-5502-4850-8813-c16a0f44f13c","Type":"ContainerDied","Data":"2cc996d75e0f038e79aede511dc7661b21169fef8fdee6e1b47fad58dd6efb9e"} Sep 30 19:33:46 crc kubenswrapper[4756]: I0930 19:33:46.335640 4756 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2cc996d75e0f038e79aede511dc7661b21169fef8fdee6e1b47fad58dd6efb9e" Sep 30 19:33:46 crc kubenswrapper[4756]: I0930 19:33:46.335647 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Sep 30 19:33:46 crc kubenswrapper[4756]: I0930 19:33:46.374134 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"2a362a68-3a77-4ad6-a130-8f99546e5e76","Type":"ContainerStarted","Data":"5e4df4abf92d9e8d7fd09a769e12ade344b485588eae844c62f9bb9eec431a4f"} Sep 30 19:33:46 crc kubenswrapper[4756]: I0930 19:33:46.397178 4756 generic.go:334] "Generic (PLEG): container finished" podID="4f7f9fe4-0797-4490-8bc2-c48850eb9f02" containerID="a8c5ebe8c2ce3631ae5ba6c5403872e352b1e5e30e3c6977ceaa595219d3b3ac" exitCode=0 Sep 30 19:33:46 crc kubenswrapper[4756]: I0930 19:33:46.397252 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29321010-snrpj" event={"ID":"4f7f9fe4-0797-4490-8bc2-c48850eb9f02","Type":"ContainerDied","Data":"a8c5ebe8c2ce3631ae5ba6c5403872e352b1e5e30e3c6977ceaa595219d3b3ac"} Sep 30 19:33:47 crc kubenswrapper[4756]: I0930 19:33:47.413833 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-cluster-samples-operator_cluster-samples-operator-665b6dd947-lls25_8fee247b-3f4b-4816-a0c4-7d3854ebc33b/cluster-samples-operator/0.log" Sep 30 19:33:47 crc kubenswrapper[4756]: I0930 19:33:47.413898 4756 generic.go:334] "Generic (PLEG): container finished" podID="8fee247b-3f4b-4816-a0c4-7d3854ebc33b" containerID="e931b725898c300b5908beee2e7ffd8cbf15e4f9bbe31d8580b5dfcb8c2eceda" exitCode=2 Sep 30 19:33:47 crc kubenswrapper[4756]: I0930 19:33:47.413977 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-lls25" event={"ID":"8fee247b-3f4b-4816-a0c4-7d3854ebc33b","Type":"ContainerDied","Data":"e931b725898c300b5908beee2e7ffd8cbf15e4f9bbe31d8580b5dfcb8c2eceda"} Sep 30 19:33:47 crc kubenswrapper[4756]: I0930 19:33:47.414668 4756 scope.go:117] "RemoveContainer" containerID="e931b725898c300b5908beee2e7ffd8cbf15e4f9bbe31d8580b5dfcb8c2eceda" Sep 30 19:33:47 crc kubenswrapper[4756]: I0930 19:33:47.420652 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"2a362a68-3a77-4ad6-a130-8f99546e5e76","Type":"ContainerStarted","Data":"132b0143cd8a29be2d42307a1826ff5aae470be57d22ba6512a10a891803fe83"} Sep 30 19:33:47 crc kubenswrapper[4756]: I0930 19:33:47.732732 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-dns/dns-default-jjwh9" Sep 30 19:33:47 crc kubenswrapper[4756]: I0930 19:33:47.871308 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29321010-snrpj" Sep 30 19:33:48 crc kubenswrapper[4756]: I0930 19:33:48.007428 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/4f7f9fe4-0797-4490-8bc2-c48850eb9f02-config-volume\") pod \"4f7f9fe4-0797-4490-8bc2-c48850eb9f02\" (UID: \"4f7f9fe4-0797-4490-8bc2-c48850eb9f02\") " Sep 30 19:33:48 crc kubenswrapper[4756]: I0930 19:33:48.007505 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gsf4f\" (UniqueName: \"kubernetes.io/projected/4f7f9fe4-0797-4490-8bc2-c48850eb9f02-kube-api-access-gsf4f\") pod \"4f7f9fe4-0797-4490-8bc2-c48850eb9f02\" (UID: \"4f7f9fe4-0797-4490-8bc2-c48850eb9f02\") " Sep 30 19:33:48 crc kubenswrapper[4756]: I0930 19:33:48.007558 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/4f7f9fe4-0797-4490-8bc2-c48850eb9f02-secret-volume\") pod \"4f7f9fe4-0797-4490-8bc2-c48850eb9f02\" (UID: \"4f7f9fe4-0797-4490-8bc2-c48850eb9f02\") " Sep 30 19:33:48 crc kubenswrapper[4756]: I0930 19:33:48.009046 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4f7f9fe4-0797-4490-8bc2-c48850eb9f02-config-volume" (OuterVolumeSpecName: "config-volume") pod "4f7f9fe4-0797-4490-8bc2-c48850eb9f02" (UID: "4f7f9fe4-0797-4490-8bc2-c48850eb9f02"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:33:48 crc kubenswrapper[4756]: I0930 19:33:48.021795 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4f7f9fe4-0797-4490-8bc2-c48850eb9f02-kube-api-access-gsf4f" (OuterVolumeSpecName: "kube-api-access-gsf4f") pod "4f7f9fe4-0797-4490-8bc2-c48850eb9f02" (UID: "4f7f9fe4-0797-4490-8bc2-c48850eb9f02"). InnerVolumeSpecName "kube-api-access-gsf4f". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:33:48 crc kubenswrapper[4756]: I0930 19:33:48.027248 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4f7f9fe4-0797-4490-8bc2-c48850eb9f02-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "4f7f9fe4-0797-4490-8bc2-c48850eb9f02" (UID: "4f7f9fe4-0797-4490-8bc2-c48850eb9f02"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:33:48 crc kubenswrapper[4756]: I0930 19:33:48.109084 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gsf4f\" (UniqueName: \"kubernetes.io/projected/4f7f9fe4-0797-4490-8bc2-c48850eb9f02-kube-api-access-gsf4f\") on node \"crc\" DevicePath \"\"" Sep 30 19:33:48 crc kubenswrapper[4756]: I0930 19:33:48.109493 4756 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/4f7f9fe4-0797-4490-8bc2-c48850eb9f02-secret-volume\") on node \"crc\" DevicePath \"\"" Sep 30 19:33:48 crc kubenswrapper[4756]: I0930 19:33:48.109502 4756 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/4f7f9fe4-0797-4490-8bc2-c48850eb9f02-config-volume\") on node \"crc\" DevicePath \"\"" Sep 30 19:33:48 crc kubenswrapper[4756]: I0930 19:33:48.435622 4756 generic.go:334] "Generic (PLEG): container finished" podID="2a362a68-3a77-4ad6-a130-8f99546e5e76" containerID="132b0143cd8a29be2d42307a1826ff5aae470be57d22ba6512a10a891803fe83" exitCode=0 Sep 30 19:33:48 crc kubenswrapper[4756]: I0930 19:33:48.435749 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"2a362a68-3a77-4ad6-a130-8f99546e5e76","Type":"ContainerDied","Data":"132b0143cd8a29be2d42307a1826ff5aae470be57d22ba6512a10a891803fe83"} Sep 30 19:33:48 crc kubenswrapper[4756]: I0930 19:33:48.441679 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-cluster-samples-operator_cluster-samples-operator-665b6dd947-lls25_8fee247b-3f4b-4816-a0c4-7d3854ebc33b/cluster-samples-operator/0.log" Sep 30 19:33:48 crc kubenswrapper[4756]: I0930 19:33:48.441799 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-lls25" event={"ID":"8fee247b-3f4b-4816-a0c4-7d3854ebc33b","Type":"ContainerStarted","Data":"37c4d3e9190048bc462fcd7dd9140e953b98b4cccd3cf2071ac7da0ba29f4f2a"} Sep 30 19:33:48 crc kubenswrapper[4756]: I0930 19:33:48.450081 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29321010-snrpj" event={"ID":"4f7f9fe4-0797-4490-8bc2-c48850eb9f02","Type":"ContainerDied","Data":"46f1313d518a5e8e2506314a056221c0a9dd6627fa7df89d156c8357fd1be599"} Sep 30 19:33:48 crc kubenswrapper[4756]: I0930 19:33:48.450127 4756 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="46f1313d518a5e8e2506314a056221c0a9dd6627fa7df89d156c8357fd1be599" Sep 30 19:33:48 crc kubenswrapper[4756]: I0930 19:33:48.450190 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29321010-snrpj" Sep 30 19:33:52 crc kubenswrapper[4756]: I0930 19:33:52.533725 4756 patch_prober.go:28] interesting pod/downloads-7954f5f757-7dwjw container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.17:8080/\": dial tcp 10.217.0.17:8080: connect: connection refused" start-of-body= Sep 30 19:33:52 crc kubenswrapper[4756]: I0930 19:33:52.534283 4756 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-7dwjw" podUID="52803098-a1e8-4a45-a2dd-0eb8f6d5b8ad" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.17:8080/\": dial tcp 10.217.0.17:8080: connect: connection refused" Sep 30 19:33:52 crc kubenswrapper[4756]: I0930 19:33:52.533850 4756 patch_prober.go:28] interesting pod/downloads-7954f5f757-7dwjw container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.17:8080/\": dial tcp 10.217.0.17:8080: connect: connection refused" start-of-body= Sep 30 19:33:52 crc kubenswrapper[4756]: I0930 19:33:52.534374 4756 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-7dwjw" podUID="52803098-a1e8-4a45-a2dd-0eb8f6d5b8ad" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.17:8080/\": dial tcp 10.217.0.17:8080: connect: connection refused" Sep 30 19:33:52 crc kubenswrapper[4756]: I0930 19:33:52.604065 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-f9d7485db-h5cnz" Sep 30 19:33:52 crc kubenswrapper[4756]: I0930 19:33:52.607179 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-f9d7485db-h5cnz" Sep 30 19:33:54 crc kubenswrapper[4756]: I0930 19:33:54.207280 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/6c151bbd-9757-44ba-aa11-ff679e841fdc-metrics-certs\") pod \"network-metrics-daemon-b4n45\" (UID: \"6c151bbd-9757-44ba-aa11-ff679e841fdc\") " pod="openshift-multus/network-metrics-daemon-b4n45" Sep 30 19:33:54 crc kubenswrapper[4756]: I0930 19:33:54.214045 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/6c151bbd-9757-44ba-aa11-ff679e841fdc-metrics-certs\") pod \"network-metrics-daemon-b4n45\" (UID: \"6c151bbd-9757-44ba-aa11-ff679e841fdc\") " pod="openshift-multus/network-metrics-daemon-b4n45" Sep 30 19:33:54 crc kubenswrapper[4756]: I0930 19:33:54.353515 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-b4n45" Sep 30 19:33:56 crc kubenswrapper[4756]: I0930 19:33:56.291341 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Sep 30 19:33:56 crc kubenswrapper[4756]: I0930 19:33:56.331673 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/2a362a68-3a77-4ad6-a130-8f99546e5e76-kubelet-dir\") pod \"2a362a68-3a77-4ad6-a130-8f99546e5e76\" (UID: \"2a362a68-3a77-4ad6-a130-8f99546e5e76\") " Sep 30 19:33:56 crc kubenswrapper[4756]: I0930 19:33:56.331765 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/2a362a68-3a77-4ad6-a130-8f99546e5e76-kube-api-access\") pod \"2a362a68-3a77-4ad6-a130-8f99546e5e76\" (UID: \"2a362a68-3a77-4ad6-a130-8f99546e5e76\") " Sep 30 19:33:56 crc kubenswrapper[4756]: I0930 19:33:56.331971 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/2a362a68-3a77-4ad6-a130-8f99546e5e76-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "2a362a68-3a77-4ad6-a130-8f99546e5e76" (UID: "2a362a68-3a77-4ad6-a130-8f99546e5e76"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 19:33:56 crc kubenswrapper[4756]: I0930 19:33:56.338362 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2a362a68-3a77-4ad6-a130-8f99546e5e76-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "2a362a68-3a77-4ad6-a130-8f99546e5e76" (UID: "2a362a68-3a77-4ad6-a130-8f99546e5e76"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:33:56 crc kubenswrapper[4756]: I0930 19:33:56.433126 4756 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/2a362a68-3a77-4ad6-a130-8f99546e5e76-kubelet-dir\") on node \"crc\" DevicePath \"\"" Sep 30 19:33:56 crc kubenswrapper[4756]: I0930 19:33:56.433177 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/2a362a68-3a77-4ad6-a130-8f99546e5e76-kube-api-access\") on node \"crc\" DevicePath \"\"" Sep 30 19:33:56 crc kubenswrapper[4756]: I0930 19:33:56.500929 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"2a362a68-3a77-4ad6-a130-8f99546e5e76","Type":"ContainerDied","Data":"5e4df4abf92d9e8d7fd09a769e12ade344b485588eae844c62f9bb9eec431a4f"} Sep 30 19:33:56 crc kubenswrapper[4756]: I0930 19:33:56.500973 4756 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5e4df4abf92d9e8d7fd09a769e12ade344b485588eae844c62f9bb9eec431a4f" Sep 30 19:33:56 crc kubenswrapper[4756]: I0930 19:33:56.500993 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Sep 30 19:34:00 crc kubenswrapper[4756]: I0930 19:34:00.674844 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-697d97f7c8-wpxq6" Sep 30 19:34:02 crc kubenswrapper[4756]: I0930 19:34:02.549801 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/downloads-7954f5f757-7dwjw" Sep 30 19:34:11 crc kubenswrapper[4756]: I0930 19:34:11.538564 4756 patch_prober.go:28] interesting pod/machine-config-daemon-4n9zj container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 19:34:11 crc kubenswrapper[4756]: I0930 19:34:11.538861 4756 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 19:34:12 crc kubenswrapper[4756]: I0930 19:34:12.680880 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-mb2s4" Sep 30 19:34:13 crc kubenswrapper[4756]: E0930 19:34:12.996423 4756 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-marketplace-index:v4.18" Sep 30 19:34:13 crc kubenswrapper[4756]: E0930 19:34:12.996581 4756 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-marketplace-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-7jcbz,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-marketplace-pqxlg_openshift-marketplace(e9556a46-a1ed-4d3f-bcbd-7dafe3e6f3dc): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Sep 30 19:34:13 crc kubenswrapper[4756]: E0930 19:34:12.997779 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-marketplace-pqxlg" podUID="e9556a46-a1ed-4d3f-bcbd-7dafe3e6f3dc" Sep 30 19:34:16 crc kubenswrapper[4756]: E0930 19:34:16.693305 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-marketplace-pqxlg" podUID="e9556a46-a1ed-4d3f-bcbd-7dafe3e6f3dc" Sep 30 19:34:16 crc kubenswrapper[4756]: E0930 19:34:16.773996 4756 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/community-operator-index:v4.18" Sep 30 19:34:16 crc kubenswrapper[4756]: E0930 19:34:16.774440 4756 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/community-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-f7spp,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod community-operators-js94r_openshift-marketplace(b0b29af6-5e9a-48a8-8aeb-72bc2d9539ac): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Sep 30 19:34:16 crc kubenswrapper[4756]: E0930 19:34:16.775708 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/community-operators-js94r" podUID="b0b29af6-5e9a-48a8-8aeb-72bc2d9539ac" Sep 30 19:34:18 crc kubenswrapper[4756]: E0930 19:34:18.233308 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-js94r" podUID="b0b29af6-5e9a-48a8-8aeb-72bc2d9539ac" Sep 30 19:34:18 crc kubenswrapper[4756]: E0930 19:34:18.302640 4756 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Sep 30 19:34:18 crc kubenswrapper[4756]: E0930 19:34:18.302795 4756 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-t2whz,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-scg2n_openshift-marketplace(aadc6862-7561-4dc2-a788-a4085dc5f4ad): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Sep 30 19:34:18 crc kubenswrapper[4756]: E0930 19:34:18.304334 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/certified-operators-scg2n" podUID="aadc6862-7561-4dc2-a788-a4085dc5f4ad" Sep 30 19:34:19 crc kubenswrapper[4756]: I0930 19:34:19.338138 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 19:34:21 crc kubenswrapper[4756]: E0930 19:34:21.086735 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-scg2n" podUID="aadc6862-7561-4dc2-a788-a4085dc5f4ad" Sep 30 19:34:21 crc kubenswrapper[4756]: E0930 19:34:21.181505 4756 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-operator-index:v4.18" Sep 30 19:34:21 crc kubenswrapper[4756]: E0930 19:34:21.183289 4756 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-tpcm2,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-operators-kvcxz_openshift-marketplace(700d58ef-f68b-41f1-837a-984f6afffb82): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Sep 30 19:34:21 crc kubenswrapper[4756]: E0930 19:34:21.184606 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-operators-kvcxz" podUID="700d58ef-f68b-41f1-837a-984f6afffb82" Sep 30 19:34:21 crc kubenswrapper[4756]: E0930 19:34:21.195216 4756 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-operator-index:v4.18" Sep 30 19:34:21 crc kubenswrapper[4756]: E0930 19:34:21.195348 4756 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-f8b67,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-operators-btrj8_openshift-marketplace(b363e863-a59e-4272-9c06-f1a9dc15fc3b): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Sep 30 19:34:21 crc kubenswrapper[4756]: E0930 19:34:21.195874 4756 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Sep 30 19:34:21 crc kubenswrapper[4756]: E0930 19:34:21.196045 4756 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-tznvq,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-rqhnz_openshift-marketplace(8badbed7-383d-4cc6-9fa8-aac7c4ac97ac): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Sep 30 19:34:21 crc kubenswrapper[4756]: E0930 19:34:21.197715 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/certified-operators-rqhnz" podUID="8badbed7-383d-4cc6-9fa8-aac7c4ac97ac" Sep 30 19:34:21 crc kubenswrapper[4756]: E0930 19:34:21.197759 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-operators-btrj8" podUID="b363e863-a59e-4272-9c06-f1a9dc15fc3b" Sep 30 19:34:21 crc kubenswrapper[4756]: E0930 19:34:21.227924 4756 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-marketplace-index:v4.18" Sep 30 19:34:21 crc kubenswrapper[4756]: E0930 19:34:21.228083 4756 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-marketplace-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-trqvx,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-marketplace-vs4g5_openshift-marketplace(60ec2479-5451-47c2-ada6-a659f57092e5): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Sep 30 19:34:21 crc kubenswrapper[4756]: E0930 19:34:21.229304 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-marketplace-vs4g5" podUID="60ec2479-5451-47c2-ada6-a659f57092e5" Sep 30 19:34:21 crc kubenswrapper[4756]: E0930 19:34:21.238296 4756 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/community-operator-index:v4.18" Sep 30 19:34:21 crc kubenswrapper[4756]: E0930 19:34:21.238489 4756 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/community-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-72vcn,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod community-operators-j6fg8_openshift-marketplace(252408ae-8514-4b7b-b8c9-348d43fa0323): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Sep 30 19:34:21 crc kubenswrapper[4756]: E0930 19:34:21.239798 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/community-operators-j6fg8" podUID="252408ae-8514-4b7b-b8c9-348d43fa0323" Sep 30 19:34:21 crc kubenswrapper[4756]: I0930 19:34:21.476296 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-b4n45"] Sep 30 19:34:21 crc kubenswrapper[4756]: W0930 19:34:21.490281 4756 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6c151bbd_9757_44ba_aa11_ff679e841fdc.slice/crio-385ad63a1123c4e78de6aeb4481b62a400a608918494d32f2ceac0d31dd2d294 WatchSource:0}: Error finding container 385ad63a1123c4e78de6aeb4481b62a400a608918494d32f2ceac0d31dd2d294: Status 404 returned error can't find the container with id 385ad63a1123c4e78de6aeb4481b62a400a608918494d32f2ceac0d31dd2d294 Sep 30 19:34:21 crc kubenswrapper[4756]: I0930 19:34:21.635884 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-b4n45" event={"ID":"6c151bbd-9757-44ba-aa11-ff679e841fdc","Type":"ContainerStarted","Data":"385ad63a1123c4e78de6aeb4481b62a400a608918494d32f2ceac0d31dd2d294"} Sep 30 19:34:21 crc kubenswrapper[4756]: E0930 19:34:21.638860 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-operators-btrj8" podUID="b363e863-a59e-4272-9c06-f1a9dc15fc3b" Sep 30 19:34:21 crc kubenswrapper[4756]: E0930 19:34:21.638908 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-rqhnz" podUID="8badbed7-383d-4cc6-9fa8-aac7c4ac97ac" Sep 30 19:34:21 crc kubenswrapper[4756]: E0930 19:34:21.638982 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-marketplace-vs4g5" podUID="60ec2479-5451-47c2-ada6-a659f57092e5" Sep 30 19:34:21 crc kubenswrapper[4756]: E0930 19:34:21.638942 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-j6fg8" podUID="252408ae-8514-4b7b-b8c9-348d43fa0323" Sep 30 19:34:21 crc kubenswrapper[4756]: E0930 19:34:21.639039 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-operators-kvcxz" podUID="700d58ef-f68b-41f1-837a-984f6afffb82" Sep 30 19:34:22 crc kubenswrapper[4756]: I0930 19:34:22.643709 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-b4n45" event={"ID":"6c151bbd-9757-44ba-aa11-ff679e841fdc","Type":"ContainerStarted","Data":"9374612c0338265c33e4b3735ca44215d138a4386df07a5817c119188a18dfd4"} Sep 30 19:34:22 crc kubenswrapper[4756]: I0930 19:34:22.644166 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-b4n45" event={"ID":"6c151bbd-9757-44ba-aa11-ff679e841fdc","Type":"ContainerStarted","Data":"397546dcdf5215df3becefbbd9fb6baaa819be468e0b50e2d7d0e033107d6bd4"} Sep 30 19:34:29 crc kubenswrapper[4756]: I0930 19:34:29.147352 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/network-metrics-daemon-b4n45" podStartSLOduration=177.147330268 podStartE2EDuration="2m57.147330268s" podCreationTimestamp="2025-09-30 19:31:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 19:34:22.668851215 +0000 UTC m=+192.289784732" watchObservedRunningTime="2025-09-30 19:34:29.147330268 +0000 UTC m=+198.768263765" Sep 30 19:34:30 crc kubenswrapper[4756]: I0930 19:34:30.697639 4756 generic.go:334] "Generic (PLEG): container finished" podID="e9556a46-a1ed-4d3f-bcbd-7dafe3e6f3dc" containerID="1e44808232f49beeacafab8d8a107157330d6aa02c8d4cd0f40b3ff4ec706355" exitCode=0 Sep 30 19:34:30 crc kubenswrapper[4756]: I0930 19:34:30.697748 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-pqxlg" event={"ID":"e9556a46-a1ed-4d3f-bcbd-7dafe3e6f3dc","Type":"ContainerDied","Data":"1e44808232f49beeacafab8d8a107157330d6aa02c8d4cd0f40b3ff4ec706355"} Sep 30 19:34:31 crc kubenswrapper[4756]: I0930 19:34:31.713782 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-pqxlg" event={"ID":"e9556a46-a1ed-4d3f-bcbd-7dafe3e6f3dc","Type":"ContainerStarted","Data":"8c960371c9838bf1580eb835ab90d48dd8f61042c0e02d709bc4c1b33558a446"} Sep 30 19:34:31 crc kubenswrapper[4756]: I0930 19:34:31.742737 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-pqxlg" podStartSLOduration=2.661958516 podStartE2EDuration="50.742703526s" podCreationTimestamp="2025-09-30 19:33:41 +0000 UTC" firstStartedPulling="2025-09-30 19:33:43.191141864 +0000 UTC m=+152.812075341" lastFinishedPulling="2025-09-30 19:34:31.271886874 +0000 UTC m=+200.892820351" observedRunningTime="2025-09-30 19:34:31.739382279 +0000 UTC m=+201.360315756" watchObservedRunningTime="2025-09-30 19:34:31.742703526 +0000 UTC m=+201.363637063" Sep 30 19:34:31 crc kubenswrapper[4756]: I0930 19:34:31.803760 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-pqxlg" Sep 30 19:34:31 crc kubenswrapper[4756]: I0930 19:34:31.803812 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-pqxlg" Sep 30 19:34:32 crc kubenswrapper[4756]: I0930 19:34:32.722422 4756 generic.go:334] "Generic (PLEG): container finished" podID="b0b29af6-5e9a-48a8-8aeb-72bc2d9539ac" containerID="ca0f717af2159d3d5ded90d1b2ae9a39d99f408b3dcdcf1733b9e9972697f7b2" exitCode=0 Sep 30 19:34:32 crc kubenswrapper[4756]: I0930 19:34:32.722565 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-js94r" event={"ID":"b0b29af6-5e9a-48a8-8aeb-72bc2d9539ac","Type":"ContainerDied","Data":"ca0f717af2159d3d5ded90d1b2ae9a39d99f408b3dcdcf1733b9e9972697f7b2"} Sep 30 19:34:32 crc kubenswrapper[4756]: I0930 19:34:32.950344 4756 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-marketplace-pqxlg" podUID="e9556a46-a1ed-4d3f-bcbd-7dafe3e6f3dc" containerName="registry-server" probeResult="failure" output=< Sep 30 19:34:32 crc kubenswrapper[4756]: timeout: failed to connect service ":50051" within 1s Sep 30 19:34:32 crc kubenswrapper[4756]: > Sep 30 19:34:34 crc kubenswrapper[4756]: I0930 19:34:34.738211 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-js94r" event={"ID":"b0b29af6-5e9a-48a8-8aeb-72bc2d9539ac","Type":"ContainerStarted","Data":"308f86923d5e0a1fae7224d29b31e20305dfaa58259605bb28e0c6814cfdbe85"} Sep 30 19:34:34 crc kubenswrapper[4756]: I0930 19:34:34.763530 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-js94r" podStartSLOduration=3.233286777 podStartE2EDuration="55.763506363s" podCreationTimestamp="2025-09-30 19:33:39 +0000 UTC" firstStartedPulling="2025-09-30 19:33:41.022662596 +0000 UTC m=+150.643596073" lastFinishedPulling="2025-09-30 19:34:33.552882182 +0000 UTC m=+203.173815659" observedRunningTime="2025-09-30 19:34:34.760729601 +0000 UTC m=+204.381663108" watchObservedRunningTime="2025-09-30 19:34:34.763506363 +0000 UTC m=+204.384439850" Sep 30 19:34:35 crc kubenswrapper[4756]: I0930 19:34:35.745431 4756 generic.go:334] "Generic (PLEG): container finished" podID="252408ae-8514-4b7b-b8c9-348d43fa0323" containerID="2ef2f7bbae4e1186bba3ff95867cd91edc2e8d4ab0807cb397d34e12c8476acb" exitCode=0 Sep 30 19:34:35 crc kubenswrapper[4756]: I0930 19:34:35.745477 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-j6fg8" event={"ID":"252408ae-8514-4b7b-b8c9-348d43fa0323","Type":"ContainerDied","Data":"2ef2f7bbae4e1186bba3ff95867cd91edc2e8d4ab0807cb397d34e12c8476acb"} Sep 30 19:34:36 crc kubenswrapper[4756]: I0930 19:34:36.755338 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-j6fg8" event={"ID":"252408ae-8514-4b7b-b8c9-348d43fa0323","Type":"ContainerStarted","Data":"2008d082827ec508ccabf94fe48ed8cc9d1be0750f56c519a7a8bc4bd053d772"} Sep 30 19:34:36 crc kubenswrapper[4756]: I0930 19:34:36.757368 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-kvcxz" event={"ID":"700d58ef-f68b-41f1-837a-984f6afffb82","Type":"ContainerStarted","Data":"3b12969983dfb081e6844d0082e79be56cac5b8224693056f3325b7012f107ce"} Sep 30 19:34:36 crc kubenswrapper[4756]: I0930 19:34:36.759842 4756 generic.go:334] "Generic (PLEG): container finished" podID="60ec2479-5451-47c2-ada6-a659f57092e5" containerID="daec1a35cd4fdaeb3bdc21c3e06425f5fdf5ee663cfcb4bef87a708ca619bb03" exitCode=0 Sep 30 19:34:36 crc kubenswrapper[4756]: I0930 19:34:36.759879 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vs4g5" event={"ID":"60ec2479-5451-47c2-ada6-a659f57092e5","Type":"ContainerDied","Data":"daec1a35cd4fdaeb3bdc21c3e06425f5fdf5ee663cfcb4bef87a708ca619bb03"} Sep 30 19:34:36 crc kubenswrapper[4756]: I0930 19:34:36.764813 4756 generic.go:334] "Generic (PLEG): container finished" podID="8badbed7-383d-4cc6-9fa8-aac7c4ac97ac" containerID="059f08f4f4290aededee955da7af1ebfda19bb301b34058d885bc08af93618fb" exitCode=0 Sep 30 19:34:36 crc kubenswrapper[4756]: I0930 19:34:36.764878 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rqhnz" event={"ID":"8badbed7-383d-4cc6-9fa8-aac7c4ac97ac","Type":"ContainerDied","Data":"059f08f4f4290aededee955da7af1ebfda19bb301b34058d885bc08af93618fb"} Sep 30 19:34:36 crc kubenswrapper[4756]: I0930 19:34:36.766784 4756 generic.go:334] "Generic (PLEG): container finished" podID="b363e863-a59e-4272-9c06-f1a9dc15fc3b" containerID="af812b9ac360809f5472c71951b17ee78febaafe22b6ce0f6e9a11faa2aa5416" exitCode=0 Sep 30 19:34:36 crc kubenswrapper[4756]: I0930 19:34:36.766837 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-btrj8" event={"ID":"b363e863-a59e-4272-9c06-f1a9dc15fc3b","Type":"ContainerDied","Data":"af812b9ac360809f5472c71951b17ee78febaafe22b6ce0f6e9a11faa2aa5416"} Sep 30 19:34:36 crc kubenswrapper[4756]: I0930 19:34:36.769437 4756 generic.go:334] "Generic (PLEG): container finished" podID="aadc6862-7561-4dc2-a788-a4085dc5f4ad" containerID="a84630af0aa4742efaec5b2fecbcc1793148c905be09568a76de45f9f7559598" exitCode=0 Sep 30 19:34:36 crc kubenswrapper[4756]: I0930 19:34:36.769486 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-scg2n" event={"ID":"aadc6862-7561-4dc2-a788-a4085dc5f4ad","Type":"ContainerDied","Data":"a84630af0aa4742efaec5b2fecbcc1793148c905be09568a76de45f9f7559598"} Sep 30 19:34:36 crc kubenswrapper[4756]: I0930 19:34:36.778816 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-j6fg8" podStartSLOduration=3.4381512770000002 podStartE2EDuration="58.778797159s" podCreationTimestamp="2025-09-30 19:33:38 +0000 UTC" firstStartedPulling="2025-09-30 19:33:40.903589501 +0000 UTC m=+150.524522978" lastFinishedPulling="2025-09-30 19:34:36.244235383 +0000 UTC m=+205.865168860" observedRunningTime="2025-09-30 19:34:36.775605885 +0000 UTC m=+206.396539442" watchObservedRunningTime="2025-09-30 19:34:36.778797159 +0000 UTC m=+206.399730636" Sep 30 19:34:37 crc kubenswrapper[4756]: I0930 19:34:37.777850 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-scg2n" event={"ID":"aadc6862-7561-4dc2-a788-a4085dc5f4ad","Type":"ContainerStarted","Data":"393b1e2da58cd377aa63833da6f9950992302304fc2e6d9e9130b04cd6bb0720"} Sep 30 19:34:37 crc kubenswrapper[4756]: I0930 19:34:37.780640 4756 generic.go:334] "Generic (PLEG): container finished" podID="700d58ef-f68b-41f1-837a-984f6afffb82" containerID="3b12969983dfb081e6844d0082e79be56cac5b8224693056f3325b7012f107ce" exitCode=0 Sep 30 19:34:37 crc kubenswrapper[4756]: I0930 19:34:37.780711 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-kvcxz" event={"ID":"700d58ef-f68b-41f1-837a-984f6afffb82","Type":"ContainerDied","Data":"3b12969983dfb081e6844d0082e79be56cac5b8224693056f3325b7012f107ce"} Sep 30 19:34:37 crc kubenswrapper[4756]: I0930 19:34:37.783358 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vs4g5" event={"ID":"60ec2479-5451-47c2-ada6-a659f57092e5","Type":"ContainerStarted","Data":"1a6acbce684b67a7cd18c437f70619a5097d53c42b28fb4276e3c1300cdc7db8"} Sep 30 19:34:37 crc kubenswrapper[4756]: I0930 19:34:37.785653 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rqhnz" event={"ID":"8badbed7-383d-4cc6-9fa8-aac7c4ac97ac","Type":"ContainerStarted","Data":"062cfacf1dcec337bcf4e7c015f4f2d2aa5d9d615f0880be92ef85f422dd9a9c"} Sep 30 19:34:37 crc kubenswrapper[4756]: I0930 19:34:37.787771 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-btrj8" event={"ID":"b363e863-a59e-4272-9c06-f1a9dc15fc3b","Type":"ContainerStarted","Data":"3c8480f41026b61a0587615b743761e8b22246967ab7cb0d4e5961f9d6ef1589"} Sep 30 19:34:37 crc kubenswrapper[4756]: I0930 19:34:37.809760 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-scg2n" podStartSLOduration=2.545447984 podStartE2EDuration="58.809741289s" podCreationTimestamp="2025-09-30 19:33:39 +0000 UTC" firstStartedPulling="2025-09-30 19:33:40.888331247 +0000 UTC m=+150.509264714" lastFinishedPulling="2025-09-30 19:34:37.152624542 +0000 UTC m=+206.773558019" observedRunningTime="2025-09-30 19:34:37.804859821 +0000 UTC m=+207.425793298" watchObservedRunningTime="2025-09-30 19:34:37.809741289 +0000 UTC m=+207.430674766" Sep 30 19:34:37 crc kubenswrapper[4756]: I0930 19:34:37.827948 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-rqhnz" podStartSLOduration=2.431104716 podStartE2EDuration="58.827930267s" podCreationTimestamp="2025-09-30 19:33:39 +0000 UTC" firstStartedPulling="2025-09-30 19:33:41.005504393 +0000 UTC m=+150.626437870" lastFinishedPulling="2025-09-30 19:34:37.402329944 +0000 UTC m=+207.023263421" observedRunningTime="2025-09-30 19:34:37.82614102 +0000 UTC m=+207.447074517" watchObservedRunningTime="2025-09-30 19:34:37.827930267 +0000 UTC m=+207.448863754" Sep 30 19:34:37 crc kubenswrapper[4756]: I0930 19:34:37.867490 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-btrj8" podStartSLOduration=2.655458894 podStartE2EDuration="55.867471656s" podCreationTimestamp="2025-09-30 19:33:42 +0000 UTC" firstStartedPulling="2025-09-30 19:33:44.289624391 +0000 UTC m=+153.910557858" lastFinishedPulling="2025-09-30 19:34:37.501637143 +0000 UTC m=+207.122570620" observedRunningTime="2025-09-30 19:34:37.863166703 +0000 UTC m=+207.484100180" watchObservedRunningTime="2025-09-30 19:34:37.867471656 +0000 UTC m=+207.488405133" Sep 30 19:34:37 crc kubenswrapper[4756]: I0930 19:34:37.893129 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-vs4g5" podStartSLOduration=2.59294572 podStartE2EDuration="56.89310917s" podCreationTimestamp="2025-09-30 19:33:41 +0000 UTC" firstStartedPulling="2025-09-30 19:33:43.172605285 +0000 UTC m=+152.793538762" lastFinishedPulling="2025-09-30 19:34:37.472768735 +0000 UTC m=+207.093702212" observedRunningTime="2025-09-30 19:34:37.888402556 +0000 UTC m=+207.509336033" watchObservedRunningTime="2025-09-30 19:34:37.89310917 +0000 UTC m=+207.514042797" Sep 30 19:34:38 crc kubenswrapper[4756]: I0930 19:34:38.794612 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-kvcxz" event={"ID":"700d58ef-f68b-41f1-837a-984f6afffb82","Type":"ContainerStarted","Data":"d12ac4203bd271091a1bcbf439ee8a044c798b5985d49812af481ab0e9100d79"} Sep 30 19:34:39 crc kubenswrapper[4756]: I0930 19:34:39.261620 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-j6fg8" Sep 30 19:34:39 crc kubenswrapper[4756]: I0930 19:34:39.261663 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-j6fg8" Sep 30 19:34:39 crc kubenswrapper[4756]: I0930 19:34:39.317681 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-j6fg8" Sep 30 19:34:39 crc kubenswrapper[4756]: I0930 19:34:39.337142 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-kvcxz" podStartSLOduration=3.094910669 podStartE2EDuration="57.337126475s" podCreationTimestamp="2025-09-30 19:33:42 +0000 UTC" firstStartedPulling="2025-09-30 19:33:44.297225908 +0000 UTC m=+153.918159385" lastFinishedPulling="2025-09-30 19:34:38.539441714 +0000 UTC m=+208.160375191" observedRunningTime="2025-09-30 19:34:38.825166942 +0000 UTC m=+208.446100429" watchObservedRunningTime="2025-09-30 19:34:39.337126475 +0000 UTC m=+208.958059972" Sep 30 19:34:39 crc kubenswrapper[4756]: I0930 19:34:39.468769 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-rqhnz" Sep 30 19:34:39 crc kubenswrapper[4756]: I0930 19:34:39.469127 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-rqhnz" Sep 30 19:34:39 crc kubenswrapper[4756]: I0930 19:34:39.520963 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-rqhnz" Sep 30 19:34:39 crc kubenswrapper[4756]: I0930 19:34:39.668373 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-js94r" Sep 30 19:34:39 crc kubenswrapper[4756]: I0930 19:34:39.668440 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-js94r" Sep 30 19:34:39 crc kubenswrapper[4756]: I0930 19:34:39.713157 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-js94r" Sep 30 19:34:39 crc kubenswrapper[4756]: I0930 19:34:39.872450 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-js94r" Sep 30 19:34:39 crc kubenswrapper[4756]: I0930 19:34:39.873670 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-scg2n" Sep 30 19:34:39 crc kubenswrapper[4756]: I0930 19:34:39.873763 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-scg2n" Sep 30 19:34:39 crc kubenswrapper[4756]: I0930 19:34:39.917320 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-scg2n" Sep 30 19:34:41 crc kubenswrapper[4756]: I0930 19:34:41.510603 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-vs4g5" Sep 30 19:34:41 crc kubenswrapper[4756]: I0930 19:34:41.511261 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-vs4g5" Sep 30 19:34:41 crc kubenswrapper[4756]: I0930 19:34:41.539666 4756 patch_prober.go:28] interesting pod/machine-config-daemon-4n9zj container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 19:34:41 crc kubenswrapper[4756]: I0930 19:34:41.539720 4756 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 19:34:41 crc kubenswrapper[4756]: I0930 19:34:41.539753 4756 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" Sep 30 19:34:41 crc kubenswrapper[4756]: I0930 19:34:41.540252 4756 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"ba18abf718fde52cadd61c25b05097873901a4dbbe7631fd7ce64133627b9473"} pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 30 19:34:41 crc kubenswrapper[4756]: I0930 19:34:41.540342 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" containerName="machine-config-daemon" containerID="cri-o://ba18abf718fde52cadd61c25b05097873901a4dbbe7631fd7ce64133627b9473" gracePeriod=600 Sep 30 19:34:41 crc kubenswrapper[4756]: I0930 19:34:41.564099 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-vs4g5" Sep 30 19:34:41 crc kubenswrapper[4756]: I0930 19:34:41.858152 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-pqxlg" Sep 30 19:34:41 crc kubenswrapper[4756]: I0930 19:34:41.902115 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-pqxlg" Sep 30 19:34:42 crc kubenswrapper[4756]: I0930 19:34:42.489746 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-kvcxz" Sep 30 19:34:42 crc kubenswrapper[4756]: I0930 19:34:42.490091 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-kvcxz" Sep 30 19:34:42 crc kubenswrapper[4756]: I0930 19:34:42.821494 4756 generic.go:334] "Generic (PLEG): container finished" podID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" containerID="ba18abf718fde52cadd61c25b05097873901a4dbbe7631fd7ce64133627b9473" exitCode=0 Sep 30 19:34:42 crc kubenswrapper[4756]: I0930 19:34:42.821580 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" event={"ID":"3370c2ca-fec3-4f90-8df7-51e21e6c7e1c","Type":"ContainerDied","Data":"ba18abf718fde52cadd61c25b05097873901a4dbbe7631fd7ce64133627b9473"} Sep 30 19:34:42 crc kubenswrapper[4756]: I0930 19:34:42.821756 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" event={"ID":"3370c2ca-fec3-4f90-8df7-51e21e6c7e1c","Type":"ContainerStarted","Data":"6ba8081515eeb18121e909838c5752036b5d1f4bd297c66dad27b619402c76b4"} Sep 30 19:34:42 crc kubenswrapper[4756]: I0930 19:34:42.890178 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-vs4g5" Sep 30 19:34:42 crc kubenswrapper[4756]: I0930 19:34:42.913971 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-btrj8" Sep 30 19:34:42 crc kubenswrapper[4756]: I0930 19:34:42.914037 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-btrj8" Sep 30 19:34:42 crc kubenswrapper[4756]: I0930 19:34:42.969837 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-btrj8" Sep 30 19:34:43 crc kubenswrapper[4756]: I0930 19:34:43.531135 4756 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-kvcxz" podUID="700d58ef-f68b-41f1-837a-984f6afffb82" containerName="registry-server" probeResult="failure" output=< Sep 30 19:34:43 crc kubenswrapper[4756]: timeout: failed to connect service ":50051" within 1s Sep 30 19:34:43 crc kubenswrapper[4756]: > Sep 30 19:34:43 crc kubenswrapper[4756]: I0930 19:34:43.531944 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-js94r"] Sep 30 19:34:43 crc kubenswrapper[4756]: I0930 19:34:43.532162 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-js94r" podUID="b0b29af6-5e9a-48a8-8aeb-72bc2d9539ac" containerName="registry-server" containerID="cri-o://308f86923d5e0a1fae7224d29b31e20305dfaa58259605bb28e0c6814cfdbe85" gracePeriod=2 Sep 30 19:34:43 crc kubenswrapper[4756]: I0930 19:34:43.829309 4756 generic.go:334] "Generic (PLEG): container finished" podID="b0b29af6-5e9a-48a8-8aeb-72bc2d9539ac" containerID="308f86923d5e0a1fae7224d29b31e20305dfaa58259605bb28e0c6814cfdbe85" exitCode=0 Sep 30 19:34:43 crc kubenswrapper[4756]: I0930 19:34:43.829374 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-js94r" event={"ID":"b0b29af6-5e9a-48a8-8aeb-72bc2d9539ac","Type":"ContainerDied","Data":"308f86923d5e0a1fae7224d29b31e20305dfaa58259605bb28e0c6814cfdbe85"} Sep 30 19:34:43 crc kubenswrapper[4756]: I0930 19:34:43.879066 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-btrj8" Sep 30 19:34:43 crc kubenswrapper[4756]: I0930 19:34:43.904479 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-js94r" Sep 30 19:34:43 crc kubenswrapper[4756]: I0930 19:34:43.958730 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b0b29af6-5e9a-48a8-8aeb-72bc2d9539ac-catalog-content\") pod \"b0b29af6-5e9a-48a8-8aeb-72bc2d9539ac\" (UID: \"b0b29af6-5e9a-48a8-8aeb-72bc2d9539ac\") " Sep 30 19:34:43 crc kubenswrapper[4756]: I0930 19:34:43.958841 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b0b29af6-5e9a-48a8-8aeb-72bc2d9539ac-utilities\") pod \"b0b29af6-5e9a-48a8-8aeb-72bc2d9539ac\" (UID: \"b0b29af6-5e9a-48a8-8aeb-72bc2d9539ac\") " Sep 30 19:34:43 crc kubenswrapper[4756]: I0930 19:34:43.958874 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-f7spp\" (UniqueName: \"kubernetes.io/projected/b0b29af6-5e9a-48a8-8aeb-72bc2d9539ac-kube-api-access-f7spp\") pod \"b0b29af6-5e9a-48a8-8aeb-72bc2d9539ac\" (UID: \"b0b29af6-5e9a-48a8-8aeb-72bc2d9539ac\") " Sep 30 19:34:43 crc kubenswrapper[4756]: I0930 19:34:43.978597 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b0b29af6-5e9a-48a8-8aeb-72bc2d9539ac-utilities" (OuterVolumeSpecName: "utilities") pod "b0b29af6-5e9a-48a8-8aeb-72bc2d9539ac" (UID: "b0b29af6-5e9a-48a8-8aeb-72bc2d9539ac"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 19:34:43 crc kubenswrapper[4756]: I0930 19:34:43.993705 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b0b29af6-5e9a-48a8-8aeb-72bc2d9539ac-kube-api-access-f7spp" (OuterVolumeSpecName: "kube-api-access-f7spp") pod "b0b29af6-5e9a-48a8-8aeb-72bc2d9539ac" (UID: "b0b29af6-5e9a-48a8-8aeb-72bc2d9539ac"). InnerVolumeSpecName "kube-api-access-f7spp". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:34:44 crc kubenswrapper[4756]: I0930 19:34:44.015912 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b0b29af6-5e9a-48a8-8aeb-72bc2d9539ac-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b0b29af6-5e9a-48a8-8aeb-72bc2d9539ac" (UID: "b0b29af6-5e9a-48a8-8aeb-72bc2d9539ac"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 19:34:44 crc kubenswrapper[4756]: I0930 19:34:44.060652 4756 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b0b29af6-5e9a-48a8-8aeb-72bc2d9539ac-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 19:34:44 crc kubenswrapper[4756]: I0930 19:34:44.060713 4756 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b0b29af6-5e9a-48a8-8aeb-72bc2d9539ac-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 19:34:44 crc kubenswrapper[4756]: I0930 19:34:44.060725 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-f7spp\" (UniqueName: \"kubernetes.io/projected/b0b29af6-5e9a-48a8-8aeb-72bc2d9539ac-kube-api-access-f7spp\") on node \"crc\" DevicePath \"\"" Sep 30 19:34:44 crc kubenswrapper[4756]: I0930 19:34:44.836046 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-js94r" event={"ID":"b0b29af6-5e9a-48a8-8aeb-72bc2d9539ac","Type":"ContainerDied","Data":"92dc9e8633cd3b60fe04e4c9aed3b8d2ca45531acbf34360c066bde81f817c7b"} Sep 30 19:34:44 crc kubenswrapper[4756]: I0930 19:34:44.836090 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-js94r" Sep 30 19:34:44 crc kubenswrapper[4756]: I0930 19:34:44.836332 4756 scope.go:117] "RemoveContainer" containerID="308f86923d5e0a1fae7224d29b31e20305dfaa58259605bb28e0c6814cfdbe85" Sep 30 19:34:44 crc kubenswrapper[4756]: I0930 19:34:44.854428 4756 scope.go:117] "RemoveContainer" containerID="ca0f717af2159d3d5ded90d1b2ae9a39d99f408b3dcdcf1733b9e9972697f7b2" Sep 30 19:34:44 crc kubenswrapper[4756]: I0930 19:34:44.866659 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-js94r"] Sep 30 19:34:44 crc kubenswrapper[4756]: I0930 19:34:44.870075 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-js94r"] Sep 30 19:34:44 crc kubenswrapper[4756]: I0930 19:34:44.893362 4756 scope.go:117] "RemoveContainer" containerID="f655ee459e63ac1c9098fcd9da8868dbc75d1dd7bf2cb8622c18c745d6431c0e" Sep 30 19:34:45 crc kubenswrapper[4756]: I0930 19:34:45.122687 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b0b29af6-5e9a-48a8-8aeb-72bc2d9539ac" path="/var/lib/kubelet/pods/b0b29af6-5e9a-48a8-8aeb-72bc2d9539ac/volumes" Sep 30 19:34:45 crc kubenswrapper[4756]: I0930 19:34:45.934872 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-pqxlg"] Sep 30 19:34:45 crc kubenswrapper[4756]: I0930 19:34:45.935111 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-pqxlg" podUID="e9556a46-a1ed-4d3f-bcbd-7dafe3e6f3dc" containerName="registry-server" containerID="cri-o://8c960371c9838bf1580eb835ab90d48dd8f61042c0e02d709bc4c1b33558a446" gracePeriod=2 Sep 30 19:34:46 crc kubenswrapper[4756]: I0930 19:34:46.533865 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-btrj8"] Sep 30 19:34:46 crc kubenswrapper[4756]: I0930 19:34:46.534734 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-btrj8" podUID="b363e863-a59e-4272-9c06-f1a9dc15fc3b" containerName="registry-server" containerID="cri-o://3c8480f41026b61a0587615b743761e8b22246967ab7cb0d4e5961f9d6ef1589" gracePeriod=2 Sep 30 19:34:46 crc kubenswrapper[4756]: I0930 19:34:46.860991 4756 generic.go:334] "Generic (PLEG): container finished" podID="e9556a46-a1ed-4d3f-bcbd-7dafe3e6f3dc" containerID="8c960371c9838bf1580eb835ab90d48dd8f61042c0e02d709bc4c1b33558a446" exitCode=0 Sep 30 19:34:46 crc kubenswrapper[4756]: I0930 19:34:46.861045 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-pqxlg" event={"ID":"e9556a46-a1ed-4d3f-bcbd-7dafe3e6f3dc","Type":"ContainerDied","Data":"8c960371c9838bf1580eb835ab90d48dd8f61042c0e02d709bc4c1b33558a446"} Sep 30 19:34:47 crc kubenswrapper[4756]: I0930 19:34:47.851552 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-pqxlg" Sep 30 19:34:47 crc kubenswrapper[4756]: I0930 19:34:47.871000 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-pqxlg" event={"ID":"e9556a46-a1ed-4d3f-bcbd-7dafe3e6f3dc","Type":"ContainerDied","Data":"ec9e0b7b402023b601c6ecef7f005345c1d58e35a93f2c6a2434a04b25b5a7e6"} Sep 30 19:34:47 crc kubenswrapper[4756]: I0930 19:34:47.871055 4756 scope.go:117] "RemoveContainer" containerID="8c960371c9838bf1580eb835ab90d48dd8f61042c0e02d709bc4c1b33558a446" Sep 30 19:34:47 crc kubenswrapper[4756]: I0930 19:34:47.871065 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-pqxlg" Sep 30 19:34:47 crc kubenswrapper[4756]: I0930 19:34:47.883521 4756 generic.go:334] "Generic (PLEG): container finished" podID="b363e863-a59e-4272-9c06-f1a9dc15fc3b" containerID="3c8480f41026b61a0587615b743761e8b22246967ab7cb0d4e5961f9d6ef1589" exitCode=0 Sep 30 19:34:47 crc kubenswrapper[4756]: I0930 19:34:47.883561 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-btrj8" event={"ID":"b363e863-a59e-4272-9c06-f1a9dc15fc3b","Type":"ContainerDied","Data":"3c8480f41026b61a0587615b743761e8b22246967ab7cb0d4e5961f9d6ef1589"} Sep 30 19:34:47 crc kubenswrapper[4756]: I0930 19:34:47.899554 4756 scope.go:117] "RemoveContainer" containerID="1e44808232f49beeacafab8d8a107157330d6aa02c8d4cd0f40b3ff4ec706355" Sep 30 19:34:47 crc kubenswrapper[4756]: I0930 19:34:47.916615 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e9556a46-a1ed-4d3f-bcbd-7dafe3e6f3dc-catalog-content\") pod \"e9556a46-a1ed-4d3f-bcbd-7dafe3e6f3dc\" (UID: \"e9556a46-a1ed-4d3f-bcbd-7dafe3e6f3dc\") " Sep 30 19:34:47 crc kubenswrapper[4756]: I0930 19:34:47.916698 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e9556a46-a1ed-4d3f-bcbd-7dafe3e6f3dc-utilities\") pod \"e9556a46-a1ed-4d3f-bcbd-7dafe3e6f3dc\" (UID: \"e9556a46-a1ed-4d3f-bcbd-7dafe3e6f3dc\") " Sep 30 19:34:47 crc kubenswrapper[4756]: I0930 19:34:47.916772 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7jcbz\" (UniqueName: \"kubernetes.io/projected/e9556a46-a1ed-4d3f-bcbd-7dafe3e6f3dc-kube-api-access-7jcbz\") pod \"e9556a46-a1ed-4d3f-bcbd-7dafe3e6f3dc\" (UID: \"e9556a46-a1ed-4d3f-bcbd-7dafe3e6f3dc\") " Sep 30 19:34:47 crc kubenswrapper[4756]: I0930 19:34:47.917986 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e9556a46-a1ed-4d3f-bcbd-7dafe3e6f3dc-utilities" (OuterVolumeSpecName: "utilities") pod "e9556a46-a1ed-4d3f-bcbd-7dafe3e6f3dc" (UID: "e9556a46-a1ed-4d3f-bcbd-7dafe3e6f3dc"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 19:34:47 crc kubenswrapper[4756]: I0930 19:34:47.931082 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e9556a46-a1ed-4d3f-bcbd-7dafe3e6f3dc-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "e9556a46-a1ed-4d3f-bcbd-7dafe3e6f3dc" (UID: "e9556a46-a1ed-4d3f-bcbd-7dafe3e6f3dc"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 19:34:47 crc kubenswrapper[4756]: I0930 19:34:47.956215 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e9556a46-a1ed-4d3f-bcbd-7dafe3e6f3dc-kube-api-access-7jcbz" (OuterVolumeSpecName: "kube-api-access-7jcbz") pod "e9556a46-a1ed-4d3f-bcbd-7dafe3e6f3dc" (UID: "e9556a46-a1ed-4d3f-bcbd-7dafe3e6f3dc"). InnerVolumeSpecName "kube-api-access-7jcbz". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:34:47 crc kubenswrapper[4756]: I0930 19:34:47.963136 4756 scope.go:117] "RemoveContainer" containerID="6f752b7d412509f199ba517ac1fe26e532d2e28ef1260b90c53bdd187420ce8e" Sep 30 19:34:48 crc kubenswrapper[4756]: I0930 19:34:48.018753 4756 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e9556a46-a1ed-4d3f-bcbd-7dafe3e6f3dc-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 19:34:48 crc kubenswrapper[4756]: I0930 19:34:48.018795 4756 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e9556a46-a1ed-4d3f-bcbd-7dafe3e6f3dc-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 19:34:48 crc kubenswrapper[4756]: I0930 19:34:48.018811 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7jcbz\" (UniqueName: \"kubernetes.io/projected/e9556a46-a1ed-4d3f-bcbd-7dafe3e6f3dc-kube-api-access-7jcbz\") on node \"crc\" DevicePath \"\"" Sep 30 19:34:48 crc kubenswrapper[4756]: I0930 19:34:48.222645 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-pqxlg"] Sep 30 19:34:48 crc kubenswrapper[4756]: I0930 19:34:48.226448 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-pqxlg"] Sep 30 19:34:48 crc kubenswrapper[4756]: I0930 19:34:48.819347 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-btrj8" Sep 30 19:34:48 crc kubenswrapper[4756]: I0930 19:34:48.890969 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-btrj8" event={"ID":"b363e863-a59e-4272-9c06-f1a9dc15fc3b","Type":"ContainerDied","Data":"26fe89a0d339d6a7ed0a2d3dc6b32057b069f73d685f2277b0728220047a7937"} Sep 30 19:34:48 crc kubenswrapper[4756]: I0930 19:34:48.891014 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-btrj8" Sep 30 19:34:48 crc kubenswrapper[4756]: I0930 19:34:48.891102 4756 scope.go:117] "RemoveContainer" containerID="3c8480f41026b61a0587615b743761e8b22246967ab7cb0d4e5961f9d6ef1589" Sep 30 19:34:48 crc kubenswrapper[4756]: I0930 19:34:48.909155 4756 scope.go:117] "RemoveContainer" containerID="af812b9ac360809f5472c71951b17ee78febaafe22b6ce0f6e9a11faa2aa5416" Sep 30 19:34:48 crc kubenswrapper[4756]: I0930 19:34:48.923332 4756 scope.go:117] "RemoveContainer" containerID="6219c122cd0d2c30e77b6e3644c3fecd6323bc31c25c8ac35d8fe54c66b08695" Sep 30 19:34:48 crc kubenswrapper[4756]: I0930 19:34:48.929807 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b363e863-a59e-4272-9c06-f1a9dc15fc3b-catalog-content\") pod \"b363e863-a59e-4272-9c06-f1a9dc15fc3b\" (UID: \"b363e863-a59e-4272-9c06-f1a9dc15fc3b\") " Sep 30 19:34:48 crc kubenswrapper[4756]: I0930 19:34:48.929856 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-f8b67\" (UniqueName: \"kubernetes.io/projected/b363e863-a59e-4272-9c06-f1a9dc15fc3b-kube-api-access-f8b67\") pod \"b363e863-a59e-4272-9c06-f1a9dc15fc3b\" (UID: \"b363e863-a59e-4272-9c06-f1a9dc15fc3b\") " Sep 30 19:34:48 crc kubenswrapper[4756]: I0930 19:34:48.929926 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b363e863-a59e-4272-9c06-f1a9dc15fc3b-utilities\") pod \"b363e863-a59e-4272-9c06-f1a9dc15fc3b\" (UID: \"b363e863-a59e-4272-9c06-f1a9dc15fc3b\") " Sep 30 19:34:48 crc kubenswrapper[4756]: I0930 19:34:48.930707 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b363e863-a59e-4272-9c06-f1a9dc15fc3b-utilities" (OuterVolumeSpecName: "utilities") pod "b363e863-a59e-4272-9c06-f1a9dc15fc3b" (UID: "b363e863-a59e-4272-9c06-f1a9dc15fc3b"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 19:34:48 crc kubenswrapper[4756]: I0930 19:34:48.934887 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b363e863-a59e-4272-9c06-f1a9dc15fc3b-kube-api-access-f8b67" (OuterVolumeSpecName: "kube-api-access-f8b67") pod "b363e863-a59e-4272-9c06-f1a9dc15fc3b" (UID: "b363e863-a59e-4272-9c06-f1a9dc15fc3b"). InnerVolumeSpecName "kube-api-access-f8b67". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:34:49 crc kubenswrapper[4756]: I0930 19:34:49.031889 4756 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b363e863-a59e-4272-9c06-f1a9dc15fc3b-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 19:34:49 crc kubenswrapper[4756]: I0930 19:34:49.031933 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-f8b67\" (UniqueName: \"kubernetes.io/projected/b363e863-a59e-4272-9c06-f1a9dc15fc3b-kube-api-access-f8b67\") on node \"crc\" DevicePath \"\"" Sep 30 19:34:49 crc kubenswrapper[4756]: I0930 19:34:49.133424 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e9556a46-a1ed-4d3f-bcbd-7dafe3e6f3dc" path="/var/lib/kubelet/pods/e9556a46-a1ed-4d3f-bcbd-7dafe3e6f3dc/volumes" Sep 30 19:34:49 crc kubenswrapper[4756]: I0930 19:34:49.303269 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-j6fg8" Sep 30 19:34:49 crc kubenswrapper[4756]: I0930 19:34:49.503160 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-rqhnz" Sep 30 19:34:49 crc kubenswrapper[4756]: I0930 19:34:49.920332 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-scg2n" Sep 30 19:34:51 crc kubenswrapper[4756]: I0930 19:34:51.738006 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b363e863-a59e-4272-9c06-f1a9dc15fc3b-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b363e863-a59e-4272-9c06-f1a9dc15fc3b" (UID: "b363e863-a59e-4272-9c06-f1a9dc15fc3b"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 19:34:51 crc kubenswrapper[4756]: I0930 19:34:51.769565 4756 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b363e863-a59e-4272-9c06-f1a9dc15fc3b-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 19:34:51 crc kubenswrapper[4756]: I0930 19:34:51.930471 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-btrj8"] Sep 30 19:34:51 crc kubenswrapper[4756]: I0930 19:34:51.935162 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-btrj8"] Sep 30 19:34:52 crc kubenswrapper[4756]: I0930 19:34:52.556485 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-kvcxz" Sep 30 19:34:52 crc kubenswrapper[4756]: I0930 19:34:52.628160 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-kvcxz" Sep 30 19:34:52 crc kubenswrapper[4756]: I0930 19:34:52.935103 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-scg2n"] Sep 30 19:34:52 crc kubenswrapper[4756]: I0930 19:34:52.935906 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-scg2n" podUID="aadc6862-7561-4dc2-a788-a4085dc5f4ad" containerName="registry-server" containerID="cri-o://393b1e2da58cd377aa63833da6f9950992302304fc2e6d9e9130b04cd6bb0720" gracePeriod=2 Sep 30 19:34:53 crc kubenswrapper[4756]: I0930 19:34:53.130561 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b363e863-a59e-4272-9c06-f1a9dc15fc3b" path="/var/lib/kubelet/pods/b363e863-a59e-4272-9c06-f1a9dc15fc3b/volumes" Sep 30 19:34:53 crc kubenswrapper[4756]: I0930 19:34:53.397329 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-scg2n" Sep 30 19:34:53 crc kubenswrapper[4756]: I0930 19:34:53.593024 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/aadc6862-7561-4dc2-a788-a4085dc5f4ad-catalog-content\") pod \"aadc6862-7561-4dc2-a788-a4085dc5f4ad\" (UID: \"aadc6862-7561-4dc2-a788-a4085dc5f4ad\") " Sep 30 19:34:53 crc kubenswrapper[4756]: I0930 19:34:53.593115 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-t2whz\" (UniqueName: \"kubernetes.io/projected/aadc6862-7561-4dc2-a788-a4085dc5f4ad-kube-api-access-t2whz\") pod \"aadc6862-7561-4dc2-a788-a4085dc5f4ad\" (UID: \"aadc6862-7561-4dc2-a788-a4085dc5f4ad\") " Sep 30 19:34:53 crc kubenswrapper[4756]: I0930 19:34:53.593200 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/aadc6862-7561-4dc2-a788-a4085dc5f4ad-utilities\") pod \"aadc6862-7561-4dc2-a788-a4085dc5f4ad\" (UID: \"aadc6862-7561-4dc2-a788-a4085dc5f4ad\") " Sep 30 19:34:53 crc kubenswrapper[4756]: I0930 19:34:53.594656 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/aadc6862-7561-4dc2-a788-a4085dc5f4ad-utilities" (OuterVolumeSpecName: "utilities") pod "aadc6862-7561-4dc2-a788-a4085dc5f4ad" (UID: "aadc6862-7561-4dc2-a788-a4085dc5f4ad"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 19:34:53 crc kubenswrapper[4756]: I0930 19:34:53.621036 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/aadc6862-7561-4dc2-a788-a4085dc5f4ad-kube-api-access-t2whz" (OuterVolumeSpecName: "kube-api-access-t2whz") pod "aadc6862-7561-4dc2-a788-a4085dc5f4ad" (UID: "aadc6862-7561-4dc2-a788-a4085dc5f4ad"). InnerVolumeSpecName "kube-api-access-t2whz". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:34:53 crc kubenswrapper[4756]: I0930 19:34:53.648524 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/aadc6862-7561-4dc2-a788-a4085dc5f4ad-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "aadc6862-7561-4dc2-a788-a4085dc5f4ad" (UID: "aadc6862-7561-4dc2-a788-a4085dc5f4ad"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 19:34:53 crc kubenswrapper[4756]: I0930 19:34:53.695019 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-t2whz\" (UniqueName: \"kubernetes.io/projected/aadc6862-7561-4dc2-a788-a4085dc5f4ad-kube-api-access-t2whz\") on node \"crc\" DevicePath \"\"" Sep 30 19:34:53 crc kubenswrapper[4756]: I0930 19:34:53.695072 4756 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/aadc6862-7561-4dc2-a788-a4085dc5f4ad-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 19:34:53 crc kubenswrapper[4756]: I0930 19:34:53.695084 4756 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/aadc6862-7561-4dc2-a788-a4085dc5f4ad-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 19:34:53 crc kubenswrapper[4756]: I0930 19:34:53.926842 4756 generic.go:334] "Generic (PLEG): container finished" podID="aadc6862-7561-4dc2-a788-a4085dc5f4ad" containerID="393b1e2da58cd377aa63833da6f9950992302304fc2e6d9e9130b04cd6bb0720" exitCode=0 Sep 30 19:34:53 crc kubenswrapper[4756]: I0930 19:34:53.926892 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-scg2n" event={"ID":"aadc6862-7561-4dc2-a788-a4085dc5f4ad","Type":"ContainerDied","Data":"393b1e2da58cd377aa63833da6f9950992302304fc2e6d9e9130b04cd6bb0720"} Sep 30 19:34:53 crc kubenswrapper[4756]: I0930 19:34:53.926928 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-scg2n" event={"ID":"aadc6862-7561-4dc2-a788-a4085dc5f4ad","Type":"ContainerDied","Data":"53445e38c2202f99918186312939b357d7f7337c04f6d43cd3ad37a43a6f1c50"} Sep 30 19:34:53 crc kubenswrapper[4756]: I0930 19:34:53.926949 4756 scope.go:117] "RemoveContainer" containerID="393b1e2da58cd377aa63833da6f9950992302304fc2e6d9e9130b04cd6bb0720" Sep 30 19:34:53 crc kubenswrapper[4756]: I0930 19:34:53.927531 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-scg2n" Sep 30 19:34:53 crc kubenswrapper[4756]: I0930 19:34:53.956986 4756 scope.go:117] "RemoveContainer" containerID="a84630af0aa4742efaec5b2fecbcc1793148c905be09568a76de45f9f7559598" Sep 30 19:34:53 crc kubenswrapper[4756]: I0930 19:34:53.958339 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-scg2n"] Sep 30 19:34:53 crc kubenswrapper[4756]: I0930 19:34:53.962452 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-scg2n"] Sep 30 19:34:53 crc kubenswrapper[4756]: I0930 19:34:53.986921 4756 scope.go:117] "RemoveContainer" containerID="c5d2eda457ff897f98b1492f3324d46594aa173c5b3322a7de4484cdb5460ee0" Sep 30 19:34:54 crc kubenswrapper[4756]: I0930 19:34:54.008226 4756 scope.go:117] "RemoveContainer" containerID="393b1e2da58cd377aa63833da6f9950992302304fc2e6d9e9130b04cd6bb0720" Sep 30 19:34:54 crc kubenswrapper[4756]: E0930 19:34:54.008830 4756 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"393b1e2da58cd377aa63833da6f9950992302304fc2e6d9e9130b04cd6bb0720\": container with ID starting with 393b1e2da58cd377aa63833da6f9950992302304fc2e6d9e9130b04cd6bb0720 not found: ID does not exist" containerID="393b1e2da58cd377aa63833da6f9950992302304fc2e6d9e9130b04cd6bb0720" Sep 30 19:34:54 crc kubenswrapper[4756]: I0930 19:34:54.008881 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"393b1e2da58cd377aa63833da6f9950992302304fc2e6d9e9130b04cd6bb0720"} err="failed to get container status \"393b1e2da58cd377aa63833da6f9950992302304fc2e6d9e9130b04cd6bb0720\": rpc error: code = NotFound desc = could not find container \"393b1e2da58cd377aa63833da6f9950992302304fc2e6d9e9130b04cd6bb0720\": container with ID starting with 393b1e2da58cd377aa63833da6f9950992302304fc2e6d9e9130b04cd6bb0720 not found: ID does not exist" Sep 30 19:34:54 crc kubenswrapper[4756]: I0930 19:34:54.008912 4756 scope.go:117] "RemoveContainer" containerID="a84630af0aa4742efaec5b2fecbcc1793148c905be09568a76de45f9f7559598" Sep 30 19:34:54 crc kubenswrapper[4756]: E0930 19:34:54.009297 4756 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a84630af0aa4742efaec5b2fecbcc1793148c905be09568a76de45f9f7559598\": container with ID starting with a84630af0aa4742efaec5b2fecbcc1793148c905be09568a76de45f9f7559598 not found: ID does not exist" containerID="a84630af0aa4742efaec5b2fecbcc1793148c905be09568a76de45f9f7559598" Sep 30 19:34:54 crc kubenswrapper[4756]: I0930 19:34:54.009331 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a84630af0aa4742efaec5b2fecbcc1793148c905be09568a76de45f9f7559598"} err="failed to get container status \"a84630af0aa4742efaec5b2fecbcc1793148c905be09568a76de45f9f7559598\": rpc error: code = NotFound desc = could not find container \"a84630af0aa4742efaec5b2fecbcc1793148c905be09568a76de45f9f7559598\": container with ID starting with a84630af0aa4742efaec5b2fecbcc1793148c905be09568a76de45f9f7559598 not found: ID does not exist" Sep 30 19:34:54 crc kubenswrapper[4756]: I0930 19:34:54.009353 4756 scope.go:117] "RemoveContainer" containerID="c5d2eda457ff897f98b1492f3324d46594aa173c5b3322a7de4484cdb5460ee0" Sep 30 19:34:54 crc kubenswrapper[4756]: E0930 19:34:54.009702 4756 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c5d2eda457ff897f98b1492f3324d46594aa173c5b3322a7de4484cdb5460ee0\": container with ID starting with c5d2eda457ff897f98b1492f3324d46594aa173c5b3322a7de4484cdb5460ee0 not found: ID does not exist" containerID="c5d2eda457ff897f98b1492f3324d46594aa173c5b3322a7de4484cdb5460ee0" Sep 30 19:34:54 crc kubenswrapper[4756]: I0930 19:34:54.009725 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c5d2eda457ff897f98b1492f3324d46594aa173c5b3322a7de4484cdb5460ee0"} err="failed to get container status \"c5d2eda457ff897f98b1492f3324d46594aa173c5b3322a7de4484cdb5460ee0\": rpc error: code = NotFound desc = could not find container \"c5d2eda457ff897f98b1492f3324d46594aa173c5b3322a7de4484cdb5460ee0\": container with ID starting with c5d2eda457ff897f98b1492f3324d46594aa173c5b3322a7de4484cdb5460ee0 not found: ID does not exist" Sep 30 19:34:55 crc kubenswrapper[4756]: I0930 19:34:55.122192 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="aadc6862-7561-4dc2-a788-a4085dc5f4ad" path="/var/lib/kubelet/pods/aadc6862-7561-4dc2-a788-a4085dc5f4ad/volumes" Sep 30 19:35:32 crc kubenswrapper[4756]: I0930 19:35:32.121661 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-h5tp5"] Sep 30 19:35:57 crc kubenswrapper[4756]: I0930 19:35:57.167524 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-authentication/oauth-openshift-558db77b4-h5tp5" podUID="29fa1c29-d959-4928-bff4-550a2928dfef" containerName="oauth-openshift" containerID="cri-o://2521882fbfd13f07179e6957ce4a8fd9e7bbe9c5f76ea11b73e6bd856015807a" gracePeriod=15 Sep 30 19:35:58 crc kubenswrapper[4756]: I0930 19:35:58.129550 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-h5tp5" Sep 30 19:35:58 crc kubenswrapper[4756]: I0930 19:35:58.193829 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-c494796b-6jlzn"] Sep 30 19:35:58 crc kubenswrapper[4756]: E0930 19:35:58.194526 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b0b29af6-5e9a-48a8-8aeb-72bc2d9539ac" containerName="extract-utilities" Sep 30 19:35:58 crc kubenswrapper[4756]: I0930 19:35:58.194564 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="b0b29af6-5e9a-48a8-8aeb-72bc2d9539ac" containerName="extract-utilities" Sep 30 19:35:58 crc kubenswrapper[4756]: E0930 19:35:58.194607 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aadc6862-7561-4dc2-a788-a4085dc5f4ad" containerName="extract-utilities" Sep 30 19:35:58 crc kubenswrapper[4756]: I0930 19:35:58.194623 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="aadc6862-7561-4dc2-a788-a4085dc5f4ad" containerName="extract-utilities" Sep 30 19:35:58 crc kubenswrapper[4756]: E0930 19:35:58.194655 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b363e863-a59e-4272-9c06-f1a9dc15fc3b" containerName="extract-utilities" Sep 30 19:35:58 crc kubenswrapper[4756]: I0930 19:35:58.194674 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="b363e863-a59e-4272-9c06-f1a9dc15fc3b" containerName="extract-utilities" Sep 30 19:35:58 crc kubenswrapper[4756]: E0930 19:35:58.194695 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b363e863-a59e-4272-9c06-f1a9dc15fc3b" containerName="extract-content" Sep 30 19:35:58 crc kubenswrapper[4756]: I0930 19:35:58.194713 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="b363e863-a59e-4272-9c06-f1a9dc15fc3b" containerName="extract-content" Sep 30 19:35:58 crc kubenswrapper[4756]: E0930 19:35:58.194741 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="29fa1c29-d959-4928-bff4-550a2928dfef" containerName="oauth-openshift" Sep 30 19:35:58 crc kubenswrapper[4756]: I0930 19:35:58.194758 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="29fa1c29-d959-4928-bff4-550a2928dfef" containerName="oauth-openshift" Sep 30 19:35:58 crc kubenswrapper[4756]: E0930 19:35:58.194777 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aadc6862-7561-4dc2-a788-a4085dc5f4ad" containerName="extract-content" Sep 30 19:35:58 crc kubenswrapper[4756]: I0930 19:35:58.194794 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="aadc6862-7561-4dc2-a788-a4085dc5f4ad" containerName="extract-content" Sep 30 19:35:58 crc kubenswrapper[4756]: E0930 19:35:58.194827 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aadc6862-7561-4dc2-a788-a4085dc5f4ad" containerName="registry-server" Sep 30 19:35:58 crc kubenswrapper[4756]: I0930 19:35:58.194845 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="aadc6862-7561-4dc2-a788-a4085dc5f4ad" containerName="registry-server" Sep 30 19:35:58 crc kubenswrapper[4756]: E0930 19:35:58.194866 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4f7f9fe4-0797-4490-8bc2-c48850eb9f02" containerName="collect-profiles" Sep 30 19:35:58 crc kubenswrapper[4756]: I0930 19:35:58.194880 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="4f7f9fe4-0797-4490-8bc2-c48850eb9f02" containerName="collect-profiles" Sep 30 19:35:58 crc kubenswrapper[4756]: E0930 19:35:58.194898 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e9556a46-a1ed-4d3f-bcbd-7dafe3e6f3dc" containerName="extract-utilities" Sep 30 19:35:58 crc kubenswrapper[4756]: I0930 19:35:58.194916 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="e9556a46-a1ed-4d3f-bcbd-7dafe3e6f3dc" containerName="extract-utilities" Sep 30 19:35:58 crc kubenswrapper[4756]: E0930 19:35:58.194936 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1c3c4ad7-5502-4850-8813-c16a0f44f13c" containerName="pruner" Sep 30 19:35:58 crc kubenswrapper[4756]: I0930 19:35:58.194953 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="1c3c4ad7-5502-4850-8813-c16a0f44f13c" containerName="pruner" Sep 30 19:35:58 crc kubenswrapper[4756]: E0930 19:35:58.194974 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b0b29af6-5e9a-48a8-8aeb-72bc2d9539ac" containerName="extract-content" Sep 30 19:35:58 crc kubenswrapper[4756]: I0930 19:35:58.194990 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="b0b29af6-5e9a-48a8-8aeb-72bc2d9539ac" containerName="extract-content" Sep 30 19:35:58 crc kubenswrapper[4756]: E0930 19:35:58.195011 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e9556a46-a1ed-4d3f-bcbd-7dafe3e6f3dc" containerName="registry-server" Sep 30 19:35:58 crc kubenswrapper[4756]: I0930 19:35:58.195026 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="e9556a46-a1ed-4d3f-bcbd-7dafe3e6f3dc" containerName="registry-server" Sep 30 19:35:58 crc kubenswrapper[4756]: E0930 19:35:58.195045 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2a362a68-3a77-4ad6-a130-8f99546e5e76" containerName="pruner" Sep 30 19:35:58 crc kubenswrapper[4756]: I0930 19:35:58.195058 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="2a362a68-3a77-4ad6-a130-8f99546e5e76" containerName="pruner" Sep 30 19:35:58 crc kubenswrapper[4756]: E0930 19:35:58.195074 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b0b29af6-5e9a-48a8-8aeb-72bc2d9539ac" containerName="registry-server" Sep 30 19:35:58 crc kubenswrapper[4756]: I0930 19:35:58.195086 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="b0b29af6-5e9a-48a8-8aeb-72bc2d9539ac" containerName="registry-server" Sep 30 19:35:58 crc kubenswrapper[4756]: E0930 19:35:58.195104 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b363e863-a59e-4272-9c06-f1a9dc15fc3b" containerName="registry-server" Sep 30 19:35:58 crc kubenswrapper[4756]: I0930 19:35:58.195116 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="b363e863-a59e-4272-9c06-f1a9dc15fc3b" containerName="registry-server" Sep 30 19:35:58 crc kubenswrapper[4756]: E0930 19:35:58.195132 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e9556a46-a1ed-4d3f-bcbd-7dafe3e6f3dc" containerName="extract-content" Sep 30 19:35:58 crc kubenswrapper[4756]: I0930 19:35:58.195145 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="e9556a46-a1ed-4d3f-bcbd-7dafe3e6f3dc" containerName="extract-content" Sep 30 19:35:58 crc kubenswrapper[4756]: I0930 19:35:58.195346 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="4f7f9fe4-0797-4490-8bc2-c48850eb9f02" containerName="collect-profiles" Sep 30 19:35:58 crc kubenswrapper[4756]: I0930 19:35:58.195372 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="b363e863-a59e-4272-9c06-f1a9dc15fc3b" containerName="registry-server" Sep 30 19:35:58 crc kubenswrapper[4756]: I0930 19:35:58.195430 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="aadc6862-7561-4dc2-a788-a4085dc5f4ad" containerName="registry-server" Sep 30 19:35:58 crc kubenswrapper[4756]: I0930 19:35:58.195453 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="2a362a68-3a77-4ad6-a130-8f99546e5e76" containerName="pruner" Sep 30 19:35:58 crc kubenswrapper[4756]: I0930 19:35:58.195469 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="b0b29af6-5e9a-48a8-8aeb-72bc2d9539ac" containerName="registry-server" Sep 30 19:35:58 crc kubenswrapper[4756]: I0930 19:35:58.195486 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="29fa1c29-d959-4928-bff4-550a2928dfef" containerName="oauth-openshift" Sep 30 19:35:58 crc kubenswrapper[4756]: I0930 19:35:58.195506 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="e9556a46-a1ed-4d3f-bcbd-7dafe3e6f3dc" containerName="registry-server" Sep 30 19:35:58 crc kubenswrapper[4756]: I0930 19:35:58.195527 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="1c3c4ad7-5502-4850-8813-c16a0f44f13c" containerName="pruner" Sep 30 19:35:58 crc kubenswrapper[4756]: I0930 19:35:58.196391 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-c494796b-6jlzn" Sep 30 19:35:58 crc kubenswrapper[4756]: I0930 19:35:58.200538 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-c494796b-6jlzn"] Sep 30 19:35:58 crc kubenswrapper[4756]: I0930 19:35:58.305191 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/29fa1c29-d959-4928-bff4-550a2928dfef-v4-0-config-user-template-provider-selection\") pod \"29fa1c29-d959-4928-bff4-550a2928dfef\" (UID: \"29fa1c29-d959-4928-bff4-550a2928dfef\") " Sep 30 19:35:58 crc kubenswrapper[4756]: I0930 19:35:58.305869 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/29fa1c29-d959-4928-bff4-550a2928dfef-v4-0-config-system-ocp-branding-template\") pod \"29fa1c29-d959-4928-bff4-550a2928dfef\" (UID: \"29fa1c29-d959-4928-bff4-550a2928dfef\") " Sep 30 19:35:58 crc kubenswrapper[4756]: I0930 19:35:58.306003 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bgkr5\" (UniqueName: \"kubernetes.io/projected/29fa1c29-d959-4928-bff4-550a2928dfef-kube-api-access-bgkr5\") pod \"29fa1c29-d959-4928-bff4-550a2928dfef\" (UID: \"29fa1c29-d959-4928-bff4-550a2928dfef\") " Sep 30 19:35:58 crc kubenswrapper[4756]: I0930 19:35:58.306233 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/29fa1c29-d959-4928-bff4-550a2928dfef-v4-0-config-user-template-error\") pod \"29fa1c29-d959-4928-bff4-550a2928dfef\" (UID: \"29fa1c29-d959-4928-bff4-550a2928dfef\") " Sep 30 19:35:58 crc kubenswrapper[4756]: I0930 19:35:58.306541 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/29fa1c29-d959-4928-bff4-550a2928dfef-v4-0-config-user-template-login\") pod \"29fa1c29-d959-4928-bff4-550a2928dfef\" (UID: \"29fa1c29-d959-4928-bff4-550a2928dfef\") " Sep 30 19:35:58 crc kubenswrapper[4756]: I0930 19:35:58.307594 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/29fa1c29-d959-4928-bff4-550a2928dfef-v4-0-config-system-session\") pod \"29fa1c29-d959-4928-bff4-550a2928dfef\" (UID: \"29fa1c29-d959-4928-bff4-550a2928dfef\") " Sep 30 19:35:58 crc kubenswrapper[4756]: I0930 19:35:58.307696 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/29fa1c29-d959-4928-bff4-550a2928dfef-v4-0-config-system-trusted-ca-bundle\") pod \"29fa1c29-d959-4928-bff4-550a2928dfef\" (UID: \"29fa1c29-d959-4928-bff4-550a2928dfef\") " Sep 30 19:35:58 crc kubenswrapper[4756]: I0930 19:35:58.307757 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/29fa1c29-d959-4928-bff4-550a2928dfef-audit-dir\") pod \"29fa1c29-d959-4928-bff4-550a2928dfef\" (UID: \"29fa1c29-d959-4928-bff4-550a2928dfef\") " Sep 30 19:35:58 crc kubenswrapper[4756]: I0930 19:35:58.307802 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/29fa1c29-d959-4928-bff4-550a2928dfef-v4-0-config-system-router-certs\") pod \"29fa1c29-d959-4928-bff4-550a2928dfef\" (UID: \"29fa1c29-d959-4928-bff4-550a2928dfef\") " Sep 30 19:35:58 crc kubenswrapper[4756]: I0930 19:35:58.307848 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/29fa1c29-d959-4928-bff4-550a2928dfef-v4-0-config-system-service-ca\") pod \"29fa1c29-d959-4928-bff4-550a2928dfef\" (UID: \"29fa1c29-d959-4928-bff4-550a2928dfef\") " Sep 30 19:35:58 crc kubenswrapper[4756]: I0930 19:35:58.308083 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/29fa1c29-d959-4928-bff4-550a2928dfef-v4-0-config-system-serving-cert\") pod \"29fa1c29-d959-4928-bff4-550a2928dfef\" (UID: \"29fa1c29-d959-4928-bff4-550a2928dfef\") " Sep 30 19:35:58 crc kubenswrapper[4756]: I0930 19:35:58.308126 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/29fa1c29-d959-4928-bff4-550a2928dfef-v4-0-config-system-cliconfig\") pod \"29fa1c29-d959-4928-bff4-550a2928dfef\" (UID: \"29fa1c29-d959-4928-bff4-550a2928dfef\") " Sep 30 19:35:58 crc kubenswrapper[4756]: I0930 19:35:58.308164 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/29fa1c29-d959-4928-bff4-550a2928dfef-audit-policies\") pod \"29fa1c29-d959-4928-bff4-550a2928dfef\" (UID: \"29fa1c29-d959-4928-bff4-550a2928dfef\") " Sep 30 19:35:58 crc kubenswrapper[4756]: I0930 19:35:58.308198 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/29fa1c29-d959-4928-bff4-550a2928dfef-v4-0-config-user-idp-0-file-data\") pod \"29fa1c29-d959-4928-bff4-550a2928dfef\" (UID: \"29fa1c29-d959-4928-bff4-550a2928dfef\") " Sep 30 19:35:58 crc kubenswrapper[4756]: I0930 19:35:58.309724 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/4210df1a-a9ae-4d9c-b989-e1e12368d691-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-c494796b-6jlzn\" (UID: \"4210df1a-a9ae-4d9c-b989-e1e12368d691\") " pod="openshift-authentication/oauth-openshift-c494796b-6jlzn" Sep 30 19:35:58 crc kubenswrapper[4756]: I0930 19:35:58.309786 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/4210df1a-a9ae-4d9c-b989-e1e12368d691-v4-0-config-system-service-ca\") pod \"oauth-openshift-c494796b-6jlzn\" (UID: \"4210df1a-a9ae-4d9c-b989-e1e12368d691\") " pod="openshift-authentication/oauth-openshift-c494796b-6jlzn" Sep 30 19:35:58 crc kubenswrapper[4756]: I0930 19:35:58.309867 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/4210df1a-a9ae-4d9c-b989-e1e12368d691-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-c494796b-6jlzn\" (UID: \"4210df1a-a9ae-4d9c-b989-e1e12368d691\") " pod="openshift-authentication/oauth-openshift-c494796b-6jlzn" Sep 30 19:35:58 crc kubenswrapper[4756]: I0930 19:35:58.310032 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/4210df1a-a9ae-4d9c-b989-e1e12368d691-v4-0-config-system-serving-cert\") pod \"oauth-openshift-c494796b-6jlzn\" (UID: \"4210df1a-a9ae-4d9c-b989-e1e12368d691\") " pod="openshift-authentication/oauth-openshift-c494796b-6jlzn" Sep 30 19:35:58 crc kubenswrapper[4756]: I0930 19:35:58.310270 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/4210df1a-a9ae-4d9c-b989-e1e12368d691-v4-0-config-system-router-certs\") pod \"oauth-openshift-c494796b-6jlzn\" (UID: \"4210df1a-a9ae-4d9c-b989-e1e12368d691\") " pod="openshift-authentication/oauth-openshift-c494796b-6jlzn" Sep 30 19:35:58 crc kubenswrapper[4756]: I0930 19:35:58.310355 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/4210df1a-a9ae-4d9c-b989-e1e12368d691-v4-0-config-user-template-error\") pod \"oauth-openshift-c494796b-6jlzn\" (UID: \"4210df1a-a9ae-4d9c-b989-e1e12368d691\") " pod="openshift-authentication/oauth-openshift-c494796b-6jlzn" Sep 30 19:35:58 crc kubenswrapper[4756]: I0930 19:35:58.310437 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/4210df1a-a9ae-4d9c-b989-e1e12368d691-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-c494796b-6jlzn\" (UID: \"4210df1a-a9ae-4d9c-b989-e1e12368d691\") " pod="openshift-authentication/oauth-openshift-c494796b-6jlzn" Sep 30 19:35:58 crc kubenswrapper[4756]: I0930 19:35:58.310143 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/29fa1c29-d959-4928-bff4-550a2928dfef-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "29fa1c29-d959-4928-bff4-550a2928dfef" (UID: "29fa1c29-d959-4928-bff4-550a2928dfef"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:35:58 crc kubenswrapper[4756]: I0930 19:35:58.310554 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/29fa1c29-d959-4928-bff4-550a2928dfef-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "29fa1c29-d959-4928-bff4-550a2928dfef" (UID: "29fa1c29-d959-4928-bff4-550a2928dfef"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:35:58 crc kubenswrapper[4756]: I0930 19:35:58.310583 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/4210df1a-a9ae-4d9c-b989-e1e12368d691-audit-dir\") pod \"oauth-openshift-c494796b-6jlzn\" (UID: \"4210df1a-a9ae-4d9c-b989-e1e12368d691\") " pod="openshift-authentication/oauth-openshift-c494796b-6jlzn" Sep 30 19:35:58 crc kubenswrapper[4756]: I0930 19:35:58.310642 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-df57h\" (UniqueName: \"kubernetes.io/projected/4210df1a-a9ae-4d9c-b989-e1e12368d691-kube-api-access-df57h\") pod \"oauth-openshift-c494796b-6jlzn\" (UID: \"4210df1a-a9ae-4d9c-b989-e1e12368d691\") " pod="openshift-authentication/oauth-openshift-c494796b-6jlzn" Sep 30 19:35:58 crc kubenswrapper[4756]: I0930 19:35:58.310756 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/4210df1a-a9ae-4d9c-b989-e1e12368d691-v4-0-config-system-session\") pod \"oauth-openshift-c494796b-6jlzn\" (UID: \"4210df1a-a9ae-4d9c-b989-e1e12368d691\") " pod="openshift-authentication/oauth-openshift-c494796b-6jlzn" Sep 30 19:35:58 crc kubenswrapper[4756]: I0930 19:35:58.311005 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/4210df1a-a9ae-4d9c-b989-e1e12368d691-v4-0-config-system-cliconfig\") pod \"oauth-openshift-c494796b-6jlzn\" (UID: \"4210df1a-a9ae-4d9c-b989-e1e12368d691\") " pod="openshift-authentication/oauth-openshift-c494796b-6jlzn" Sep 30 19:35:58 crc kubenswrapper[4756]: I0930 19:35:58.311191 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/4210df1a-a9ae-4d9c-b989-e1e12368d691-audit-policies\") pod \"oauth-openshift-c494796b-6jlzn\" (UID: \"4210df1a-a9ae-4d9c-b989-e1e12368d691\") " pod="openshift-authentication/oauth-openshift-c494796b-6jlzn" Sep 30 19:35:58 crc kubenswrapper[4756]: I0930 19:35:58.311267 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/4210df1a-a9ae-4d9c-b989-e1e12368d691-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-c494796b-6jlzn\" (UID: \"4210df1a-a9ae-4d9c-b989-e1e12368d691\") " pod="openshift-authentication/oauth-openshift-c494796b-6jlzn" Sep 30 19:35:58 crc kubenswrapper[4756]: I0930 19:35:58.311386 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/4210df1a-a9ae-4d9c-b989-e1e12368d691-v4-0-config-user-template-login\") pod \"oauth-openshift-c494796b-6jlzn\" (UID: \"4210df1a-a9ae-4d9c-b989-e1e12368d691\") " pod="openshift-authentication/oauth-openshift-c494796b-6jlzn" Sep 30 19:35:58 crc kubenswrapper[4756]: I0930 19:35:58.311570 4756 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/29fa1c29-d959-4928-bff4-550a2928dfef-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Sep 30 19:35:58 crc kubenswrapper[4756]: I0930 19:35:58.311596 4756 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/29fa1c29-d959-4928-bff4-550a2928dfef-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Sep 30 19:35:58 crc kubenswrapper[4756]: I0930 19:35:58.310642 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/29fa1c29-d959-4928-bff4-550a2928dfef-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "29fa1c29-d959-4928-bff4-550a2928dfef" (UID: "29fa1c29-d959-4928-bff4-550a2928dfef"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:35:58 crc kubenswrapper[4756]: I0930 19:35:58.311298 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/29fa1c29-d959-4928-bff4-550a2928dfef-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "29fa1c29-d959-4928-bff4-550a2928dfef" (UID: "29fa1c29-d959-4928-bff4-550a2928dfef"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:35:58 crc kubenswrapper[4756]: I0930 19:35:58.312018 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/29fa1c29-d959-4928-bff4-550a2928dfef-audit-dir" (OuterVolumeSpecName: "audit-dir") pod "29fa1c29-d959-4928-bff4-550a2928dfef" (UID: "29fa1c29-d959-4928-bff4-550a2928dfef"). InnerVolumeSpecName "audit-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 19:35:58 crc kubenswrapper[4756]: I0930 19:35:58.312136 4756 generic.go:334] "Generic (PLEG): container finished" podID="29fa1c29-d959-4928-bff4-550a2928dfef" containerID="2521882fbfd13f07179e6957ce4a8fd9e7bbe9c5f76ea11b73e6bd856015807a" exitCode=0 Sep 30 19:35:58 crc kubenswrapper[4756]: I0930 19:35:58.312193 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-h5tp5" event={"ID":"29fa1c29-d959-4928-bff4-550a2928dfef","Type":"ContainerDied","Data":"2521882fbfd13f07179e6957ce4a8fd9e7bbe9c5f76ea11b73e6bd856015807a"} Sep 30 19:35:58 crc kubenswrapper[4756]: I0930 19:35:58.312276 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-h5tp5" event={"ID":"29fa1c29-d959-4928-bff4-550a2928dfef","Type":"ContainerDied","Data":"be742d4b7690edccc58f7d4add6e79b98ec7447d28566b1f7eadd822e58714c9"} Sep 30 19:35:58 crc kubenswrapper[4756]: I0930 19:35:58.312297 4756 scope.go:117] "RemoveContainer" containerID="2521882fbfd13f07179e6957ce4a8fd9e7bbe9c5f76ea11b73e6bd856015807a" Sep 30 19:35:58 crc kubenswrapper[4756]: I0930 19:35:58.312631 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-h5tp5" Sep 30 19:35:58 crc kubenswrapper[4756]: I0930 19:35:58.314129 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/29fa1c29-d959-4928-bff4-550a2928dfef-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "29fa1c29-d959-4928-bff4-550a2928dfef" (UID: "29fa1c29-d959-4928-bff4-550a2928dfef"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:35:58 crc kubenswrapper[4756]: I0930 19:35:58.314555 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/29fa1c29-d959-4928-bff4-550a2928dfef-kube-api-access-bgkr5" (OuterVolumeSpecName: "kube-api-access-bgkr5") pod "29fa1c29-d959-4928-bff4-550a2928dfef" (UID: "29fa1c29-d959-4928-bff4-550a2928dfef"). InnerVolumeSpecName "kube-api-access-bgkr5". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:35:58 crc kubenswrapper[4756]: I0930 19:35:58.314583 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/29fa1c29-d959-4928-bff4-550a2928dfef-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "29fa1c29-d959-4928-bff4-550a2928dfef" (UID: "29fa1c29-d959-4928-bff4-550a2928dfef"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:35:58 crc kubenswrapper[4756]: I0930 19:35:58.315446 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/29fa1c29-d959-4928-bff4-550a2928dfef-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "29fa1c29-d959-4928-bff4-550a2928dfef" (UID: "29fa1c29-d959-4928-bff4-550a2928dfef"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:35:58 crc kubenswrapper[4756]: I0930 19:35:58.315813 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/29fa1c29-d959-4928-bff4-550a2928dfef-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "29fa1c29-d959-4928-bff4-550a2928dfef" (UID: "29fa1c29-d959-4928-bff4-550a2928dfef"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:35:58 crc kubenswrapper[4756]: I0930 19:35:58.316550 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/29fa1c29-d959-4928-bff4-550a2928dfef-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "29fa1c29-d959-4928-bff4-550a2928dfef" (UID: "29fa1c29-d959-4928-bff4-550a2928dfef"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:35:58 crc kubenswrapper[4756]: I0930 19:35:58.318611 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/29fa1c29-d959-4928-bff4-550a2928dfef-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "29fa1c29-d959-4928-bff4-550a2928dfef" (UID: "29fa1c29-d959-4928-bff4-550a2928dfef"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:35:58 crc kubenswrapper[4756]: I0930 19:35:58.319566 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/29fa1c29-d959-4928-bff4-550a2928dfef-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "29fa1c29-d959-4928-bff4-550a2928dfef" (UID: "29fa1c29-d959-4928-bff4-550a2928dfef"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:35:58 crc kubenswrapper[4756]: I0930 19:35:58.320333 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/29fa1c29-d959-4928-bff4-550a2928dfef-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "29fa1c29-d959-4928-bff4-550a2928dfef" (UID: "29fa1c29-d959-4928-bff4-550a2928dfef"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:35:58 crc kubenswrapper[4756]: I0930 19:35:58.383012 4756 scope.go:117] "RemoveContainer" containerID="2521882fbfd13f07179e6957ce4a8fd9e7bbe9c5f76ea11b73e6bd856015807a" Sep 30 19:35:58 crc kubenswrapper[4756]: E0930 19:35:58.385655 4756 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2521882fbfd13f07179e6957ce4a8fd9e7bbe9c5f76ea11b73e6bd856015807a\": container with ID starting with 2521882fbfd13f07179e6957ce4a8fd9e7bbe9c5f76ea11b73e6bd856015807a not found: ID does not exist" containerID="2521882fbfd13f07179e6957ce4a8fd9e7bbe9c5f76ea11b73e6bd856015807a" Sep 30 19:35:58 crc kubenswrapper[4756]: I0930 19:35:58.385761 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2521882fbfd13f07179e6957ce4a8fd9e7bbe9c5f76ea11b73e6bd856015807a"} err="failed to get container status \"2521882fbfd13f07179e6957ce4a8fd9e7bbe9c5f76ea11b73e6bd856015807a\": rpc error: code = NotFound desc = could not find container \"2521882fbfd13f07179e6957ce4a8fd9e7bbe9c5f76ea11b73e6bd856015807a\": container with ID starting with 2521882fbfd13f07179e6957ce4a8fd9e7bbe9c5f76ea11b73e6bd856015807a not found: ID does not exist" Sep 30 19:35:58 crc kubenswrapper[4756]: I0930 19:35:58.412849 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/4210df1a-a9ae-4d9c-b989-e1e12368d691-audit-dir\") pod \"oauth-openshift-c494796b-6jlzn\" (UID: \"4210df1a-a9ae-4d9c-b989-e1e12368d691\") " pod="openshift-authentication/oauth-openshift-c494796b-6jlzn" Sep 30 19:35:58 crc kubenswrapper[4756]: I0930 19:35:58.412956 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-df57h\" (UniqueName: \"kubernetes.io/projected/4210df1a-a9ae-4d9c-b989-e1e12368d691-kube-api-access-df57h\") pod \"oauth-openshift-c494796b-6jlzn\" (UID: \"4210df1a-a9ae-4d9c-b989-e1e12368d691\") " pod="openshift-authentication/oauth-openshift-c494796b-6jlzn" Sep 30 19:35:58 crc kubenswrapper[4756]: I0930 19:35:58.413058 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/4210df1a-a9ae-4d9c-b989-e1e12368d691-v4-0-config-system-session\") pod \"oauth-openshift-c494796b-6jlzn\" (UID: \"4210df1a-a9ae-4d9c-b989-e1e12368d691\") " pod="openshift-authentication/oauth-openshift-c494796b-6jlzn" Sep 30 19:35:58 crc kubenswrapper[4756]: I0930 19:35:58.412983 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/4210df1a-a9ae-4d9c-b989-e1e12368d691-audit-dir\") pod \"oauth-openshift-c494796b-6jlzn\" (UID: \"4210df1a-a9ae-4d9c-b989-e1e12368d691\") " pod="openshift-authentication/oauth-openshift-c494796b-6jlzn" Sep 30 19:35:58 crc kubenswrapper[4756]: I0930 19:35:58.413661 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/4210df1a-a9ae-4d9c-b989-e1e12368d691-v4-0-config-system-cliconfig\") pod \"oauth-openshift-c494796b-6jlzn\" (UID: \"4210df1a-a9ae-4d9c-b989-e1e12368d691\") " pod="openshift-authentication/oauth-openshift-c494796b-6jlzn" Sep 30 19:35:58 crc kubenswrapper[4756]: I0930 19:35:58.413812 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/4210df1a-a9ae-4d9c-b989-e1e12368d691-audit-policies\") pod \"oauth-openshift-c494796b-6jlzn\" (UID: \"4210df1a-a9ae-4d9c-b989-e1e12368d691\") " pod="openshift-authentication/oauth-openshift-c494796b-6jlzn" Sep 30 19:35:58 crc kubenswrapper[4756]: I0930 19:35:58.413854 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/4210df1a-a9ae-4d9c-b989-e1e12368d691-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-c494796b-6jlzn\" (UID: \"4210df1a-a9ae-4d9c-b989-e1e12368d691\") " pod="openshift-authentication/oauth-openshift-c494796b-6jlzn" Sep 30 19:35:58 crc kubenswrapper[4756]: I0930 19:35:58.413919 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/4210df1a-a9ae-4d9c-b989-e1e12368d691-v4-0-config-user-template-login\") pod \"oauth-openshift-c494796b-6jlzn\" (UID: \"4210df1a-a9ae-4d9c-b989-e1e12368d691\") " pod="openshift-authentication/oauth-openshift-c494796b-6jlzn" Sep 30 19:35:58 crc kubenswrapper[4756]: I0930 19:35:58.414011 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/4210df1a-a9ae-4d9c-b989-e1e12368d691-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-c494796b-6jlzn\" (UID: \"4210df1a-a9ae-4d9c-b989-e1e12368d691\") " pod="openshift-authentication/oauth-openshift-c494796b-6jlzn" Sep 30 19:35:58 crc kubenswrapper[4756]: I0930 19:35:58.414049 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/4210df1a-a9ae-4d9c-b989-e1e12368d691-v4-0-config-system-service-ca\") pod \"oauth-openshift-c494796b-6jlzn\" (UID: \"4210df1a-a9ae-4d9c-b989-e1e12368d691\") " pod="openshift-authentication/oauth-openshift-c494796b-6jlzn" Sep 30 19:35:58 crc kubenswrapper[4756]: I0930 19:35:58.414081 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/4210df1a-a9ae-4d9c-b989-e1e12368d691-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-c494796b-6jlzn\" (UID: \"4210df1a-a9ae-4d9c-b989-e1e12368d691\") " pod="openshift-authentication/oauth-openshift-c494796b-6jlzn" Sep 30 19:35:58 crc kubenswrapper[4756]: I0930 19:35:58.414119 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/4210df1a-a9ae-4d9c-b989-e1e12368d691-v4-0-config-system-serving-cert\") pod \"oauth-openshift-c494796b-6jlzn\" (UID: \"4210df1a-a9ae-4d9c-b989-e1e12368d691\") " pod="openshift-authentication/oauth-openshift-c494796b-6jlzn" Sep 30 19:35:58 crc kubenswrapper[4756]: I0930 19:35:58.414222 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/4210df1a-a9ae-4d9c-b989-e1e12368d691-v4-0-config-system-router-certs\") pod \"oauth-openshift-c494796b-6jlzn\" (UID: \"4210df1a-a9ae-4d9c-b989-e1e12368d691\") " pod="openshift-authentication/oauth-openshift-c494796b-6jlzn" Sep 30 19:35:58 crc kubenswrapper[4756]: I0930 19:35:58.414254 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/4210df1a-a9ae-4d9c-b989-e1e12368d691-v4-0-config-user-template-error\") pod \"oauth-openshift-c494796b-6jlzn\" (UID: \"4210df1a-a9ae-4d9c-b989-e1e12368d691\") " pod="openshift-authentication/oauth-openshift-c494796b-6jlzn" Sep 30 19:35:58 crc kubenswrapper[4756]: I0930 19:35:58.414284 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/4210df1a-a9ae-4d9c-b989-e1e12368d691-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-c494796b-6jlzn\" (UID: \"4210df1a-a9ae-4d9c-b989-e1e12368d691\") " pod="openshift-authentication/oauth-openshift-c494796b-6jlzn" Sep 30 19:35:58 crc kubenswrapper[4756]: I0930 19:35:58.414422 4756 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/29fa1c29-d959-4928-bff4-550a2928dfef-audit-policies\") on node \"crc\" DevicePath \"\"" Sep 30 19:35:58 crc kubenswrapper[4756]: I0930 19:35:58.414443 4756 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/29fa1c29-d959-4928-bff4-550a2928dfef-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Sep 30 19:35:58 crc kubenswrapper[4756]: I0930 19:35:58.414462 4756 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/29fa1c29-d959-4928-bff4-550a2928dfef-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Sep 30 19:35:58 crc kubenswrapper[4756]: I0930 19:35:58.414481 4756 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/29fa1c29-d959-4928-bff4-550a2928dfef-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Sep 30 19:35:58 crc kubenswrapper[4756]: I0930 19:35:58.414496 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bgkr5\" (UniqueName: \"kubernetes.io/projected/29fa1c29-d959-4928-bff4-550a2928dfef-kube-api-access-bgkr5\") on node \"crc\" DevicePath \"\"" Sep 30 19:35:58 crc kubenswrapper[4756]: I0930 19:35:58.414510 4756 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/29fa1c29-d959-4928-bff4-550a2928dfef-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Sep 30 19:35:58 crc kubenswrapper[4756]: I0930 19:35:58.414524 4756 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/29fa1c29-d959-4928-bff4-550a2928dfef-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Sep 30 19:35:58 crc kubenswrapper[4756]: I0930 19:35:58.414538 4756 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/29fa1c29-d959-4928-bff4-550a2928dfef-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Sep 30 19:35:58 crc kubenswrapper[4756]: I0930 19:35:58.414553 4756 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/29fa1c29-d959-4928-bff4-550a2928dfef-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 19:35:58 crc kubenswrapper[4756]: I0930 19:35:58.414566 4756 reconciler_common.go:293] "Volume detached for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/29fa1c29-d959-4928-bff4-550a2928dfef-audit-dir\") on node \"crc\" DevicePath \"\"" Sep 30 19:35:58 crc kubenswrapper[4756]: I0930 19:35:58.414579 4756 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/29fa1c29-d959-4928-bff4-550a2928dfef-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Sep 30 19:35:58 crc kubenswrapper[4756]: I0930 19:35:58.414594 4756 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/29fa1c29-d959-4928-bff4-550a2928dfef-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 30 19:35:58 crc kubenswrapper[4756]: I0930 19:35:58.415060 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/4210df1a-a9ae-4d9c-b989-e1e12368d691-audit-policies\") pod \"oauth-openshift-c494796b-6jlzn\" (UID: \"4210df1a-a9ae-4d9c-b989-e1e12368d691\") " pod="openshift-authentication/oauth-openshift-c494796b-6jlzn" Sep 30 19:35:58 crc kubenswrapper[4756]: I0930 19:35:58.415379 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/4210df1a-a9ae-4d9c-b989-e1e12368d691-v4-0-config-system-cliconfig\") pod \"oauth-openshift-c494796b-6jlzn\" (UID: \"4210df1a-a9ae-4d9c-b989-e1e12368d691\") " pod="openshift-authentication/oauth-openshift-c494796b-6jlzn" Sep 30 19:35:58 crc kubenswrapper[4756]: I0930 19:35:58.416643 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/4210df1a-a9ae-4d9c-b989-e1e12368d691-v4-0-config-system-session\") pod \"oauth-openshift-c494796b-6jlzn\" (UID: \"4210df1a-a9ae-4d9c-b989-e1e12368d691\") " pod="openshift-authentication/oauth-openshift-c494796b-6jlzn" Sep 30 19:35:58 crc kubenswrapper[4756]: I0930 19:35:58.417697 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/4210df1a-a9ae-4d9c-b989-e1e12368d691-v4-0-config-system-service-ca\") pod \"oauth-openshift-c494796b-6jlzn\" (UID: \"4210df1a-a9ae-4d9c-b989-e1e12368d691\") " pod="openshift-authentication/oauth-openshift-c494796b-6jlzn" Sep 30 19:35:58 crc kubenswrapper[4756]: I0930 19:35:58.417877 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/4210df1a-a9ae-4d9c-b989-e1e12368d691-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-c494796b-6jlzn\" (UID: \"4210df1a-a9ae-4d9c-b989-e1e12368d691\") " pod="openshift-authentication/oauth-openshift-c494796b-6jlzn" Sep 30 19:35:58 crc kubenswrapper[4756]: I0930 19:35:58.419604 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/4210df1a-a9ae-4d9c-b989-e1e12368d691-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-c494796b-6jlzn\" (UID: \"4210df1a-a9ae-4d9c-b989-e1e12368d691\") " pod="openshift-authentication/oauth-openshift-c494796b-6jlzn" Sep 30 19:35:58 crc kubenswrapper[4756]: I0930 19:35:58.420289 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/4210df1a-a9ae-4d9c-b989-e1e12368d691-v4-0-config-user-template-login\") pod \"oauth-openshift-c494796b-6jlzn\" (UID: \"4210df1a-a9ae-4d9c-b989-e1e12368d691\") " pod="openshift-authentication/oauth-openshift-c494796b-6jlzn" Sep 30 19:35:58 crc kubenswrapper[4756]: I0930 19:35:58.420512 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/4210df1a-a9ae-4d9c-b989-e1e12368d691-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-c494796b-6jlzn\" (UID: \"4210df1a-a9ae-4d9c-b989-e1e12368d691\") " pod="openshift-authentication/oauth-openshift-c494796b-6jlzn" Sep 30 19:35:58 crc kubenswrapper[4756]: I0930 19:35:58.421660 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/4210df1a-a9ae-4d9c-b989-e1e12368d691-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-c494796b-6jlzn\" (UID: \"4210df1a-a9ae-4d9c-b989-e1e12368d691\") " pod="openshift-authentication/oauth-openshift-c494796b-6jlzn" Sep 30 19:35:58 crc kubenswrapper[4756]: I0930 19:35:58.421814 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/4210df1a-a9ae-4d9c-b989-e1e12368d691-v4-0-config-system-router-certs\") pod \"oauth-openshift-c494796b-6jlzn\" (UID: \"4210df1a-a9ae-4d9c-b989-e1e12368d691\") " pod="openshift-authentication/oauth-openshift-c494796b-6jlzn" Sep 30 19:35:58 crc kubenswrapper[4756]: I0930 19:35:58.422755 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/4210df1a-a9ae-4d9c-b989-e1e12368d691-v4-0-config-system-serving-cert\") pod \"oauth-openshift-c494796b-6jlzn\" (UID: \"4210df1a-a9ae-4d9c-b989-e1e12368d691\") " pod="openshift-authentication/oauth-openshift-c494796b-6jlzn" Sep 30 19:35:58 crc kubenswrapper[4756]: I0930 19:35:58.422997 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/4210df1a-a9ae-4d9c-b989-e1e12368d691-v4-0-config-user-template-error\") pod \"oauth-openshift-c494796b-6jlzn\" (UID: \"4210df1a-a9ae-4d9c-b989-e1e12368d691\") " pod="openshift-authentication/oauth-openshift-c494796b-6jlzn" Sep 30 19:35:58 crc kubenswrapper[4756]: I0930 19:35:58.442794 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-df57h\" (UniqueName: \"kubernetes.io/projected/4210df1a-a9ae-4d9c-b989-e1e12368d691-kube-api-access-df57h\") pod \"oauth-openshift-c494796b-6jlzn\" (UID: \"4210df1a-a9ae-4d9c-b989-e1e12368d691\") " pod="openshift-authentication/oauth-openshift-c494796b-6jlzn" Sep 30 19:35:58 crc kubenswrapper[4756]: I0930 19:35:58.515110 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-c494796b-6jlzn" Sep 30 19:35:58 crc kubenswrapper[4756]: I0930 19:35:58.654726 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-h5tp5"] Sep 30 19:35:58 crc kubenswrapper[4756]: I0930 19:35:58.661317 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-h5tp5"] Sep 30 19:35:58 crc kubenswrapper[4756]: I0930 19:35:58.796072 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-c494796b-6jlzn"] Sep 30 19:35:59 crc kubenswrapper[4756]: I0930 19:35:59.126909 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="29fa1c29-d959-4928-bff4-550a2928dfef" path="/var/lib/kubelet/pods/29fa1c29-d959-4928-bff4-550a2928dfef/volumes" Sep 30 19:35:59 crc kubenswrapper[4756]: I0930 19:35:59.322105 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-c494796b-6jlzn" event={"ID":"4210df1a-a9ae-4d9c-b989-e1e12368d691","Type":"ContainerStarted","Data":"2f985ee7e7a939a56e7bf12342081a1cf12f43bd96582551ea6226d198479fdc"} Sep 30 19:35:59 crc kubenswrapper[4756]: I0930 19:35:59.322157 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-c494796b-6jlzn" event={"ID":"4210df1a-a9ae-4d9c-b989-e1e12368d691","Type":"ContainerStarted","Data":"99b166f9b9960ccf059527980e3c2d19acb19b4965d4f1dd7f13d9f7f2457038"} Sep 30 19:35:59 crc kubenswrapper[4756]: I0930 19:35:59.323388 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-c494796b-6jlzn" Sep 30 19:35:59 crc kubenswrapper[4756]: I0930 19:35:59.802260 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-c494796b-6jlzn" Sep 30 19:35:59 crc kubenswrapper[4756]: I0930 19:35:59.822769 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-c494796b-6jlzn" podStartSLOduration=27.822739269 podStartE2EDuration="27.822739269s" podCreationTimestamp="2025-09-30 19:35:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 19:35:59.346686682 +0000 UTC m=+288.967620209" watchObservedRunningTime="2025-09-30 19:35:59.822739269 +0000 UTC m=+289.443672786" Sep 30 19:36:18 crc kubenswrapper[4756]: I0930 19:36:18.431112 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-rqhnz"] Sep 30 19:36:18 crc kubenswrapper[4756]: I0930 19:36:18.432170 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-rqhnz" podUID="8badbed7-383d-4cc6-9fa8-aac7c4ac97ac" containerName="registry-server" containerID="cri-o://062cfacf1dcec337bcf4e7c015f4f2d2aa5d9d615f0880be92ef85f422dd9a9c" gracePeriod=30 Sep 30 19:36:18 crc kubenswrapper[4756]: I0930 19:36:18.448422 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-j6fg8"] Sep 30 19:36:18 crc kubenswrapper[4756]: I0930 19:36:18.448725 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-j6fg8" podUID="252408ae-8514-4b7b-b8c9-348d43fa0323" containerName="registry-server" containerID="cri-o://2008d082827ec508ccabf94fe48ed8cc9d1be0750f56c519a7a8bc4bd053d772" gracePeriod=30 Sep 30 19:36:18 crc kubenswrapper[4756]: I0930 19:36:18.463651 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-42ln4"] Sep 30 19:36:18 crc kubenswrapper[4756]: I0930 19:36:18.464617 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/marketplace-operator-79b997595-42ln4" podUID="d2f7e436-45ee-4e29-a473-cde6db34524b" containerName="marketplace-operator" containerID="cri-o://36ef99d9611b62aa39e56c45e5a9cda1cc95c989eb64a847b1f60fd69420d385" gracePeriod=30 Sep 30 19:36:18 crc kubenswrapper[4756]: I0930 19:36:18.481321 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-vs4g5"] Sep 30 19:36:18 crc kubenswrapper[4756]: I0930 19:36:18.484609 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-vs4g5" podUID="60ec2479-5451-47c2-ada6-a659f57092e5" containerName="registry-server" containerID="cri-o://1a6acbce684b67a7cd18c437f70619a5097d53c42b28fb4276e3c1300cdc7db8" gracePeriod=30 Sep 30 19:36:18 crc kubenswrapper[4756]: I0930 19:36:18.498568 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-kvcxz"] Sep 30 19:36:18 crc kubenswrapper[4756]: I0930 19:36:18.499271 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-kvcxz" podUID="700d58ef-f68b-41f1-837a-984f6afffb82" containerName="registry-server" containerID="cri-o://d12ac4203bd271091a1bcbf439ee8a044c798b5985d49812af481ab0e9100d79" gracePeriod=30 Sep 30 19:36:18 crc kubenswrapper[4756]: I0930 19:36:18.506443 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-p27l9"] Sep 30 19:36:18 crc kubenswrapper[4756]: I0930 19:36:18.507909 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-p27l9" Sep 30 19:36:18 crc kubenswrapper[4756]: I0930 19:36:18.509630 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-p27l9"] Sep 30 19:36:18 crc kubenswrapper[4756]: I0930 19:36:18.670501 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/72e24494-4294-4d3e-934e-4a31a0e7601f-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-p27l9\" (UID: \"72e24494-4294-4d3e-934e-4a31a0e7601f\") " pod="openshift-marketplace/marketplace-operator-79b997595-p27l9" Sep 30 19:36:18 crc kubenswrapper[4756]: I0930 19:36:18.670551 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4ddpr\" (UniqueName: \"kubernetes.io/projected/72e24494-4294-4d3e-934e-4a31a0e7601f-kube-api-access-4ddpr\") pod \"marketplace-operator-79b997595-p27l9\" (UID: \"72e24494-4294-4d3e-934e-4a31a0e7601f\") " pod="openshift-marketplace/marketplace-operator-79b997595-p27l9" Sep 30 19:36:18 crc kubenswrapper[4756]: I0930 19:36:18.670574 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/72e24494-4294-4d3e-934e-4a31a0e7601f-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-p27l9\" (UID: \"72e24494-4294-4d3e-934e-4a31a0e7601f\") " pod="openshift-marketplace/marketplace-operator-79b997595-p27l9" Sep 30 19:36:18 crc kubenswrapper[4756]: I0930 19:36:18.771661 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/72e24494-4294-4d3e-934e-4a31a0e7601f-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-p27l9\" (UID: \"72e24494-4294-4d3e-934e-4a31a0e7601f\") " pod="openshift-marketplace/marketplace-operator-79b997595-p27l9" Sep 30 19:36:18 crc kubenswrapper[4756]: I0930 19:36:18.771702 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4ddpr\" (UniqueName: \"kubernetes.io/projected/72e24494-4294-4d3e-934e-4a31a0e7601f-kube-api-access-4ddpr\") pod \"marketplace-operator-79b997595-p27l9\" (UID: \"72e24494-4294-4d3e-934e-4a31a0e7601f\") " pod="openshift-marketplace/marketplace-operator-79b997595-p27l9" Sep 30 19:36:18 crc kubenswrapper[4756]: I0930 19:36:18.771719 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/72e24494-4294-4d3e-934e-4a31a0e7601f-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-p27l9\" (UID: \"72e24494-4294-4d3e-934e-4a31a0e7601f\") " pod="openshift-marketplace/marketplace-operator-79b997595-p27l9" Sep 30 19:36:18 crc kubenswrapper[4756]: I0930 19:36:18.773271 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/72e24494-4294-4d3e-934e-4a31a0e7601f-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-p27l9\" (UID: \"72e24494-4294-4d3e-934e-4a31a0e7601f\") " pod="openshift-marketplace/marketplace-operator-79b997595-p27l9" Sep 30 19:36:18 crc kubenswrapper[4756]: I0930 19:36:18.791571 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/72e24494-4294-4d3e-934e-4a31a0e7601f-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-p27l9\" (UID: \"72e24494-4294-4d3e-934e-4a31a0e7601f\") " pod="openshift-marketplace/marketplace-operator-79b997595-p27l9" Sep 30 19:36:18 crc kubenswrapper[4756]: I0930 19:36:18.793361 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4ddpr\" (UniqueName: \"kubernetes.io/projected/72e24494-4294-4d3e-934e-4a31a0e7601f-kube-api-access-4ddpr\") pod \"marketplace-operator-79b997595-p27l9\" (UID: \"72e24494-4294-4d3e-934e-4a31a0e7601f\") " pod="openshift-marketplace/marketplace-operator-79b997595-p27l9" Sep 30 19:36:18 crc kubenswrapper[4756]: I0930 19:36:18.822567 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-p27l9" Sep 30 19:36:18 crc kubenswrapper[4756]: I0930 19:36:18.888275 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-rqhnz" Sep 30 19:36:18 crc kubenswrapper[4756]: I0930 19:36:18.899683 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-j6fg8" Sep 30 19:36:18 crc kubenswrapper[4756]: I0930 19:36:18.948033 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-kvcxz" Sep 30 19:36:18 crc kubenswrapper[4756]: I0930 19:36:18.958576 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-42ln4" Sep 30 19:36:18 crc kubenswrapper[4756]: I0930 19:36:18.958707 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-vs4g5" Sep 30 19:36:18 crc kubenswrapper[4756]: I0930 19:36:18.973824 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8badbed7-383d-4cc6-9fa8-aac7c4ac97ac-utilities\") pod \"8badbed7-383d-4cc6-9fa8-aac7c4ac97ac\" (UID: \"8badbed7-383d-4cc6-9fa8-aac7c4ac97ac\") " Sep 30 19:36:18 crc kubenswrapper[4756]: I0930 19:36:18.973888 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8badbed7-383d-4cc6-9fa8-aac7c4ac97ac-catalog-content\") pod \"8badbed7-383d-4cc6-9fa8-aac7c4ac97ac\" (UID: \"8badbed7-383d-4cc6-9fa8-aac7c4ac97ac\") " Sep 30 19:36:18 crc kubenswrapper[4756]: I0930 19:36:18.975115 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8badbed7-383d-4cc6-9fa8-aac7c4ac97ac-utilities" (OuterVolumeSpecName: "utilities") pod "8badbed7-383d-4cc6-9fa8-aac7c4ac97ac" (UID: "8badbed7-383d-4cc6-9fa8-aac7c4ac97ac"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 19:36:18 crc kubenswrapper[4756]: I0930 19:36:18.975172 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-72vcn\" (UniqueName: \"kubernetes.io/projected/252408ae-8514-4b7b-b8c9-348d43fa0323-kube-api-access-72vcn\") pod \"252408ae-8514-4b7b-b8c9-348d43fa0323\" (UID: \"252408ae-8514-4b7b-b8c9-348d43fa0323\") " Sep 30 19:36:18 crc kubenswrapper[4756]: I0930 19:36:18.975231 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tznvq\" (UniqueName: \"kubernetes.io/projected/8badbed7-383d-4cc6-9fa8-aac7c4ac97ac-kube-api-access-tznvq\") pod \"8badbed7-383d-4cc6-9fa8-aac7c4ac97ac\" (UID: \"8badbed7-383d-4cc6-9fa8-aac7c4ac97ac\") " Sep 30 19:36:18 crc kubenswrapper[4756]: I0930 19:36:18.975274 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/252408ae-8514-4b7b-b8c9-348d43fa0323-utilities\") pod \"252408ae-8514-4b7b-b8c9-348d43fa0323\" (UID: \"252408ae-8514-4b7b-b8c9-348d43fa0323\") " Sep 30 19:36:18 crc kubenswrapper[4756]: I0930 19:36:18.975331 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/252408ae-8514-4b7b-b8c9-348d43fa0323-catalog-content\") pod \"252408ae-8514-4b7b-b8c9-348d43fa0323\" (UID: \"252408ae-8514-4b7b-b8c9-348d43fa0323\") " Sep 30 19:36:18 crc kubenswrapper[4756]: I0930 19:36:18.975618 4756 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8badbed7-383d-4cc6-9fa8-aac7c4ac97ac-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 19:36:18 crc kubenswrapper[4756]: I0930 19:36:18.977913 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/252408ae-8514-4b7b-b8c9-348d43fa0323-utilities" (OuterVolumeSpecName: "utilities") pod "252408ae-8514-4b7b-b8c9-348d43fa0323" (UID: "252408ae-8514-4b7b-b8c9-348d43fa0323"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 19:36:18 crc kubenswrapper[4756]: I0930 19:36:18.979184 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8badbed7-383d-4cc6-9fa8-aac7c4ac97ac-kube-api-access-tznvq" (OuterVolumeSpecName: "kube-api-access-tznvq") pod "8badbed7-383d-4cc6-9fa8-aac7c4ac97ac" (UID: "8badbed7-383d-4cc6-9fa8-aac7c4ac97ac"). InnerVolumeSpecName "kube-api-access-tznvq". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:36:18 crc kubenswrapper[4756]: I0930 19:36:18.979359 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/252408ae-8514-4b7b-b8c9-348d43fa0323-kube-api-access-72vcn" (OuterVolumeSpecName: "kube-api-access-72vcn") pod "252408ae-8514-4b7b-b8c9-348d43fa0323" (UID: "252408ae-8514-4b7b-b8c9-348d43fa0323"). InnerVolumeSpecName "kube-api-access-72vcn". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:36:19 crc kubenswrapper[4756]: I0930 19:36:19.051174 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/252408ae-8514-4b7b-b8c9-348d43fa0323-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "252408ae-8514-4b7b-b8c9-348d43fa0323" (UID: "252408ae-8514-4b7b-b8c9-348d43fa0323"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 19:36:19 crc kubenswrapper[4756]: I0930 19:36:19.064947 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8badbed7-383d-4cc6-9fa8-aac7c4ac97ac-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "8badbed7-383d-4cc6-9fa8-aac7c4ac97ac" (UID: "8badbed7-383d-4cc6-9fa8-aac7c4ac97ac"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 19:36:19 crc kubenswrapper[4756]: I0930 19:36:19.076281 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tpcm2\" (UniqueName: \"kubernetes.io/projected/700d58ef-f68b-41f1-837a-984f6afffb82-kube-api-access-tpcm2\") pod \"700d58ef-f68b-41f1-837a-984f6afffb82\" (UID: \"700d58ef-f68b-41f1-837a-984f6afffb82\") " Sep 30 19:36:19 crc kubenswrapper[4756]: I0930 19:36:19.076366 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/d2f7e436-45ee-4e29-a473-cde6db34524b-marketplace-operator-metrics\") pod \"d2f7e436-45ee-4e29-a473-cde6db34524b\" (UID: \"d2f7e436-45ee-4e29-a473-cde6db34524b\") " Sep 30 19:36:19 crc kubenswrapper[4756]: I0930 19:36:19.076429 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/60ec2479-5451-47c2-ada6-a659f57092e5-utilities\") pod \"60ec2479-5451-47c2-ada6-a659f57092e5\" (UID: \"60ec2479-5451-47c2-ada6-a659f57092e5\") " Sep 30 19:36:19 crc kubenswrapper[4756]: I0930 19:36:19.076447 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/60ec2479-5451-47c2-ada6-a659f57092e5-catalog-content\") pod \"60ec2479-5451-47c2-ada6-a659f57092e5\" (UID: \"60ec2479-5451-47c2-ada6-a659f57092e5\") " Sep 30 19:36:19 crc kubenswrapper[4756]: I0930 19:36:19.076461 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/700d58ef-f68b-41f1-837a-984f6afffb82-utilities\") pod \"700d58ef-f68b-41f1-837a-984f6afffb82\" (UID: \"700d58ef-f68b-41f1-837a-984f6afffb82\") " Sep 30 19:36:19 crc kubenswrapper[4756]: I0930 19:36:19.076490 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/700d58ef-f68b-41f1-837a-984f6afffb82-catalog-content\") pod \"700d58ef-f68b-41f1-837a-984f6afffb82\" (UID: \"700d58ef-f68b-41f1-837a-984f6afffb82\") " Sep 30 19:36:19 crc kubenswrapper[4756]: I0930 19:36:19.076510 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2lvlf\" (UniqueName: \"kubernetes.io/projected/d2f7e436-45ee-4e29-a473-cde6db34524b-kube-api-access-2lvlf\") pod \"d2f7e436-45ee-4e29-a473-cde6db34524b\" (UID: \"d2f7e436-45ee-4e29-a473-cde6db34524b\") " Sep 30 19:36:19 crc kubenswrapper[4756]: I0930 19:36:19.076532 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-trqvx\" (UniqueName: \"kubernetes.io/projected/60ec2479-5451-47c2-ada6-a659f57092e5-kube-api-access-trqvx\") pod \"60ec2479-5451-47c2-ada6-a659f57092e5\" (UID: \"60ec2479-5451-47c2-ada6-a659f57092e5\") " Sep 30 19:36:19 crc kubenswrapper[4756]: I0930 19:36:19.076561 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/d2f7e436-45ee-4e29-a473-cde6db34524b-marketplace-trusted-ca\") pod \"d2f7e436-45ee-4e29-a473-cde6db34524b\" (UID: \"d2f7e436-45ee-4e29-a473-cde6db34524b\") " Sep 30 19:36:19 crc kubenswrapper[4756]: I0930 19:36:19.076728 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tznvq\" (UniqueName: \"kubernetes.io/projected/8badbed7-383d-4cc6-9fa8-aac7c4ac97ac-kube-api-access-tznvq\") on node \"crc\" DevicePath \"\"" Sep 30 19:36:19 crc kubenswrapper[4756]: I0930 19:36:19.076742 4756 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/252408ae-8514-4b7b-b8c9-348d43fa0323-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 19:36:19 crc kubenswrapper[4756]: I0930 19:36:19.076750 4756 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/252408ae-8514-4b7b-b8c9-348d43fa0323-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 19:36:19 crc kubenswrapper[4756]: I0930 19:36:19.076760 4756 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8badbed7-383d-4cc6-9fa8-aac7c4ac97ac-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 19:36:19 crc kubenswrapper[4756]: I0930 19:36:19.076768 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-72vcn\" (UniqueName: \"kubernetes.io/projected/252408ae-8514-4b7b-b8c9-348d43fa0323-kube-api-access-72vcn\") on node \"crc\" DevicePath \"\"" Sep 30 19:36:19 crc kubenswrapper[4756]: I0930 19:36:19.077228 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d2f7e436-45ee-4e29-a473-cde6db34524b-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "d2f7e436-45ee-4e29-a473-cde6db34524b" (UID: "d2f7e436-45ee-4e29-a473-cde6db34524b"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:36:19 crc kubenswrapper[4756]: I0930 19:36:19.078088 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/700d58ef-f68b-41f1-837a-984f6afffb82-utilities" (OuterVolumeSpecName: "utilities") pod "700d58ef-f68b-41f1-837a-984f6afffb82" (UID: "700d58ef-f68b-41f1-837a-984f6afffb82"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 19:36:19 crc kubenswrapper[4756]: I0930 19:36:19.079530 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/60ec2479-5451-47c2-ada6-a659f57092e5-utilities" (OuterVolumeSpecName: "utilities") pod "60ec2479-5451-47c2-ada6-a659f57092e5" (UID: "60ec2479-5451-47c2-ada6-a659f57092e5"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 19:36:19 crc kubenswrapper[4756]: I0930 19:36:19.080555 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/700d58ef-f68b-41f1-837a-984f6afffb82-kube-api-access-tpcm2" (OuterVolumeSpecName: "kube-api-access-tpcm2") pod "700d58ef-f68b-41f1-837a-984f6afffb82" (UID: "700d58ef-f68b-41f1-837a-984f6afffb82"). InnerVolumeSpecName "kube-api-access-tpcm2". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:36:19 crc kubenswrapper[4756]: I0930 19:36:19.080623 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d2f7e436-45ee-4e29-a473-cde6db34524b-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "d2f7e436-45ee-4e29-a473-cde6db34524b" (UID: "d2f7e436-45ee-4e29-a473-cde6db34524b"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:36:19 crc kubenswrapper[4756]: I0930 19:36:19.080958 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d2f7e436-45ee-4e29-a473-cde6db34524b-kube-api-access-2lvlf" (OuterVolumeSpecName: "kube-api-access-2lvlf") pod "d2f7e436-45ee-4e29-a473-cde6db34524b" (UID: "d2f7e436-45ee-4e29-a473-cde6db34524b"). InnerVolumeSpecName "kube-api-access-2lvlf". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:36:19 crc kubenswrapper[4756]: I0930 19:36:19.081606 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/60ec2479-5451-47c2-ada6-a659f57092e5-kube-api-access-trqvx" (OuterVolumeSpecName: "kube-api-access-trqvx") pod "60ec2479-5451-47c2-ada6-a659f57092e5" (UID: "60ec2479-5451-47c2-ada6-a659f57092e5"). InnerVolumeSpecName "kube-api-access-trqvx". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:36:19 crc kubenswrapper[4756]: I0930 19:36:19.105364 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/60ec2479-5451-47c2-ada6-a659f57092e5-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "60ec2479-5451-47c2-ada6-a659f57092e5" (UID: "60ec2479-5451-47c2-ada6-a659f57092e5"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 19:36:19 crc kubenswrapper[4756]: I0930 19:36:19.177644 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2lvlf\" (UniqueName: \"kubernetes.io/projected/d2f7e436-45ee-4e29-a473-cde6db34524b-kube-api-access-2lvlf\") on node \"crc\" DevicePath \"\"" Sep 30 19:36:19 crc kubenswrapper[4756]: I0930 19:36:19.177669 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-trqvx\" (UniqueName: \"kubernetes.io/projected/60ec2479-5451-47c2-ada6-a659f57092e5-kube-api-access-trqvx\") on node \"crc\" DevicePath \"\"" Sep 30 19:36:19 crc kubenswrapper[4756]: I0930 19:36:19.177679 4756 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/d2f7e436-45ee-4e29-a473-cde6db34524b-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Sep 30 19:36:19 crc kubenswrapper[4756]: I0930 19:36:19.177691 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tpcm2\" (UniqueName: \"kubernetes.io/projected/700d58ef-f68b-41f1-837a-984f6afffb82-kube-api-access-tpcm2\") on node \"crc\" DevicePath \"\"" Sep 30 19:36:19 crc kubenswrapper[4756]: I0930 19:36:19.177711 4756 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/d2f7e436-45ee-4e29-a473-cde6db34524b-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Sep 30 19:36:19 crc kubenswrapper[4756]: I0930 19:36:19.177722 4756 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/60ec2479-5451-47c2-ada6-a659f57092e5-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 19:36:19 crc kubenswrapper[4756]: I0930 19:36:19.177730 4756 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/60ec2479-5451-47c2-ada6-a659f57092e5-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 19:36:19 crc kubenswrapper[4756]: I0930 19:36:19.177738 4756 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/700d58ef-f68b-41f1-837a-984f6afffb82-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 19:36:19 crc kubenswrapper[4756]: I0930 19:36:19.183516 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/700d58ef-f68b-41f1-837a-984f6afffb82-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "700d58ef-f68b-41f1-837a-984f6afffb82" (UID: "700d58ef-f68b-41f1-837a-984f6afffb82"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 19:36:19 crc kubenswrapper[4756]: I0930 19:36:19.267647 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-p27l9"] Sep 30 19:36:19 crc kubenswrapper[4756]: I0930 19:36:19.278407 4756 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/700d58ef-f68b-41f1-837a-984f6afffb82-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 19:36:19 crc kubenswrapper[4756]: I0930 19:36:19.486447 4756 generic.go:334] "Generic (PLEG): container finished" podID="d2f7e436-45ee-4e29-a473-cde6db34524b" containerID="36ef99d9611b62aa39e56c45e5a9cda1cc95c989eb64a847b1f60fd69420d385" exitCode=0 Sep 30 19:36:19 crc kubenswrapper[4756]: I0930 19:36:19.486573 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-42ln4" event={"ID":"d2f7e436-45ee-4e29-a473-cde6db34524b","Type":"ContainerDied","Data":"36ef99d9611b62aa39e56c45e5a9cda1cc95c989eb64a847b1f60fd69420d385"} Sep 30 19:36:19 crc kubenswrapper[4756]: I0930 19:36:19.486582 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-42ln4" Sep 30 19:36:19 crc kubenswrapper[4756]: I0930 19:36:19.486607 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-42ln4" event={"ID":"d2f7e436-45ee-4e29-a473-cde6db34524b","Type":"ContainerDied","Data":"c597cad2f64b3bada01dc2954911a6ba55ec96c762334afa4997069affa44cd0"} Sep 30 19:36:19 crc kubenswrapper[4756]: I0930 19:36:19.486647 4756 scope.go:117] "RemoveContainer" containerID="36ef99d9611b62aa39e56c45e5a9cda1cc95c989eb64a847b1f60fd69420d385" Sep 30 19:36:19 crc kubenswrapper[4756]: I0930 19:36:19.492004 4756 generic.go:334] "Generic (PLEG): container finished" podID="252408ae-8514-4b7b-b8c9-348d43fa0323" containerID="2008d082827ec508ccabf94fe48ed8cc9d1be0750f56c519a7a8bc4bd053d772" exitCode=0 Sep 30 19:36:19 crc kubenswrapper[4756]: I0930 19:36:19.492072 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-j6fg8" Sep 30 19:36:19 crc kubenswrapper[4756]: I0930 19:36:19.492122 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-j6fg8" event={"ID":"252408ae-8514-4b7b-b8c9-348d43fa0323","Type":"ContainerDied","Data":"2008d082827ec508ccabf94fe48ed8cc9d1be0750f56c519a7a8bc4bd053d772"} Sep 30 19:36:19 crc kubenswrapper[4756]: I0930 19:36:19.492153 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-j6fg8" event={"ID":"252408ae-8514-4b7b-b8c9-348d43fa0323","Type":"ContainerDied","Data":"e78c61f3fe43859670167c7f3b9fba81b5f473d2db13ba1876f9360674555e1b"} Sep 30 19:36:19 crc kubenswrapper[4756]: I0930 19:36:19.495352 4756 generic.go:334] "Generic (PLEG): container finished" podID="700d58ef-f68b-41f1-837a-984f6afffb82" containerID="d12ac4203bd271091a1bcbf439ee8a044c798b5985d49812af481ab0e9100d79" exitCode=0 Sep 30 19:36:19 crc kubenswrapper[4756]: I0930 19:36:19.495447 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-kvcxz" event={"ID":"700d58ef-f68b-41f1-837a-984f6afffb82","Type":"ContainerDied","Data":"d12ac4203bd271091a1bcbf439ee8a044c798b5985d49812af481ab0e9100d79"} Sep 30 19:36:19 crc kubenswrapper[4756]: I0930 19:36:19.495470 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-kvcxz" event={"ID":"700d58ef-f68b-41f1-837a-984f6afffb82","Type":"ContainerDied","Data":"d1ad1d7db0365b3832a631bfd01d4bdb449ea5eb56e0b7c226a93d7fb2d3ee7e"} Sep 30 19:36:19 crc kubenswrapper[4756]: I0930 19:36:19.495487 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-kvcxz" Sep 30 19:36:19 crc kubenswrapper[4756]: I0930 19:36:19.498055 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-p27l9" event={"ID":"72e24494-4294-4d3e-934e-4a31a0e7601f","Type":"ContainerStarted","Data":"922fb5d763cd579e7296c304a768643b5ff2e204330dd21d88e67c62cc3c4beb"} Sep 30 19:36:19 crc kubenswrapper[4756]: I0930 19:36:19.498094 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-p27l9" event={"ID":"72e24494-4294-4d3e-934e-4a31a0e7601f","Type":"ContainerStarted","Data":"92165bb99da7b2b4d7a8350df650a6b68815ae6784bb4c58483756bcb5ef1ad7"} Sep 30 19:36:19 crc kubenswrapper[4756]: I0930 19:36:19.498139 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-p27l9" Sep 30 19:36:19 crc kubenswrapper[4756]: I0930 19:36:19.501352 4756 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-p27l9 container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.55:8080/healthz\": dial tcp 10.217.0.55:8080: connect: connection refused" start-of-body= Sep 30 19:36:19 crc kubenswrapper[4756]: I0930 19:36:19.501436 4756 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-p27l9" podUID="72e24494-4294-4d3e-934e-4a31a0e7601f" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.55:8080/healthz\": dial tcp 10.217.0.55:8080: connect: connection refused" Sep 30 19:36:19 crc kubenswrapper[4756]: I0930 19:36:19.503264 4756 scope.go:117] "RemoveContainer" containerID="36ef99d9611b62aa39e56c45e5a9cda1cc95c989eb64a847b1f60fd69420d385" Sep 30 19:36:19 crc kubenswrapper[4756]: I0930 19:36:19.505034 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-42ln4"] Sep 30 19:36:19 crc kubenswrapper[4756]: E0930 19:36:19.505265 4756 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"36ef99d9611b62aa39e56c45e5a9cda1cc95c989eb64a847b1f60fd69420d385\": container with ID starting with 36ef99d9611b62aa39e56c45e5a9cda1cc95c989eb64a847b1f60fd69420d385 not found: ID does not exist" containerID="36ef99d9611b62aa39e56c45e5a9cda1cc95c989eb64a847b1f60fd69420d385" Sep 30 19:36:19 crc kubenswrapper[4756]: I0930 19:36:19.505295 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"36ef99d9611b62aa39e56c45e5a9cda1cc95c989eb64a847b1f60fd69420d385"} err="failed to get container status \"36ef99d9611b62aa39e56c45e5a9cda1cc95c989eb64a847b1f60fd69420d385\": rpc error: code = NotFound desc = could not find container \"36ef99d9611b62aa39e56c45e5a9cda1cc95c989eb64a847b1f60fd69420d385\": container with ID starting with 36ef99d9611b62aa39e56c45e5a9cda1cc95c989eb64a847b1f60fd69420d385 not found: ID does not exist" Sep 30 19:36:19 crc kubenswrapper[4756]: I0930 19:36:19.505315 4756 scope.go:117] "RemoveContainer" containerID="2008d082827ec508ccabf94fe48ed8cc9d1be0750f56c519a7a8bc4bd053d772" Sep 30 19:36:19 crc kubenswrapper[4756]: I0930 19:36:19.505210 4756 generic.go:334] "Generic (PLEG): container finished" podID="60ec2479-5451-47c2-ada6-a659f57092e5" containerID="1a6acbce684b67a7cd18c437f70619a5097d53c42b28fb4276e3c1300cdc7db8" exitCode=0 Sep 30 19:36:19 crc kubenswrapper[4756]: I0930 19:36:19.505388 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vs4g5" event={"ID":"60ec2479-5451-47c2-ada6-a659f57092e5","Type":"ContainerDied","Data":"1a6acbce684b67a7cd18c437f70619a5097d53c42b28fb4276e3c1300cdc7db8"} Sep 30 19:36:19 crc kubenswrapper[4756]: I0930 19:36:19.505886 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vs4g5" event={"ID":"60ec2479-5451-47c2-ada6-a659f57092e5","Type":"ContainerDied","Data":"2d727c28cfcc19953a2e0ad4ae5856320cc1a6fa9c4bcd19fc0749cc3bb3c8b3"} Sep 30 19:36:19 crc kubenswrapper[4756]: I0930 19:36:19.505434 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-vs4g5" Sep 30 19:36:19 crc kubenswrapper[4756]: I0930 19:36:19.510638 4756 generic.go:334] "Generic (PLEG): container finished" podID="8badbed7-383d-4cc6-9fa8-aac7c4ac97ac" containerID="062cfacf1dcec337bcf4e7c015f4f2d2aa5d9d615f0880be92ef85f422dd9a9c" exitCode=0 Sep 30 19:36:19 crc kubenswrapper[4756]: I0930 19:36:19.510677 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rqhnz" event={"ID":"8badbed7-383d-4cc6-9fa8-aac7c4ac97ac","Type":"ContainerDied","Data":"062cfacf1dcec337bcf4e7c015f4f2d2aa5d9d615f0880be92ef85f422dd9a9c"} Sep 30 19:36:19 crc kubenswrapper[4756]: I0930 19:36:19.510699 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rqhnz" event={"ID":"8badbed7-383d-4cc6-9fa8-aac7c4ac97ac","Type":"ContainerDied","Data":"bf5d94e99509f10cfc5d8f864ec978f52a39a575fcf3972ef405717c3c77afc3"} Sep 30 19:36:19 crc kubenswrapper[4756]: I0930 19:36:19.510754 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-rqhnz" Sep 30 19:36:19 crc kubenswrapper[4756]: I0930 19:36:19.510900 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-42ln4"] Sep 30 19:36:19 crc kubenswrapper[4756]: I0930 19:36:19.520099 4756 scope.go:117] "RemoveContainer" containerID="2ef2f7bbae4e1186bba3ff95867cd91edc2e8d4ab0807cb397d34e12c8476acb" Sep 30 19:36:19 crc kubenswrapper[4756]: I0930 19:36:19.529144 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-p27l9" podStartSLOduration=1.5291287040000001 podStartE2EDuration="1.529128704s" podCreationTimestamp="2025-09-30 19:36:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 19:36:19.526955394 +0000 UTC m=+309.147888881" watchObservedRunningTime="2025-09-30 19:36:19.529128704 +0000 UTC m=+309.150062181" Sep 30 19:36:19 crc kubenswrapper[4756]: I0930 19:36:19.542444 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-j6fg8"] Sep 30 19:36:19 crc kubenswrapper[4756]: I0930 19:36:19.552076 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-j6fg8"] Sep 30 19:36:19 crc kubenswrapper[4756]: I0930 19:36:19.559903 4756 scope.go:117] "RemoveContainer" containerID="4bb22368b3e1be067c056759243a550de38e0360fc99f890b9d12c518e8dbefb" Sep 30 19:36:19 crc kubenswrapper[4756]: I0930 19:36:19.560034 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-kvcxz"] Sep 30 19:36:19 crc kubenswrapper[4756]: I0930 19:36:19.560459 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-kvcxz"] Sep 30 19:36:19 crc kubenswrapper[4756]: I0930 19:36:19.563385 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-vs4g5"] Sep 30 19:36:19 crc kubenswrapper[4756]: I0930 19:36:19.570529 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-vs4g5"] Sep 30 19:36:19 crc kubenswrapper[4756]: I0930 19:36:19.575605 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-rqhnz"] Sep 30 19:36:19 crc kubenswrapper[4756]: I0930 19:36:19.577277 4756 scope.go:117] "RemoveContainer" containerID="2008d082827ec508ccabf94fe48ed8cc9d1be0750f56c519a7a8bc4bd053d772" Sep 30 19:36:19 crc kubenswrapper[4756]: E0930 19:36:19.577765 4756 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2008d082827ec508ccabf94fe48ed8cc9d1be0750f56c519a7a8bc4bd053d772\": container with ID starting with 2008d082827ec508ccabf94fe48ed8cc9d1be0750f56c519a7a8bc4bd053d772 not found: ID does not exist" containerID="2008d082827ec508ccabf94fe48ed8cc9d1be0750f56c519a7a8bc4bd053d772" Sep 30 19:36:19 crc kubenswrapper[4756]: I0930 19:36:19.577791 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2008d082827ec508ccabf94fe48ed8cc9d1be0750f56c519a7a8bc4bd053d772"} err="failed to get container status \"2008d082827ec508ccabf94fe48ed8cc9d1be0750f56c519a7a8bc4bd053d772\": rpc error: code = NotFound desc = could not find container \"2008d082827ec508ccabf94fe48ed8cc9d1be0750f56c519a7a8bc4bd053d772\": container with ID starting with 2008d082827ec508ccabf94fe48ed8cc9d1be0750f56c519a7a8bc4bd053d772 not found: ID does not exist" Sep 30 19:36:19 crc kubenswrapper[4756]: I0930 19:36:19.577813 4756 scope.go:117] "RemoveContainer" containerID="2ef2f7bbae4e1186bba3ff95867cd91edc2e8d4ab0807cb397d34e12c8476acb" Sep 30 19:36:19 crc kubenswrapper[4756]: E0930 19:36:19.578100 4756 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2ef2f7bbae4e1186bba3ff95867cd91edc2e8d4ab0807cb397d34e12c8476acb\": container with ID starting with 2ef2f7bbae4e1186bba3ff95867cd91edc2e8d4ab0807cb397d34e12c8476acb not found: ID does not exist" containerID="2ef2f7bbae4e1186bba3ff95867cd91edc2e8d4ab0807cb397d34e12c8476acb" Sep 30 19:36:19 crc kubenswrapper[4756]: I0930 19:36:19.578120 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2ef2f7bbae4e1186bba3ff95867cd91edc2e8d4ab0807cb397d34e12c8476acb"} err="failed to get container status \"2ef2f7bbae4e1186bba3ff95867cd91edc2e8d4ab0807cb397d34e12c8476acb\": rpc error: code = NotFound desc = could not find container \"2ef2f7bbae4e1186bba3ff95867cd91edc2e8d4ab0807cb397d34e12c8476acb\": container with ID starting with 2ef2f7bbae4e1186bba3ff95867cd91edc2e8d4ab0807cb397d34e12c8476acb not found: ID does not exist" Sep 30 19:36:19 crc kubenswrapper[4756]: I0930 19:36:19.578150 4756 scope.go:117] "RemoveContainer" containerID="4bb22368b3e1be067c056759243a550de38e0360fc99f890b9d12c518e8dbefb" Sep 30 19:36:19 crc kubenswrapper[4756]: E0930 19:36:19.578343 4756 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4bb22368b3e1be067c056759243a550de38e0360fc99f890b9d12c518e8dbefb\": container with ID starting with 4bb22368b3e1be067c056759243a550de38e0360fc99f890b9d12c518e8dbefb not found: ID does not exist" containerID="4bb22368b3e1be067c056759243a550de38e0360fc99f890b9d12c518e8dbefb" Sep 30 19:36:19 crc kubenswrapper[4756]: I0930 19:36:19.578361 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4bb22368b3e1be067c056759243a550de38e0360fc99f890b9d12c518e8dbefb"} err="failed to get container status \"4bb22368b3e1be067c056759243a550de38e0360fc99f890b9d12c518e8dbefb\": rpc error: code = NotFound desc = could not find container \"4bb22368b3e1be067c056759243a550de38e0360fc99f890b9d12c518e8dbefb\": container with ID starting with 4bb22368b3e1be067c056759243a550de38e0360fc99f890b9d12c518e8dbefb not found: ID does not exist" Sep 30 19:36:19 crc kubenswrapper[4756]: I0930 19:36:19.578374 4756 scope.go:117] "RemoveContainer" containerID="d12ac4203bd271091a1bcbf439ee8a044c798b5985d49812af481ab0e9100d79" Sep 30 19:36:19 crc kubenswrapper[4756]: I0930 19:36:19.580724 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-rqhnz"] Sep 30 19:36:19 crc kubenswrapper[4756]: I0930 19:36:19.613564 4756 scope.go:117] "RemoveContainer" containerID="3b12969983dfb081e6844d0082e79be56cac5b8224693056f3325b7012f107ce" Sep 30 19:36:19 crc kubenswrapper[4756]: I0930 19:36:19.639804 4756 scope.go:117] "RemoveContainer" containerID="a0f283366f8371d89ad5a23a490c64aceb3b81cbee97e11c2c6ca1662db7f319" Sep 30 19:36:19 crc kubenswrapper[4756]: I0930 19:36:19.705637 4756 scope.go:117] "RemoveContainer" containerID="d12ac4203bd271091a1bcbf439ee8a044c798b5985d49812af481ab0e9100d79" Sep 30 19:36:19 crc kubenswrapper[4756]: E0930 19:36:19.706221 4756 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d12ac4203bd271091a1bcbf439ee8a044c798b5985d49812af481ab0e9100d79\": container with ID starting with d12ac4203bd271091a1bcbf439ee8a044c798b5985d49812af481ab0e9100d79 not found: ID does not exist" containerID="d12ac4203bd271091a1bcbf439ee8a044c798b5985d49812af481ab0e9100d79" Sep 30 19:36:19 crc kubenswrapper[4756]: I0930 19:36:19.706264 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d12ac4203bd271091a1bcbf439ee8a044c798b5985d49812af481ab0e9100d79"} err="failed to get container status \"d12ac4203bd271091a1bcbf439ee8a044c798b5985d49812af481ab0e9100d79\": rpc error: code = NotFound desc = could not find container \"d12ac4203bd271091a1bcbf439ee8a044c798b5985d49812af481ab0e9100d79\": container with ID starting with d12ac4203bd271091a1bcbf439ee8a044c798b5985d49812af481ab0e9100d79 not found: ID does not exist" Sep 30 19:36:19 crc kubenswrapper[4756]: I0930 19:36:19.706291 4756 scope.go:117] "RemoveContainer" containerID="3b12969983dfb081e6844d0082e79be56cac5b8224693056f3325b7012f107ce" Sep 30 19:36:19 crc kubenswrapper[4756]: E0930 19:36:19.706636 4756 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3b12969983dfb081e6844d0082e79be56cac5b8224693056f3325b7012f107ce\": container with ID starting with 3b12969983dfb081e6844d0082e79be56cac5b8224693056f3325b7012f107ce not found: ID does not exist" containerID="3b12969983dfb081e6844d0082e79be56cac5b8224693056f3325b7012f107ce" Sep 30 19:36:19 crc kubenswrapper[4756]: I0930 19:36:19.706681 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3b12969983dfb081e6844d0082e79be56cac5b8224693056f3325b7012f107ce"} err="failed to get container status \"3b12969983dfb081e6844d0082e79be56cac5b8224693056f3325b7012f107ce\": rpc error: code = NotFound desc = could not find container \"3b12969983dfb081e6844d0082e79be56cac5b8224693056f3325b7012f107ce\": container with ID starting with 3b12969983dfb081e6844d0082e79be56cac5b8224693056f3325b7012f107ce not found: ID does not exist" Sep 30 19:36:19 crc kubenswrapper[4756]: I0930 19:36:19.706716 4756 scope.go:117] "RemoveContainer" containerID="a0f283366f8371d89ad5a23a490c64aceb3b81cbee97e11c2c6ca1662db7f319" Sep 30 19:36:19 crc kubenswrapper[4756]: E0930 19:36:19.706966 4756 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a0f283366f8371d89ad5a23a490c64aceb3b81cbee97e11c2c6ca1662db7f319\": container with ID starting with a0f283366f8371d89ad5a23a490c64aceb3b81cbee97e11c2c6ca1662db7f319 not found: ID does not exist" containerID="a0f283366f8371d89ad5a23a490c64aceb3b81cbee97e11c2c6ca1662db7f319" Sep 30 19:36:19 crc kubenswrapper[4756]: I0930 19:36:19.706995 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a0f283366f8371d89ad5a23a490c64aceb3b81cbee97e11c2c6ca1662db7f319"} err="failed to get container status \"a0f283366f8371d89ad5a23a490c64aceb3b81cbee97e11c2c6ca1662db7f319\": rpc error: code = NotFound desc = could not find container \"a0f283366f8371d89ad5a23a490c64aceb3b81cbee97e11c2c6ca1662db7f319\": container with ID starting with a0f283366f8371d89ad5a23a490c64aceb3b81cbee97e11c2c6ca1662db7f319 not found: ID does not exist" Sep 30 19:36:19 crc kubenswrapper[4756]: I0930 19:36:19.707011 4756 scope.go:117] "RemoveContainer" containerID="1a6acbce684b67a7cd18c437f70619a5097d53c42b28fb4276e3c1300cdc7db8" Sep 30 19:36:19 crc kubenswrapper[4756]: I0930 19:36:19.723579 4756 scope.go:117] "RemoveContainer" containerID="daec1a35cd4fdaeb3bdc21c3e06425f5fdf5ee663cfcb4bef87a708ca619bb03" Sep 30 19:36:19 crc kubenswrapper[4756]: I0930 19:36:19.738629 4756 scope.go:117] "RemoveContainer" containerID="f6058539de0f019c6165eab560cb8d6aa586fe32e016b627b7dc7112a1e80b35" Sep 30 19:36:19 crc kubenswrapper[4756]: I0930 19:36:19.756763 4756 scope.go:117] "RemoveContainer" containerID="1a6acbce684b67a7cd18c437f70619a5097d53c42b28fb4276e3c1300cdc7db8" Sep 30 19:36:19 crc kubenswrapper[4756]: E0930 19:36:19.757236 4756 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1a6acbce684b67a7cd18c437f70619a5097d53c42b28fb4276e3c1300cdc7db8\": container with ID starting with 1a6acbce684b67a7cd18c437f70619a5097d53c42b28fb4276e3c1300cdc7db8 not found: ID does not exist" containerID="1a6acbce684b67a7cd18c437f70619a5097d53c42b28fb4276e3c1300cdc7db8" Sep 30 19:36:19 crc kubenswrapper[4756]: I0930 19:36:19.757270 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1a6acbce684b67a7cd18c437f70619a5097d53c42b28fb4276e3c1300cdc7db8"} err="failed to get container status \"1a6acbce684b67a7cd18c437f70619a5097d53c42b28fb4276e3c1300cdc7db8\": rpc error: code = NotFound desc = could not find container \"1a6acbce684b67a7cd18c437f70619a5097d53c42b28fb4276e3c1300cdc7db8\": container with ID starting with 1a6acbce684b67a7cd18c437f70619a5097d53c42b28fb4276e3c1300cdc7db8 not found: ID does not exist" Sep 30 19:36:19 crc kubenswrapper[4756]: I0930 19:36:19.757295 4756 scope.go:117] "RemoveContainer" containerID="daec1a35cd4fdaeb3bdc21c3e06425f5fdf5ee663cfcb4bef87a708ca619bb03" Sep 30 19:36:19 crc kubenswrapper[4756]: E0930 19:36:19.757500 4756 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"daec1a35cd4fdaeb3bdc21c3e06425f5fdf5ee663cfcb4bef87a708ca619bb03\": container with ID starting with daec1a35cd4fdaeb3bdc21c3e06425f5fdf5ee663cfcb4bef87a708ca619bb03 not found: ID does not exist" containerID="daec1a35cd4fdaeb3bdc21c3e06425f5fdf5ee663cfcb4bef87a708ca619bb03" Sep 30 19:36:19 crc kubenswrapper[4756]: I0930 19:36:19.757527 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"daec1a35cd4fdaeb3bdc21c3e06425f5fdf5ee663cfcb4bef87a708ca619bb03"} err="failed to get container status \"daec1a35cd4fdaeb3bdc21c3e06425f5fdf5ee663cfcb4bef87a708ca619bb03\": rpc error: code = NotFound desc = could not find container \"daec1a35cd4fdaeb3bdc21c3e06425f5fdf5ee663cfcb4bef87a708ca619bb03\": container with ID starting with daec1a35cd4fdaeb3bdc21c3e06425f5fdf5ee663cfcb4bef87a708ca619bb03 not found: ID does not exist" Sep 30 19:36:19 crc kubenswrapper[4756]: I0930 19:36:19.757543 4756 scope.go:117] "RemoveContainer" containerID="f6058539de0f019c6165eab560cb8d6aa586fe32e016b627b7dc7112a1e80b35" Sep 30 19:36:19 crc kubenswrapper[4756]: E0930 19:36:19.757733 4756 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f6058539de0f019c6165eab560cb8d6aa586fe32e016b627b7dc7112a1e80b35\": container with ID starting with f6058539de0f019c6165eab560cb8d6aa586fe32e016b627b7dc7112a1e80b35 not found: ID does not exist" containerID="f6058539de0f019c6165eab560cb8d6aa586fe32e016b627b7dc7112a1e80b35" Sep 30 19:36:19 crc kubenswrapper[4756]: I0930 19:36:19.757756 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f6058539de0f019c6165eab560cb8d6aa586fe32e016b627b7dc7112a1e80b35"} err="failed to get container status \"f6058539de0f019c6165eab560cb8d6aa586fe32e016b627b7dc7112a1e80b35\": rpc error: code = NotFound desc = could not find container \"f6058539de0f019c6165eab560cb8d6aa586fe32e016b627b7dc7112a1e80b35\": container with ID starting with f6058539de0f019c6165eab560cb8d6aa586fe32e016b627b7dc7112a1e80b35 not found: ID does not exist" Sep 30 19:36:19 crc kubenswrapper[4756]: I0930 19:36:19.757773 4756 scope.go:117] "RemoveContainer" containerID="062cfacf1dcec337bcf4e7c015f4f2d2aa5d9d615f0880be92ef85f422dd9a9c" Sep 30 19:36:19 crc kubenswrapper[4756]: I0930 19:36:19.772408 4756 scope.go:117] "RemoveContainer" containerID="059f08f4f4290aededee955da7af1ebfda19bb301b34058d885bc08af93618fb" Sep 30 19:36:19 crc kubenswrapper[4756]: I0930 19:36:19.794069 4756 scope.go:117] "RemoveContainer" containerID="00888ea535890c5455e28e0fd50c6dd5f1b4d4d425df59e714788cbb5e1e431b" Sep 30 19:36:19 crc kubenswrapper[4756]: I0930 19:36:19.807568 4756 scope.go:117] "RemoveContainer" containerID="062cfacf1dcec337bcf4e7c015f4f2d2aa5d9d615f0880be92ef85f422dd9a9c" Sep 30 19:36:19 crc kubenswrapper[4756]: E0930 19:36:19.807880 4756 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"062cfacf1dcec337bcf4e7c015f4f2d2aa5d9d615f0880be92ef85f422dd9a9c\": container with ID starting with 062cfacf1dcec337bcf4e7c015f4f2d2aa5d9d615f0880be92ef85f422dd9a9c not found: ID does not exist" containerID="062cfacf1dcec337bcf4e7c015f4f2d2aa5d9d615f0880be92ef85f422dd9a9c" Sep 30 19:36:19 crc kubenswrapper[4756]: I0930 19:36:19.807917 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"062cfacf1dcec337bcf4e7c015f4f2d2aa5d9d615f0880be92ef85f422dd9a9c"} err="failed to get container status \"062cfacf1dcec337bcf4e7c015f4f2d2aa5d9d615f0880be92ef85f422dd9a9c\": rpc error: code = NotFound desc = could not find container \"062cfacf1dcec337bcf4e7c015f4f2d2aa5d9d615f0880be92ef85f422dd9a9c\": container with ID starting with 062cfacf1dcec337bcf4e7c015f4f2d2aa5d9d615f0880be92ef85f422dd9a9c not found: ID does not exist" Sep 30 19:36:19 crc kubenswrapper[4756]: I0930 19:36:19.807944 4756 scope.go:117] "RemoveContainer" containerID="059f08f4f4290aededee955da7af1ebfda19bb301b34058d885bc08af93618fb" Sep 30 19:36:19 crc kubenswrapper[4756]: E0930 19:36:19.808266 4756 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"059f08f4f4290aededee955da7af1ebfda19bb301b34058d885bc08af93618fb\": container with ID starting with 059f08f4f4290aededee955da7af1ebfda19bb301b34058d885bc08af93618fb not found: ID does not exist" containerID="059f08f4f4290aededee955da7af1ebfda19bb301b34058d885bc08af93618fb" Sep 30 19:36:19 crc kubenswrapper[4756]: I0930 19:36:19.808289 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"059f08f4f4290aededee955da7af1ebfda19bb301b34058d885bc08af93618fb"} err="failed to get container status \"059f08f4f4290aededee955da7af1ebfda19bb301b34058d885bc08af93618fb\": rpc error: code = NotFound desc = could not find container \"059f08f4f4290aededee955da7af1ebfda19bb301b34058d885bc08af93618fb\": container with ID starting with 059f08f4f4290aededee955da7af1ebfda19bb301b34058d885bc08af93618fb not found: ID does not exist" Sep 30 19:36:19 crc kubenswrapper[4756]: I0930 19:36:19.808302 4756 scope.go:117] "RemoveContainer" containerID="00888ea535890c5455e28e0fd50c6dd5f1b4d4d425df59e714788cbb5e1e431b" Sep 30 19:36:19 crc kubenswrapper[4756]: E0930 19:36:19.808627 4756 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"00888ea535890c5455e28e0fd50c6dd5f1b4d4d425df59e714788cbb5e1e431b\": container with ID starting with 00888ea535890c5455e28e0fd50c6dd5f1b4d4d425df59e714788cbb5e1e431b not found: ID does not exist" containerID="00888ea535890c5455e28e0fd50c6dd5f1b4d4d425df59e714788cbb5e1e431b" Sep 30 19:36:19 crc kubenswrapper[4756]: I0930 19:36:19.808646 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"00888ea535890c5455e28e0fd50c6dd5f1b4d4d425df59e714788cbb5e1e431b"} err="failed to get container status \"00888ea535890c5455e28e0fd50c6dd5f1b4d4d425df59e714788cbb5e1e431b\": rpc error: code = NotFound desc = could not find container \"00888ea535890c5455e28e0fd50c6dd5f1b4d4d425df59e714788cbb5e1e431b\": container with ID starting with 00888ea535890c5455e28e0fd50c6dd5f1b4d4d425df59e714788cbb5e1e431b not found: ID does not exist" Sep 30 19:36:20 crc kubenswrapper[4756]: I0930 19:36:20.524228 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-p27l9" Sep 30 19:36:20 crc kubenswrapper[4756]: I0930 19:36:20.650342 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-wzt5t"] Sep 30 19:36:20 crc kubenswrapper[4756]: E0930 19:36:20.650642 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="252408ae-8514-4b7b-b8c9-348d43fa0323" containerName="extract-content" Sep 30 19:36:20 crc kubenswrapper[4756]: I0930 19:36:20.650662 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="252408ae-8514-4b7b-b8c9-348d43fa0323" containerName="extract-content" Sep 30 19:36:20 crc kubenswrapper[4756]: E0930 19:36:20.650680 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8badbed7-383d-4cc6-9fa8-aac7c4ac97ac" containerName="extract-content" Sep 30 19:36:20 crc kubenswrapper[4756]: I0930 19:36:20.650688 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="8badbed7-383d-4cc6-9fa8-aac7c4ac97ac" containerName="extract-content" Sep 30 19:36:20 crc kubenswrapper[4756]: E0930 19:36:20.650699 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="60ec2479-5451-47c2-ada6-a659f57092e5" containerName="extract-content" Sep 30 19:36:20 crc kubenswrapper[4756]: I0930 19:36:20.650706 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="60ec2479-5451-47c2-ada6-a659f57092e5" containerName="extract-content" Sep 30 19:36:20 crc kubenswrapper[4756]: E0930 19:36:20.650719 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8badbed7-383d-4cc6-9fa8-aac7c4ac97ac" containerName="registry-server" Sep 30 19:36:20 crc kubenswrapper[4756]: I0930 19:36:20.650726 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="8badbed7-383d-4cc6-9fa8-aac7c4ac97ac" containerName="registry-server" Sep 30 19:36:20 crc kubenswrapper[4756]: E0930 19:36:20.650739 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="252408ae-8514-4b7b-b8c9-348d43fa0323" containerName="registry-server" Sep 30 19:36:20 crc kubenswrapper[4756]: I0930 19:36:20.650747 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="252408ae-8514-4b7b-b8c9-348d43fa0323" containerName="registry-server" Sep 30 19:36:20 crc kubenswrapper[4756]: E0930 19:36:20.650758 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8badbed7-383d-4cc6-9fa8-aac7c4ac97ac" containerName="extract-utilities" Sep 30 19:36:20 crc kubenswrapper[4756]: I0930 19:36:20.650765 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="8badbed7-383d-4cc6-9fa8-aac7c4ac97ac" containerName="extract-utilities" Sep 30 19:36:20 crc kubenswrapper[4756]: E0930 19:36:20.650775 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d2f7e436-45ee-4e29-a473-cde6db34524b" containerName="marketplace-operator" Sep 30 19:36:20 crc kubenswrapper[4756]: I0930 19:36:20.650782 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="d2f7e436-45ee-4e29-a473-cde6db34524b" containerName="marketplace-operator" Sep 30 19:36:20 crc kubenswrapper[4756]: E0930 19:36:20.650792 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="700d58ef-f68b-41f1-837a-984f6afffb82" containerName="extract-content" Sep 30 19:36:20 crc kubenswrapper[4756]: I0930 19:36:20.650799 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="700d58ef-f68b-41f1-837a-984f6afffb82" containerName="extract-content" Sep 30 19:36:20 crc kubenswrapper[4756]: E0930 19:36:20.650809 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="252408ae-8514-4b7b-b8c9-348d43fa0323" containerName="extract-utilities" Sep 30 19:36:20 crc kubenswrapper[4756]: I0930 19:36:20.650817 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="252408ae-8514-4b7b-b8c9-348d43fa0323" containerName="extract-utilities" Sep 30 19:36:20 crc kubenswrapper[4756]: E0930 19:36:20.650826 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="700d58ef-f68b-41f1-837a-984f6afffb82" containerName="extract-utilities" Sep 30 19:36:20 crc kubenswrapper[4756]: I0930 19:36:20.650833 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="700d58ef-f68b-41f1-837a-984f6afffb82" containerName="extract-utilities" Sep 30 19:36:20 crc kubenswrapper[4756]: E0930 19:36:20.650858 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="700d58ef-f68b-41f1-837a-984f6afffb82" containerName="registry-server" Sep 30 19:36:20 crc kubenswrapper[4756]: I0930 19:36:20.650865 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="700d58ef-f68b-41f1-837a-984f6afffb82" containerName="registry-server" Sep 30 19:36:20 crc kubenswrapper[4756]: E0930 19:36:20.650874 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="60ec2479-5451-47c2-ada6-a659f57092e5" containerName="extract-utilities" Sep 30 19:36:20 crc kubenswrapper[4756]: I0930 19:36:20.650881 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="60ec2479-5451-47c2-ada6-a659f57092e5" containerName="extract-utilities" Sep 30 19:36:20 crc kubenswrapper[4756]: E0930 19:36:20.650892 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="60ec2479-5451-47c2-ada6-a659f57092e5" containerName="registry-server" Sep 30 19:36:20 crc kubenswrapper[4756]: I0930 19:36:20.650899 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="60ec2479-5451-47c2-ada6-a659f57092e5" containerName="registry-server" Sep 30 19:36:20 crc kubenswrapper[4756]: I0930 19:36:20.650998 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="252408ae-8514-4b7b-b8c9-348d43fa0323" containerName="registry-server" Sep 30 19:36:20 crc kubenswrapper[4756]: I0930 19:36:20.651014 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="700d58ef-f68b-41f1-837a-984f6afffb82" containerName="registry-server" Sep 30 19:36:20 crc kubenswrapper[4756]: I0930 19:36:20.651027 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="d2f7e436-45ee-4e29-a473-cde6db34524b" containerName="marketplace-operator" Sep 30 19:36:20 crc kubenswrapper[4756]: I0930 19:36:20.651039 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="8badbed7-383d-4cc6-9fa8-aac7c4ac97ac" containerName="registry-server" Sep 30 19:36:20 crc kubenswrapper[4756]: I0930 19:36:20.651047 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="60ec2479-5451-47c2-ada6-a659f57092e5" containerName="registry-server" Sep 30 19:36:20 crc kubenswrapper[4756]: I0930 19:36:20.651775 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-wzt5t" Sep 30 19:36:20 crc kubenswrapper[4756]: I0930 19:36:20.653437 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Sep 30 19:36:20 crc kubenswrapper[4756]: I0930 19:36:20.662748 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-wzt5t"] Sep 30 19:36:20 crc kubenswrapper[4756]: I0930 19:36:20.798214 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/59f8fa32-72a8-4202-839f-545d6b81ea1a-catalog-content\") pod \"redhat-marketplace-wzt5t\" (UID: \"59f8fa32-72a8-4202-839f-545d6b81ea1a\") " pod="openshift-marketplace/redhat-marketplace-wzt5t" Sep 30 19:36:20 crc kubenswrapper[4756]: I0930 19:36:20.798445 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/59f8fa32-72a8-4202-839f-545d6b81ea1a-utilities\") pod \"redhat-marketplace-wzt5t\" (UID: \"59f8fa32-72a8-4202-839f-545d6b81ea1a\") " pod="openshift-marketplace/redhat-marketplace-wzt5t" Sep 30 19:36:20 crc kubenswrapper[4756]: I0930 19:36:20.798507 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zn4g2\" (UniqueName: \"kubernetes.io/projected/59f8fa32-72a8-4202-839f-545d6b81ea1a-kube-api-access-zn4g2\") pod \"redhat-marketplace-wzt5t\" (UID: \"59f8fa32-72a8-4202-839f-545d6b81ea1a\") " pod="openshift-marketplace/redhat-marketplace-wzt5t" Sep 30 19:36:20 crc kubenswrapper[4756]: I0930 19:36:20.852133 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-zmzd7"] Sep 30 19:36:20 crc kubenswrapper[4756]: I0930 19:36:20.854199 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-zmzd7" Sep 30 19:36:20 crc kubenswrapper[4756]: I0930 19:36:20.856212 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Sep 30 19:36:20 crc kubenswrapper[4756]: I0930 19:36:20.863251 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-zmzd7"] Sep 30 19:36:20 crc kubenswrapper[4756]: I0930 19:36:20.900668 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/59f8fa32-72a8-4202-839f-545d6b81ea1a-catalog-content\") pod \"redhat-marketplace-wzt5t\" (UID: \"59f8fa32-72a8-4202-839f-545d6b81ea1a\") " pod="openshift-marketplace/redhat-marketplace-wzt5t" Sep 30 19:36:20 crc kubenswrapper[4756]: I0930 19:36:20.900803 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/59f8fa32-72a8-4202-839f-545d6b81ea1a-utilities\") pod \"redhat-marketplace-wzt5t\" (UID: \"59f8fa32-72a8-4202-839f-545d6b81ea1a\") " pod="openshift-marketplace/redhat-marketplace-wzt5t" Sep 30 19:36:20 crc kubenswrapper[4756]: I0930 19:36:20.900840 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zn4g2\" (UniqueName: \"kubernetes.io/projected/59f8fa32-72a8-4202-839f-545d6b81ea1a-kube-api-access-zn4g2\") pod \"redhat-marketplace-wzt5t\" (UID: \"59f8fa32-72a8-4202-839f-545d6b81ea1a\") " pod="openshift-marketplace/redhat-marketplace-wzt5t" Sep 30 19:36:20 crc kubenswrapper[4756]: I0930 19:36:20.901154 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/59f8fa32-72a8-4202-839f-545d6b81ea1a-catalog-content\") pod \"redhat-marketplace-wzt5t\" (UID: \"59f8fa32-72a8-4202-839f-545d6b81ea1a\") " pod="openshift-marketplace/redhat-marketplace-wzt5t" Sep 30 19:36:20 crc kubenswrapper[4756]: I0930 19:36:20.901254 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/59f8fa32-72a8-4202-839f-545d6b81ea1a-utilities\") pod \"redhat-marketplace-wzt5t\" (UID: \"59f8fa32-72a8-4202-839f-545d6b81ea1a\") " pod="openshift-marketplace/redhat-marketplace-wzt5t" Sep 30 19:36:20 crc kubenswrapper[4756]: I0930 19:36:20.927387 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zn4g2\" (UniqueName: \"kubernetes.io/projected/59f8fa32-72a8-4202-839f-545d6b81ea1a-kube-api-access-zn4g2\") pod \"redhat-marketplace-wzt5t\" (UID: \"59f8fa32-72a8-4202-839f-545d6b81ea1a\") " pod="openshift-marketplace/redhat-marketplace-wzt5t" Sep 30 19:36:20 crc kubenswrapper[4756]: I0930 19:36:20.970623 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-wzt5t" Sep 30 19:36:21 crc kubenswrapper[4756]: I0930 19:36:21.002210 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9cptr\" (UniqueName: \"kubernetes.io/projected/a2916419-e636-4f43-9b71-e80307c8f0ef-kube-api-access-9cptr\") pod \"certified-operators-zmzd7\" (UID: \"a2916419-e636-4f43-9b71-e80307c8f0ef\") " pod="openshift-marketplace/certified-operators-zmzd7" Sep 30 19:36:21 crc kubenswrapper[4756]: I0930 19:36:21.002852 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a2916419-e636-4f43-9b71-e80307c8f0ef-catalog-content\") pod \"certified-operators-zmzd7\" (UID: \"a2916419-e636-4f43-9b71-e80307c8f0ef\") " pod="openshift-marketplace/certified-operators-zmzd7" Sep 30 19:36:21 crc kubenswrapper[4756]: I0930 19:36:21.002904 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a2916419-e636-4f43-9b71-e80307c8f0ef-utilities\") pod \"certified-operators-zmzd7\" (UID: \"a2916419-e636-4f43-9b71-e80307c8f0ef\") " pod="openshift-marketplace/certified-operators-zmzd7" Sep 30 19:36:21 crc kubenswrapper[4756]: I0930 19:36:21.104883 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9cptr\" (UniqueName: \"kubernetes.io/projected/a2916419-e636-4f43-9b71-e80307c8f0ef-kube-api-access-9cptr\") pod \"certified-operators-zmzd7\" (UID: \"a2916419-e636-4f43-9b71-e80307c8f0ef\") " pod="openshift-marketplace/certified-operators-zmzd7" Sep 30 19:36:21 crc kubenswrapper[4756]: I0930 19:36:21.104962 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a2916419-e636-4f43-9b71-e80307c8f0ef-catalog-content\") pod \"certified-operators-zmzd7\" (UID: \"a2916419-e636-4f43-9b71-e80307c8f0ef\") " pod="openshift-marketplace/certified-operators-zmzd7" Sep 30 19:36:21 crc kubenswrapper[4756]: I0930 19:36:21.105011 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a2916419-e636-4f43-9b71-e80307c8f0ef-utilities\") pod \"certified-operators-zmzd7\" (UID: \"a2916419-e636-4f43-9b71-e80307c8f0ef\") " pod="openshift-marketplace/certified-operators-zmzd7" Sep 30 19:36:21 crc kubenswrapper[4756]: I0930 19:36:21.105613 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a2916419-e636-4f43-9b71-e80307c8f0ef-utilities\") pod \"certified-operators-zmzd7\" (UID: \"a2916419-e636-4f43-9b71-e80307c8f0ef\") " pod="openshift-marketplace/certified-operators-zmzd7" Sep 30 19:36:21 crc kubenswrapper[4756]: I0930 19:36:21.105938 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a2916419-e636-4f43-9b71-e80307c8f0ef-catalog-content\") pod \"certified-operators-zmzd7\" (UID: \"a2916419-e636-4f43-9b71-e80307c8f0ef\") " pod="openshift-marketplace/certified-operators-zmzd7" Sep 30 19:36:21 crc kubenswrapper[4756]: I0930 19:36:21.122349 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9cptr\" (UniqueName: \"kubernetes.io/projected/a2916419-e636-4f43-9b71-e80307c8f0ef-kube-api-access-9cptr\") pod \"certified-operators-zmzd7\" (UID: \"a2916419-e636-4f43-9b71-e80307c8f0ef\") " pod="openshift-marketplace/certified-operators-zmzd7" Sep 30 19:36:21 crc kubenswrapper[4756]: I0930 19:36:21.125208 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="252408ae-8514-4b7b-b8c9-348d43fa0323" path="/var/lib/kubelet/pods/252408ae-8514-4b7b-b8c9-348d43fa0323/volumes" Sep 30 19:36:21 crc kubenswrapper[4756]: I0930 19:36:21.126180 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="60ec2479-5451-47c2-ada6-a659f57092e5" path="/var/lib/kubelet/pods/60ec2479-5451-47c2-ada6-a659f57092e5/volumes" Sep 30 19:36:21 crc kubenswrapper[4756]: I0930 19:36:21.126820 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="700d58ef-f68b-41f1-837a-984f6afffb82" path="/var/lib/kubelet/pods/700d58ef-f68b-41f1-837a-984f6afffb82/volumes" Sep 30 19:36:21 crc kubenswrapper[4756]: I0930 19:36:21.127969 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8badbed7-383d-4cc6-9fa8-aac7c4ac97ac" path="/var/lib/kubelet/pods/8badbed7-383d-4cc6-9fa8-aac7c4ac97ac/volumes" Sep 30 19:36:21 crc kubenswrapper[4756]: I0930 19:36:21.128617 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d2f7e436-45ee-4e29-a473-cde6db34524b" path="/var/lib/kubelet/pods/d2f7e436-45ee-4e29-a473-cde6db34524b/volumes" Sep 30 19:36:21 crc kubenswrapper[4756]: I0930 19:36:21.165976 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-wzt5t"] Sep 30 19:36:21 crc kubenswrapper[4756]: I0930 19:36:21.168605 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-zmzd7" Sep 30 19:36:21 crc kubenswrapper[4756]: W0930 19:36:21.170350 4756 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod59f8fa32_72a8_4202_839f_545d6b81ea1a.slice/crio-8f705f15dd7667903615f4a3e4d84be60802906836d2c8c51c2d04fc76b141c1 WatchSource:0}: Error finding container 8f705f15dd7667903615f4a3e4d84be60802906836d2c8c51c2d04fc76b141c1: Status 404 returned error can't find the container with id 8f705f15dd7667903615f4a3e4d84be60802906836d2c8c51c2d04fc76b141c1 Sep 30 19:36:21 crc kubenswrapper[4756]: I0930 19:36:21.531594 4756 generic.go:334] "Generic (PLEG): container finished" podID="59f8fa32-72a8-4202-839f-545d6b81ea1a" containerID="61ce099210595925abed1416983078bbc493fadc6fda194568b09cb17dab8040" exitCode=0 Sep 30 19:36:21 crc kubenswrapper[4756]: I0930 19:36:21.531663 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-wzt5t" event={"ID":"59f8fa32-72a8-4202-839f-545d6b81ea1a","Type":"ContainerDied","Data":"61ce099210595925abed1416983078bbc493fadc6fda194568b09cb17dab8040"} Sep 30 19:36:21 crc kubenswrapper[4756]: I0930 19:36:21.531920 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-wzt5t" event={"ID":"59f8fa32-72a8-4202-839f-545d6b81ea1a","Type":"ContainerStarted","Data":"8f705f15dd7667903615f4a3e4d84be60802906836d2c8c51c2d04fc76b141c1"} Sep 30 19:36:21 crc kubenswrapper[4756]: I0930 19:36:21.594774 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-zmzd7"] Sep 30 19:36:21 crc kubenswrapper[4756]: W0930 19:36:21.600078 4756 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda2916419_e636_4f43_9b71_e80307c8f0ef.slice/crio-4c5f32fbc03b6dd252449d84a86b7cca89301093ad4752dd7787cec79b523c75 WatchSource:0}: Error finding container 4c5f32fbc03b6dd252449d84a86b7cca89301093ad4752dd7787cec79b523c75: Status 404 returned error can't find the container with id 4c5f32fbc03b6dd252449d84a86b7cca89301093ad4752dd7787cec79b523c75 Sep 30 19:36:22 crc kubenswrapper[4756]: I0930 19:36:22.541733 4756 generic.go:334] "Generic (PLEG): container finished" podID="59f8fa32-72a8-4202-839f-545d6b81ea1a" containerID="ffe008d0cf5657336e0f8cc3406a68c33e8936fa05768be7f15fa29577a3a385" exitCode=0 Sep 30 19:36:22 crc kubenswrapper[4756]: I0930 19:36:22.541778 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-wzt5t" event={"ID":"59f8fa32-72a8-4202-839f-545d6b81ea1a","Type":"ContainerDied","Data":"ffe008d0cf5657336e0f8cc3406a68c33e8936fa05768be7f15fa29577a3a385"} Sep 30 19:36:22 crc kubenswrapper[4756]: I0930 19:36:22.546626 4756 generic.go:334] "Generic (PLEG): container finished" podID="a2916419-e636-4f43-9b71-e80307c8f0ef" containerID="a1cfc5a87e56fc908b480359296630def448968ed7084860545de1aa8d618fd4" exitCode=0 Sep 30 19:36:22 crc kubenswrapper[4756]: I0930 19:36:22.546662 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-zmzd7" event={"ID":"a2916419-e636-4f43-9b71-e80307c8f0ef","Type":"ContainerDied","Data":"a1cfc5a87e56fc908b480359296630def448968ed7084860545de1aa8d618fd4"} Sep 30 19:36:22 crc kubenswrapper[4756]: I0930 19:36:22.546686 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-zmzd7" event={"ID":"a2916419-e636-4f43-9b71-e80307c8f0ef","Type":"ContainerStarted","Data":"4c5f32fbc03b6dd252449d84a86b7cca89301093ad4752dd7787cec79b523c75"} Sep 30 19:36:23 crc kubenswrapper[4756]: I0930 19:36:23.058120 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-5jlx7"] Sep 30 19:36:23 crc kubenswrapper[4756]: I0930 19:36:23.060180 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-5jlx7" Sep 30 19:36:23 crc kubenswrapper[4756]: I0930 19:36:23.062015 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Sep 30 19:36:23 crc kubenswrapper[4756]: I0930 19:36:23.063266 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-5jlx7"] Sep 30 19:36:23 crc kubenswrapper[4756]: I0930 19:36:23.129094 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7r47t\" (UniqueName: \"kubernetes.io/projected/37aac716-15ee-430e-a9ab-bf0bcd10a1c0-kube-api-access-7r47t\") pod \"redhat-operators-5jlx7\" (UID: \"37aac716-15ee-430e-a9ab-bf0bcd10a1c0\") " pod="openshift-marketplace/redhat-operators-5jlx7" Sep 30 19:36:23 crc kubenswrapper[4756]: I0930 19:36:23.129147 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/37aac716-15ee-430e-a9ab-bf0bcd10a1c0-catalog-content\") pod \"redhat-operators-5jlx7\" (UID: \"37aac716-15ee-430e-a9ab-bf0bcd10a1c0\") " pod="openshift-marketplace/redhat-operators-5jlx7" Sep 30 19:36:23 crc kubenswrapper[4756]: I0930 19:36:23.129193 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/37aac716-15ee-430e-a9ab-bf0bcd10a1c0-utilities\") pod \"redhat-operators-5jlx7\" (UID: \"37aac716-15ee-430e-a9ab-bf0bcd10a1c0\") " pod="openshift-marketplace/redhat-operators-5jlx7" Sep 30 19:36:23 crc kubenswrapper[4756]: I0930 19:36:23.230637 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/37aac716-15ee-430e-a9ab-bf0bcd10a1c0-catalog-content\") pod \"redhat-operators-5jlx7\" (UID: \"37aac716-15ee-430e-a9ab-bf0bcd10a1c0\") " pod="openshift-marketplace/redhat-operators-5jlx7" Sep 30 19:36:23 crc kubenswrapper[4756]: I0930 19:36:23.230698 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/37aac716-15ee-430e-a9ab-bf0bcd10a1c0-utilities\") pod \"redhat-operators-5jlx7\" (UID: \"37aac716-15ee-430e-a9ab-bf0bcd10a1c0\") " pod="openshift-marketplace/redhat-operators-5jlx7" Sep 30 19:36:23 crc kubenswrapper[4756]: I0930 19:36:23.230786 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7r47t\" (UniqueName: \"kubernetes.io/projected/37aac716-15ee-430e-a9ab-bf0bcd10a1c0-kube-api-access-7r47t\") pod \"redhat-operators-5jlx7\" (UID: \"37aac716-15ee-430e-a9ab-bf0bcd10a1c0\") " pod="openshift-marketplace/redhat-operators-5jlx7" Sep 30 19:36:23 crc kubenswrapper[4756]: I0930 19:36:23.231151 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/37aac716-15ee-430e-a9ab-bf0bcd10a1c0-catalog-content\") pod \"redhat-operators-5jlx7\" (UID: \"37aac716-15ee-430e-a9ab-bf0bcd10a1c0\") " pod="openshift-marketplace/redhat-operators-5jlx7" Sep 30 19:36:23 crc kubenswrapper[4756]: I0930 19:36:23.231256 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/37aac716-15ee-430e-a9ab-bf0bcd10a1c0-utilities\") pod \"redhat-operators-5jlx7\" (UID: \"37aac716-15ee-430e-a9ab-bf0bcd10a1c0\") " pod="openshift-marketplace/redhat-operators-5jlx7" Sep 30 19:36:23 crc kubenswrapper[4756]: I0930 19:36:23.250049 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-hjcpm"] Sep 30 19:36:23 crc kubenswrapper[4756]: I0930 19:36:23.250988 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-hjcpm" Sep 30 19:36:23 crc kubenswrapper[4756]: I0930 19:36:23.253010 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Sep 30 19:36:23 crc kubenswrapper[4756]: I0930 19:36:23.259053 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-hjcpm"] Sep 30 19:36:23 crc kubenswrapper[4756]: I0930 19:36:23.263371 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7r47t\" (UniqueName: \"kubernetes.io/projected/37aac716-15ee-430e-a9ab-bf0bcd10a1c0-kube-api-access-7r47t\") pod \"redhat-operators-5jlx7\" (UID: \"37aac716-15ee-430e-a9ab-bf0bcd10a1c0\") " pod="openshift-marketplace/redhat-operators-5jlx7" Sep 30 19:36:23 crc kubenswrapper[4756]: I0930 19:36:23.331710 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tz9mb\" (UniqueName: \"kubernetes.io/projected/094a83f2-0234-4c62-ba77-e622e631e9fc-kube-api-access-tz9mb\") pod \"community-operators-hjcpm\" (UID: \"094a83f2-0234-4c62-ba77-e622e631e9fc\") " pod="openshift-marketplace/community-operators-hjcpm" Sep 30 19:36:23 crc kubenswrapper[4756]: I0930 19:36:23.331857 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/094a83f2-0234-4c62-ba77-e622e631e9fc-utilities\") pod \"community-operators-hjcpm\" (UID: \"094a83f2-0234-4c62-ba77-e622e631e9fc\") " pod="openshift-marketplace/community-operators-hjcpm" Sep 30 19:36:23 crc kubenswrapper[4756]: I0930 19:36:23.331900 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/094a83f2-0234-4c62-ba77-e622e631e9fc-catalog-content\") pod \"community-operators-hjcpm\" (UID: \"094a83f2-0234-4c62-ba77-e622e631e9fc\") " pod="openshift-marketplace/community-operators-hjcpm" Sep 30 19:36:23 crc kubenswrapper[4756]: I0930 19:36:23.390692 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-5jlx7" Sep 30 19:36:23 crc kubenswrapper[4756]: I0930 19:36:23.433328 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/094a83f2-0234-4c62-ba77-e622e631e9fc-catalog-content\") pod \"community-operators-hjcpm\" (UID: \"094a83f2-0234-4c62-ba77-e622e631e9fc\") " pod="openshift-marketplace/community-operators-hjcpm" Sep 30 19:36:23 crc kubenswrapper[4756]: I0930 19:36:23.433612 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tz9mb\" (UniqueName: \"kubernetes.io/projected/094a83f2-0234-4c62-ba77-e622e631e9fc-kube-api-access-tz9mb\") pod \"community-operators-hjcpm\" (UID: \"094a83f2-0234-4c62-ba77-e622e631e9fc\") " pod="openshift-marketplace/community-operators-hjcpm" Sep 30 19:36:23 crc kubenswrapper[4756]: I0930 19:36:23.434067 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/094a83f2-0234-4c62-ba77-e622e631e9fc-catalog-content\") pod \"community-operators-hjcpm\" (UID: \"094a83f2-0234-4c62-ba77-e622e631e9fc\") " pod="openshift-marketplace/community-operators-hjcpm" Sep 30 19:36:23 crc kubenswrapper[4756]: I0930 19:36:23.434686 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/094a83f2-0234-4c62-ba77-e622e631e9fc-utilities\") pod \"community-operators-hjcpm\" (UID: \"094a83f2-0234-4c62-ba77-e622e631e9fc\") " pod="openshift-marketplace/community-operators-hjcpm" Sep 30 19:36:23 crc kubenswrapper[4756]: I0930 19:36:23.435000 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/094a83f2-0234-4c62-ba77-e622e631e9fc-utilities\") pod \"community-operators-hjcpm\" (UID: \"094a83f2-0234-4c62-ba77-e622e631e9fc\") " pod="openshift-marketplace/community-operators-hjcpm" Sep 30 19:36:23 crc kubenswrapper[4756]: I0930 19:36:23.459678 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tz9mb\" (UniqueName: \"kubernetes.io/projected/094a83f2-0234-4c62-ba77-e622e631e9fc-kube-api-access-tz9mb\") pod \"community-operators-hjcpm\" (UID: \"094a83f2-0234-4c62-ba77-e622e631e9fc\") " pod="openshift-marketplace/community-operators-hjcpm" Sep 30 19:36:23 crc kubenswrapper[4756]: I0930 19:36:23.552451 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-wzt5t" event={"ID":"59f8fa32-72a8-4202-839f-545d6b81ea1a","Type":"ContainerStarted","Data":"4a0a5060b0dbece8b7dc5440dbcc39f0ad6845d8e63c254854550bc317fb5ce2"} Sep 30 19:36:23 crc kubenswrapper[4756]: I0930 19:36:23.581878 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-hjcpm" Sep 30 19:36:23 crc kubenswrapper[4756]: I0930 19:36:23.726217 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-wzt5t" podStartSLOduration=2.266302438 podStartE2EDuration="3.726167975s" podCreationTimestamp="2025-09-30 19:36:20 +0000 UTC" firstStartedPulling="2025-09-30 19:36:21.534003805 +0000 UTC m=+311.154937272" lastFinishedPulling="2025-09-30 19:36:22.993869292 +0000 UTC m=+312.614802809" observedRunningTime="2025-09-30 19:36:23.570662773 +0000 UTC m=+313.191596250" watchObservedRunningTime="2025-09-30 19:36:23.726167975 +0000 UTC m=+313.347101452" Sep 30 19:36:23 crc kubenswrapper[4756]: I0930 19:36:23.729134 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-hjcpm"] Sep 30 19:36:23 crc kubenswrapper[4756]: W0930 19:36:23.735060 4756 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod094a83f2_0234_4c62_ba77_e622e631e9fc.slice/crio-697fd8810accfc1b755f40f5264609a913bbcfeaace8d448564ae40915e86a68 WatchSource:0}: Error finding container 697fd8810accfc1b755f40f5264609a913bbcfeaace8d448564ae40915e86a68: Status 404 returned error can't find the container with id 697fd8810accfc1b755f40f5264609a913bbcfeaace8d448564ae40915e86a68 Sep 30 19:36:23 crc kubenswrapper[4756]: I0930 19:36:23.773829 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-5jlx7"] Sep 30 19:36:24 crc kubenswrapper[4756]: I0930 19:36:24.557740 4756 generic.go:334] "Generic (PLEG): container finished" podID="37aac716-15ee-430e-a9ab-bf0bcd10a1c0" containerID="615fbae8b2c564cfcb21b5515c2727ab9743d220d14d0c62031f497307127768" exitCode=0 Sep 30 19:36:24 crc kubenswrapper[4756]: I0930 19:36:24.557837 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5jlx7" event={"ID":"37aac716-15ee-430e-a9ab-bf0bcd10a1c0","Type":"ContainerDied","Data":"615fbae8b2c564cfcb21b5515c2727ab9743d220d14d0c62031f497307127768"} Sep 30 19:36:24 crc kubenswrapper[4756]: I0930 19:36:24.558908 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5jlx7" event={"ID":"37aac716-15ee-430e-a9ab-bf0bcd10a1c0","Type":"ContainerStarted","Data":"54acd22b78d3ad58197715cc07c2d1417201c426e04dc97419cbd16a4e790320"} Sep 30 19:36:24 crc kubenswrapper[4756]: I0930 19:36:24.575706 4756 generic.go:334] "Generic (PLEG): container finished" podID="094a83f2-0234-4c62-ba77-e622e631e9fc" containerID="2bc74545f81e8737bfa9763cd903208a3883860a7001887942e0d12b17c80e58" exitCode=0 Sep 30 19:36:24 crc kubenswrapper[4756]: I0930 19:36:24.575847 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hjcpm" event={"ID":"094a83f2-0234-4c62-ba77-e622e631e9fc","Type":"ContainerDied","Data":"2bc74545f81e8737bfa9763cd903208a3883860a7001887942e0d12b17c80e58"} Sep 30 19:36:24 crc kubenswrapper[4756]: I0930 19:36:24.575895 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hjcpm" event={"ID":"094a83f2-0234-4c62-ba77-e622e631e9fc","Type":"ContainerStarted","Data":"697fd8810accfc1b755f40f5264609a913bbcfeaace8d448564ae40915e86a68"} Sep 30 19:36:24 crc kubenswrapper[4756]: I0930 19:36:24.579183 4756 generic.go:334] "Generic (PLEG): container finished" podID="a2916419-e636-4f43-9b71-e80307c8f0ef" containerID="474c8aa084bae744c6d033f6a20578971b2945bbd7cfb52a6daba29565a1e337" exitCode=0 Sep 30 19:36:24 crc kubenswrapper[4756]: I0930 19:36:24.580839 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-zmzd7" event={"ID":"a2916419-e636-4f43-9b71-e80307c8f0ef","Type":"ContainerDied","Data":"474c8aa084bae744c6d033f6a20578971b2945bbd7cfb52a6daba29565a1e337"} Sep 30 19:36:25 crc kubenswrapper[4756]: I0930 19:36:25.590035 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-zmzd7" event={"ID":"a2916419-e636-4f43-9b71-e80307c8f0ef","Type":"ContainerStarted","Data":"8bdb6920ba90dc7d691abb874b89ef6ee54f2ef5ead82447276b7f590c0eea43"} Sep 30 19:36:25 crc kubenswrapper[4756]: I0930 19:36:25.605949 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-zmzd7" podStartSLOduration=2.982086458 podStartE2EDuration="5.605934152s" podCreationTimestamp="2025-09-30 19:36:20 +0000 UTC" firstStartedPulling="2025-09-30 19:36:22.549041694 +0000 UTC m=+312.169975171" lastFinishedPulling="2025-09-30 19:36:25.172889388 +0000 UTC m=+314.793822865" observedRunningTime="2025-09-30 19:36:25.604106911 +0000 UTC m=+315.225040408" watchObservedRunningTime="2025-09-30 19:36:25.605934152 +0000 UTC m=+315.226867629" Sep 30 19:36:26 crc kubenswrapper[4756]: I0930 19:36:26.597111 4756 generic.go:334] "Generic (PLEG): container finished" podID="37aac716-15ee-430e-a9ab-bf0bcd10a1c0" containerID="31f4eda29199b8df67c3c1fb62d36d488062a6185cb731a421acc422190f90f5" exitCode=0 Sep 30 19:36:26 crc kubenswrapper[4756]: I0930 19:36:26.597169 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5jlx7" event={"ID":"37aac716-15ee-430e-a9ab-bf0bcd10a1c0","Type":"ContainerDied","Data":"31f4eda29199b8df67c3c1fb62d36d488062a6185cb731a421acc422190f90f5"} Sep 30 19:36:26 crc kubenswrapper[4756]: I0930 19:36:26.599971 4756 generic.go:334] "Generic (PLEG): container finished" podID="094a83f2-0234-4c62-ba77-e622e631e9fc" containerID="95f8413b2f67cec9ec0d3794a54ee2b89183dda6a7d00074af510d540e6b59d2" exitCode=0 Sep 30 19:36:26 crc kubenswrapper[4756]: I0930 19:36:26.600032 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hjcpm" event={"ID":"094a83f2-0234-4c62-ba77-e622e631e9fc","Type":"ContainerDied","Data":"95f8413b2f67cec9ec0d3794a54ee2b89183dda6a7d00074af510d540e6b59d2"} Sep 30 19:36:27 crc kubenswrapper[4756]: I0930 19:36:27.614436 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5jlx7" event={"ID":"37aac716-15ee-430e-a9ab-bf0bcd10a1c0","Type":"ContainerStarted","Data":"8f6dd7e2fd0e2781da37299f01e7f654fbd757ddc2cc79b0825df06be14f3754"} Sep 30 19:36:27 crc kubenswrapper[4756]: I0930 19:36:27.617694 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hjcpm" event={"ID":"094a83f2-0234-4c62-ba77-e622e631e9fc","Type":"ContainerStarted","Data":"d6c914280a5672feb331c7b0baab7ccb2afc53bb191b5105d5510b31b1278191"} Sep 30 19:36:27 crc kubenswrapper[4756]: I0930 19:36:27.630783 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-5jlx7" podStartSLOduration=2.225015283 podStartE2EDuration="4.630765623s" podCreationTimestamp="2025-09-30 19:36:23 +0000 UTC" firstStartedPulling="2025-09-30 19:36:24.560584979 +0000 UTC m=+314.181518456" lastFinishedPulling="2025-09-30 19:36:26.966335319 +0000 UTC m=+316.587268796" observedRunningTime="2025-09-30 19:36:27.629697244 +0000 UTC m=+317.250630721" watchObservedRunningTime="2025-09-30 19:36:27.630765623 +0000 UTC m=+317.251699100" Sep 30 19:36:27 crc kubenswrapper[4756]: I0930 19:36:27.651284 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-hjcpm" podStartSLOduration=2.170987315 podStartE2EDuration="4.651260297s" podCreationTimestamp="2025-09-30 19:36:23 +0000 UTC" firstStartedPulling="2025-09-30 19:36:24.577571777 +0000 UTC m=+314.198505254" lastFinishedPulling="2025-09-30 19:36:27.057844719 +0000 UTC m=+316.678778236" observedRunningTime="2025-09-30 19:36:27.646570378 +0000 UTC m=+317.267503865" watchObservedRunningTime="2025-09-30 19:36:27.651260297 +0000 UTC m=+317.272193794" Sep 30 19:36:30 crc kubenswrapper[4756]: I0930 19:36:30.971111 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-wzt5t" Sep 30 19:36:30 crc kubenswrapper[4756]: I0930 19:36:30.971484 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-wzt5t" Sep 30 19:36:31 crc kubenswrapper[4756]: I0930 19:36:31.035010 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-wzt5t" Sep 30 19:36:31 crc kubenswrapper[4756]: I0930 19:36:31.169260 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-zmzd7" Sep 30 19:36:31 crc kubenswrapper[4756]: I0930 19:36:31.169384 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-zmzd7" Sep 30 19:36:31 crc kubenswrapper[4756]: I0930 19:36:31.284338 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-zmzd7" Sep 30 19:36:31 crc kubenswrapper[4756]: I0930 19:36:31.677453 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-wzt5t" Sep 30 19:36:31 crc kubenswrapper[4756]: I0930 19:36:31.687863 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-zmzd7" Sep 30 19:36:33 crc kubenswrapper[4756]: I0930 19:36:33.392077 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-5jlx7" Sep 30 19:36:33 crc kubenswrapper[4756]: I0930 19:36:33.392415 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-5jlx7" Sep 30 19:36:33 crc kubenswrapper[4756]: I0930 19:36:33.445038 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-5jlx7" Sep 30 19:36:33 crc kubenswrapper[4756]: I0930 19:36:33.582250 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-hjcpm" Sep 30 19:36:33 crc kubenswrapper[4756]: I0930 19:36:33.582296 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-hjcpm" Sep 30 19:36:33 crc kubenswrapper[4756]: I0930 19:36:33.629348 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-hjcpm" Sep 30 19:36:33 crc kubenswrapper[4756]: I0930 19:36:33.700557 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-5jlx7" Sep 30 19:36:33 crc kubenswrapper[4756]: I0930 19:36:33.702586 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-hjcpm" Sep 30 19:37:11 crc kubenswrapper[4756]: I0930 19:37:11.538454 4756 patch_prober.go:28] interesting pod/machine-config-daemon-4n9zj container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 19:37:11 crc kubenswrapper[4756]: I0930 19:37:11.539098 4756 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 19:37:41 crc kubenswrapper[4756]: I0930 19:37:41.538696 4756 patch_prober.go:28] interesting pod/machine-config-daemon-4n9zj container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 19:37:41 crc kubenswrapper[4756]: I0930 19:37:41.539231 4756 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 19:38:11 crc kubenswrapper[4756]: I0930 19:38:11.538764 4756 patch_prober.go:28] interesting pod/machine-config-daemon-4n9zj container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 19:38:11 crc kubenswrapper[4756]: I0930 19:38:11.539611 4756 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 19:38:11 crc kubenswrapper[4756]: I0930 19:38:11.539678 4756 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" Sep 30 19:38:11 crc kubenswrapper[4756]: I0930 19:38:11.540743 4756 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"6ba8081515eeb18121e909838c5752036b5d1f4bd297c66dad27b619402c76b4"} pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 30 19:38:11 crc kubenswrapper[4756]: I0930 19:38:11.540848 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" containerName="machine-config-daemon" containerID="cri-o://6ba8081515eeb18121e909838c5752036b5d1f4bd297c66dad27b619402c76b4" gracePeriod=600 Sep 30 19:38:12 crc kubenswrapper[4756]: I0930 19:38:12.280260 4756 generic.go:334] "Generic (PLEG): container finished" podID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" containerID="6ba8081515eeb18121e909838c5752036b5d1f4bd297c66dad27b619402c76b4" exitCode=0 Sep 30 19:38:12 crc kubenswrapper[4756]: I0930 19:38:12.280433 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" event={"ID":"3370c2ca-fec3-4f90-8df7-51e21e6c7e1c","Type":"ContainerDied","Data":"6ba8081515eeb18121e909838c5752036b5d1f4bd297c66dad27b619402c76b4"} Sep 30 19:38:12 crc kubenswrapper[4756]: I0930 19:38:12.280843 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" event={"ID":"3370c2ca-fec3-4f90-8df7-51e21e6c7e1c","Type":"ContainerStarted","Data":"283db66c586f33bf62f42011fd591816e47d70e65049040b396eaac160f0797b"} Sep 30 19:38:12 crc kubenswrapper[4756]: I0930 19:38:12.280876 4756 scope.go:117] "RemoveContainer" containerID="ba18abf718fde52cadd61c25b05097873901a4dbbe7631fd7ce64133627b9473" Sep 30 19:38:20 crc kubenswrapper[4756]: I0930 19:38:20.849051 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-qsgb6"] Sep 30 19:38:20 crc kubenswrapper[4756]: I0930 19:38:20.853526 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-qsgb6" Sep 30 19:38:20 crc kubenswrapper[4756]: I0930 19:38:20.867169 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-qsgb6"] Sep 30 19:38:21 crc kubenswrapper[4756]: I0930 19:38:21.005647 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/30027813-3e7e-48d1-ac71-8607a7d0bd99-registry-tls\") pod \"image-registry-66df7c8f76-qsgb6\" (UID: \"30027813-3e7e-48d1-ac71-8607a7d0bd99\") " pod="openshift-image-registry/image-registry-66df7c8f76-qsgb6" Sep 30 19:38:21 crc kubenswrapper[4756]: I0930 19:38:21.005905 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/30027813-3e7e-48d1-ac71-8607a7d0bd99-registry-certificates\") pod \"image-registry-66df7c8f76-qsgb6\" (UID: \"30027813-3e7e-48d1-ac71-8607a7d0bd99\") " pod="openshift-image-registry/image-registry-66df7c8f76-qsgb6" Sep 30 19:38:21 crc kubenswrapper[4756]: I0930 19:38:21.005925 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/30027813-3e7e-48d1-ac71-8607a7d0bd99-trusted-ca\") pod \"image-registry-66df7c8f76-qsgb6\" (UID: \"30027813-3e7e-48d1-ac71-8607a7d0bd99\") " pod="openshift-image-registry/image-registry-66df7c8f76-qsgb6" Sep 30 19:38:21 crc kubenswrapper[4756]: I0930 19:38:21.005949 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/30027813-3e7e-48d1-ac71-8607a7d0bd99-ca-trust-extracted\") pod \"image-registry-66df7c8f76-qsgb6\" (UID: \"30027813-3e7e-48d1-ac71-8607a7d0bd99\") " pod="openshift-image-registry/image-registry-66df7c8f76-qsgb6" Sep 30 19:38:21 crc kubenswrapper[4756]: I0930 19:38:21.006050 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fpsmf\" (UniqueName: \"kubernetes.io/projected/30027813-3e7e-48d1-ac71-8607a7d0bd99-kube-api-access-fpsmf\") pod \"image-registry-66df7c8f76-qsgb6\" (UID: \"30027813-3e7e-48d1-ac71-8607a7d0bd99\") " pod="openshift-image-registry/image-registry-66df7c8f76-qsgb6" Sep 30 19:38:21 crc kubenswrapper[4756]: I0930 19:38:21.006068 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/30027813-3e7e-48d1-ac71-8607a7d0bd99-bound-sa-token\") pod \"image-registry-66df7c8f76-qsgb6\" (UID: \"30027813-3e7e-48d1-ac71-8607a7d0bd99\") " pod="openshift-image-registry/image-registry-66df7c8f76-qsgb6" Sep 30 19:38:21 crc kubenswrapper[4756]: I0930 19:38:21.006096 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-qsgb6\" (UID: \"30027813-3e7e-48d1-ac71-8607a7d0bd99\") " pod="openshift-image-registry/image-registry-66df7c8f76-qsgb6" Sep 30 19:38:21 crc kubenswrapper[4756]: I0930 19:38:21.006124 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/30027813-3e7e-48d1-ac71-8607a7d0bd99-installation-pull-secrets\") pod \"image-registry-66df7c8f76-qsgb6\" (UID: \"30027813-3e7e-48d1-ac71-8607a7d0bd99\") " pod="openshift-image-registry/image-registry-66df7c8f76-qsgb6" Sep 30 19:38:21 crc kubenswrapper[4756]: I0930 19:38:21.026812 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-qsgb6\" (UID: \"30027813-3e7e-48d1-ac71-8607a7d0bd99\") " pod="openshift-image-registry/image-registry-66df7c8f76-qsgb6" Sep 30 19:38:21 crc kubenswrapper[4756]: I0930 19:38:21.107245 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fpsmf\" (UniqueName: \"kubernetes.io/projected/30027813-3e7e-48d1-ac71-8607a7d0bd99-kube-api-access-fpsmf\") pod \"image-registry-66df7c8f76-qsgb6\" (UID: \"30027813-3e7e-48d1-ac71-8607a7d0bd99\") " pod="openshift-image-registry/image-registry-66df7c8f76-qsgb6" Sep 30 19:38:21 crc kubenswrapper[4756]: I0930 19:38:21.107298 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/30027813-3e7e-48d1-ac71-8607a7d0bd99-bound-sa-token\") pod \"image-registry-66df7c8f76-qsgb6\" (UID: \"30027813-3e7e-48d1-ac71-8607a7d0bd99\") " pod="openshift-image-registry/image-registry-66df7c8f76-qsgb6" Sep 30 19:38:21 crc kubenswrapper[4756]: I0930 19:38:21.107339 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/30027813-3e7e-48d1-ac71-8607a7d0bd99-installation-pull-secrets\") pod \"image-registry-66df7c8f76-qsgb6\" (UID: \"30027813-3e7e-48d1-ac71-8607a7d0bd99\") " pod="openshift-image-registry/image-registry-66df7c8f76-qsgb6" Sep 30 19:38:21 crc kubenswrapper[4756]: I0930 19:38:21.107380 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/30027813-3e7e-48d1-ac71-8607a7d0bd99-registry-tls\") pod \"image-registry-66df7c8f76-qsgb6\" (UID: \"30027813-3e7e-48d1-ac71-8607a7d0bd99\") " pod="openshift-image-registry/image-registry-66df7c8f76-qsgb6" Sep 30 19:38:21 crc kubenswrapper[4756]: I0930 19:38:21.107411 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/30027813-3e7e-48d1-ac71-8607a7d0bd99-registry-certificates\") pod \"image-registry-66df7c8f76-qsgb6\" (UID: \"30027813-3e7e-48d1-ac71-8607a7d0bd99\") " pod="openshift-image-registry/image-registry-66df7c8f76-qsgb6" Sep 30 19:38:21 crc kubenswrapper[4756]: I0930 19:38:21.107430 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/30027813-3e7e-48d1-ac71-8607a7d0bd99-trusted-ca\") pod \"image-registry-66df7c8f76-qsgb6\" (UID: \"30027813-3e7e-48d1-ac71-8607a7d0bd99\") " pod="openshift-image-registry/image-registry-66df7c8f76-qsgb6" Sep 30 19:38:21 crc kubenswrapper[4756]: I0930 19:38:21.107456 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/30027813-3e7e-48d1-ac71-8607a7d0bd99-ca-trust-extracted\") pod \"image-registry-66df7c8f76-qsgb6\" (UID: \"30027813-3e7e-48d1-ac71-8607a7d0bd99\") " pod="openshift-image-registry/image-registry-66df7c8f76-qsgb6" Sep 30 19:38:21 crc kubenswrapper[4756]: I0930 19:38:21.107952 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/30027813-3e7e-48d1-ac71-8607a7d0bd99-ca-trust-extracted\") pod \"image-registry-66df7c8f76-qsgb6\" (UID: \"30027813-3e7e-48d1-ac71-8607a7d0bd99\") " pod="openshift-image-registry/image-registry-66df7c8f76-qsgb6" Sep 30 19:38:21 crc kubenswrapper[4756]: I0930 19:38:21.109376 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/30027813-3e7e-48d1-ac71-8607a7d0bd99-registry-certificates\") pod \"image-registry-66df7c8f76-qsgb6\" (UID: \"30027813-3e7e-48d1-ac71-8607a7d0bd99\") " pod="openshift-image-registry/image-registry-66df7c8f76-qsgb6" Sep 30 19:38:21 crc kubenswrapper[4756]: I0930 19:38:21.110039 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/30027813-3e7e-48d1-ac71-8607a7d0bd99-trusted-ca\") pod \"image-registry-66df7c8f76-qsgb6\" (UID: \"30027813-3e7e-48d1-ac71-8607a7d0bd99\") " pod="openshift-image-registry/image-registry-66df7c8f76-qsgb6" Sep 30 19:38:21 crc kubenswrapper[4756]: I0930 19:38:21.113757 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/30027813-3e7e-48d1-ac71-8607a7d0bd99-installation-pull-secrets\") pod \"image-registry-66df7c8f76-qsgb6\" (UID: \"30027813-3e7e-48d1-ac71-8607a7d0bd99\") " pod="openshift-image-registry/image-registry-66df7c8f76-qsgb6" Sep 30 19:38:21 crc kubenswrapper[4756]: I0930 19:38:21.116207 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/30027813-3e7e-48d1-ac71-8607a7d0bd99-registry-tls\") pod \"image-registry-66df7c8f76-qsgb6\" (UID: \"30027813-3e7e-48d1-ac71-8607a7d0bd99\") " pod="openshift-image-registry/image-registry-66df7c8f76-qsgb6" Sep 30 19:38:21 crc kubenswrapper[4756]: I0930 19:38:21.121742 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/30027813-3e7e-48d1-ac71-8607a7d0bd99-bound-sa-token\") pod \"image-registry-66df7c8f76-qsgb6\" (UID: \"30027813-3e7e-48d1-ac71-8607a7d0bd99\") " pod="openshift-image-registry/image-registry-66df7c8f76-qsgb6" Sep 30 19:38:21 crc kubenswrapper[4756]: I0930 19:38:21.126153 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fpsmf\" (UniqueName: \"kubernetes.io/projected/30027813-3e7e-48d1-ac71-8607a7d0bd99-kube-api-access-fpsmf\") pod \"image-registry-66df7c8f76-qsgb6\" (UID: \"30027813-3e7e-48d1-ac71-8607a7d0bd99\") " pod="openshift-image-registry/image-registry-66df7c8f76-qsgb6" Sep 30 19:38:21 crc kubenswrapper[4756]: I0930 19:38:21.204888 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-qsgb6" Sep 30 19:38:21 crc kubenswrapper[4756]: I0930 19:38:21.610477 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-qsgb6"] Sep 30 19:38:22 crc kubenswrapper[4756]: I0930 19:38:22.342114 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-qsgb6" event={"ID":"30027813-3e7e-48d1-ac71-8607a7d0bd99","Type":"ContainerStarted","Data":"4a2d4e5b24cfa9f50f31810db80b522a25d9fa0fe5d51209d9485ce95d0722f9"} Sep 30 19:38:22 crc kubenswrapper[4756]: I0930 19:38:22.342495 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-66df7c8f76-qsgb6" Sep 30 19:38:22 crc kubenswrapper[4756]: I0930 19:38:22.342522 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-qsgb6" event={"ID":"30027813-3e7e-48d1-ac71-8607a7d0bd99","Type":"ContainerStarted","Data":"4a63762c3a39fb936b12e635c28e9cf6770c78d45c512b4fd105ac749f04eb30"} Sep 30 19:38:22 crc kubenswrapper[4756]: I0930 19:38:22.368776 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-66df7c8f76-qsgb6" podStartSLOduration=2.368744923 podStartE2EDuration="2.368744923s" podCreationTimestamp="2025-09-30 19:38:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 19:38:22.366773051 +0000 UTC m=+431.987706548" watchObservedRunningTime="2025-09-30 19:38:22.368744923 +0000 UTC m=+431.989678440" Sep 30 19:38:41 crc kubenswrapper[4756]: I0930 19:38:41.216006 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-66df7c8f76-qsgb6" Sep 30 19:38:41 crc kubenswrapper[4756]: I0930 19:38:41.306743 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-wpxq6"] Sep 30 19:39:06 crc kubenswrapper[4756]: I0930 19:39:06.346196 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-image-registry/image-registry-697d97f7c8-wpxq6" podUID="ce702cef-b28d-4fcd-99b0-9961a980d3e6" containerName="registry" containerID="cri-o://c801d9a345fada93537e26288362d5759104548aa511a1ec3f3e39ef03d601e3" gracePeriod=30 Sep 30 19:39:06 crc kubenswrapper[4756]: I0930 19:39:06.616340 4756 generic.go:334] "Generic (PLEG): container finished" podID="ce702cef-b28d-4fcd-99b0-9961a980d3e6" containerID="c801d9a345fada93537e26288362d5759104548aa511a1ec3f3e39ef03d601e3" exitCode=0 Sep 30 19:39:06 crc kubenswrapper[4756]: I0930 19:39:06.616642 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-wpxq6" event={"ID":"ce702cef-b28d-4fcd-99b0-9961a980d3e6","Type":"ContainerDied","Data":"c801d9a345fada93537e26288362d5759104548aa511a1ec3f3e39ef03d601e3"} Sep 30 19:39:06 crc kubenswrapper[4756]: I0930 19:39:06.780460 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-wpxq6" Sep 30 19:39:06 crc kubenswrapper[4756]: I0930 19:39:06.883528 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/ce702cef-b28d-4fcd-99b0-9961a980d3e6-bound-sa-token\") pod \"ce702cef-b28d-4fcd-99b0-9961a980d3e6\" (UID: \"ce702cef-b28d-4fcd-99b0-9961a980d3e6\") " Sep 30 19:39:06 crc kubenswrapper[4756]: I0930 19:39:06.883632 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/ce702cef-b28d-4fcd-99b0-9961a980d3e6-ca-trust-extracted\") pod \"ce702cef-b28d-4fcd-99b0-9961a980d3e6\" (UID: \"ce702cef-b28d-4fcd-99b0-9961a980d3e6\") " Sep 30 19:39:06 crc kubenswrapper[4756]: I0930 19:39:06.883754 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-storage\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"ce702cef-b28d-4fcd-99b0-9961a980d3e6\" (UID: \"ce702cef-b28d-4fcd-99b0-9961a980d3e6\") " Sep 30 19:39:06 crc kubenswrapper[4756]: I0930 19:39:06.883777 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jf9s9\" (UniqueName: \"kubernetes.io/projected/ce702cef-b28d-4fcd-99b0-9961a980d3e6-kube-api-access-jf9s9\") pod \"ce702cef-b28d-4fcd-99b0-9961a980d3e6\" (UID: \"ce702cef-b28d-4fcd-99b0-9961a980d3e6\") " Sep 30 19:39:06 crc kubenswrapper[4756]: I0930 19:39:06.883817 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/ce702cef-b28d-4fcd-99b0-9961a980d3e6-registry-certificates\") pod \"ce702cef-b28d-4fcd-99b0-9961a980d3e6\" (UID: \"ce702cef-b28d-4fcd-99b0-9961a980d3e6\") " Sep 30 19:39:06 crc kubenswrapper[4756]: I0930 19:39:06.883838 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/ce702cef-b28d-4fcd-99b0-9961a980d3e6-trusted-ca\") pod \"ce702cef-b28d-4fcd-99b0-9961a980d3e6\" (UID: \"ce702cef-b28d-4fcd-99b0-9961a980d3e6\") " Sep 30 19:39:06 crc kubenswrapper[4756]: I0930 19:39:06.883871 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/ce702cef-b28d-4fcd-99b0-9961a980d3e6-registry-tls\") pod \"ce702cef-b28d-4fcd-99b0-9961a980d3e6\" (UID: \"ce702cef-b28d-4fcd-99b0-9961a980d3e6\") " Sep 30 19:39:06 crc kubenswrapper[4756]: I0930 19:39:06.883891 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/ce702cef-b28d-4fcd-99b0-9961a980d3e6-installation-pull-secrets\") pod \"ce702cef-b28d-4fcd-99b0-9961a980d3e6\" (UID: \"ce702cef-b28d-4fcd-99b0-9961a980d3e6\") " Sep 30 19:39:06 crc kubenswrapper[4756]: I0930 19:39:06.886185 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ce702cef-b28d-4fcd-99b0-9961a980d3e6-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "ce702cef-b28d-4fcd-99b0-9961a980d3e6" (UID: "ce702cef-b28d-4fcd-99b0-9961a980d3e6"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:39:06 crc kubenswrapper[4756]: I0930 19:39:06.886788 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ce702cef-b28d-4fcd-99b0-9961a980d3e6-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "ce702cef-b28d-4fcd-99b0-9961a980d3e6" (UID: "ce702cef-b28d-4fcd-99b0-9961a980d3e6"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:39:06 crc kubenswrapper[4756]: I0930 19:39:06.888860 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ce702cef-b28d-4fcd-99b0-9961a980d3e6-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "ce702cef-b28d-4fcd-99b0-9961a980d3e6" (UID: "ce702cef-b28d-4fcd-99b0-9961a980d3e6"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:39:06 crc kubenswrapper[4756]: I0930 19:39:06.889206 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ce702cef-b28d-4fcd-99b0-9961a980d3e6-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "ce702cef-b28d-4fcd-99b0-9961a980d3e6" (UID: "ce702cef-b28d-4fcd-99b0-9961a980d3e6"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:39:06 crc kubenswrapper[4756]: I0930 19:39:06.890091 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ce702cef-b28d-4fcd-99b0-9961a980d3e6-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "ce702cef-b28d-4fcd-99b0-9961a980d3e6" (UID: "ce702cef-b28d-4fcd-99b0-9961a980d3e6"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:39:06 crc kubenswrapper[4756]: I0930 19:39:06.890216 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ce702cef-b28d-4fcd-99b0-9961a980d3e6-kube-api-access-jf9s9" (OuterVolumeSpecName: "kube-api-access-jf9s9") pod "ce702cef-b28d-4fcd-99b0-9961a980d3e6" (UID: "ce702cef-b28d-4fcd-99b0-9961a980d3e6"). InnerVolumeSpecName "kube-api-access-jf9s9". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:39:06 crc kubenswrapper[4756]: I0930 19:39:06.896790 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "registry-storage") pod "ce702cef-b28d-4fcd-99b0-9961a980d3e6" (UID: "ce702cef-b28d-4fcd-99b0-9961a980d3e6"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Sep 30 19:39:06 crc kubenswrapper[4756]: I0930 19:39:06.901747 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ce702cef-b28d-4fcd-99b0-9961a980d3e6-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "ce702cef-b28d-4fcd-99b0-9961a980d3e6" (UID: "ce702cef-b28d-4fcd-99b0-9961a980d3e6"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 19:39:06 crc kubenswrapper[4756]: I0930 19:39:06.984884 4756 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/ce702cef-b28d-4fcd-99b0-9961a980d3e6-registry-certificates\") on node \"crc\" DevicePath \"\"" Sep 30 19:39:06 crc kubenswrapper[4756]: I0930 19:39:06.984917 4756 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/ce702cef-b28d-4fcd-99b0-9961a980d3e6-trusted-ca\") on node \"crc\" DevicePath \"\"" Sep 30 19:39:06 crc kubenswrapper[4756]: I0930 19:39:06.984928 4756 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/ce702cef-b28d-4fcd-99b0-9961a980d3e6-registry-tls\") on node \"crc\" DevicePath \"\"" Sep 30 19:39:06 crc kubenswrapper[4756]: I0930 19:39:06.984937 4756 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/ce702cef-b28d-4fcd-99b0-9961a980d3e6-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Sep 30 19:39:06 crc kubenswrapper[4756]: I0930 19:39:06.984947 4756 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/ce702cef-b28d-4fcd-99b0-9961a980d3e6-bound-sa-token\") on node \"crc\" DevicePath \"\"" Sep 30 19:39:06 crc kubenswrapper[4756]: I0930 19:39:06.984956 4756 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/ce702cef-b28d-4fcd-99b0-9961a980d3e6-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Sep 30 19:39:06 crc kubenswrapper[4756]: I0930 19:39:06.984967 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jf9s9\" (UniqueName: \"kubernetes.io/projected/ce702cef-b28d-4fcd-99b0-9961a980d3e6-kube-api-access-jf9s9\") on node \"crc\" DevicePath \"\"" Sep 30 19:39:07 crc kubenswrapper[4756]: I0930 19:39:07.632034 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-wpxq6" event={"ID":"ce702cef-b28d-4fcd-99b0-9961a980d3e6","Type":"ContainerDied","Data":"50809457fec328570a1c0b7cff90f0c35b928a2f3bf0ad8f14b9f4f0e60b0260"} Sep 30 19:39:07 crc kubenswrapper[4756]: I0930 19:39:07.632451 4756 scope.go:117] "RemoveContainer" containerID="c801d9a345fada93537e26288362d5759104548aa511a1ec3f3e39ef03d601e3" Sep 30 19:39:07 crc kubenswrapper[4756]: I0930 19:39:07.632123 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-wpxq6" Sep 30 19:39:07 crc kubenswrapper[4756]: I0930 19:39:07.658730 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-wpxq6"] Sep 30 19:39:07 crc kubenswrapper[4756]: I0930 19:39:07.660993 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-wpxq6"] Sep 30 19:39:09 crc kubenswrapper[4756]: I0930 19:39:09.123212 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ce702cef-b28d-4fcd-99b0-9961a980d3e6" path="/var/lib/kubelet/pods/ce702cef-b28d-4fcd-99b0-9961a980d3e6/volumes" Sep 30 19:40:11 crc kubenswrapper[4756]: I0930 19:40:11.538574 4756 patch_prober.go:28] interesting pod/machine-config-daemon-4n9zj container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 19:40:11 crc kubenswrapper[4756]: I0930 19:40:11.539155 4756 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 19:40:41 crc kubenswrapper[4756]: I0930 19:40:41.538635 4756 patch_prober.go:28] interesting pod/machine-config-daemon-4n9zj container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 19:40:41 crc kubenswrapper[4756]: I0930 19:40:41.539147 4756 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 19:41:11 crc kubenswrapper[4756]: I0930 19:41:11.539063 4756 patch_prober.go:28] interesting pod/machine-config-daemon-4n9zj container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 19:41:11 crc kubenswrapper[4756]: I0930 19:41:11.542223 4756 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 19:41:11 crc kubenswrapper[4756]: I0930 19:41:11.542525 4756 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" Sep 30 19:41:11 crc kubenswrapper[4756]: I0930 19:41:11.543989 4756 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"283db66c586f33bf62f42011fd591816e47d70e65049040b396eaac160f0797b"} pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 30 19:41:11 crc kubenswrapper[4756]: I0930 19:41:11.544322 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" containerName="machine-config-daemon" containerID="cri-o://283db66c586f33bf62f42011fd591816e47d70e65049040b396eaac160f0797b" gracePeriod=600 Sep 30 19:41:12 crc kubenswrapper[4756]: I0930 19:41:12.429792 4756 generic.go:334] "Generic (PLEG): container finished" podID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" containerID="283db66c586f33bf62f42011fd591816e47d70e65049040b396eaac160f0797b" exitCode=0 Sep 30 19:41:12 crc kubenswrapper[4756]: I0930 19:41:12.429902 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" event={"ID":"3370c2ca-fec3-4f90-8df7-51e21e6c7e1c","Type":"ContainerDied","Data":"283db66c586f33bf62f42011fd591816e47d70e65049040b396eaac160f0797b"} Sep 30 19:41:12 crc kubenswrapper[4756]: I0930 19:41:12.430226 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" event={"ID":"3370c2ca-fec3-4f90-8df7-51e21e6c7e1c","Type":"ContainerStarted","Data":"2979d187e57e810a5b2b095948919aef457acc38ed2bbb2147029cc6d902e94d"} Sep 30 19:41:12 crc kubenswrapper[4756]: I0930 19:41:12.430251 4756 scope.go:117] "RemoveContainer" containerID="6ba8081515eeb18121e909838c5752036b5d1f4bd297c66dad27b619402c76b4" Sep 30 19:42:48 crc kubenswrapper[4756]: I0930 19:42:48.893658 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-kmxk8"] Sep 30 19:42:48 crc kubenswrapper[4756]: I0930 19:42:48.894929 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-kmxk8" podUID="d96acc31-a519-46fc-94d8-f19522e77391" containerName="ovn-controller" containerID="cri-o://990926364c8d973164f3da73740b7d31c8caf87f0fb691539f7a62520b822fba" gracePeriod=30 Sep 30 19:42:48 crc kubenswrapper[4756]: I0930 19:42:48.894996 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-kmxk8" podUID="d96acc31-a519-46fc-94d8-f19522e77391" containerName="nbdb" containerID="cri-o://f20230d520af06912183a05d0b9bcabb8581bf772fe34e037fa8d1a157c4a1b2" gracePeriod=30 Sep 30 19:42:48 crc kubenswrapper[4756]: I0930 19:42:48.895093 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-kmxk8" podUID="d96acc31-a519-46fc-94d8-f19522e77391" containerName="northd" containerID="cri-o://261b104308c48bbe4631db85f56d63cefeb0bc6b7112076b0bf3f4fb6b67caad" gracePeriod=30 Sep 30 19:42:48 crc kubenswrapper[4756]: I0930 19:42:48.895166 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-kmxk8" podUID="d96acc31-a519-46fc-94d8-f19522e77391" containerName="kube-rbac-proxy-ovn-metrics" containerID="cri-o://c5badf6335e4358fa0078ae48d7d50a908bf754fdb57c786e445ccae31933ebe" gracePeriod=30 Sep 30 19:42:48 crc kubenswrapper[4756]: I0930 19:42:48.895227 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-kmxk8" podUID="d96acc31-a519-46fc-94d8-f19522e77391" containerName="kube-rbac-proxy-node" containerID="cri-o://fe054d1844c58433aebfe8f4e500e17558f1b4afb5423002d60b50f210052c93" gracePeriod=30 Sep 30 19:42:48 crc kubenswrapper[4756]: I0930 19:42:48.895283 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-kmxk8" podUID="d96acc31-a519-46fc-94d8-f19522e77391" containerName="ovn-acl-logging" containerID="cri-o://1fa8babb1f5d1213dec99c75be1d5f37d09c2257a31babd51ec297621fc7d564" gracePeriod=30 Sep 30 19:42:48 crc kubenswrapper[4756]: I0930 19:42:48.896037 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-kmxk8" podUID="d96acc31-a519-46fc-94d8-f19522e77391" containerName="sbdb" containerID="cri-o://64404e34c6a077df9cc9e9f9273dbe188c6feda3bc1186a83e05a43e1fa2ea99" gracePeriod=30 Sep 30 19:42:48 crc kubenswrapper[4756]: I0930 19:42:48.964218 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-kmxk8" podUID="d96acc31-a519-46fc-94d8-f19522e77391" containerName="ovnkube-controller" containerID="cri-o://1fac0efd11ba04c69202d48c30a6196f3d0be1b1457916c5d64b3a8d1d79aa75" gracePeriod=30 Sep 30 19:42:49 crc kubenswrapper[4756]: I0930 19:42:49.048610 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-l9mw5_104562fb-2a2d-4291-963c-7a95062ec13a/kube-multus/2.log" Sep 30 19:42:49 crc kubenswrapper[4756]: I0930 19:42:49.049203 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-l9mw5_104562fb-2a2d-4291-963c-7a95062ec13a/kube-multus/1.log" Sep 30 19:42:49 crc kubenswrapper[4756]: I0930 19:42:49.049358 4756 generic.go:334] "Generic (PLEG): container finished" podID="104562fb-2a2d-4291-963c-7a95062ec13a" containerID="470501388b1e38afd5e6b82ca923d83c2a5392340aa8161d03f7487c4d5945c9" exitCode=2 Sep 30 19:42:49 crc kubenswrapper[4756]: I0930 19:42:49.049488 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-l9mw5" event={"ID":"104562fb-2a2d-4291-963c-7a95062ec13a","Type":"ContainerDied","Data":"470501388b1e38afd5e6b82ca923d83c2a5392340aa8161d03f7487c4d5945c9"} Sep 30 19:42:49 crc kubenswrapper[4756]: I0930 19:42:49.049641 4756 scope.go:117] "RemoveContainer" containerID="3d7e48a55049d09339652ebc1700f595dcbba5c2434a42ccd4a1d644c388f996" Sep 30 19:42:49 crc kubenswrapper[4756]: I0930 19:42:49.050096 4756 scope.go:117] "RemoveContainer" containerID="470501388b1e38afd5e6b82ca923d83c2a5392340aa8161d03f7487c4d5945c9" Sep 30 19:42:49 crc kubenswrapper[4756]: E0930 19:42:49.050350 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 20s restarting failed container=kube-multus pod=multus-l9mw5_openshift-multus(104562fb-2a2d-4291-963c-7a95062ec13a)\"" pod="openshift-multus/multus-l9mw5" podUID="104562fb-2a2d-4291-963c-7a95062ec13a" Sep 30 19:42:49 crc kubenswrapper[4756]: I0930 19:42:49.054005 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-kmxk8_d96acc31-a519-46fc-94d8-f19522e77391/ovnkube-controller/3.log" Sep 30 19:42:49 crc kubenswrapper[4756]: I0930 19:42:49.061407 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-kmxk8_d96acc31-a519-46fc-94d8-f19522e77391/ovn-acl-logging/0.log" Sep 30 19:42:49 crc kubenswrapper[4756]: I0930 19:42:49.062023 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-kmxk8_d96acc31-a519-46fc-94d8-f19522e77391/ovn-controller/0.log" Sep 30 19:42:49 crc kubenswrapper[4756]: I0930 19:42:49.062642 4756 generic.go:334] "Generic (PLEG): container finished" podID="d96acc31-a519-46fc-94d8-f19522e77391" containerID="c5badf6335e4358fa0078ae48d7d50a908bf754fdb57c786e445ccae31933ebe" exitCode=0 Sep 30 19:42:49 crc kubenswrapper[4756]: I0930 19:42:49.062665 4756 generic.go:334] "Generic (PLEG): container finished" podID="d96acc31-a519-46fc-94d8-f19522e77391" containerID="fe054d1844c58433aebfe8f4e500e17558f1b4afb5423002d60b50f210052c93" exitCode=0 Sep 30 19:42:49 crc kubenswrapper[4756]: I0930 19:42:49.062672 4756 generic.go:334] "Generic (PLEG): container finished" podID="d96acc31-a519-46fc-94d8-f19522e77391" containerID="1fa8babb1f5d1213dec99c75be1d5f37d09c2257a31babd51ec297621fc7d564" exitCode=143 Sep 30 19:42:49 crc kubenswrapper[4756]: I0930 19:42:49.062679 4756 generic.go:334] "Generic (PLEG): container finished" podID="d96acc31-a519-46fc-94d8-f19522e77391" containerID="990926364c8d973164f3da73740b7d31c8caf87f0fb691539f7a62520b822fba" exitCode=143 Sep 30 19:42:49 crc kubenswrapper[4756]: I0930 19:42:49.062699 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-kmxk8" event={"ID":"d96acc31-a519-46fc-94d8-f19522e77391","Type":"ContainerDied","Data":"c5badf6335e4358fa0078ae48d7d50a908bf754fdb57c786e445ccae31933ebe"} Sep 30 19:42:49 crc kubenswrapper[4756]: I0930 19:42:49.062720 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-kmxk8" event={"ID":"d96acc31-a519-46fc-94d8-f19522e77391","Type":"ContainerDied","Data":"fe054d1844c58433aebfe8f4e500e17558f1b4afb5423002d60b50f210052c93"} Sep 30 19:42:49 crc kubenswrapper[4756]: I0930 19:42:49.062730 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-kmxk8" event={"ID":"d96acc31-a519-46fc-94d8-f19522e77391","Type":"ContainerDied","Data":"1fa8babb1f5d1213dec99c75be1d5f37d09c2257a31babd51ec297621fc7d564"} Sep 30 19:42:49 crc kubenswrapper[4756]: I0930 19:42:49.062738 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-kmxk8" event={"ID":"d96acc31-a519-46fc-94d8-f19522e77391","Type":"ContainerDied","Data":"990926364c8d973164f3da73740b7d31c8caf87f0fb691539f7a62520b822fba"} Sep 30 19:42:49 crc kubenswrapper[4756]: I0930 19:42:49.220168 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-kmxk8_d96acc31-a519-46fc-94d8-f19522e77391/ovnkube-controller/3.log" Sep 30 19:42:49 crc kubenswrapper[4756]: I0930 19:42:49.222604 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-kmxk8_d96acc31-a519-46fc-94d8-f19522e77391/ovn-acl-logging/0.log" Sep 30 19:42:49 crc kubenswrapper[4756]: I0930 19:42:49.223123 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-kmxk8_d96acc31-a519-46fc-94d8-f19522e77391/ovn-controller/0.log" Sep 30 19:42:49 crc kubenswrapper[4756]: I0930 19:42:49.223569 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-kmxk8" Sep 30 19:42:49 crc kubenswrapper[4756]: I0930 19:42:49.278494 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-pjcrs"] Sep 30 19:42:49 crc kubenswrapper[4756]: E0930 19:42:49.278690 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d96acc31-a519-46fc-94d8-f19522e77391" containerName="kubecfg-setup" Sep 30 19:42:49 crc kubenswrapper[4756]: I0930 19:42:49.278702 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="d96acc31-a519-46fc-94d8-f19522e77391" containerName="kubecfg-setup" Sep 30 19:42:49 crc kubenswrapper[4756]: E0930 19:42:49.278713 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ce702cef-b28d-4fcd-99b0-9961a980d3e6" containerName="registry" Sep 30 19:42:49 crc kubenswrapper[4756]: I0930 19:42:49.278719 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="ce702cef-b28d-4fcd-99b0-9961a980d3e6" containerName="registry" Sep 30 19:42:49 crc kubenswrapper[4756]: E0930 19:42:49.278725 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d96acc31-a519-46fc-94d8-f19522e77391" containerName="ovn-acl-logging" Sep 30 19:42:49 crc kubenswrapper[4756]: I0930 19:42:49.278731 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="d96acc31-a519-46fc-94d8-f19522e77391" containerName="ovn-acl-logging" Sep 30 19:42:49 crc kubenswrapper[4756]: E0930 19:42:49.278741 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d96acc31-a519-46fc-94d8-f19522e77391" containerName="ovnkube-controller" Sep 30 19:42:49 crc kubenswrapper[4756]: I0930 19:42:49.278746 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="d96acc31-a519-46fc-94d8-f19522e77391" containerName="ovnkube-controller" Sep 30 19:42:49 crc kubenswrapper[4756]: E0930 19:42:49.278755 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d96acc31-a519-46fc-94d8-f19522e77391" containerName="ovnkube-controller" Sep 30 19:42:49 crc kubenswrapper[4756]: I0930 19:42:49.278761 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="d96acc31-a519-46fc-94d8-f19522e77391" containerName="ovnkube-controller" Sep 30 19:42:49 crc kubenswrapper[4756]: E0930 19:42:49.278769 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d96acc31-a519-46fc-94d8-f19522e77391" containerName="kube-rbac-proxy-ovn-metrics" Sep 30 19:42:49 crc kubenswrapper[4756]: I0930 19:42:49.278775 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="d96acc31-a519-46fc-94d8-f19522e77391" containerName="kube-rbac-proxy-ovn-metrics" Sep 30 19:42:49 crc kubenswrapper[4756]: E0930 19:42:49.278783 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d96acc31-a519-46fc-94d8-f19522e77391" containerName="northd" Sep 30 19:42:49 crc kubenswrapper[4756]: I0930 19:42:49.278790 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="d96acc31-a519-46fc-94d8-f19522e77391" containerName="northd" Sep 30 19:42:49 crc kubenswrapper[4756]: E0930 19:42:49.278801 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d96acc31-a519-46fc-94d8-f19522e77391" containerName="ovnkube-controller" Sep 30 19:42:49 crc kubenswrapper[4756]: I0930 19:42:49.278809 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="d96acc31-a519-46fc-94d8-f19522e77391" containerName="ovnkube-controller" Sep 30 19:42:49 crc kubenswrapper[4756]: E0930 19:42:49.278818 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d96acc31-a519-46fc-94d8-f19522e77391" containerName="ovn-controller" Sep 30 19:42:49 crc kubenswrapper[4756]: I0930 19:42:49.278826 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="d96acc31-a519-46fc-94d8-f19522e77391" containerName="ovn-controller" Sep 30 19:42:49 crc kubenswrapper[4756]: E0930 19:42:49.278834 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d96acc31-a519-46fc-94d8-f19522e77391" containerName="ovnkube-controller" Sep 30 19:42:49 crc kubenswrapper[4756]: I0930 19:42:49.278841 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="d96acc31-a519-46fc-94d8-f19522e77391" containerName="ovnkube-controller" Sep 30 19:42:49 crc kubenswrapper[4756]: E0930 19:42:49.278850 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d96acc31-a519-46fc-94d8-f19522e77391" containerName="nbdb" Sep 30 19:42:49 crc kubenswrapper[4756]: I0930 19:42:49.278857 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="d96acc31-a519-46fc-94d8-f19522e77391" containerName="nbdb" Sep 30 19:42:49 crc kubenswrapper[4756]: E0930 19:42:49.278870 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d96acc31-a519-46fc-94d8-f19522e77391" containerName="sbdb" Sep 30 19:42:49 crc kubenswrapper[4756]: I0930 19:42:49.278877 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="d96acc31-a519-46fc-94d8-f19522e77391" containerName="sbdb" Sep 30 19:42:49 crc kubenswrapper[4756]: E0930 19:42:49.278888 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d96acc31-a519-46fc-94d8-f19522e77391" containerName="kube-rbac-proxy-node" Sep 30 19:42:49 crc kubenswrapper[4756]: I0930 19:42:49.278895 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="d96acc31-a519-46fc-94d8-f19522e77391" containerName="kube-rbac-proxy-node" Sep 30 19:42:49 crc kubenswrapper[4756]: I0930 19:42:49.278981 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="d96acc31-a519-46fc-94d8-f19522e77391" containerName="ovnkube-controller" Sep 30 19:42:49 crc kubenswrapper[4756]: I0930 19:42:49.278989 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="d96acc31-a519-46fc-94d8-f19522e77391" containerName="sbdb" Sep 30 19:42:49 crc kubenswrapper[4756]: I0930 19:42:49.278998 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="d96acc31-a519-46fc-94d8-f19522e77391" containerName="ovnkube-controller" Sep 30 19:42:49 crc kubenswrapper[4756]: I0930 19:42:49.279004 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="ce702cef-b28d-4fcd-99b0-9961a980d3e6" containerName="registry" Sep 30 19:42:49 crc kubenswrapper[4756]: I0930 19:42:49.279013 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="d96acc31-a519-46fc-94d8-f19522e77391" containerName="ovn-acl-logging" Sep 30 19:42:49 crc kubenswrapper[4756]: I0930 19:42:49.279020 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="d96acc31-a519-46fc-94d8-f19522e77391" containerName="ovnkube-controller" Sep 30 19:42:49 crc kubenswrapper[4756]: I0930 19:42:49.279028 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="d96acc31-a519-46fc-94d8-f19522e77391" containerName="kube-rbac-proxy-node" Sep 30 19:42:49 crc kubenswrapper[4756]: I0930 19:42:49.279034 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="d96acc31-a519-46fc-94d8-f19522e77391" containerName="northd" Sep 30 19:42:49 crc kubenswrapper[4756]: I0930 19:42:49.279042 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="d96acc31-a519-46fc-94d8-f19522e77391" containerName="kube-rbac-proxy-ovn-metrics" Sep 30 19:42:49 crc kubenswrapper[4756]: I0930 19:42:49.279047 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="d96acc31-a519-46fc-94d8-f19522e77391" containerName="ovn-controller" Sep 30 19:42:49 crc kubenswrapper[4756]: I0930 19:42:49.279056 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="d96acc31-a519-46fc-94d8-f19522e77391" containerName="nbdb" Sep 30 19:42:49 crc kubenswrapper[4756]: E0930 19:42:49.279147 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d96acc31-a519-46fc-94d8-f19522e77391" containerName="ovnkube-controller" Sep 30 19:42:49 crc kubenswrapper[4756]: I0930 19:42:49.279153 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="d96acc31-a519-46fc-94d8-f19522e77391" containerName="ovnkube-controller" Sep 30 19:42:49 crc kubenswrapper[4756]: I0930 19:42:49.279233 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="d96acc31-a519-46fc-94d8-f19522e77391" containerName="ovnkube-controller" Sep 30 19:42:49 crc kubenswrapper[4756]: I0930 19:42:49.279241 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="d96acc31-a519-46fc-94d8-f19522e77391" containerName="ovnkube-controller" Sep 30 19:42:49 crc kubenswrapper[4756]: I0930 19:42:49.281278 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-pjcrs" Sep 30 19:42:49 crc kubenswrapper[4756]: I0930 19:42:49.289678 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/d96acc31-a519-46fc-94d8-f19522e77391-etc-openvswitch\") pod \"d96acc31-a519-46fc-94d8-f19522e77391\" (UID: \"d96acc31-a519-46fc-94d8-f19522e77391\") " Sep 30 19:42:49 crc kubenswrapper[4756]: I0930 19:42:49.289745 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/d96acc31-a519-46fc-94d8-f19522e77391-host-cni-bin\") pod \"d96acc31-a519-46fc-94d8-f19522e77391\" (UID: \"d96acc31-a519-46fc-94d8-f19522e77391\") " Sep 30 19:42:49 crc kubenswrapper[4756]: I0930 19:42:49.289743 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/d96acc31-a519-46fc-94d8-f19522e77391-etc-openvswitch" (OuterVolumeSpecName: "etc-openvswitch") pod "d96acc31-a519-46fc-94d8-f19522e77391" (UID: "d96acc31-a519-46fc-94d8-f19522e77391"). InnerVolumeSpecName "etc-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 19:42:49 crc kubenswrapper[4756]: I0930 19:42:49.289775 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/d96acc31-a519-46fc-94d8-f19522e77391-log-socket\") pod \"d96acc31-a519-46fc-94d8-f19522e77391\" (UID: \"d96acc31-a519-46fc-94d8-f19522e77391\") " Sep 30 19:42:49 crc kubenswrapper[4756]: I0930 19:42:49.289789 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/d96acc31-a519-46fc-94d8-f19522e77391-host-cni-bin" (OuterVolumeSpecName: "host-cni-bin") pod "d96acc31-a519-46fc-94d8-f19522e77391" (UID: "d96acc31-a519-46fc-94d8-f19522e77391"). InnerVolumeSpecName "host-cni-bin". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 19:42:49 crc kubenswrapper[4756]: I0930 19:42:49.289811 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/d96acc31-a519-46fc-94d8-f19522e77391-ovnkube-config\") pod \"d96acc31-a519-46fc-94d8-f19522e77391\" (UID: \"d96acc31-a519-46fc-94d8-f19522e77391\") " Sep 30 19:42:49 crc kubenswrapper[4756]: I0930 19:42:49.289843 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/d96acc31-a519-46fc-94d8-f19522e77391-run-systemd\") pod \"d96acc31-a519-46fc-94d8-f19522e77391\" (UID: \"d96acc31-a519-46fc-94d8-f19522e77391\") " Sep 30 19:42:49 crc kubenswrapper[4756]: I0930 19:42:49.289894 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/d96acc31-a519-46fc-94d8-f19522e77391-run-ovn\") pod \"d96acc31-a519-46fc-94d8-f19522e77391\" (UID: \"d96acc31-a519-46fc-94d8-f19522e77391\") " Sep 30 19:42:49 crc kubenswrapper[4756]: I0930 19:42:49.289925 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/d96acc31-a519-46fc-94d8-f19522e77391-host-run-netns\") pod \"d96acc31-a519-46fc-94d8-f19522e77391\" (UID: \"d96acc31-a519-46fc-94d8-f19522e77391\") " Sep 30 19:42:49 crc kubenswrapper[4756]: I0930 19:42:49.289954 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/d96acc31-a519-46fc-94d8-f19522e77391-ovnkube-script-lib\") pod \"d96acc31-a519-46fc-94d8-f19522e77391\" (UID: \"d96acc31-a519-46fc-94d8-f19522e77391\") " Sep 30 19:42:49 crc kubenswrapper[4756]: I0930 19:42:49.289976 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/d96acc31-a519-46fc-94d8-f19522e77391-host-cni-netd\") pod \"d96acc31-a519-46fc-94d8-f19522e77391\" (UID: \"d96acc31-a519-46fc-94d8-f19522e77391\") " Sep 30 19:42:49 crc kubenswrapper[4756]: I0930 19:42:49.290006 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/d96acc31-a519-46fc-94d8-f19522e77391-node-log\") pod \"d96acc31-a519-46fc-94d8-f19522e77391\" (UID: \"d96acc31-a519-46fc-94d8-f19522e77391\") " Sep 30 19:42:49 crc kubenswrapper[4756]: I0930 19:42:49.290037 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/d96acc31-a519-46fc-94d8-f19522e77391-env-overrides\") pod \"d96acc31-a519-46fc-94d8-f19522e77391\" (UID: \"d96acc31-a519-46fc-94d8-f19522e77391\") " Sep 30 19:42:49 crc kubenswrapper[4756]: I0930 19:42:49.290066 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5bwg2\" (UniqueName: \"kubernetes.io/projected/d96acc31-a519-46fc-94d8-f19522e77391-kube-api-access-5bwg2\") pod \"d96acc31-a519-46fc-94d8-f19522e77391\" (UID: \"d96acc31-a519-46fc-94d8-f19522e77391\") " Sep 30 19:42:49 crc kubenswrapper[4756]: I0930 19:42:49.290091 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/d96acc31-a519-46fc-94d8-f19522e77391-host-run-ovn-kubernetes\") pod \"d96acc31-a519-46fc-94d8-f19522e77391\" (UID: \"d96acc31-a519-46fc-94d8-f19522e77391\") " Sep 30 19:42:49 crc kubenswrapper[4756]: I0930 19:42:49.290118 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/d96acc31-a519-46fc-94d8-f19522e77391-ovn-node-metrics-cert\") pod \"d96acc31-a519-46fc-94d8-f19522e77391\" (UID: \"d96acc31-a519-46fc-94d8-f19522e77391\") " Sep 30 19:42:49 crc kubenswrapper[4756]: I0930 19:42:49.290145 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d96acc31-a519-46fc-94d8-f19522e77391-host-slash\") pod \"d96acc31-a519-46fc-94d8-f19522e77391\" (UID: \"d96acc31-a519-46fc-94d8-f19522e77391\") " Sep 30 19:42:49 crc kubenswrapper[4756]: I0930 19:42:49.290166 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/d96acc31-a519-46fc-94d8-f19522e77391-run-openvswitch\") pod \"d96acc31-a519-46fc-94d8-f19522e77391\" (UID: \"d96acc31-a519-46fc-94d8-f19522e77391\") " Sep 30 19:42:49 crc kubenswrapper[4756]: I0930 19:42:49.290197 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/d96acc31-a519-46fc-94d8-f19522e77391-systemd-units\") pod \"d96acc31-a519-46fc-94d8-f19522e77391\" (UID: \"d96acc31-a519-46fc-94d8-f19522e77391\") " Sep 30 19:42:49 crc kubenswrapper[4756]: I0930 19:42:49.290219 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/d96acc31-a519-46fc-94d8-f19522e77391-host-var-lib-cni-networks-ovn-kubernetes\") pod \"d96acc31-a519-46fc-94d8-f19522e77391\" (UID: \"d96acc31-a519-46fc-94d8-f19522e77391\") " Sep 30 19:42:49 crc kubenswrapper[4756]: I0930 19:42:49.290238 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/d96acc31-a519-46fc-94d8-f19522e77391-var-lib-openvswitch\") pod \"d96acc31-a519-46fc-94d8-f19522e77391\" (UID: \"d96acc31-a519-46fc-94d8-f19522e77391\") " Sep 30 19:42:49 crc kubenswrapper[4756]: I0930 19:42:49.290255 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/d96acc31-a519-46fc-94d8-f19522e77391-host-kubelet\") pod \"d96acc31-a519-46fc-94d8-f19522e77391\" (UID: \"d96acc31-a519-46fc-94d8-f19522e77391\") " Sep 30 19:42:49 crc kubenswrapper[4756]: I0930 19:42:49.290480 4756 reconciler_common.go:293] "Volume detached for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/d96acc31-a519-46fc-94d8-f19522e77391-etc-openvswitch\") on node \"crc\" DevicePath \"\"" Sep 30 19:42:49 crc kubenswrapper[4756]: I0930 19:42:49.290496 4756 reconciler_common.go:293] "Volume detached for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/d96acc31-a519-46fc-94d8-f19522e77391-host-cni-bin\") on node \"crc\" DevicePath \"\"" Sep 30 19:42:49 crc kubenswrapper[4756]: I0930 19:42:49.289896 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/d96acc31-a519-46fc-94d8-f19522e77391-log-socket" (OuterVolumeSpecName: "log-socket") pod "d96acc31-a519-46fc-94d8-f19522e77391" (UID: "d96acc31-a519-46fc-94d8-f19522e77391"). InnerVolumeSpecName "log-socket". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 19:42:49 crc kubenswrapper[4756]: I0930 19:42:49.290347 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d96acc31-a519-46fc-94d8-f19522e77391-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "d96acc31-a519-46fc-94d8-f19522e77391" (UID: "d96acc31-a519-46fc-94d8-f19522e77391"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:42:49 crc kubenswrapper[4756]: I0930 19:42:49.290550 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/d96acc31-a519-46fc-94d8-f19522e77391-host-kubelet" (OuterVolumeSpecName: "host-kubelet") pod "d96acc31-a519-46fc-94d8-f19522e77391" (UID: "d96acc31-a519-46fc-94d8-f19522e77391"). InnerVolumeSpecName "host-kubelet". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 19:42:49 crc kubenswrapper[4756]: I0930 19:42:49.290623 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d96acc31-a519-46fc-94d8-f19522e77391-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "d96acc31-a519-46fc-94d8-f19522e77391" (UID: "d96acc31-a519-46fc-94d8-f19522e77391"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:42:49 crc kubenswrapper[4756]: I0930 19:42:49.291133 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/d96acc31-a519-46fc-94d8-f19522e77391-host-run-ovn-kubernetes" (OuterVolumeSpecName: "host-run-ovn-kubernetes") pod "d96acc31-a519-46fc-94d8-f19522e77391" (UID: "d96acc31-a519-46fc-94d8-f19522e77391"). InnerVolumeSpecName "host-run-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 19:42:49 crc kubenswrapper[4756]: I0930 19:42:49.291501 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/d96acc31-a519-46fc-94d8-f19522e77391-run-ovn" (OuterVolumeSpecName: "run-ovn") pod "d96acc31-a519-46fc-94d8-f19522e77391" (UID: "d96acc31-a519-46fc-94d8-f19522e77391"). InnerVolumeSpecName "run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 19:42:49 crc kubenswrapper[4756]: I0930 19:42:49.291592 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/d96acc31-a519-46fc-94d8-f19522e77391-host-cni-netd" (OuterVolumeSpecName: "host-cni-netd") pod "d96acc31-a519-46fc-94d8-f19522e77391" (UID: "d96acc31-a519-46fc-94d8-f19522e77391"). InnerVolumeSpecName "host-cni-netd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 19:42:49 crc kubenswrapper[4756]: I0930 19:42:49.291611 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/d96acc31-a519-46fc-94d8-f19522e77391-node-log" (OuterVolumeSpecName: "node-log") pod "d96acc31-a519-46fc-94d8-f19522e77391" (UID: "d96acc31-a519-46fc-94d8-f19522e77391"). InnerVolumeSpecName "node-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 19:42:49 crc kubenswrapper[4756]: I0930 19:42:49.291632 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/d96acc31-a519-46fc-94d8-f19522e77391-host-run-netns" (OuterVolumeSpecName: "host-run-netns") pod "d96acc31-a519-46fc-94d8-f19522e77391" (UID: "d96acc31-a519-46fc-94d8-f19522e77391"). InnerVolumeSpecName "host-run-netns". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 19:42:49 crc kubenswrapper[4756]: I0930 19:42:49.291656 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/d96acc31-a519-46fc-94d8-f19522e77391-systemd-units" (OuterVolumeSpecName: "systemd-units") pod "d96acc31-a519-46fc-94d8-f19522e77391" (UID: "d96acc31-a519-46fc-94d8-f19522e77391"). InnerVolumeSpecName "systemd-units". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 19:42:49 crc kubenswrapper[4756]: I0930 19:42:49.291677 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/d96acc31-a519-46fc-94d8-f19522e77391-host-slash" (OuterVolumeSpecName: "host-slash") pod "d96acc31-a519-46fc-94d8-f19522e77391" (UID: "d96acc31-a519-46fc-94d8-f19522e77391"). InnerVolumeSpecName "host-slash". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 19:42:49 crc kubenswrapper[4756]: I0930 19:42:49.291771 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/d96acc31-a519-46fc-94d8-f19522e77391-run-openvswitch" (OuterVolumeSpecName: "run-openvswitch") pod "d96acc31-a519-46fc-94d8-f19522e77391" (UID: "d96acc31-a519-46fc-94d8-f19522e77391"). InnerVolumeSpecName "run-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 19:42:49 crc kubenswrapper[4756]: I0930 19:42:49.291909 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/d96acc31-a519-46fc-94d8-f19522e77391-host-var-lib-cni-networks-ovn-kubernetes" (OuterVolumeSpecName: "host-var-lib-cni-networks-ovn-kubernetes") pod "d96acc31-a519-46fc-94d8-f19522e77391" (UID: "d96acc31-a519-46fc-94d8-f19522e77391"). InnerVolumeSpecName "host-var-lib-cni-networks-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 19:42:49 crc kubenswrapper[4756]: I0930 19:42:49.291937 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/d96acc31-a519-46fc-94d8-f19522e77391-var-lib-openvswitch" (OuterVolumeSpecName: "var-lib-openvswitch") pod "d96acc31-a519-46fc-94d8-f19522e77391" (UID: "d96acc31-a519-46fc-94d8-f19522e77391"). InnerVolumeSpecName "var-lib-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 19:42:49 crc kubenswrapper[4756]: I0930 19:42:49.292246 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d96acc31-a519-46fc-94d8-f19522e77391-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "d96acc31-a519-46fc-94d8-f19522e77391" (UID: "d96acc31-a519-46fc-94d8-f19522e77391"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:42:49 crc kubenswrapper[4756]: I0930 19:42:49.296424 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d96acc31-a519-46fc-94d8-f19522e77391-kube-api-access-5bwg2" (OuterVolumeSpecName: "kube-api-access-5bwg2") pod "d96acc31-a519-46fc-94d8-f19522e77391" (UID: "d96acc31-a519-46fc-94d8-f19522e77391"). InnerVolumeSpecName "kube-api-access-5bwg2". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:42:49 crc kubenswrapper[4756]: I0930 19:42:49.296439 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d96acc31-a519-46fc-94d8-f19522e77391-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "d96acc31-a519-46fc-94d8-f19522e77391" (UID: "d96acc31-a519-46fc-94d8-f19522e77391"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:42:49 crc kubenswrapper[4756]: I0930 19:42:49.310361 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/d96acc31-a519-46fc-94d8-f19522e77391-run-systemd" (OuterVolumeSpecName: "run-systemd") pod "d96acc31-a519-46fc-94d8-f19522e77391" (UID: "d96acc31-a519-46fc-94d8-f19522e77391"). InnerVolumeSpecName "run-systemd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 19:42:49 crc kubenswrapper[4756]: I0930 19:42:49.391676 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/84379bd4-5109-4940-a9af-99e39ffb6bce-ovn-node-metrics-cert\") pod \"ovnkube-node-pjcrs\" (UID: \"84379bd4-5109-4940-a9af-99e39ffb6bce\") " pod="openshift-ovn-kubernetes/ovnkube-node-pjcrs" Sep 30 19:42:49 crc kubenswrapper[4756]: I0930 19:42:49.391730 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/84379bd4-5109-4940-a9af-99e39ffb6bce-log-socket\") pod \"ovnkube-node-pjcrs\" (UID: \"84379bd4-5109-4940-a9af-99e39ffb6bce\") " pod="openshift-ovn-kubernetes/ovnkube-node-pjcrs" Sep 30 19:42:49 crc kubenswrapper[4756]: I0930 19:42:49.391755 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/84379bd4-5109-4940-a9af-99e39ffb6bce-run-ovn\") pod \"ovnkube-node-pjcrs\" (UID: \"84379bd4-5109-4940-a9af-99e39ffb6bce\") " pod="openshift-ovn-kubernetes/ovnkube-node-pjcrs" Sep 30 19:42:49 crc kubenswrapper[4756]: I0930 19:42:49.391822 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/84379bd4-5109-4940-a9af-99e39ffb6bce-host-run-ovn-kubernetes\") pod \"ovnkube-node-pjcrs\" (UID: \"84379bd4-5109-4940-a9af-99e39ffb6bce\") " pod="openshift-ovn-kubernetes/ovnkube-node-pjcrs" Sep 30 19:42:49 crc kubenswrapper[4756]: I0930 19:42:49.391843 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/84379bd4-5109-4940-a9af-99e39ffb6bce-host-run-netns\") pod \"ovnkube-node-pjcrs\" (UID: \"84379bd4-5109-4940-a9af-99e39ffb6bce\") " pod="openshift-ovn-kubernetes/ovnkube-node-pjcrs" Sep 30 19:42:49 crc kubenswrapper[4756]: I0930 19:42:49.391873 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/84379bd4-5109-4940-a9af-99e39ffb6bce-ovnkube-script-lib\") pod \"ovnkube-node-pjcrs\" (UID: \"84379bd4-5109-4940-a9af-99e39ffb6bce\") " pod="openshift-ovn-kubernetes/ovnkube-node-pjcrs" Sep 30 19:42:49 crc kubenswrapper[4756]: I0930 19:42:49.391900 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/84379bd4-5109-4940-a9af-99e39ffb6bce-host-kubelet\") pod \"ovnkube-node-pjcrs\" (UID: \"84379bd4-5109-4940-a9af-99e39ffb6bce\") " pod="openshift-ovn-kubernetes/ovnkube-node-pjcrs" Sep 30 19:42:49 crc kubenswrapper[4756]: I0930 19:42:49.391922 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/84379bd4-5109-4940-a9af-99e39ffb6bce-host-cni-netd\") pod \"ovnkube-node-pjcrs\" (UID: \"84379bd4-5109-4940-a9af-99e39ffb6bce\") " pod="openshift-ovn-kubernetes/ovnkube-node-pjcrs" Sep 30 19:42:49 crc kubenswrapper[4756]: I0930 19:42:49.391947 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/84379bd4-5109-4940-a9af-99e39ffb6bce-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-pjcrs\" (UID: \"84379bd4-5109-4940-a9af-99e39ffb6bce\") " pod="openshift-ovn-kubernetes/ovnkube-node-pjcrs" Sep 30 19:42:49 crc kubenswrapper[4756]: I0930 19:42:49.391969 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/84379bd4-5109-4940-a9af-99e39ffb6bce-run-systemd\") pod \"ovnkube-node-pjcrs\" (UID: \"84379bd4-5109-4940-a9af-99e39ffb6bce\") " pod="openshift-ovn-kubernetes/ovnkube-node-pjcrs" Sep 30 19:42:49 crc kubenswrapper[4756]: I0930 19:42:49.391998 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/84379bd4-5109-4940-a9af-99e39ffb6bce-etc-openvswitch\") pod \"ovnkube-node-pjcrs\" (UID: \"84379bd4-5109-4940-a9af-99e39ffb6bce\") " pod="openshift-ovn-kubernetes/ovnkube-node-pjcrs" Sep 30 19:42:49 crc kubenswrapper[4756]: I0930 19:42:49.392026 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/84379bd4-5109-4940-a9af-99e39ffb6bce-run-openvswitch\") pod \"ovnkube-node-pjcrs\" (UID: \"84379bd4-5109-4940-a9af-99e39ffb6bce\") " pod="openshift-ovn-kubernetes/ovnkube-node-pjcrs" Sep 30 19:42:49 crc kubenswrapper[4756]: I0930 19:42:49.392464 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/84379bd4-5109-4940-a9af-99e39ffb6bce-ovnkube-config\") pod \"ovnkube-node-pjcrs\" (UID: \"84379bd4-5109-4940-a9af-99e39ffb6bce\") " pod="openshift-ovn-kubernetes/ovnkube-node-pjcrs" Sep 30 19:42:49 crc kubenswrapper[4756]: I0930 19:42:49.392564 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zgbtc\" (UniqueName: \"kubernetes.io/projected/84379bd4-5109-4940-a9af-99e39ffb6bce-kube-api-access-zgbtc\") pod \"ovnkube-node-pjcrs\" (UID: \"84379bd4-5109-4940-a9af-99e39ffb6bce\") " pod="openshift-ovn-kubernetes/ovnkube-node-pjcrs" Sep 30 19:42:49 crc kubenswrapper[4756]: I0930 19:42:49.392594 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/84379bd4-5109-4940-a9af-99e39ffb6bce-node-log\") pod \"ovnkube-node-pjcrs\" (UID: \"84379bd4-5109-4940-a9af-99e39ffb6bce\") " pod="openshift-ovn-kubernetes/ovnkube-node-pjcrs" Sep 30 19:42:49 crc kubenswrapper[4756]: I0930 19:42:49.392618 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/84379bd4-5109-4940-a9af-99e39ffb6bce-env-overrides\") pod \"ovnkube-node-pjcrs\" (UID: \"84379bd4-5109-4940-a9af-99e39ffb6bce\") " pod="openshift-ovn-kubernetes/ovnkube-node-pjcrs" Sep 30 19:42:49 crc kubenswrapper[4756]: I0930 19:42:49.392643 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/84379bd4-5109-4940-a9af-99e39ffb6bce-var-lib-openvswitch\") pod \"ovnkube-node-pjcrs\" (UID: \"84379bd4-5109-4940-a9af-99e39ffb6bce\") " pod="openshift-ovn-kubernetes/ovnkube-node-pjcrs" Sep 30 19:42:49 crc kubenswrapper[4756]: I0930 19:42:49.392668 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/84379bd4-5109-4940-a9af-99e39ffb6bce-host-cni-bin\") pod \"ovnkube-node-pjcrs\" (UID: \"84379bd4-5109-4940-a9af-99e39ffb6bce\") " pod="openshift-ovn-kubernetes/ovnkube-node-pjcrs" Sep 30 19:42:49 crc kubenswrapper[4756]: I0930 19:42:49.392694 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/84379bd4-5109-4940-a9af-99e39ffb6bce-systemd-units\") pod \"ovnkube-node-pjcrs\" (UID: \"84379bd4-5109-4940-a9af-99e39ffb6bce\") " pod="openshift-ovn-kubernetes/ovnkube-node-pjcrs" Sep 30 19:42:49 crc kubenswrapper[4756]: I0930 19:42:49.392716 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/84379bd4-5109-4940-a9af-99e39ffb6bce-host-slash\") pod \"ovnkube-node-pjcrs\" (UID: \"84379bd4-5109-4940-a9af-99e39ffb6bce\") " pod="openshift-ovn-kubernetes/ovnkube-node-pjcrs" Sep 30 19:42:49 crc kubenswrapper[4756]: I0930 19:42:49.393089 4756 reconciler_common.go:293] "Volume detached for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/d96acc31-a519-46fc-94d8-f19522e77391-log-socket\") on node \"crc\" DevicePath \"\"" Sep 30 19:42:49 crc kubenswrapper[4756]: I0930 19:42:49.393106 4756 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/d96acc31-a519-46fc-94d8-f19522e77391-ovnkube-config\") on node \"crc\" DevicePath \"\"" Sep 30 19:42:49 crc kubenswrapper[4756]: I0930 19:42:49.393119 4756 reconciler_common.go:293] "Volume detached for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/d96acc31-a519-46fc-94d8-f19522e77391-run-systemd\") on node \"crc\" DevicePath \"\"" Sep 30 19:42:49 crc kubenswrapper[4756]: I0930 19:42:49.393132 4756 reconciler_common.go:293] "Volume detached for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/d96acc31-a519-46fc-94d8-f19522e77391-run-ovn\") on node \"crc\" DevicePath \"\"" Sep 30 19:42:49 crc kubenswrapper[4756]: I0930 19:42:49.393146 4756 reconciler_common.go:293] "Volume detached for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/d96acc31-a519-46fc-94d8-f19522e77391-host-run-netns\") on node \"crc\" DevicePath \"\"" Sep 30 19:42:49 crc kubenswrapper[4756]: I0930 19:42:49.393158 4756 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/d96acc31-a519-46fc-94d8-f19522e77391-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Sep 30 19:42:49 crc kubenswrapper[4756]: I0930 19:42:49.393170 4756 reconciler_common.go:293] "Volume detached for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/d96acc31-a519-46fc-94d8-f19522e77391-host-cni-netd\") on node \"crc\" DevicePath \"\"" Sep 30 19:42:49 crc kubenswrapper[4756]: I0930 19:42:49.393181 4756 reconciler_common.go:293] "Volume detached for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/d96acc31-a519-46fc-94d8-f19522e77391-node-log\") on node \"crc\" DevicePath \"\"" Sep 30 19:42:49 crc kubenswrapper[4756]: I0930 19:42:49.393192 4756 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/d96acc31-a519-46fc-94d8-f19522e77391-env-overrides\") on node \"crc\" DevicePath \"\"" Sep 30 19:42:49 crc kubenswrapper[4756]: I0930 19:42:49.393204 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5bwg2\" (UniqueName: \"kubernetes.io/projected/d96acc31-a519-46fc-94d8-f19522e77391-kube-api-access-5bwg2\") on node \"crc\" DevicePath \"\"" Sep 30 19:42:49 crc kubenswrapper[4756]: I0930 19:42:49.393216 4756 reconciler_common.go:293] "Volume detached for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/d96acc31-a519-46fc-94d8-f19522e77391-host-run-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Sep 30 19:42:49 crc kubenswrapper[4756]: I0930 19:42:49.393229 4756 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/d96acc31-a519-46fc-94d8-f19522e77391-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Sep 30 19:42:49 crc kubenswrapper[4756]: I0930 19:42:49.393241 4756 reconciler_common.go:293] "Volume detached for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d96acc31-a519-46fc-94d8-f19522e77391-host-slash\") on node \"crc\" DevicePath \"\"" Sep 30 19:42:49 crc kubenswrapper[4756]: I0930 19:42:49.393253 4756 reconciler_common.go:293] "Volume detached for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/d96acc31-a519-46fc-94d8-f19522e77391-run-openvswitch\") on node \"crc\" DevicePath \"\"" Sep 30 19:42:49 crc kubenswrapper[4756]: I0930 19:42:49.393264 4756 reconciler_common.go:293] "Volume detached for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/d96acc31-a519-46fc-94d8-f19522e77391-systemd-units\") on node \"crc\" DevicePath \"\"" Sep 30 19:42:49 crc kubenswrapper[4756]: I0930 19:42:49.393276 4756 reconciler_common.go:293] "Volume detached for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/d96acc31-a519-46fc-94d8-f19522e77391-host-var-lib-cni-networks-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Sep 30 19:42:49 crc kubenswrapper[4756]: I0930 19:42:49.393289 4756 reconciler_common.go:293] "Volume detached for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/d96acc31-a519-46fc-94d8-f19522e77391-var-lib-openvswitch\") on node \"crc\" DevicePath \"\"" Sep 30 19:42:49 crc kubenswrapper[4756]: I0930 19:42:49.393300 4756 reconciler_common.go:293] "Volume detached for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/d96acc31-a519-46fc-94d8-f19522e77391-host-kubelet\") on node \"crc\" DevicePath \"\"" Sep 30 19:42:49 crc kubenswrapper[4756]: I0930 19:42:49.494047 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/84379bd4-5109-4940-a9af-99e39ffb6bce-ovnkube-script-lib\") pod \"ovnkube-node-pjcrs\" (UID: \"84379bd4-5109-4940-a9af-99e39ffb6bce\") " pod="openshift-ovn-kubernetes/ovnkube-node-pjcrs" Sep 30 19:42:49 crc kubenswrapper[4756]: I0930 19:42:49.494093 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/84379bd4-5109-4940-a9af-99e39ffb6bce-host-kubelet\") pod \"ovnkube-node-pjcrs\" (UID: \"84379bd4-5109-4940-a9af-99e39ffb6bce\") " pod="openshift-ovn-kubernetes/ovnkube-node-pjcrs" Sep 30 19:42:49 crc kubenswrapper[4756]: I0930 19:42:49.494109 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/84379bd4-5109-4940-a9af-99e39ffb6bce-host-cni-netd\") pod \"ovnkube-node-pjcrs\" (UID: \"84379bd4-5109-4940-a9af-99e39ffb6bce\") " pod="openshift-ovn-kubernetes/ovnkube-node-pjcrs" Sep 30 19:42:49 crc kubenswrapper[4756]: I0930 19:42:49.494128 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/84379bd4-5109-4940-a9af-99e39ffb6bce-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-pjcrs\" (UID: \"84379bd4-5109-4940-a9af-99e39ffb6bce\") " pod="openshift-ovn-kubernetes/ovnkube-node-pjcrs" Sep 30 19:42:49 crc kubenswrapper[4756]: I0930 19:42:49.494147 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/84379bd4-5109-4940-a9af-99e39ffb6bce-run-systemd\") pod \"ovnkube-node-pjcrs\" (UID: \"84379bd4-5109-4940-a9af-99e39ffb6bce\") " pod="openshift-ovn-kubernetes/ovnkube-node-pjcrs" Sep 30 19:42:49 crc kubenswrapper[4756]: I0930 19:42:49.494167 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/84379bd4-5109-4940-a9af-99e39ffb6bce-etc-openvswitch\") pod \"ovnkube-node-pjcrs\" (UID: \"84379bd4-5109-4940-a9af-99e39ffb6bce\") " pod="openshift-ovn-kubernetes/ovnkube-node-pjcrs" Sep 30 19:42:49 crc kubenswrapper[4756]: I0930 19:42:49.494196 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/84379bd4-5109-4940-a9af-99e39ffb6bce-run-openvswitch\") pod \"ovnkube-node-pjcrs\" (UID: \"84379bd4-5109-4940-a9af-99e39ffb6bce\") " pod="openshift-ovn-kubernetes/ovnkube-node-pjcrs" Sep 30 19:42:49 crc kubenswrapper[4756]: I0930 19:42:49.494216 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/84379bd4-5109-4940-a9af-99e39ffb6bce-ovnkube-config\") pod \"ovnkube-node-pjcrs\" (UID: \"84379bd4-5109-4940-a9af-99e39ffb6bce\") " pod="openshift-ovn-kubernetes/ovnkube-node-pjcrs" Sep 30 19:42:49 crc kubenswrapper[4756]: I0930 19:42:49.494233 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zgbtc\" (UniqueName: \"kubernetes.io/projected/84379bd4-5109-4940-a9af-99e39ffb6bce-kube-api-access-zgbtc\") pod \"ovnkube-node-pjcrs\" (UID: \"84379bd4-5109-4940-a9af-99e39ffb6bce\") " pod="openshift-ovn-kubernetes/ovnkube-node-pjcrs" Sep 30 19:42:49 crc kubenswrapper[4756]: I0930 19:42:49.494240 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/84379bd4-5109-4940-a9af-99e39ffb6bce-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-pjcrs\" (UID: \"84379bd4-5109-4940-a9af-99e39ffb6bce\") " pod="openshift-ovn-kubernetes/ovnkube-node-pjcrs" Sep 30 19:42:49 crc kubenswrapper[4756]: I0930 19:42:49.494281 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/84379bd4-5109-4940-a9af-99e39ffb6bce-node-log\") pod \"ovnkube-node-pjcrs\" (UID: \"84379bd4-5109-4940-a9af-99e39ffb6bce\") " pod="openshift-ovn-kubernetes/ovnkube-node-pjcrs" Sep 30 19:42:49 crc kubenswrapper[4756]: I0930 19:42:49.494257 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/84379bd4-5109-4940-a9af-99e39ffb6bce-node-log\") pod \"ovnkube-node-pjcrs\" (UID: \"84379bd4-5109-4940-a9af-99e39ffb6bce\") " pod="openshift-ovn-kubernetes/ovnkube-node-pjcrs" Sep 30 19:42:49 crc kubenswrapper[4756]: I0930 19:42:49.494308 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/84379bd4-5109-4940-a9af-99e39ffb6bce-run-openvswitch\") pod \"ovnkube-node-pjcrs\" (UID: \"84379bd4-5109-4940-a9af-99e39ffb6bce\") " pod="openshift-ovn-kubernetes/ovnkube-node-pjcrs" Sep 30 19:42:49 crc kubenswrapper[4756]: I0930 19:42:49.494294 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/84379bd4-5109-4940-a9af-99e39ffb6bce-host-cni-netd\") pod \"ovnkube-node-pjcrs\" (UID: \"84379bd4-5109-4940-a9af-99e39ffb6bce\") " pod="openshift-ovn-kubernetes/ovnkube-node-pjcrs" Sep 30 19:42:49 crc kubenswrapper[4756]: I0930 19:42:49.494336 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/84379bd4-5109-4940-a9af-99e39ffb6bce-env-overrides\") pod \"ovnkube-node-pjcrs\" (UID: \"84379bd4-5109-4940-a9af-99e39ffb6bce\") " pod="openshift-ovn-kubernetes/ovnkube-node-pjcrs" Sep 30 19:42:49 crc kubenswrapper[4756]: I0930 19:42:49.494347 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/84379bd4-5109-4940-a9af-99e39ffb6bce-run-systemd\") pod \"ovnkube-node-pjcrs\" (UID: \"84379bd4-5109-4940-a9af-99e39ffb6bce\") " pod="openshift-ovn-kubernetes/ovnkube-node-pjcrs" Sep 30 19:42:49 crc kubenswrapper[4756]: I0930 19:42:49.494308 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/84379bd4-5109-4940-a9af-99e39ffb6bce-etc-openvswitch\") pod \"ovnkube-node-pjcrs\" (UID: \"84379bd4-5109-4940-a9af-99e39ffb6bce\") " pod="openshift-ovn-kubernetes/ovnkube-node-pjcrs" Sep 30 19:42:49 crc kubenswrapper[4756]: I0930 19:42:49.494443 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/84379bd4-5109-4940-a9af-99e39ffb6bce-var-lib-openvswitch\") pod \"ovnkube-node-pjcrs\" (UID: \"84379bd4-5109-4940-a9af-99e39ffb6bce\") " pod="openshift-ovn-kubernetes/ovnkube-node-pjcrs" Sep 30 19:42:49 crc kubenswrapper[4756]: I0930 19:42:49.494477 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/84379bd4-5109-4940-a9af-99e39ffb6bce-host-cni-bin\") pod \"ovnkube-node-pjcrs\" (UID: \"84379bd4-5109-4940-a9af-99e39ffb6bce\") " pod="openshift-ovn-kubernetes/ovnkube-node-pjcrs" Sep 30 19:42:49 crc kubenswrapper[4756]: I0930 19:42:49.494509 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/84379bd4-5109-4940-a9af-99e39ffb6bce-systemd-units\") pod \"ovnkube-node-pjcrs\" (UID: \"84379bd4-5109-4940-a9af-99e39ffb6bce\") " pod="openshift-ovn-kubernetes/ovnkube-node-pjcrs" Sep 30 19:42:49 crc kubenswrapper[4756]: I0930 19:42:49.494525 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/84379bd4-5109-4940-a9af-99e39ffb6bce-host-slash\") pod \"ovnkube-node-pjcrs\" (UID: \"84379bd4-5109-4940-a9af-99e39ffb6bce\") " pod="openshift-ovn-kubernetes/ovnkube-node-pjcrs" Sep 30 19:42:49 crc kubenswrapper[4756]: I0930 19:42:49.494540 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/84379bd4-5109-4940-a9af-99e39ffb6bce-var-lib-openvswitch\") pod \"ovnkube-node-pjcrs\" (UID: \"84379bd4-5109-4940-a9af-99e39ffb6bce\") " pod="openshift-ovn-kubernetes/ovnkube-node-pjcrs" Sep 30 19:42:49 crc kubenswrapper[4756]: I0930 19:42:49.494551 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/84379bd4-5109-4940-a9af-99e39ffb6bce-ovn-node-metrics-cert\") pod \"ovnkube-node-pjcrs\" (UID: \"84379bd4-5109-4940-a9af-99e39ffb6bce\") " pod="openshift-ovn-kubernetes/ovnkube-node-pjcrs" Sep 30 19:42:49 crc kubenswrapper[4756]: I0930 19:42:49.494579 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/84379bd4-5109-4940-a9af-99e39ffb6bce-systemd-units\") pod \"ovnkube-node-pjcrs\" (UID: \"84379bd4-5109-4940-a9af-99e39ffb6bce\") " pod="openshift-ovn-kubernetes/ovnkube-node-pjcrs" Sep 30 19:42:49 crc kubenswrapper[4756]: I0930 19:42:49.494587 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/84379bd4-5109-4940-a9af-99e39ffb6bce-log-socket\") pod \"ovnkube-node-pjcrs\" (UID: \"84379bd4-5109-4940-a9af-99e39ffb6bce\") " pod="openshift-ovn-kubernetes/ovnkube-node-pjcrs" Sep 30 19:42:49 crc kubenswrapper[4756]: I0930 19:42:49.494608 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/84379bd4-5109-4940-a9af-99e39ffb6bce-run-ovn\") pod \"ovnkube-node-pjcrs\" (UID: \"84379bd4-5109-4940-a9af-99e39ffb6bce\") " pod="openshift-ovn-kubernetes/ovnkube-node-pjcrs" Sep 30 19:42:49 crc kubenswrapper[4756]: I0930 19:42:49.494612 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/84379bd4-5109-4940-a9af-99e39ffb6bce-host-cni-bin\") pod \"ovnkube-node-pjcrs\" (UID: \"84379bd4-5109-4940-a9af-99e39ffb6bce\") " pod="openshift-ovn-kubernetes/ovnkube-node-pjcrs" Sep 30 19:42:49 crc kubenswrapper[4756]: I0930 19:42:49.494620 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/84379bd4-5109-4940-a9af-99e39ffb6bce-host-slash\") pod \"ovnkube-node-pjcrs\" (UID: \"84379bd4-5109-4940-a9af-99e39ffb6bce\") " pod="openshift-ovn-kubernetes/ovnkube-node-pjcrs" Sep 30 19:42:49 crc kubenswrapper[4756]: I0930 19:42:49.494632 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/84379bd4-5109-4940-a9af-99e39ffb6bce-host-run-ovn-kubernetes\") pod \"ovnkube-node-pjcrs\" (UID: \"84379bd4-5109-4940-a9af-99e39ffb6bce\") " pod="openshift-ovn-kubernetes/ovnkube-node-pjcrs" Sep 30 19:42:49 crc kubenswrapper[4756]: I0930 19:42:49.494652 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/84379bd4-5109-4940-a9af-99e39ffb6bce-host-run-ovn-kubernetes\") pod \"ovnkube-node-pjcrs\" (UID: \"84379bd4-5109-4940-a9af-99e39ffb6bce\") " pod="openshift-ovn-kubernetes/ovnkube-node-pjcrs" Sep 30 19:42:49 crc kubenswrapper[4756]: I0930 19:42:49.494679 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/84379bd4-5109-4940-a9af-99e39ffb6bce-run-ovn\") pod \"ovnkube-node-pjcrs\" (UID: \"84379bd4-5109-4940-a9af-99e39ffb6bce\") " pod="openshift-ovn-kubernetes/ovnkube-node-pjcrs" Sep 30 19:42:49 crc kubenswrapper[4756]: I0930 19:42:49.494691 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/84379bd4-5109-4940-a9af-99e39ffb6bce-host-run-netns\") pod \"ovnkube-node-pjcrs\" (UID: \"84379bd4-5109-4940-a9af-99e39ffb6bce\") " pod="openshift-ovn-kubernetes/ovnkube-node-pjcrs" Sep 30 19:42:49 crc kubenswrapper[4756]: I0930 19:42:49.494719 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/84379bd4-5109-4940-a9af-99e39ffb6bce-host-run-netns\") pod \"ovnkube-node-pjcrs\" (UID: \"84379bd4-5109-4940-a9af-99e39ffb6bce\") " pod="openshift-ovn-kubernetes/ovnkube-node-pjcrs" Sep 30 19:42:49 crc kubenswrapper[4756]: I0930 19:42:49.494685 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/84379bd4-5109-4940-a9af-99e39ffb6bce-log-socket\") pod \"ovnkube-node-pjcrs\" (UID: \"84379bd4-5109-4940-a9af-99e39ffb6bce\") " pod="openshift-ovn-kubernetes/ovnkube-node-pjcrs" Sep 30 19:42:49 crc kubenswrapper[4756]: I0930 19:42:49.495125 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/84379bd4-5109-4940-a9af-99e39ffb6bce-ovnkube-config\") pod \"ovnkube-node-pjcrs\" (UID: \"84379bd4-5109-4940-a9af-99e39ffb6bce\") " pod="openshift-ovn-kubernetes/ovnkube-node-pjcrs" Sep 30 19:42:49 crc kubenswrapper[4756]: I0930 19:42:49.495247 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/84379bd4-5109-4940-a9af-99e39ffb6bce-env-overrides\") pod \"ovnkube-node-pjcrs\" (UID: \"84379bd4-5109-4940-a9af-99e39ffb6bce\") " pod="openshift-ovn-kubernetes/ovnkube-node-pjcrs" Sep 30 19:42:49 crc kubenswrapper[4756]: I0930 19:42:49.495302 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/84379bd4-5109-4940-a9af-99e39ffb6bce-host-kubelet\") pod \"ovnkube-node-pjcrs\" (UID: \"84379bd4-5109-4940-a9af-99e39ffb6bce\") " pod="openshift-ovn-kubernetes/ovnkube-node-pjcrs" Sep 30 19:42:49 crc kubenswrapper[4756]: I0930 19:42:49.495334 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/84379bd4-5109-4940-a9af-99e39ffb6bce-ovnkube-script-lib\") pod \"ovnkube-node-pjcrs\" (UID: \"84379bd4-5109-4940-a9af-99e39ffb6bce\") " pod="openshift-ovn-kubernetes/ovnkube-node-pjcrs" Sep 30 19:42:49 crc kubenswrapper[4756]: I0930 19:42:49.497492 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/84379bd4-5109-4940-a9af-99e39ffb6bce-ovn-node-metrics-cert\") pod \"ovnkube-node-pjcrs\" (UID: \"84379bd4-5109-4940-a9af-99e39ffb6bce\") " pod="openshift-ovn-kubernetes/ovnkube-node-pjcrs" Sep 30 19:42:49 crc kubenswrapper[4756]: I0930 19:42:49.508472 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zgbtc\" (UniqueName: \"kubernetes.io/projected/84379bd4-5109-4940-a9af-99e39ffb6bce-kube-api-access-zgbtc\") pod \"ovnkube-node-pjcrs\" (UID: \"84379bd4-5109-4940-a9af-99e39ffb6bce\") " pod="openshift-ovn-kubernetes/ovnkube-node-pjcrs" Sep 30 19:42:49 crc kubenswrapper[4756]: I0930 19:42:49.596775 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-pjcrs" Sep 30 19:42:49 crc kubenswrapper[4756]: W0930 19:42:49.618992 4756 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod84379bd4_5109_4940_a9af_99e39ffb6bce.slice/crio-6041c75508cf4d8764f4104d4f34cc4a51e9fe944c045568a4a50fc47f57f88c WatchSource:0}: Error finding container 6041c75508cf4d8764f4104d4f34cc4a51e9fe944c045568a4a50fc47f57f88c: Status 404 returned error can't find the container with id 6041c75508cf4d8764f4104d4f34cc4a51e9fe944c045568a4a50fc47f57f88c Sep 30 19:42:50 crc kubenswrapper[4756]: I0930 19:42:50.071365 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-kmxk8_d96acc31-a519-46fc-94d8-f19522e77391/ovnkube-controller/3.log" Sep 30 19:42:50 crc kubenswrapper[4756]: I0930 19:42:50.074500 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-kmxk8_d96acc31-a519-46fc-94d8-f19522e77391/ovn-acl-logging/0.log" Sep 30 19:42:50 crc kubenswrapper[4756]: I0930 19:42:50.075061 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-kmxk8_d96acc31-a519-46fc-94d8-f19522e77391/ovn-controller/0.log" Sep 30 19:42:50 crc kubenswrapper[4756]: I0930 19:42:50.075505 4756 generic.go:334] "Generic (PLEG): container finished" podID="d96acc31-a519-46fc-94d8-f19522e77391" containerID="1fac0efd11ba04c69202d48c30a6196f3d0be1b1457916c5d64b3a8d1d79aa75" exitCode=0 Sep 30 19:42:50 crc kubenswrapper[4756]: I0930 19:42:50.075527 4756 generic.go:334] "Generic (PLEG): container finished" podID="d96acc31-a519-46fc-94d8-f19522e77391" containerID="64404e34c6a077df9cc9e9f9273dbe188c6feda3bc1186a83e05a43e1fa2ea99" exitCode=0 Sep 30 19:42:50 crc kubenswrapper[4756]: I0930 19:42:50.075535 4756 generic.go:334] "Generic (PLEG): container finished" podID="d96acc31-a519-46fc-94d8-f19522e77391" containerID="f20230d520af06912183a05d0b9bcabb8581bf772fe34e037fa8d1a157c4a1b2" exitCode=0 Sep 30 19:42:50 crc kubenswrapper[4756]: I0930 19:42:50.075542 4756 generic.go:334] "Generic (PLEG): container finished" podID="d96acc31-a519-46fc-94d8-f19522e77391" containerID="261b104308c48bbe4631db85f56d63cefeb0bc6b7112076b0bf3f4fb6b67caad" exitCode=0 Sep 30 19:42:50 crc kubenswrapper[4756]: I0930 19:42:50.075592 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-kmxk8" event={"ID":"d96acc31-a519-46fc-94d8-f19522e77391","Type":"ContainerDied","Data":"1fac0efd11ba04c69202d48c30a6196f3d0be1b1457916c5d64b3a8d1d79aa75"} Sep 30 19:42:50 crc kubenswrapper[4756]: I0930 19:42:50.075617 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-kmxk8" event={"ID":"d96acc31-a519-46fc-94d8-f19522e77391","Type":"ContainerDied","Data":"64404e34c6a077df9cc9e9f9273dbe188c6feda3bc1186a83e05a43e1fa2ea99"} Sep 30 19:42:50 crc kubenswrapper[4756]: I0930 19:42:50.075629 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-kmxk8" event={"ID":"d96acc31-a519-46fc-94d8-f19522e77391","Type":"ContainerDied","Data":"f20230d520af06912183a05d0b9bcabb8581bf772fe34e037fa8d1a157c4a1b2"} Sep 30 19:42:50 crc kubenswrapper[4756]: I0930 19:42:50.075638 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-kmxk8" event={"ID":"d96acc31-a519-46fc-94d8-f19522e77391","Type":"ContainerDied","Data":"261b104308c48bbe4631db85f56d63cefeb0bc6b7112076b0bf3f4fb6b67caad"} Sep 30 19:42:50 crc kubenswrapper[4756]: I0930 19:42:50.075647 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-kmxk8" event={"ID":"d96acc31-a519-46fc-94d8-f19522e77391","Type":"ContainerDied","Data":"491f8988059f7e68820ea749015f1d214f2fc64998eea802c4d2db97a3b30cc6"} Sep 30 19:42:50 crc kubenswrapper[4756]: I0930 19:42:50.075649 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-kmxk8" Sep 30 19:42:50 crc kubenswrapper[4756]: I0930 19:42:50.075664 4756 scope.go:117] "RemoveContainer" containerID="1fac0efd11ba04c69202d48c30a6196f3d0be1b1457916c5d64b3a8d1d79aa75" Sep 30 19:42:50 crc kubenswrapper[4756]: I0930 19:42:50.078662 4756 generic.go:334] "Generic (PLEG): container finished" podID="84379bd4-5109-4940-a9af-99e39ffb6bce" containerID="87a9e5eec536e8d0872b6448a95f6815d25aa6af62c70867d606f6cc9f99605f" exitCode=0 Sep 30 19:42:50 crc kubenswrapper[4756]: I0930 19:42:50.078716 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-pjcrs" event={"ID":"84379bd4-5109-4940-a9af-99e39ffb6bce","Type":"ContainerDied","Data":"87a9e5eec536e8d0872b6448a95f6815d25aa6af62c70867d606f6cc9f99605f"} Sep 30 19:42:50 crc kubenswrapper[4756]: I0930 19:42:50.078743 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-pjcrs" event={"ID":"84379bd4-5109-4940-a9af-99e39ffb6bce","Type":"ContainerStarted","Data":"6041c75508cf4d8764f4104d4f34cc4a51e9fe944c045568a4a50fc47f57f88c"} Sep 30 19:42:50 crc kubenswrapper[4756]: I0930 19:42:50.087021 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-l9mw5_104562fb-2a2d-4291-963c-7a95062ec13a/kube-multus/2.log" Sep 30 19:42:50 crc kubenswrapper[4756]: I0930 19:42:50.134498 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-kmxk8"] Sep 30 19:42:50 crc kubenswrapper[4756]: I0930 19:42:50.138342 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-kmxk8"] Sep 30 19:42:50 crc kubenswrapper[4756]: I0930 19:42:50.145932 4756 scope.go:117] "RemoveContainer" containerID="802780f678d2e964061eb22cc909146073c026124ad07406cf1e046c73c442a2" Sep 30 19:42:50 crc kubenswrapper[4756]: I0930 19:42:50.164512 4756 scope.go:117] "RemoveContainer" containerID="64404e34c6a077df9cc9e9f9273dbe188c6feda3bc1186a83e05a43e1fa2ea99" Sep 30 19:42:50 crc kubenswrapper[4756]: I0930 19:42:50.186039 4756 scope.go:117] "RemoveContainer" containerID="f20230d520af06912183a05d0b9bcabb8581bf772fe34e037fa8d1a157c4a1b2" Sep 30 19:42:50 crc kubenswrapper[4756]: I0930 19:42:50.203572 4756 scope.go:117] "RemoveContainer" containerID="261b104308c48bbe4631db85f56d63cefeb0bc6b7112076b0bf3f4fb6b67caad" Sep 30 19:42:50 crc kubenswrapper[4756]: I0930 19:42:50.215501 4756 scope.go:117] "RemoveContainer" containerID="c5badf6335e4358fa0078ae48d7d50a908bf754fdb57c786e445ccae31933ebe" Sep 30 19:42:50 crc kubenswrapper[4756]: I0930 19:42:50.228837 4756 scope.go:117] "RemoveContainer" containerID="fe054d1844c58433aebfe8f4e500e17558f1b4afb5423002d60b50f210052c93" Sep 30 19:42:50 crc kubenswrapper[4756]: I0930 19:42:50.245426 4756 scope.go:117] "RemoveContainer" containerID="1fa8babb1f5d1213dec99c75be1d5f37d09c2257a31babd51ec297621fc7d564" Sep 30 19:42:50 crc kubenswrapper[4756]: I0930 19:42:50.260162 4756 scope.go:117] "RemoveContainer" containerID="990926364c8d973164f3da73740b7d31c8caf87f0fb691539f7a62520b822fba" Sep 30 19:42:50 crc kubenswrapper[4756]: I0930 19:42:50.274903 4756 scope.go:117] "RemoveContainer" containerID="76bb398eefb2b0a9ef06b44c2ea182deefd51e4daf42b17946b911822d6eb0af" Sep 30 19:42:50 crc kubenswrapper[4756]: I0930 19:42:50.311624 4756 scope.go:117] "RemoveContainer" containerID="1fac0efd11ba04c69202d48c30a6196f3d0be1b1457916c5d64b3a8d1d79aa75" Sep 30 19:42:50 crc kubenswrapper[4756]: E0930 19:42:50.312415 4756 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1fac0efd11ba04c69202d48c30a6196f3d0be1b1457916c5d64b3a8d1d79aa75\": container with ID starting with 1fac0efd11ba04c69202d48c30a6196f3d0be1b1457916c5d64b3a8d1d79aa75 not found: ID does not exist" containerID="1fac0efd11ba04c69202d48c30a6196f3d0be1b1457916c5d64b3a8d1d79aa75" Sep 30 19:42:50 crc kubenswrapper[4756]: I0930 19:42:50.312449 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1fac0efd11ba04c69202d48c30a6196f3d0be1b1457916c5d64b3a8d1d79aa75"} err="failed to get container status \"1fac0efd11ba04c69202d48c30a6196f3d0be1b1457916c5d64b3a8d1d79aa75\": rpc error: code = NotFound desc = could not find container \"1fac0efd11ba04c69202d48c30a6196f3d0be1b1457916c5d64b3a8d1d79aa75\": container with ID starting with 1fac0efd11ba04c69202d48c30a6196f3d0be1b1457916c5d64b3a8d1d79aa75 not found: ID does not exist" Sep 30 19:42:50 crc kubenswrapper[4756]: I0930 19:42:50.312475 4756 scope.go:117] "RemoveContainer" containerID="802780f678d2e964061eb22cc909146073c026124ad07406cf1e046c73c442a2" Sep 30 19:42:50 crc kubenswrapper[4756]: E0930 19:42:50.312690 4756 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"802780f678d2e964061eb22cc909146073c026124ad07406cf1e046c73c442a2\": container with ID starting with 802780f678d2e964061eb22cc909146073c026124ad07406cf1e046c73c442a2 not found: ID does not exist" containerID="802780f678d2e964061eb22cc909146073c026124ad07406cf1e046c73c442a2" Sep 30 19:42:50 crc kubenswrapper[4756]: I0930 19:42:50.312713 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"802780f678d2e964061eb22cc909146073c026124ad07406cf1e046c73c442a2"} err="failed to get container status \"802780f678d2e964061eb22cc909146073c026124ad07406cf1e046c73c442a2\": rpc error: code = NotFound desc = could not find container \"802780f678d2e964061eb22cc909146073c026124ad07406cf1e046c73c442a2\": container with ID starting with 802780f678d2e964061eb22cc909146073c026124ad07406cf1e046c73c442a2 not found: ID does not exist" Sep 30 19:42:50 crc kubenswrapper[4756]: I0930 19:42:50.312733 4756 scope.go:117] "RemoveContainer" containerID="64404e34c6a077df9cc9e9f9273dbe188c6feda3bc1186a83e05a43e1fa2ea99" Sep 30 19:42:50 crc kubenswrapper[4756]: E0930 19:42:50.313327 4756 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"64404e34c6a077df9cc9e9f9273dbe188c6feda3bc1186a83e05a43e1fa2ea99\": container with ID starting with 64404e34c6a077df9cc9e9f9273dbe188c6feda3bc1186a83e05a43e1fa2ea99 not found: ID does not exist" containerID="64404e34c6a077df9cc9e9f9273dbe188c6feda3bc1186a83e05a43e1fa2ea99" Sep 30 19:42:50 crc kubenswrapper[4756]: I0930 19:42:50.313357 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"64404e34c6a077df9cc9e9f9273dbe188c6feda3bc1186a83e05a43e1fa2ea99"} err="failed to get container status \"64404e34c6a077df9cc9e9f9273dbe188c6feda3bc1186a83e05a43e1fa2ea99\": rpc error: code = NotFound desc = could not find container \"64404e34c6a077df9cc9e9f9273dbe188c6feda3bc1186a83e05a43e1fa2ea99\": container with ID starting with 64404e34c6a077df9cc9e9f9273dbe188c6feda3bc1186a83e05a43e1fa2ea99 not found: ID does not exist" Sep 30 19:42:50 crc kubenswrapper[4756]: I0930 19:42:50.313373 4756 scope.go:117] "RemoveContainer" containerID="f20230d520af06912183a05d0b9bcabb8581bf772fe34e037fa8d1a157c4a1b2" Sep 30 19:42:50 crc kubenswrapper[4756]: E0930 19:42:50.313677 4756 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f20230d520af06912183a05d0b9bcabb8581bf772fe34e037fa8d1a157c4a1b2\": container with ID starting with f20230d520af06912183a05d0b9bcabb8581bf772fe34e037fa8d1a157c4a1b2 not found: ID does not exist" containerID="f20230d520af06912183a05d0b9bcabb8581bf772fe34e037fa8d1a157c4a1b2" Sep 30 19:42:50 crc kubenswrapper[4756]: I0930 19:42:50.313720 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f20230d520af06912183a05d0b9bcabb8581bf772fe34e037fa8d1a157c4a1b2"} err="failed to get container status \"f20230d520af06912183a05d0b9bcabb8581bf772fe34e037fa8d1a157c4a1b2\": rpc error: code = NotFound desc = could not find container \"f20230d520af06912183a05d0b9bcabb8581bf772fe34e037fa8d1a157c4a1b2\": container with ID starting with f20230d520af06912183a05d0b9bcabb8581bf772fe34e037fa8d1a157c4a1b2 not found: ID does not exist" Sep 30 19:42:50 crc kubenswrapper[4756]: I0930 19:42:50.313748 4756 scope.go:117] "RemoveContainer" containerID="261b104308c48bbe4631db85f56d63cefeb0bc6b7112076b0bf3f4fb6b67caad" Sep 30 19:42:50 crc kubenswrapper[4756]: E0930 19:42:50.314945 4756 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"261b104308c48bbe4631db85f56d63cefeb0bc6b7112076b0bf3f4fb6b67caad\": container with ID starting with 261b104308c48bbe4631db85f56d63cefeb0bc6b7112076b0bf3f4fb6b67caad not found: ID does not exist" containerID="261b104308c48bbe4631db85f56d63cefeb0bc6b7112076b0bf3f4fb6b67caad" Sep 30 19:42:50 crc kubenswrapper[4756]: I0930 19:42:50.315002 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"261b104308c48bbe4631db85f56d63cefeb0bc6b7112076b0bf3f4fb6b67caad"} err="failed to get container status \"261b104308c48bbe4631db85f56d63cefeb0bc6b7112076b0bf3f4fb6b67caad\": rpc error: code = NotFound desc = could not find container \"261b104308c48bbe4631db85f56d63cefeb0bc6b7112076b0bf3f4fb6b67caad\": container with ID starting with 261b104308c48bbe4631db85f56d63cefeb0bc6b7112076b0bf3f4fb6b67caad not found: ID does not exist" Sep 30 19:42:50 crc kubenswrapper[4756]: I0930 19:42:50.315023 4756 scope.go:117] "RemoveContainer" containerID="c5badf6335e4358fa0078ae48d7d50a908bf754fdb57c786e445ccae31933ebe" Sep 30 19:42:50 crc kubenswrapper[4756]: E0930 19:42:50.315363 4756 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c5badf6335e4358fa0078ae48d7d50a908bf754fdb57c786e445ccae31933ebe\": container with ID starting with c5badf6335e4358fa0078ae48d7d50a908bf754fdb57c786e445ccae31933ebe not found: ID does not exist" containerID="c5badf6335e4358fa0078ae48d7d50a908bf754fdb57c786e445ccae31933ebe" Sep 30 19:42:50 crc kubenswrapper[4756]: I0930 19:42:50.315386 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c5badf6335e4358fa0078ae48d7d50a908bf754fdb57c786e445ccae31933ebe"} err="failed to get container status \"c5badf6335e4358fa0078ae48d7d50a908bf754fdb57c786e445ccae31933ebe\": rpc error: code = NotFound desc = could not find container \"c5badf6335e4358fa0078ae48d7d50a908bf754fdb57c786e445ccae31933ebe\": container with ID starting with c5badf6335e4358fa0078ae48d7d50a908bf754fdb57c786e445ccae31933ebe not found: ID does not exist" Sep 30 19:42:50 crc kubenswrapper[4756]: I0930 19:42:50.315419 4756 scope.go:117] "RemoveContainer" containerID="fe054d1844c58433aebfe8f4e500e17558f1b4afb5423002d60b50f210052c93" Sep 30 19:42:50 crc kubenswrapper[4756]: E0930 19:42:50.315923 4756 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fe054d1844c58433aebfe8f4e500e17558f1b4afb5423002d60b50f210052c93\": container with ID starting with fe054d1844c58433aebfe8f4e500e17558f1b4afb5423002d60b50f210052c93 not found: ID does not exist" containerID="fe054d1844c58433aebfe8f4e500e17558f1b4afb5423002d60b50f210052c93" Sep 30 19:42:50 crc kubenswrapper[4756]: I0930 19:42:50.315959 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fe054d1844c58433aebfe8f4e500e17558f1b4afb5423002d60b50f210052c93"} err="failed to get container status \"fe054d1844c58433aebfe8f4e500e17558f1b4afb5423002d60b50f210052c93\": rpc error: code = NotFound desc = could not find container \"fe054d1844c58433aebfe8f4e500e17558f1b4afb5423002d60b50f210052c93\": container with ID starting with fe054d1844c58433aebfe8f4e500e17558f1b4afb5423002d60b50f210052c93 not found: ID does not exist" Sep 30 19:42:50 crc kubenswrapper[4756]: I0930 19:42:50.316011 4756 scope.go:117] "RemoveContainer" containerID="1fa8babb1f5d1213dec99c75be1d5f37d09c2257a31babd51ec297621fc7d564" Sep 30 19:42:50 crc kubenswrapper[4756]: E0930 19:42:50.316330 4756 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1fa8babb1f5d1213dec99c75be1d5f37d09c2257a31babd51ec297621fc7d564\": container with ID starting with 1fa8babb1f5d1213dec99c75be1d5f37d09c2257a31babd51ec297621fc7d564 not found: ID does not exist" containerID="1fa8babb1f5d1213dec99c75be1d5f37d09c2257a31babd51ec297621fc7d564" Sep 30 19:42:50 crc kubenswrapper[4756]: I0930 19:42:50.316356 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1fa8babb1f5d1213dec99c75be1d5f37d09c2257a31babd51ec297621fc7d564"} err="failed to get container status \"1fa8babb1f5d1213dec99c75be1d5f37d09c2257a31babd51ec297621fc7d564\": rpc error: code = NotFound desc = could not find container \"1fa8babb1f5d1213dec99c75be1d5f37d09c2257a31babd51ec297621fc7d564\": container with ID starting with 1fa8babb1f5d1213dec99c75be1d5f37d09c2257a31babd51ec297621fc7d564 not found: ID does not exist" Sep 30 19:42:50 crc kubenswrapper[4756]: I0930 19:42:50.316370 4756 scope.go:117] "RemoveContainer" containerID="990926364c8d973164f3da73740b7d31c8caf87f0fb691539f7a62520b822fba" Sep 30 19:42:50 crc kubenswrapper[4756]: E0930 19:42:50.316645 4756 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"990926364c8d973164f3da73740b7d31c8caf87f0fb691539f7a62520b822fba\": container with ID starting with 990926364c8d973164f3da73740b7d31c8caf87f0fb691539f7a62520b822fba not found: ID does not exist" containerID="990926364c8d973164f3da73740b7d31c8caf87f0fb691539f7a62520b822fba" Sep 30 19:42:50 crc kubenswrapper[4756]: I0930 19:42:50.316666 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"990926364c8d973164f3da73740b7d31c8caf87f0fb691539f7a62520b822fba"} err="failed to get container status \"990926364c8d973164f3da73740b7d31c8caf87f0fb691539f7a62520b822fba\": rpc error: code = NotFound desc = could not find container \"990926364c8d973164f3da73740b7d31c8caf87f0fb691539f7a62520b822fba\": container with ID starting with 990926364c8d973164f3da73740b7d31c8caf87f0fb691539f7a62520b822fba not found: ID does not exist" Sep 30 19:42:50 crc kubenswrapper[4756]: I0930 19:42:50.316679 4756 scope.go:117] "RemoveContainer" containerID="76bb398eefb2b0a9ef06b44c2ea182deefd51e4daf42b17946b911822d6eb0af" Sep 30 19:42:50 crc kubenswrapper[4756]: E0930 19:42:50.316975 4756 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"76bb398eefb2b0a9ef06b44c2ea182deefd51e4daf42b17946b911822d6eb0af\": container with ID starting with 76bb398eefb2b0a9ef06b44c2ea182deefd51e4daf42b17946b911822d6eb0af not found: ID does not exist" containerID="76bb398eefb2b0a9ef06b44c2ea182deefd51e4daf42b17946b911822d6eb0af" Sep 30 19:42:50 crc kubenswrapper[4756]: I0930 19:42:50.317001 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"76bb398eefb2b0a9ef06b44c2ea182deefd51e4daf42b17946b911822d6eb0af"} err="failed to get container status \"76bb398eefb2b0a9ef06b44c2ea182deefd51e4daf42b17946b911822d6eb0af\": rpc error: code = NotFound desc = could not find container \"76bb398eefb2b0a9ef06b44c2ea182deefd51e4daf42b17946b911822d6eb0af\": container with ID starting with 76bb398eefb2b0a9ef06b44c2ea182deefd51e4daf42b17946b911822d6eb0af not found: ID does not exist" Sep 30 19:42:50 crc kubenswrapper[4756]: I0930 19:42:50.317016 4756 scope.go:117] "RemoveContainer" containerID="1fac0efd11ba04c69202d48c30a6196f3d0be1b1457916c5d64b3a8d1d79aa75" Sep 30 19:42:50 crc kubenswrapper[4756]: I0930 19:42:50.317321 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1fac0efd11ba04c69202d48c30a6196f3d0be1b1457916c5d64b3a8d1d79aa75"} err="failed to get container status \"1fac0efd11ba04c69202d48c30a6196f3d0be1b1457916c5d64b3a8d1d79aa75\": rpc error: code = NotFound desc = could not find container \"1fac0efd11ba04c69202d48c30a6196f3d0be1b1457916c5d64b3a8d1d79aa75\": container with ID starting with 1fac0efd11ba04c69202d48c30a6196f3d0be1b1457916c5d64b3a8d1d79aa75 not found: ID does not exist" Sep 30 19:42:50 crc kubenswrapper[4756]: I0930 19:42:50.317341 4756 scope.go:117] "RemoveContainer" containerID="802780f678d2e964061eb22cc909146073c026124ad07406cf1e046c73c442a2" Sep 30 19:42:50 crc kubenswrapper[4756]: I0930 19:42:50.317538 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"802780f678d2e964061eb22cc909146073c026124ad07406cf1e046c73c442a2"} err="failed to get container status \"802780f678d2e964061eb22cc909146073c026124ad07406cf1e046c73c442a2\": rpc error: code = NotFound desc = could not find container \"802780f678d2e964061eb22cc909146073c026124ad07406cf1e046c73c442a2\": container with ID starting with 802780f678d2e964061eb22cc909146073c026124ad07406cf1e046c73c442a2 not found: ID does not exist" Sep 30 19:42:50 crc kubenswrapper[4756]: I0930 19:42:50.317561 4756 scope.go:117] "RemoveContainer" containerID="64404e34c6a077df9cc9e9f9273dbe188c6feda3bc1186a83e05a43e1fa2ea99" Sep 30 19:42:50 crc kubenswrapper[4756]: I0930 19:42:50.317900 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"64404e34c6a077df9cc9e9f9273dbe188c6feda3bc1186a83e05a43e1fa2ea99"} err="failed to get container status \"64404e34c6a077df9cc9e9f9273dbe188c6feda3bc1186a83e05a43e1fa2ea99\": rpc error: code = NotFound desc = could not find container \"64404e34c6a077df9cc9e9f9273dbe188c6feda3bc1186a83e05a43e1fa2ea99\": container with ID starting with 64404e34c6a077df9cc9e9f9273dbe188c6feda3bc1186a83e05a43e1fa2ea99 not found: ID does not exist" Sep 30 19:42:50 crc kubenswrapper[4756]: I0930 19:42:50.317923 4756 scope.go:117] "RemoveContainer" containerID="f20230d520af06912183a05d0b9bcabb8581bf772fe34e037fa8d1a157c4a1b2" Sep 30 19:42:50 crc kubenswrapper[4756]: I0930 19:42:50.318135 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f20230d520af06912183a05d0b9bcabb8581bf772fe34e037fa8d1a157c4a1b2"} err="failed to get container status \"f20230d520af06912183a05d0b9bcabb8581bf772fe34e037fa8d1a157c4a1b2\": rpc error: code = NotFound desc = could not find container \"f20230d520af06912183a05d0b9bcabb8581bf772fe34e037fa8d1a157c4a1b2\": container with ID starting with f20230d520af06912183a05d0b9bcabb8581bf772fe34e037fa8d1a157c4a1b2 not found: ID does not exist" Sep 30 19:42:50 crc kubenswrapper[4756]: I0930 19:42:50.318158 4756 scope.go:117] "RemoveContainer" containerID="261b104308c48bbe4631db85f56d63cefeb0bc6b7112076b0bf3f4fb6b67caad" Sep 30 19:42:50 crc kubenswrapper[4756]: I0930 19:42:50.318388 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"261b104308c48bbe4631db85f56d63cefeb0bc6b7112076b0bf3f4fb6b67caad"} err="failed to get container status \"261b104308c48bbe4631db85f56d63cefeb0bc6b7112076b0bf3f4fb6b67caad\": rpc error: code = NotFound desc = could not find container \"261b104308c48bbe4631db85f56d63cefeb0bc6b7112076b0bf3f4fb6b67caad\": container with ID starting with 261b104308c48bbe4631db85f56d63cefeb0bc6b7112076b0bf3f4fb6b67caad not found: ID does not exist" Sep 30 19:42:50 crc kubenswrapper[4756]: I0930 19:42:50.318420 4756 scope.go:117] "RemoveContainer" containerID="c5badf6335e4358fa0078ae48d7d50a908bf754fdb57c786e445ccae31933ebe" Sep 30 19:42:50 crc kubenswrapper[4756]: I0930 19:42:50.318628 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c5badf6335e4358fa0078ae48d7d50a908bf754fdb57c786e445ccae31933ebe"} err="failed to get container status \"c5badf6335e4358fa0078ae48d7d50a908bf754fdb57c786e445ccae31933ebe\": rpc error: code = NotFound desc = could not find container \"c5badf6335e4358fa0078ae48d7d50a908bf754fdb57c786e445ccae31933ebe\": container with ID starting with c5badf6335e4358fa0078ae48d7d50a908bf754fdb57c786e445ccae31933ebe not found: ID does not exist" Sep 30 19:42:50 crc kubenswrapper[4756]: I0930 19:42:50.318651 4756 scope.go:117] "RemoveContainer" containerID="fe054d1844c58433aebfe8f4e500e17558f1b4afb5423002d60b50f210052c93" Sep 30 19:42:50 crc kubenswrapper[4756]: I0930 19:42:50.319004 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fe054d1844c58433aebfe8f4e500e17558f1b4afb5423002d60b50f210052c93"} err="failed to get container status \"fe054d1844c58433aebfe8f4e500e17558f1b4afb5423002d60b50f210052c93\": rpc error: code = NotFound desc = could not find container \"fe054d1844c58433aebfe8f4e500e17558f1b4afb5423002d60b50f210052c93\": container with ID starting with fe054d1844c58433aebfe8f4e500e17558f1b4afb5423002d60b50f210052c93 not found: ID does not exist" Sep 30 19:42:50 crc kubenswrapper[4756]: I0930 19:42:50.319025 4756 scope.go:117] "RemoveContainer" containerID="1fa8babb1f5d1213dec99c75be1d5f37d09c2257a31babd51ec297621fc7d564" Sep 30 19:42:50 crc kubenswrapper[4756]: I0930 19:42:50.319340 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1fa8babb1f5d1213dec99c75be1d5f37d09c2257a31babd51ec297621fc7d564"} err="failed to get container status \"1fa8babb1f5d1213dec99c75be1d5f37d09c2257a31babd51ec297621fc7d564\": rpc error: code = NotFound desc = could not find container \"1fa8babb1f5d1213dec99c75be1d5f37d09c2257a31babd51ec297621fc7d564\": container with ID starting with 1fa8babb1f5d1213dec99c75be1d5f37d09c2257a31babd51ec297621fc7d564 not found: ID does not exist" Sep 30 19:42:50 crc kubenswrapper[4756]: I0930 19:42:50.319360 4756 scope.go:117] "RemoveContainer" containerID="990926364c8d973164f3da73740b7d31c8caf87f0fb691539f7a62520b822fba" Sep 30 19:42:50 crc kubenswrapper[4756]: I0930 19:42:50.319665 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"990926364c8d973164f3da73740b7d31c8caf87f0fb691539f7a62520b822fba"} err="failed to get container status \"990926364c8d973164f3da73740b7d31c8caf87f0fb691539f7a62520b822fba\": rpc error: code = NotFound desc = could not find container \"990926364c8d973164f3da73740b7d31c8caf87f0fb691539f7a62520b822fba\": container with ID starting with 990926364c8d973164f3da73740b7d31c8caf87f0fb691539f7a62520b822fba not found: ID does not exist" Sep 30 19:42:50 crc kubenswrapper[4756]: I0930 19:42:50.319685 4756 scope.go:117] "RemoveContainer" containerID="76bb398eefb2b0a9ef06b44c2ea182deefd51e4daf42b17946b911822d6eb0af" Sep 30 19:42:50 crc kubenswrapper[4756]: I0930 19:42:50.319928 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"76bb398eefb2b0a9ef06b44c2ea182deefd51e4daf42b17946b911822d6eb0af"} err="failed to get container status \"76bb398eefb2b0a9ef06b44c2ea182deefd51e4daf42b17946b911822d6eb0af\": rpc error: code = NotFound desc = could not find container \"76bb398eefb2b0a9ef06b44c2ea182deefd51e4daf42b17946b911822d6eb0af\": container with ID starting with 76bb398eefb2b0a9ef06b44c2ea182deefd51e4daf42b17946b911822d6eb0af not found: ID does not exist" Sep 30 19:42:50 crc kubenswrapper[4756]: I0930 19:42:50.319946 4756 scope.go:117] "RemoveContainer" containerID="1fac0efd11ba04c69202d48c30a6196f3d0be1b1457916c5d64b3a8d1d79aa75" Sep 30 19:42:50 crc kubenswrapper[4756]: I0930 19:42:50.320550 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1fac0efd11ba04c69202d48c30a6196f3d0be1b1457916c5d64b3a8d1d79aa75"} err="failed to get container status \"1fac0efd11ba04c69202d48c30a6196f3d0be1b1457916c5d64b3a8d1d79aa75\": rpc error: code = NotFound desc = could not find container \"1fac0efd11ba04c69202d48c30a6196f3d0be1b1457916c5d64b3a8d1d79aa75\": container with ID starting with 1fac0efd11ba04c69202d48c30a6196f3d0be1b1457916c5d64b3a8d1d79aa75 not found: ID does not exist" Sep 30 19:42:50 crc kubenswrapper[4756]: I0930 19:42:50.320609 4756 scope.go:117] "RemoveContainer" containerID="802780f678d2e964061eb22cc909146073c026124ad07406cf1e046c73c442a2" Sep 30 19:42:50 crc kubenswrapper[4756]: I0930 19:42:50.320854 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"802780f678d2e964061eb22cc909146073c026124ad07406cf1e046c73c442a2"} err="failed to get container status \"802780f678d2e964061eb22cc909146073c026124ad07406cf1e046c73c442a2\": rpc error: code = NotFound desc = could not find container \"802780f678d2e964061eb22cc909146073c026124ad07406cf1e046c73c442a2\": container with ID starting with 802780f678d2e964061eb22cc909146073c026124ad07406cf1e046c73c442a2 not found: ID does not exist" Sep 30 19:42:50 crc kubenswrapper[4756]: I0930 19:42:50.320877 4756 scope.go:117] "RemoveContainer" containerID="64404e34c6a077df9cc9e9f9273dbe188c6feda3bc1186a83e05a43e1fa2ea99" Sep 30 19:42:50 crc kubenswrapper[4756]: I0930 19:42:50.321120 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"64404e34c6a077df9cc9e9f9273dbe188c6feda3bc1186a83e05a43e1fa2ea99"} err="failed to get container status \"64404e34c6a077df9cc9e9f9273dbe188c6feda3bc1186a83e05a43e1fa2ea99\": rpc error: code = NotFound desc = could not find container \"64404e34c6a077df9cc9e9f9273dbe188c6feda3bc1186a83e05a43e1fa2ea99\": container with ID starting with 64404e34c6a077df9cc9e9f9273dbe188c6feda3bc1186a83e05a43e1fa2ea99 not found: ID does not exist" Sep 30 19:42:50 crc kubenswrapper[4756]: I0930 19:42:50.321147 4756 scope.go:117] "RemoveContainer" containerID="f20230d520af06912183a05d0b9bcabb8581bf772fe34e037fa8d1a157c4a1b2" Sep 30 19:42:50 crc kubenswrapper[4756]: I0930 19:42:50.321366 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f20230d520af06912183a05d0b9bcabb8581bf772fe34e037fa8d1a157c4a1b2"} err="failed to get container status \"f20230d520af06912183a05d0b9bcabb8581bf772fe34e037fa8d1a157c4a1b2\": rpc error: code = NotFound desc = could not find container \"f20230d520af06912183a05d0b9bcabb8581bf772fe34e037fa8d1a157c4a1b2\": container with ID starting with f20230d520af06912183a05d0b9bcabb8581bf772fe34e037fa8d1a157c4a1b2 not found: ID does not exist" Sep 30 19:42:50 crc kubenswrapper[4756]: I0930 19:42:50.321386 4756 scope.go:117] "RemoveContainer" containerID="261b104308c48bbe4631db85f56d63cefeb0bc6b7112076b0bf3f4fb6b67caad" Sep 30 19:42:50 crc kubenswrapper[4756]: I0930 19:42:50.321560 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"261b104308c48bbe4631db85f56d63cefeb0bc6b7112076b0bf3f4fb6b67caad"} err="failed to get container status \"261b104308c48bbe4631db85f56d63cefeb0bc6b7112076b0bf3f4fb6b67caad\": rpc error: code = NotFound desc = could not find container \"261b104308c48bbe4631db85f56d63cefeb0bc6b7112076b0bf3f4fb6b67caad\": container with ID starting with 261b104308c48bbe4631db85f56d63cefeb0bc6b7112076b0bf3f4fb6b67caad not found: ID does not exist" Sep 30 19:42:50 crc kubenswrapper[4756]: I0930 19:42:50.321580 4756 scope.go:117] "RemoveContainer" containerID="c5badf6335e4358fa0078ae48d7d50a908bf754fdb57c786e445ccae31933ebe" Sep 30 19:42:50 crc kubenswrapper[4756]: I0930 19:42:50.321736 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c5badf6335e4358fa0078ae48d7d50a908bf754fdb57c786e445ccae31933ebe"} err="failed to get container status \"c5badf6335e4358fa0078ae48d7d50a908bf754fdb57c786e445ccae31933ebe\": rpc error: code = NotFound desc = could not find container \"c5badf6335e4358fa0078ae48d7d50a908bf754fdb57c786e445ccae31933ebe\": container with ID starting with c5badf6335e4358fa0078ae48d7d50a908bf754fdb57c786e445ccae31933ebe not found: ID does not exist" Sep 30 19:42:50 crc kubenswrapper[4756]: I0930 19:42:50.321756 4756 scope.go:117] "RemoveContainer" containerID="fe054d1844c58433aebfe8f4e500e17558f1b4afb5423002d60b50f210052c93" Sep 30 19:42:50 crc kubenswrapper[4756]: I0930 19:42:50.321932 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fe054d1844c58433aebfe8f4e500e17558f1b4afb5423002d60b50f210052c93"} err="failed to get container status \"fe054d1844c58433aebfe8f4e500e17558f1b4afb5423002d60b50f210052c93\": rpc error: code = NotFound desc = could not find container \"fe054d1844c58433aebfe8f4e500e17558f1b4afb5423002d60b50f210052c93\": container with ID starting with fe054d1844c58433aebfe8f4e500e17558f1b4afb5423002d60b50f210052c93 not found: ID does not exist" Sep 30 19:42:50 crc kubenswrapper[4756]: I0930 19:42:50.321952 4756 scope.go:117] "RemoveContainer" containerID="1fa8babb1f5d1213dec99c75be1d5f37d09c2257a31babd51ec297621fc7d564" Sep 30 19:42:50 crc kubenswrapper[4756]: I0930 19:42:50.322098 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1fa8babb1f5d1213dec99c75be1d5f37d09c2257a31babd51ec297621fc7d564"} err="failed to get container status \"1fa8babb1f5d1213dec99c75be1d5f37d09c2257a31babd51ec297621fc7d564\": rpc error: code = NotFound desc = could not find container \"1fa8babb1f5d1213dec99c75be1d5f37d09c2257a31babd51ec297621fc7d564\": container with ID starting with 1fa8babb1f5d1213dec99c75be1d5f37d09c2257a31babd51ec297621fc7d564 not found: ID does not exist" Sep 30 19:42:50 crc kubenswrapper[4756]: I0930 19:42:50.322121 4756 scope.go:117] "RemoveContainer" containerID="990926364c8d973164f3da73740b7d31c8caf87f0fb691539f7a62520b822fba" Sep 30 19:42:50 crc kubenswrapper[4756]: I0930 19:42:50.322268 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"990926364c8d973164f3da73740b7d31c8caf87f0fb691539f7a62520b822fba"} err="failed to get container status \"990926364c8d973164f3da73740b7d31c8caf87f0fb691539f7a62520b822fba\": rpc error: code = NotFound desc = could not find container \"990926364c8d973164f3da73740b7d31c8caf87f0fb691539f7a62520b822fba\": container with ID starting with 990926364c8d973164f3da73740b7d31c8caf87f0fb691539f7a62520b822fba not found: ID does not exist" Sep 30 19:42:50 crc kubenswrapper[4756]: I0930 19:42:50.322289 4756 scope.go:117] "RemoveContainer" containerID="76bb398eefb2b0a9ef06b44c2ea182deefd51e4daf42b17946b911822d6eb0af" Sep 30 19:42:50 crc kubenswrapper[4756]: I0930 19:42:50.322450 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"76bb398eefb2b0a9ef06b44c2ea182deefd51e4daf42b17946b911822d6eb0af"} err="failed to get container status \"76bb398eefb2b0a9ef06b44c2ea182deefd51e4daf42b17946b911822d6eb0af\": rpc error: code = NotFound desc = could not find container \"76bb398eefb2b0a9ef06b44c2ea182deefd51e4daf42b17946b911822d6eb0af\": container with ID starting with 76bb398eefb2b0a9ef06b44c2ea182deefd51e4daf42b17946b911822d6eb0af not found: ID does not exist" Sep 30 19:42:50 crc kubenswrapper[4756]: I0930 19:42:50.322469 4756 scope.go:117] "RemoveContainer" containerID="1fac0efd11ba04c69202d48c30a6196f3d0be1b1457916c5d64b3a8d1d79aa75" Sep 30 19:42:50 crc kubenswrapper[4756]: I0930 19:42:50.322744 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1fac0efd11ba04c69202d48c30a6196f3d0be1b1457916c5d64b3a8d1d79aa75"} err="failed to get container status \"1fac0efd11ba04c69202d48c30a6196f3d0be1b1457916c5d64b3a8d1d79aa75\": rpc error: code = NotFound desc = could not find container \"1fac0efd11ba04c69202d48c30a6196f3d0be1b1457916c5d64b3a8d1d79aa75\": container with ID starting with 1fac0efd11ba04c69202d48c30a6196f3d0be1b1457916c5d64b3a8d1d79aa75 not found: ID does not exist" Sep 30 19:42:50 crc kubenswrapper[4756]: I0930 19:42:50.322783 4756 scope.go:117] "RemoveContainer" containerID="802780f678d2e964061eb22cc909146073c026124ad07406cf1e046c73c442a2" Sep 30 19:42:50 crc kubenswrapper[4756]: I0930 19:42:50.323898 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"802780f678d2e964061eb22cc909146073c026124ad07406cf1e046c73c442a2"} err="failed to get container status \"802780f678d2e964061eb22cc909146073c026124ad07406cf1e046c73c442a2\": rpc error: code = NotFound desc = could not find container \"802780f678d2e964061eb22cc909146073c026124ad07406cf1e046c73c442a2\": container with ID starting with 802780f678d2e964061eb22cc909146073c026124ad07406cf1e046c73c442a2 not found: ID does not exist" Sep 30 19:42:50 crc kubenswrapper[4756]: I0930 19:42:50.323917 4756 scope.go:117] "RemoveContainer" containerID="64404e34c6a077df9cc9e9f9273dbe188c6feda3bc1186a83e05a43e1fa2ea99" Sep 30 19:42:50 crc kubenswrapper[4756]: I0930 19:42:50.324187 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"64404e34c6a077df9cc9e9f9273dbe188c6feda3bc1186a83e05a43e1fa2ea99"} err="failed to get container status \"64404e34c6a077df9cc9e9f9273dbe188c6feda3bc1186a83e05a43e1fa2ea99\": rpc error: code = NotFound desc = could not find container \"64404e34c6a077df9cc9e9f9273dbe188c6feda3bc1186a83e05a43e1fa2ea99\": container with ID starting with 64404e34c6a077df9cc9e9f9273dbe188c6feda3bc1186a83e05a43e1fa2ea99 not found: ID does not exist" Sep 30 19:42:50 crc kubenswrapper[4756]: I0930 19:42:50.324207 4756 scope.go:117] "RemoveContainer" containerID="f20230d520af06912183a05d0b9bcabb8581bf772fe34e037fa8d1a157c4a1b2" Sep 30 19:42:50 crc kubenswrapper[4756]: I0930 19:42:50.324388 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f20230d520af06912183a05d0b9bcabb8581bf772fe34e037fa8d1a157c4a1b2"} err="failed to get container status \"f20230d520af06912183a05d0b9bcabb8581bf772fe34e037fa8d1a157c4a1b2\": rpc error: code = NotFound desc = could not find container \"f20230d520af06912183a05d0b9bcabb8581bf772fe34e037fa8d1a157c4a1b2\": container with ID starting with f20230d520af06912183a05d0b9bcabb8581bf772fe34e037fa8d1a157c4a1b2 not found: ID does not exist" Sep 30 19:42:50 crc kubenswrapper[4756]: I0930 19:42:50.324418 4756 scope.go:117] "RemoveContainer" containerID="261b104308c48bbe4631db85f56d63cefeb0bc6b7112076b0bf3f4fb6b67caad" Sep 30 19:42:50 crc kubenswrapper[4756]: I0930 19:42:50.324583 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"261b104308c48bbe4631db85f56d63cefeb0bc6b7112076b0bf3f4fb6b67caad"} err="failed to get container status \"261b104308c48bbe4631db85f56d63cefeb0bc6b7112076b0bf3f4fb6b67caad\": rpc error: code = NotFound desc = could not find container \"261b104308c48bbe4631db85f56d63cefeb0bc6b7112076b0bf3f4fb6b67caad\": container with ID starting with 261b104308c48bbe4631db85f56d63cefeb0bc6b7112076b0bf3f4fb6b67caad not found: ID does not exist" Sep 30 19:42:50 crc kubenswrapper[4756]: I0930 19:42:50.324603 4756 scope.go:117] "RemoveContainer" containerID="c5badf6335e4358fa0078ae48d7d50a908bf754fdb57c786e445ccae31933ebe" Sep 30 19:42:50 crc kubenswrapper[4756]: I0930 19:42:50.324824 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c5badf6335e4358fa0078ae48d7d50a908bf754fdb57c786e445ccae31933ebe"} err="failed to get container status \"c5badf6335e4358fa0078ae48d7d50a908bf754fdb57c786e445ccae31933ebe\": rpc error: code = NotFound desc = could not find container \"c5badf6335e4358fa0078ae48d7d50a908bf754fdb57c786e445ccae31933ebe\": container with ID starting with c5badf6335e4358fa0078ae48d7d50a908bf754fdb57c786e445ccae31933ebe not found: ID does not exist" Sep 30 19:42:50 crc kubenswrapper[4756]: I0930 19:42:50.324845 4756 scope.go:117] "RemoveContainer" containerID="fe054d1844c58433aebfe8f4e500e17558f1b4afb5423002d60b50f210052c93" Sep 30 19:42:50 crc kubenswrapper[4756]: I0930 19:42:50.325007 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fe054d1844c58433aebfe8f4e500e17558f1b4afb5423002d60b50f210052c93"} err="failed to get container status \"fe054d1844c58433aebfe8f4e500e17558f1b4afb5423002d60b50f210052c93\": rpc error: code = NotFound desc = could not find container \"fe054d1844c58433aebfe8f4e500e17558f1b4afb5423002d60b50f210052c93\": container with ID starting with fe054d1844c58433aebfe8f4e500e17558f1b4afb5423002d60b50f210052c93 not found: ID does not exist" Sep 30 19:42:50 crc kubenswrapper[4756]: I0930 19:42:50.325023 4756 scope.go:117] "RemoveContainer" containerID="1fa8babb1f5d1213dec99c75be1d5f37d09c2257a31babd51ec297621fc7d564" Sep 30 19:42:50 crc kubenswrapper[4756]: I0930 19:42:50.325157 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1fa8babb1f5d1213dec99c75be1d5f37d09c2257a31babd51ec297621fc7d564"} err="failed to get container status \"1fa8babb1f5d1213dec99c75be1d5f37d09c2257a31babd51ec297621fc7d564\": rpc error: code = NotFound desc = could not find container \"1fa8babb1f5d1213dec99c75be1d5f37d09c2257a31babd51ec297621fc7d564\": container with ID starting with 1fa8babb1f5d1213dec99c75be1d5f37d09c2257a31babd51ec297621fc7d564 not found: ID does not exist" Sep 30 19:42:50 crc kubenswrapper[4756]: I0930 19:42:50.325169 4756 scope.go:117] "RemoveContainer" containerID="990926364c8d973164f3da73740b7d31c8caf87f0fb691539f7a62520b822fba" Sep 30 19:42:50 crc kubenswrapper[4756]: I0930 19:42:50.325303 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"990926364c8d973164f3da73740b7d31c8caf87f0fb691539f7a62520b822fba"} err="failed to get container status \"990926364c8d973164f3da73740b7d31c8caf87f0fb691539f7a62520b822fba\": rpc error: code = NotFound desc = could not find container \"990926364c8d973164f3da73740b7d31c8caf87f0fb691539f7a62520b822fba\": container with ID starting with 990926364c8d973164f3da73740b7d31c8caf87f0fb691539f7a62520b822fba not found: ID does not exist" Sep 30 19:42:50 crc kubenswrapper[4756]: I0930 19:42:50.325314 4756 scope.go:117] "RemoveContainer" containerID="76bb398eefb2b0a9ef06b44c2ea182deefd51e4daf42b17946b911822d6eb0af" Sep 30 19:42:50 crc kubenswrapper[4756]: I0930 19:42:50.325458 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"76bb398eefb2b0a9ef06b44c2ea182deefd51e4daf42b17946b911822d6eb0af"} err="failed to get container status \"76bb398eefb2b0a9ef06b44c2ea182deefd51e4daf42b17946b911822d6eb0af\": rpc error: code = NotFound desc = could not find container \"76bb398eefb2b0a9ef06b44c2ea182deefd51e4daf42b17946b911822d6eb0af\": container with ID starting with 76bb398eefb2b0a9ef06b44c2ea182deefd51e4daf42b17946b911822d6eb0af not found: ID does not exist" Sep 30 19:42:51 crc kubenswrapper[4756]: I0930 19:42:51.099920 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-pjcrs" event={"ID":"84379bd4-5109-4940-a9af-99e39ffb6bce","Type":"ContainerStarted","Data":"e000efb8607c52a9b68ac9511a31b0347d1e3e1a10a5499050921727457fc9f4"} Sep 30 19:42:51 crc kubenswrapper[4756]: I0930 19:42:51.101625 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-pjcrs" event={"ID":"84379bd4-5109-4940-a9af-99e39ffb6bce","Type":"ContainerStarted","Data":"3a268697033d14392574e1b136aeeb374c5ac1816bdc580634d8a31d3c565509"} Sep 30 19:42:51 crc kubenswrapper[4756]: I0930 19:42:51.101759 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-pjcrs" event={"ID":"84379bd4-5109-4940-a9af-99e39ffb6bce","Type":"ContainerStarted","Data":"4dfbf1bb5f1bd847398d3417e1ee626362216a44523a9be775bd1bd125d14877"} Sep 30 19:42:51 crc kubenswrapper[4756]: I0930 19:42:51.101832 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-pjcrs" event={"ID":"84379bd4-5109-4940-a9af-99e39ffb6bce","Type":"ContainerStarted","Data":"f676d2d5dc785aaead73275252590f6c1827249c81a66b6c146cfef995106fb6"} Sep 30 19:42:51 crc kubenswrapper[4756]: I0930 19:42:51.101908 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-pjcrs" event={"ID":"84379bd4-5109-4940-a9af-99e39ffb6bce","Type":"ContainerStarted","Data":"8db659a082c0e288cae3e9d1a7cb22c56b5541b097e762bae84a7ae1428fb7df"} Sep 30 19:42:51 crc kubenswrapper[4756]: I0930 19:42:51.101991 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-pjcrs" event={"ID":"84379bd4-5109-4940-a9af-99e39ffb6bce","Type":"ContainerStarted","Data":"7a570d444a40cea2a5e87fc088bdfd24f85218804572e9b5c4935dac29cea3ec"} Sep 30 19:42:51 crc kubenswrapper[4756]: I0930 19:42:51.126587 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d96acc31-a519-46fc-94d8-f19522e77391" path="/var/lib/kubelet/pods/d96acc31-a519-46fc-94d8-f19522e77391/volumes" Sep 30 19:42:53 crc kubenswrapper[4756]: I0930 19:42:53.114048 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-pjcrs" event={"ID":"84379bd4-5109-4940-a9af-99e39ffb6bce","Type":"ContainerStarted","Data":"47cfe65ce4ad5b8dd9567cd6c5a9452dc7b3d5d37d16fad47ef7de8fb0de84d3"} Sep 30 19:42:56 crc kubenswrapper[4756]: I0930 19:42:56.135761 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-pjcrs" event={"ID":"84379bd4-5109-4940-a9af-99e39ffb6bce","Type":"ContainerStarted","Data":"6f0c053adaf97539c2df258d92a02655c9d1aab5d77b05d1f546966123cd0937"} Sep 30 19:42:56 crc kubenswrapper[4756]: I0930 19:42:56.136326 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-pjcrs" Sep 30 19:42:56 crc kubenswrapper[4756]: I0930 19:42:56.136339 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-pjcrs" Sep 30 19:42:56 crc kubenswrapper[4756]: I0930 19:42:56.136349 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-pjcrs" Sep 30 19:42:56 crc kubenswrapper[4756]: I0930 19:42:56.160370 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-pjcrs" podStartSLOduration=7.160354062 podStartE2EDuration="7.160354062s" podCreationTimestamp="2025-09-30 19:42:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 19:42:56.159722075 +0000 UTC m=+705.780655552" watchObservedRunningTime="2025-09-30 19:42:56.160354062 +0000 UTC m=+705.781287539" Sep 30 19:42:56 crc kubenswrapper[4756]: I0930 19:42:56.163575 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-pjcrs" Sep 30 19:42:56 crc kubenswrapper[4756]: I0930 19:42:56.177117 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-pjcrs" Sep 30 19:42:57 crc kubenswrapper[4756]: I0930 19:42:57.110844 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["crc-storage/crc-storage-crc-vbkvb"] Sep 30 19:42:57 crc kubenswrapper[4756]: I0930 19:42:57.112561 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-vbkvb" Sep 30 19:42:57 crc kubenswrapper[4756]: I0930 19:42:57.114578 4756 reflector.go:368] Caches populated for *v1.Secret from object-"crc-storage"/"crc-storage-dockercfg-ghtx2" Sep 30 19:42:57 crc kubenswrapper[4756]: I0930 19:42:57.114856 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"openshift-service-ca.crt" Sep 30 19:42:57 crc kubenswrapper[4756]: I0930 19:42:57.114965 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"crc-storage" Sep 30 19:42:57 crc kubenswrapper[4756]: I0930 19:42:57.118750 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"kube-root-ca.crt" Sep 30 19:42:57 crc kubenswrapper[4756]: I0930 19:42:57.123951 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["crc-storage/crc-storage-crc-vbkvb"] Sep 30 19:42:57 crc kubenswrapper[4756]: I0930 19:42:57.199321 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7c24h\" (UniqueName: \"kubernetes.io/projected/d4b82604-c6b1-497d-a983-84d6543257a3-kube-api-access-7c24h\") pod \"crc-storage-crc-vbkvb\" (UID: \"d4b82604-c6b1-497d-a983-84d6543257a3\") " pod="crc-storage/crc-storage-crc-vbkvb" Sep 30 19:42:57 crc kubenswrapper[4756]: I0930 19:42:57.199521 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/d4b82604-c6b1-497d-a983-84d6543257a3-node-mnt\") pod \"crc-storage-crc-vbkvb\" (UID: \"d4b82604-c6b1-497d-a983-84d6543257a3\") " pod="crc-storage/crc-storage-crc-vbkvb" Sep 30 19:42:57 crc kubenswrapper[4756]: I0930 19:42:57.199744 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/d4b82604-c6b1-497d-a983-84d6543257a3-crc-storage\") pod \"crc-storage-crc-vbkvb\" (UID: \"d4b82604-c6b1-497d-a983-84d6543257a3\") " pod="crc-storage/crc-storage-crc-vbkvb" Sep 30 19:42:57 crc kubenswrapper[4756]: I0930 19:42:57.300800 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/d4b82604-c6b1-497d-a983-84d6543257a3-node-mnt\") pod \"crc-storage-crc-vbkvb\" (UID: \"d4b82604-c6b1-497d-a983-84d6543257a3\") " pod="crc-storage/crc-storage-crc-vbkvb" Sep 30 19:42:57 crc kubenswrapper[4756]: I0930 19:42:57.300866 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/d4b82604-c6b1-497d-a983-84d6543257a3-crc-storage\") pod \"crc-storage-crc-vbkvb\" (UID: \"d4b82604-c6b1-497d-a983-84d6543257a3\") " pod="crc-storage/crc-storage-crc-vbkvb" Sep 30 19:42:57 crc kubenswrapper[4756]: I0930 19:42:57.300961 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7c24h\" (UniqueName: \"kubernetes.io/projected/d4b82604-c6b1-497d-a983-84d6543257a3-kube-api-access-7c24h\") pod \"crc-storage-crc-vbkvb\" (UID: \"d4b82604-c6b1-497d-a983-84d6543257a3\") " pod="crc-storage/crc-storage-crc-vbkvb" Sep 30 19:42:57 crc kubenswrapper[4756]: I0930 19:42:57.301984 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/d4b82604-c6b1-497d-a983-84d6543257a3-node-mnt\") pod \"crc-storage-crc-vbkvb\" (UID: \"d4b82604-c6b1-497d-a983-84d6543257a3\") " pod="crc-storage/crc-storage-crc-vbkvb" Sep 30 19:42:57 crc kubenswrapper[4756]: I0930 19:42:57.302523 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/d4b82604-c6b1-497d-a983-84d6543257a3-crc-storage\") pod \"crc-storage-crc-vbkvb\" (UID: \"d4b82604-c6b1-497d-a983-84d6543257a3\") " pod="crc-storage/crc-storage-crc-vbkvb" Sep 30 19:42:57 crc kubenswrapper[4756]: I0930 19:42:57.328850 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7c24h\" (UniqueName: \"kubernetes.io/projected/d4b82604-c6b1-497d-a983-84d6543257a3-kube-api-access-7c24h\") pod \"crc-storage-crc-vbkvb\" (UID: \"d4b82604-c6b1-497d-a983-84d6543257a3\") " pod="crc-storage/crc-storage-crc-vbkvb" Sep 30 19:42:57 crc kubenswrapper[4756]: I0930 19:42:57.429455 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-vbkvb" Sep 30 19:42:57 crc kubenswrapper[4756]: E0930 19:42:57.455252 4756 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_crc-storage-crc-vbkvb_crc-storage_d4b82604-c6b1-497d-a983-84d6543257a3_0(1d1e9a9e87016b55ef91b9eb577369250836624886d4ed89fabe278f5026e192): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Sep 30 19:42:57 crc kubenswrapper[4756]: E0930 19:42:57.455501 4756 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_crc-storage-crc-vbkvb_crc-storage_d4b82604-c6b1-497d-a983-84d6543257a3_0(1d1e9a9e87016b55ef91b9eb577369250836624886d4ed89fabe278f5026e192): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="crc-storage/crc-storage-crc-vbkvb" Sep 30 19:42:57 crc kubenswrapper[4756]: E0930 19:42:57.455647 4756 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_crc-storage-crc-vbkvb_crc-storage_d4b82604-c6b1-497d-a983-84d6543257a3_0(1d1e9a9e87016b55ef91b9eb577369250836624886d4ed89fabe278f5026e192): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="crc-storage/crc-storage-crc-vbkvb" Sep 30 19:42:57 crc kubenswrapper[4756]: E0930 19:42:57.455820 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"crc-storage-crc-vbkvb_crc-storage(d4b82604-c6b1-497d-a983-84d6543257a3)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"crc-storage-crc-vbkvb_crc-storage(d4b82604-c6b1-497d-a983-84d6543257a3)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_crc-storage-crc-vbkvb_crc-storage_d4b82604-c6b1-497d-a983-84d6543257a3_0(1d1e9a9e87016b55ef91b9eb577369250836624886d4ed89fabe278f5026e192): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="crc-storage/crc-storage-crc-vbkvb" podUID="d4b82604-c6b1-497d-a983-84d6543257a3" Sep 30 19:42:58 crc kubenswrapper[4756]: I0930 19:42:58.150026 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-vbkvb" Sep 30 19:42:58 crc kubenswrapper[4756]: I0930 19:42:58.151022 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-vbkvb" Sep 30 19:42:58 crc kubenswrapper[4756]: E0930 19:42:58.187694 4756 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_crc-storage-crc-vbkvb_crc-storage_d4b82604-c6b1-497d-a983-84d6543257a3_0(3f6691515615c64ada63f3a5c43cac043c905e8faabd197ebca7698ceefe58bf): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Sep 30 19:42:58 crc kubenswrapper[4756]: E0930 19:42:58.187938 4756 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_crc-storage-crc-vbkvb_crc-storage_d4b82604-c6b1-497d-a983-84d6543257a3_0(3f6691515615c64ada63f3a5c43cac043c905e8faabd197ebca7698ceefe58bf): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="crc-storage/crc-storage-crc-vbkvb" Sep 30 19:42:58 crc kubenswrapper[4756]: E0930 19:42:58.188098 4756 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_crc-storage-crc-vbkvb_crc-storage_d4b82604-c6b1-497d-a983-84d6543257a3_0(3f6691515615c64ada63f3a5c43cac043c905e8faabd197ebca7698ceefe58bf): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="crc-storage/crc-storage-crc-vbkvb" Sep 30 19:42:58 crc kubenswrapper[4756]: E0930 19:42:58.188364 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"crc-storage-crc-vbkvb_crc-storage(d4b82604-c6b1-497d-a983-84d6543257a3)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"crc-storage-crc-vbkvb_crc-storage(d4b82604-c6b1-497d-a983-84d6543257a3)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_crc-storage-crc-vbkvb_crc-storage_d4b82604-c6b1-497d-a983-84d6543257a3_0(3f6691515615c64ada63f3a5c43cac043c905e8faabd197ebca7698ceefe58bf): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="crc-storage/crc-storage-crc-vbkvb" podUID="d4b82604-c6b1-497d-a983-84d6543257a3" Sep 30 19:43:02 crc kubenswrapper[4756]: I0930 19:43:02.115694 4756 scope.go:117] "RemoveContainer" containerID="470501388b1e38afd5e6b82ca923d83c2a5392340aa8161d03f7487c4d5945c9" Sep 30 19:43:02 crc kubenswrapper[4756]: E0930 19:43:02.116360 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 20s restarting failed container=kube-multus pod=multus-l9mw5_openshift-multus(104562fb-2a2d-4291-963c-7a95062ec13a)\"" pod="openshift-multus/multus-l9mw5" podUID="104562fb-2a2d-4291-963c-7a95062ec13a" Sep 30 19:43:11 crc kubenswrapper[4756]: I0930 19:43:11.115346 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-vbkvb" Sep 30 19:43:11 crc kubenswrapper[4756]: I0930 19:43:11.118011 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-vbkvb" Sep 30 19:43:11 crc kubenswrapper[4756]: E0930 19:43:11.147424 4756 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_crc-storage-crc-vbkvb_crc-storage_d4b82604-c6b1-497d-a983-84d6543257a3_0(88e547901fd457567d104d426ed829d00b5d6985b687b333bade8d77fe363714): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Sep 30 19:43:11 crc kubenswrapper[4756]: E0930 19:43:11.147514 4756 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_crc-storage-crc-vbkvb_crc-storage_d4b82604-c6b1-497d-a983-84d6543257a3_0(88e547901fd457567d104d426ed829d00b5d6985b687b333bade8d77fe363714): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="crc-storage/crc-storage-crc-vbkvb" Sep 30 19:43:11 crc kubenswrapper[4756]: E0930 19:43:11.147544 4756 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_crc-storage-crc-vbkvb_crc-storage_d4b82604-c6b1-497d-a983-84d6543257a3_0(88e547901fd457567d104d426ed829d00b5d6985b687b333bade8d77fe363714): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="crc-storage/crc-storage-crc-vbkvb" Sep 30 19:43:11 crc kubenswrapper[4756]: E0930 19:43:11.147599 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"crc-storage-crc-vbkvb_crc-storage(d4b82604-c6b1-497d-a983-84d6543257a3)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"crc-storage-crc-vbkvb_crc-storage(d4b82604-c6b1-497d-a983-84d6543257a3)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_crc-storage-crc-vbkvb_crc-storage_d4b82604-c6b1-497d-a983-84d6543257a3_0(88e547901fd457567d104d426ed829d00b5d6985b687b333bade8d77fe363714): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="crc-storage/crc-storage-crc-vbkvb" podUID="d4b82604-c6b1-497d-a983-84d6543257a3" Sep 30 19:43:11 crc kubenswrapper[4756]: I0930 19:43:11.539044 4756 patch_prober.go:28] interesting pod/machine-config-daemon-4n9zj container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 19:43:11 crc kubenswrapper[4756]: I0930 19:43:11.539088 4756 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 19:43:16 crc kubenswrapper[4756]: I0930 19:43:16.116566 4756 scope.go:117] "RemoveContainer" containerID="470501388b1e38afd5e6b82ca923d83c2a5392340aa8161d03f7487c4d5945c9" Sep 30 19:43:16 crc kubenswrapper[4756]: I0930 19:43:16.277106 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-l9mw5_104562fb-2a2d-4291-963c-7a95062ec13a/kube-multus/2.log" Sep 30 19:43:16 crc kubenswrapper[4756]: I0930 19:43:16.277546 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-l9mw5" event={"ID":"104562fb-2a2d-4291-963c-7a95062ec13a","Type":"ContainerStarted","Data":"ebd21af8a05ac8ed9ace952130f2e58ec705525c044129878bb086ba0a2d31ee"} Sep 30 19:43:19 crc kubenswrapper[4756]: I0930 19:43:19.625892 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-pjcrs" Sep 30 19:43:24 crc kubenswrapper[4756]: I0930 19:43:24.116079 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-vbkvb" Sep 30 19:43:24 crc kubenswrapper[4756]: I0930 19:43:24.117747 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-vbkvb" Sep 30 19:43:24 crc kubenswrapper[4756]: I0930 19:43:24.365134 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["crc-storage/crc-storage-crc-vbkvb"] Sep 30 19:43:24 crc kubenswrapper[4756]: W0930 19:43:24.380597 4756 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd4b82604_c6b1_497d_a983_84d6543257a3.slice/crio-7ed9f19b3ddde87d071b120714bf140893de64233bd0521150e2ed560466d414 WatchSource:0}: Error finding container 7ed9f19b3ddde87d071b120714bf140893de64233bd0521150e2ed560466d414: Status 404 returned error can't find the container with id 7ed9f19b3ddde87d071b120714bf140893de64233bd0521150e2ed560466d414 Sep 30 19:43:24 crc kubenswrapper[4756]: I0930 19:43:24.383450 4756 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Sep 30 19:43:25 crc kubenswrapper[4756]: I0930 19:43:25.337670 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-vbkvb" event={"ID":"d4b82604-c6b1-497d-a983-84d6543257a3","Type":"ContainerStarted","Data":"7ed9f19b3ddde87d071b120714bf140893de64233bd0521150e2ed560466d414"} Sep 30 19:43:26 crc kubenswrapper[4756]: I0930 19:43:26.346799 4756 generic.go:334] "Generic (PLEG): container finished" podID="d4b82604-c6b1-497d-a983-84d6543257a3" containerID="e6e95b482daf14ba551431879790adb7bea9d18283bf491d25bbe0c6bb308353" exitCode=0 Sep 30 19:43:26 crc kubenswrapper[4756]: I0930 19:43:26.346883 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-vbkvb" event={"ID":"d4b82604-c6b1-497d-a983-84d6543257a3","Type":"ContainerDied","Data":"e6e95b482daf14ba551431879790adb7bea9d18283bf491d25bbe0c6bb308353"} Sep 30 19:43:27 crc kubenswrapper[4756]: I0930 19:43:27.684535 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-vbkvb" Sep 30 19:43:27 crc kubenswrapper[4756]: I0930 19:43:27.744056 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/d4b82604-c6b1-497d-a983-84d6543257a3-crc-storage\") pod \"d4b82604-c6b1-497d-a983-84d6543257a3\" (UID: \"d4b82604-c6b1-497d-a983-84d6543257a3\") " Sep 30 19:43:27 crc kubenswrapper[4756]: I0930 19:43:27.767633 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d4b82604-c6b1-497d-a983-84d6543257a3-crc-storage" (OuterVolumeSpecName: "crc-storage") pod "d4b82604-c6b1-497d-a983-84d6543257a3" (UID: "d4b82604-c6b1-497d-a983-84d6543257a3"). InnerVolumeSpecName "crc-storage". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:43:27 crc kubenswrapper[4756]: I0930 19:43:27.844895 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7c24h\" (UniqueName: \"kubernetes.io/projected/d4b82604-c6b1-497d-a983-84d6543257a3-kube-api-access-7c24h\") pod \"d4b82604-c6b1-497d-a983-84d6543257a3\" (UID: \"d4b82604-c6b1-497d-a983-84d6543257a3\") " Sep 30 19:43:27 crc kubenswrapper[4756]: I0930 19:43:27.845106 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/d4b82604-c6b1-497d-a983-84d6543257a3-node-mnt\") pod \"d4b82604-c6b1-497d-a983-84d6543257a3\" (UID: \"d4b82604-c6b1-497d-a983-84d6543257a3\") " Sep 30 19:43:27 crc kubenswrapper[4756]: I0930 19:43:27.845359 4756 reconciler_common.go:293] "Volume detached for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/d4b82604-c6b1-497d-a983-84d6543257a3-crc-storage\") on node \"crc\" DevicePath \"\"" Sep 30 19:43:27 crc kubenswrapper[4756]: I0930 19:43:27.845445 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/d4b82604-c6b1-497d-a983-84d6543257a3-node-mnt" (OuterVolumeSpecName: "node-mnt") pod "d4b82604-c6b1-497d-a983-84d6543257a3" (UID: "d4b82604-c6b1-497d-a983-84d6543257a3"). InnerVolumeSpecName "node-mnt". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 19:43:27 crc kubenswrapper[4756]: I0930 19:43:27.850834 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d4b82604-c6b1-497d-a983-84d6543257a3-kube-api-access-7c24h" (OuterVolumeSpecName: "kube-api-access-7c24h") pod "d4b82604-c6b1-497d-a983-84d6543257a3" (UID: "d4b82604-c6b1-497d-a983-84d6543257a3"). InnerVolumeSpecName "kube-api-access-7c24h". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:43:27 crc kubenswrapper[4756]: I0930 19:43:27.946688 4756 reconciler_common.go:293] "Volume detached for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/d4b82604-c6b1-497d-a983-84d6543257a3-node-mnt\") on node \"crc\" DevicePath \"\"" Sep 30 19:43:27 crc kubenswrapper[4756]: I0930 19:43:27.946741 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7c24h\" (UniqueName: \"kubernetes.io/projected/d4b82604-c6b1-497d-a983-84d6543257a3-kube-api-access-7c24h\") on node \"crc\" DevicePath \"\"" Sep 30 19:43:28 crc kubenswrapper[4756]: I0930 19:43:28.364021 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-vbkvb" event={"ID":"d4b82604-c6b1-497d-a983-84d6543257a3","Type":"ContainerDied","Data":"7ed9f19b3ddde87d071b120714bf140893de64233bd0521150e2ed560466d414"} Sep 30 19:43:28 crc kubenswrapper[4756]: I0930 19:43:28.364061 4756 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7ed9f19b3ddde87d071b120714bf140893de64233bd0521150e2ed560466d414" Sep 30 19:43:28 crc kubenswrapper[4756]: I0930 19:43:28.364101 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-vbkvb" Sep 30 19:43:36 crc kubenswrapper[4756]: I0930 19:43:36.380910 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcvmq86"] Sep 30 19:43:36 crc kubenswrapper[4756]: E0930 19:43:36.381597 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d4b82604-c6b1-497d-a983-84d6543257a3" containerName="storage" Sep 30 19:43:36 crc kubenswrapper[4756]: I0930 19:43:36.381609 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="d4b82604-c6b1-497d-a983-84d6543257a3" containerName="storage" Sep 30 19:43:36 crc kubenswrapper[4756]: I0930 19:43:36.381708 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="d4b82604-c6b1-497d-a983-84d6543257a3" containerName="storage" Sep 30 19:43:36 crc kubenswrapper[4756]: I0930 19:43:36.382344 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcvmq86" Sep 30 19:43:36 crc kubenswrapper[4756]: I0930 19:43:36.384726 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Sep 30 19:43:36 crc kubenswrapper[4756]: I0930 19:43:36.401657 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcvmq86"] Sep 30 19:43:36 crc kubenswrapper[4756]: I0930 19:43:36.564948 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/c8aaa37a-07d0-4d40-a4b4-5204a6357700-bundle\") pod \"9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcvmq86\" (UID: \"c8aaa37a-07d0-4d40-a4b4-5204a6357700\") " pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcvmq86" Sep 30 19:43:36 crc kubenswrapper[4756]: I0930 19:43:36.565010 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/c8aaa37a-07d0-4d40-a4b4-5204a6357700-util\") pod \"9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcvmq86\" (UID: \"c8aaa37a-07d0-4d40-a4b4-5204a6357700\") " pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcvmq86" Sep 30 19:43:36 crc kubenswrapper[4756]: I0930 19:43:36.565131 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6mpk8\" (UniqueName: \"kubernetes.io/projected/c8aaa37a-07d0-4d40-a4b4-5204a6357700-kube-api-access-6mpk8\") pod \"9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcvmq86\" (UID: \"c8aaa37a-07d0-4d40-a4b4-5204a6357700\") " pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcvmq86" Sep 30 19:43:36 crc kubenswrapper[4756]: I0930 19:43:36.665838 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6mpk8\" (UniqueName: \"kubernetes.io/projected/c8aaa37a-07d0-4d40-a4b4-5204a6357700-kube-api-access-6mpk8\") pod \"9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcvmq86\" (UID: \"c8aaa37a-07d0-4d40-a4b4-5204a6357700\") " pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcvmq86" Sep 30 19:43:36 crc kubenswrapper[4756]: I0930 19:43:36.665953 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/c8aaa37a-07d0-4d40-a4b4-5204a6357700-bundle\") pod \"9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcvmq86\" (UID: \"c8aaa37a-07d0-4d40-a4b4-5204a6357700\") " pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcvmq86" Sep 30 19:43:36 crc kubenswrapper[4756]: I0930 19:43:36.665977 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/c8aaa37a-07d0-4d40-a4b4-5204a6357700-util\") pod \"9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcvmq86\" (UID: \"c8aaa37a-07d0-4d40-a4b4-5204a6357700\") " pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcvmq86" Sep 30 19:43:36 crc kubenswrapper[4756]: I0930 19:43:36.666840 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/c8aaa37a-07d0-4d40-a4b4-5204a6357700-util\") pod \"9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcvmq86\" (UID: \"c8aaa37a-07d0-4d40-a4b4-5204a6357700\") " pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcvmq86" Sep 30 19:43:36 crc kubenswrapper[4756]: I0930 19:43:36.666845 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/c8aaa37a-07d0-4d40-a4b4-5204a6357700-bundle\") pod \"9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcvmq86\" (UID: \"c8aaa37a-07d0-4d40-a4b4-5204a6357700\") " pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcvmq86" Sep 30 19:43:36 crc kubenswrapper[4756]: I0930 19:43:36.708347 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6mpk8\" (UniqueName: \"kubernetes.io/projected/c8aaa37a-07d0-4d40-a4b4-5204a6357700-kube-api-access-6mpk8\") pod \"9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcvmq86\" (UID: \"c8aaa37a-07d0-4d40-a4b4-5204a6357700\") " pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcvmq86" Sep 30 19:43:37 crc kubenswrapper[4756]: I0930 19:43:37.005124 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcvmq86" Sep 30 19:43:37 crc kubenswrapper[4756]: I0930 19:43:37.249086 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcvmq86"] Sep 30 19:43:37 crc kubenswrapper[4756]: W0930 19:43:37.258954 4756 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc8aaa37a_07d0_4d40_a4b4_5204a6357700.slice/crio-87c9877105d2fc2eface2ad980328bf198ca4d42e1c71323e1ba5e441d1bba0d WatchSource:0}: Error finding container 87c9877105d2fc2eface2ad980328bf198ca4d42e1c71323e1ba5e441d1bba0d: Status 404 returned error can't find the container with id 87c9877105d2fc2eface2ad980328bf198ca4d42e1c71323e1ba5e441d1bba0d Sep 30 19:43:37 crc kubenswrapper[4756]: I0930 19:43:37.430164 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcvmq86" event={"ID":"c8aaa37a-07d0-4d40-a4b4-5204a6357700","Type":"ContainerStarted","Data":"b2767b896e0c03717684e5221b6c879cb04b76cd00aeb42e6ab7cce0576919cc"} Sep 30 19:43:37 crc kubenswrapper[4756]: I0930 19:43:37.430858 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcvmq86" event={"ID":"c8aaa37a-07d0-4d40-a4b4-5204a6357700","Type":"ContainerStarted","Data":"87c9877105d2fc2eface2ad980328bf198ca4d42e1c71323e1ba5e441d1bba0d"} Sep 30 19:43:37 crc kubenswrapper[4756]: I0930 19:43:37.792101 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-mstk5"] Sep 30 19:43:37 crc kubenswrapper[4756]: I0930 19:43:37.792388 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-879f6c89f-mstk5" podUID="d03605a9-6716-409b-a9b2-f54bc2f8e467" containerName="controller-manager" containerID="cri-o://eba17d45863ccf87f8d339590dc59b3e347f2a64340f8b95d4be055561200fcf" gracePeriod=30 Sep 30 19:43:37 crc kubenswrapper[4756]: I0930 19:43:37.886749 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-zxkfm"] Sep 30 19:43:37 crc kubenswrapper[4756]: I0930 19:43:37.887065 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-zxkfm" podUID="4bdd776c-4f43-42e4-ae36-da0ba80eb291" containerName="route-controller-manager" containerID="cri-o://9fb2b2fb2f166dbd1ce3017013ff43165e9143f946d6637299a2c353cd13f5bf" gracePeriod=30 Sep 30 19:43:38 crc kubenswrapper[4756]: I0930 19:43:38.165345 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-mstk5" Sep 30 19:43:38 crc kubenswrapper[4756]: I0930 19:43:38.260352 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-zxkfm" Sep 30 19:43:38 crc kubenswrapper[4756]: I0930 19:43:38.318004 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-g6snr\" (UniqueName: \"kubernetes.io/projected/d03605a9-6716-409b-a9b2-f54bc2f8e467-kube-api-access-g6snr\") pod \"d03605a9-6716-409b-a9b2-f54bc2f8e467\" (UID: \"d03605a9-6716-409b-a9b2-f54bc2f8e467\") " Sep 30 19:43:38 crc kubenswrapper[4756]: I0930 19:43:38.318058 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/d03605a9-6716-409b-a9b2-f54bc2f8e467-client-ca\") pod \"d03605a9-6716-409b-a9b2-f54bc2f8e467\" (UID: \"d03605a9-6716-409b-a9b2-f54bc2f8e467\") " Sep 30 19:43:38 crc kubenswrapper[4756]: I0930 19:43:38.318118 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d03605a9-6716-409b-a9b2-f54bc2f8e467-config\") pod \"d03605a9-6716-409b-a9b2-f54bc2f8e467\" (UID: \"d03605a9-6716-409b-a9b2-f54bc2f8e467\") " Sep 30 19:43:38 crc kubenswrapper[4756]: I0930 19:43:38.318143 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d03605a9-6716-409b-a9b2-f54bc2f8e467-serving-cert\") pod \"d03605a9-6716-409b-a9b2-f54bc2f8e467\" (UID: \"d03605a9-6716-409b-a9b2-f54bc2f8e467\") " Sep 30 19:43:38 crc kubenswrapper[4756]: I0930 19:43:38.318234 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/d03605a9-6716-409b-a9b2-f54bc2f8e467-proxy-ca-bundles\") pod \"d03605a9-6716-409b-a9b2-f54bc2f8e467\" (UID: \"d03605a9-6716-409b-a9b2-f54bc2f8e467\") " Sep 30 19:43:38 crc kubenswrapper[4756]: I0930 19:43:38.318955 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d03605a9-6716-409b-a9b2-f54bc2f8e467-client-ca" (OuterVolumeSpecName: "client-ca") pod "d03605a9-6716-409b-a9b2-f54bc2f8e467" (UID: "d03605a9-6716-409b-a9b2-f54bc2f8e467"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:43:38 crc kubenswrapper[4756]: I0930 19:43:38.319017 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d03605a9-6716-409b-a9b2-f54bc2f8e467-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "d03605a9-6716-409b-a9b2-f54bc2f8e467" (UID: "d03605a9-6716-409b-a9b2-f54bc2f8e467"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:43:38 crc kubenswrapper[4756]: I0930 19:43:38.319068 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d03605a9-6716-409b-a9b2-f54bc2f8e467-config" (OuterVolumeSpecName: "config") pod "d03605a9-6716-409b-a9b2-f54bc2f8e467" (UID: "d03605a9-6716-409b-a9b2-f54bc2f8e467"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:43:38 crc kubenswrapper[4756]: I0930 19:43:38.324558 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d03605a9-6716-409b-a9b2-f54bc2f8e467-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "d03605a9-6716-409b-a9b2-f54bc2f8e467" (UID: "d03605a9-6716-409b-a9b2-f54bc2f8e467"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:43:38 crc kubenswrapper[4756]: I0930 19:43:38.324683 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d03605a9-6716-409b-a9b2-f54bc2f8e467-kube-api-access-g6snr" (OuterVolumeSpecName: "kube-api-access-g6snr") pod "d03605a9-6716-409b-a9b2-f54bc2f8e467" (UID: "d03605a9-6716-409b-a9b2-f54bc2f8e467"). InnerVolumeSpecName "kube-api-access-g6snr". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:43:38 crc kubenswrapper[4756]: I0930 19:43:38.419421 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4bdd776c-4f43-42e4-ae36-da0ba80eb291-serving-cert\") pod \"4bdd776c-4f43-42e4-ae36-da0ba80eb291\" (UID: \"4bdd776c-4f43-42e4-ae36-da0ba80eb291\") " Sep 30 19:43:38 crc kubenswrapper[4756]: I0930 19:43:38.419548 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fzt7m\" (UniqueName: \"kubernetes.io/projected/4bdd776c-4f43-42e4-ae36-da0ba80eb291-kube-api-access-fzt7m\") pod \"4bdd776c-4f43-42e4-ae36-da0ba80eb291\" (UID: \"4bdd776c-4f43-42e4-ae36-da0ba80eb291\") " Sep 30 19:43:38 crc kubenswrapper[4756]: I0930 19:43:38.419655 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4bdd776c-4f43-42e4-ae36-da0ba80eb291-config\") pod \"4bdd776c-4f43-42e4-ae36-da0ba80eb291\" (UID: \"4bdd776c-4f43-42e4-ae36-da0ba80eb291\") " Sep 30 19:43:38 crc kubenswrapper[4756]: I0930 19:43:38.419804 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/4bdd776c-4f43-42e4-ae36-da0ba80eb291-client-ca\") pod \"4bdd776c-4f43-42e4-ae36-da0ba80eb291\" (UID: \"4bdd776c-4f43-42e4-ae36-da0ba80eb291\") " Sep 30 19:43:38 crc kubenswrapper[4756]: I0930 19:43:38.420369 4756 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d03605a9-6716-409b-a9b2-f54bc2f8e467-config\") on node \"crc\" DevicePath \"\"" Sep 30 19:43:38 crc kubenswrapper[4756]: I0930 19:43:38.420424 4756 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d03605a9-6716-409b-a9b2-f54bc2f8e467-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 30 19:43:38 crc kubenswrapper[4756]: I0930 19:43:38.420454 4756 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/d03605a9-6716-409b-a9b2-f54bc2f8e467-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Sep 30 19:43:38 crc kubenswrapper[4756]: I0930 19:43:38.420481 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-g6snr\" (UniqueName: \"kubernetes.io/projected/d03605a9-6716-409b-a9b2-f54bc2f8e467-kube-api-access-g6snr\") on node \"crc\" DevicePath \"\"" Sep 30 19:43:38 crc kubenswrapper[4756]: I0930 19:43:38.420505 4756 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/d03605a9-6716-409b-a9b2-f54bc2f8e467-client-ca\") on node \"crc\" DevicePath \"\"" Sep 30 19:43:38 crc kubenswrapper[4756]: I0930 19:43:38.420805 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bdd776c-4f43-42e4-ae36-da0ba80eb291-client-ca" (OuterVolumeSpecName: "client-ca") pod "4bdd776c-4f43-42e4-ae36-da0ba80eb291" (UID: "4bdd776c-4f43-42e4-ae36-da0ba80eb291"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:43:38 crc kubenswrapper[4756]: I0930 19:43:38.420892 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bdd776c-4f43-42e4-ae36-da0ba80eb291-config" (OuterVolumeSpecName: "config") pod "4bdd776c-4f43-42e4-ae36-da0ba80eb291" (UID: "4bdd776c-4f43-42e4-ae36-da0ba80eb291"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:43:38 crc kubenswrapper[4756]: I0930 19:43:38.423221 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4bdd776c-4f43-42e4-ae36-da0ba80eb291-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "4bdd776c-4f43-42e4-ae36-da0ba80eb291" (UID: "4bdd776c-4f43-42e4-ae36-da0ba80eb291"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:43:38 crc kubenswrapper[4756]: I0930 19:43:38.427092 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4bdd776c-4f43-42e4-ae36-da0ba80eb291-kube-api-access-fzt7m" (OuterVolumeSpecName: "kube-api-access-fzt7m") pod "4bdd776c-4f43-42e4-ae36-da0ba80eb291" (UID: "4bdd776c-4f43-42e4-ae36-da0ba80eb291"). InnerVolumeSpecName "kube-api-access-fzt7m". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:43:38 crc kubenswrapper[4756]: I0930 19:43:38.440020 4756 generic.go:334] "Generic (PLEG): container finished" podID="4bdd776c-4f43-42e4-ae36-da0ba80eb291" containerID="9fb2b2fb2f166dbd1ce3017013ff43165e9143f946d6637299a2c353cd13f5bf" exitCode=0 Sep 30 19:43:38 crc kubenswrapper[4756]: I0930 19:43:38.440142 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-zxkfm" Sep 30 19:43:38 crc kubenswrapper[4756]: I0930 19:43:38.440142 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-zxkfm" event={"ID":"4bdd776c-4f43-42e4-ae36-da0ba80eb291","Type":"ContainerDied","Data":"9fb2b2fb2f166dbd1ce3017013ff43165e9143f946d6637299a2c353cd13f5bf"} Sep 30 19:43:38 crc kubenswrapper[4756]: I0930 19:43:38.440216 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-zxkfm" event={"ID":"4bdd776c-4f43-42e4-ae36-da0ba80eb291","Type":"ContainerDied","Data":"33db7ba8438a0b31347f3a43620e31c853e48cced06ae5603e0232047e477658"} Sep 30 19:43:38 crc kubenswrapper[4756]: I0930 19:43:38.440248 4756 scope.go:117] "RemoveContainer" containerID="9fb2b2fb2f166dbd1ce3017013ff43165e9143f946d6637299a2c353cd13f5bf" Sep 30 19:43:38 crc kubenswrapper[4756]: I0930 19:43:38.444084 4756 generic.go:334] "Generic (PLEG): container finished" podID="d03605a9-6716-409b-a9b2-f54bc2f8e467" containerID="eba17d45863ccf87f8d339590dc59b3e347f2a64340f8b95d4be055561200fcf" exitCode=0 Sep 30 19:43:38 crc kubenswrapper[4756]: I0930 19:43:38.444184 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-mstk5" Sep 30 19:43:38 crc kubenswrapper[4756]: I0930 19:43:38.444190 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-mstk5" event={"ID":"d03605a9-6716-409b-a9b2-f54bc2f8e467","Type":"ContainerDied","Data":"eba17d45863ccf87f8d339590dc59b3e347f2a64340f8b95d4be055561200fcf"} Sep 30 19:43:38 crc kubenswrapper[4756]: I0930 19:43:38.444232 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-mstk5" event={"ID":"d03605a9-6716-409b-a9b2-f54bc2f8e467","Type":"ContainerDied","Data":"c71ac6e48ffae355b795592542d7ac87394adfeef20350c215192d9ce275f06f"} Sep 30 19:43:38 crc kubenswrapper[4756]: I0930 19:43:38.446335 4756 generic.go:334] "Generic (PLEG): container finished" podID="c8aaa37a-07d0-4d40-a4b4-5204a6357700" containerID="b2767b896e0c03717684e5221b6c879cb04b76cd00aeb42e6ab7cce0576919cc" exitCode=0 Sep 30 19:43:38 crc kubenswrapper[4756]: I0930 19:43:38.446371 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcvmq86" event={"ID":"c8aaa37a-07d0-4d40-a4b4-5204a6357700","Type":"ContainerDied","Data":"b2767b896e0c03717684e5221b6c879cb04b76cd00aeb42e6ab7cce0576919cc"} Sep 30 19:43:38 crc kubenswrapper[4756]: I0930 19:43:38.471780 4756 scope.go:117] "RemoveContainer" containerID="9fb2b2fb2f166dbd1ce3017013ff43165e9143f946d6637299a2c353cd13f5bf" Sep 30 19:43:38 crc kubenswrapper[4756]: E0930 19:43:38.472877 4756 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9fb2b2fb2f166dbd1ce3017013ff43165e9143f946d6637299a2c353cd13f5bf\": container with ID starting with 9fb2b2fb2f166dbd1ce3017013ff43165e9143f946d6637299a2c353cd13f5bf not found: ID does not exist" containerID="9fb2b2fb2f166dbd1ce3017013ff43165e9143f946d6637299a2c353cd13f5bf" Sep 30 19:43:38 crc kubenswrapper[4756]: I0930 19:43:38.472928 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9fb2b2fb2f166dbd1ce3017013ff43165e9143f946d6637299a2c353cd13f5bf"} err="failed to get container status \"9fb2b2fb2f166dbd1ce3017013ff43165e9143f946d6637299a2c353cd13f5bf\": rpc error: code = NotFound desc = could not find container \"9fb2b2fb2f166dbd1ce3017013ff43165e9143f946d6637299a2c353cd13f5bf\": container with ID starting with 9fb2b2fb2f166dbd1ce3017013ff43165e9143f946d6637299a2c353cd13f5bf not found: ID does not exist" Sep 30 19:43:38 crc kubenswrapper[4756]: I0930 19:43:38.472950 4756 scope.go:117] "RemoveContainer" containerID="eba17d45863ccf87f8d339590dc59b3e347f2a64340f8b95d4be055561200fcf" Sep 30 19:43:38 crc kubenswrapper[4756]: I0930 19:43:38.503612 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-mstk5"] Sep 30 19:43:38 crc kubenswrapper[4756]: I0930 19:43:38.511281 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-mstk5"] Sep 30 19:43:38 crc kubenswrapper[4756]: I0930 19:43:38.515691 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-zxkfm"] Sep 30 19:43:38 crc kubenswrapper[4756]: I0930 19:43:38.519013 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-zxkfm"] Sep 30 19:43:38 crc kubenswrapper[4756]: I0930 19:43:38.523217 4756 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/4bdd776c-4f43-42e4-ae36-da0ba80eb291-client-ca\") on node \"crc\" DevicePath \"\"" Sep 30 19:43:38 crc kubenswrapper[4756]: I0930 19:43:38.523280 4756 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4bdd776c-4f43-42e4-ae36-da0ba80eb291-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 30 19:43:38 crc kubenswrapper[4756]: I0930 19:43:38.523308 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fzt7m\" (UniqueName: \"kubernetes.io/projected/4bdd776c-4f43-42e4-ae36-da0ba80eb291-kube-api-access-fzt7m\") on node \"crc\" DevicePath \"\"" Sep 30 19:43:38 crc kubenswrapper[4756]: I0930 19:43:38.523335 4756 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4bdd776c-4f43-42e4-ae36-da0ba80eb291-config\") on node \"crc\" DevicePath \"\"" Sep 30 19:43:38 crc kubenswrapper[4756]: I0930 19:43:38.525091 4756 scope.go:117] "RemoveContainer" containerID="eba17d45863ccf87f8d339590dc59b3e347f2a64340f8b95d4be055561200fcf" Sep 30 19:43:38 crc kubenswrapper[4756]: E0930 19:43:38.526147 4756 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"eba17d45863ccf87f8d339590dc59b3e347f2a64340f8b95d4be055561200fcf\": container with ID starting with eba17d45863ccf87f8d339590dc59b3e347f2a64340f8b95d4be055561200fcf not found: ID does not exist" containerID="eba17d45863ccf87f8d339590dc59b3e347f2a64340f8b95d4be055561200fcf" Sep 30 19:43:38 crc kubenswrapper[4756]: I0930 19:43:38.526357 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"eba17d45863ccf87f8d339590dc59b3e347f2a64340f8b95d4be055561200fcf"} err="failed to get container status \"eba17d45863ccf87f8d339590dc59b3e347f2a64340f8b95d4be055561200fcf\": rpc error: code = NotFound desc = could not find container \"eba17d45863ccf87f8d339590dc59b3e347f2a64340f8b95d4be055561200fcf\": container with ID starting with eba17d45863ccf87f8d339590dc59b3e347f2a64340f8b95d4be055561200fcf not found: ID does not exist" Sep 30 19:43:39 crc kubenswrapper[4756]: I0930 19:43:39.124466 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4bdd776c-4f43-42e4-ae36-da0ba80eb291" path="/var/lib/kubelet/pods/4bdd776c-4f43-42e4-ae36-da0ba80eb291/volumes" Sep 30 19:43:39 crc kubenswrapper[4756]: I0930 19:43:39.125768 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d03605a9-6716-409b-a9b2-f54bc2f8e467" path="/var/lib/kubelet/pods/d03605a9-6716-409b-a9b2-f54bc2f8e467/volumes" Sep 30 19:43:39 crc kubenswrapper[4756]: I0930 19:43:39.212185 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-899bf5c5f-ks924"] Sep 30 19:43:39 crc kubenswrapper[4756]: E0930 19:43:39.212741 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4bdd776c-4f43-42e4-ae36-da0ba80eb291" containerName="route-controller-manager" Sep 30 19:43:39 crc kubenswrapper[4756]: I0930 19:43:39.212781 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="4bdd776c-4f43-42e4-ae36-da0ba80eb291" containerName="route-controller-manager" Sep 30 19:43:39 crc kubenswrapper[4756]: E0930 19:43:39.212806 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d03605a9-6716-409b-a9b2-f54bc2f8e467" containerName="controller-manager" Sep 30 19:43:39 crc kubenswrapper[4756]: I0930 19:43:39.212822 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="d03605a9-6716-409b-a9b2-f54bc2f8e467" containerName="controller-manager" Sep 30 19:43:39 crc kubenswrapper[4756]: I0930 19:43:39.213013 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="d03605a9-6716-409b-a9b2-f54bc2f8e467" containerName="controller-manager" Sep 30 19:43:39 crc kubenswrapper[4756]: I0930 19:43:39.213040 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="4bdd776c-4f43-42e4-ae36-da0ba80eb291" containerName="route-controller-manager" Sep 30 19:43:39 crc kubenswrapper[4756]: I0930 19:43:39.213863 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-899bf5c5f-ks924" Sep 30 19:43:39 crc kubenswrapper[4756]: I0930 19:43:39.217344 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Sep 30 19:43:39 crc kubenswrapper[4756]: I0930 19:43:39.217843 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Sep 30 19:43:39 crc kubenswrapper[4756]: I0930 19:43:39.220419 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Sep 30 19:43:39 crc kubenswrapper[4756]: I0930 19:43:39.221068 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Sep 30 19:43:39 crc kubenswrapper[4756]: I0930 19:43:39.221102 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Sep 30 19:43:39 crc kubenswrapper[4756]: I0930 19:43:39.221609 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-5c76c55f57-fkxhk"] Sep 30 19:43:39 crc kubenswrapper[4756]: I0930 19:43:39.222517 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Sep 30 19:43:39 crc kubenswrapper[4756]: I0930 19:43:39.222957 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-5c76c55f57-fkxhk" Sep 30 19:43:39 crc kubenswrapper[4756]: I0930 19:43:39.225248 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Sep 30 19:43:39 crc kubenswrapper[4756]: I0930 19:43:39.225649 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Sep 30 19:43:39 crc kubenswrapper[4756]: I0930 19:43:39.228028 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Sep 30 19:43:39 crc kubenswrapper[4756]: I0930 19:43:39.228448 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Sep 30 19:43:39 crc kubenswrapper[4756]: I0930 19:43:39.228749 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Sep 30 19:43:39 crc kubenswrapper[4756]: I0930 19:43:39.228947 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-899bf5c5f-ks924"] Sep 30 19:43:39 crc kubenswrapper[4756]: I0930 19:43:39.229011 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Sep 30 19:43:39 crc kubenswrapper[4756]: I0930 19:43:39.235101 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jbzgs\" (UniqueName: \"kubernetes.io/projected/1c0240ba-0849-4a19-b0e2-619eeb82d742-kube-api-access-jbzgs\") pod \"controller-manager-5c76c55f57-fkxhk\" (UID: \"1c0240ba-0849-4a19-b0e2-619eeb82d742\") " pod="openshift-controller-manager/controller-manager-5c76c55f57-fkxhk" Sep 30 19:43:39 crc kubenswrapper[4756]: I0930 19:43:39.235166 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/1c0240ba-0849-4a19-b0e2-619eeb82d742-client-ca\") pod \"controller-manager-5c76c55f57-fkxhk\" (UID: \"1c0240ba-0849-4a19-b0e2-619eeb82d742\") " pod="openshift-controller-manager/controller-manager-5c76c55f57-fkxhk" Sep 30 19:43:39 crc kubenswrapper[4756]: I0930 19:43:39.235214 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1c0240ba-0849-4a19-b0e2-619eeb82d742-serving-cert\") pod \"controller-manager-5c76c55f57-fkxhk\" (UID: \"1c0240ba-0849-4a19-b0e2-619eeb82d742\") " pod="openshift-controller-manager/controller-manager-5c76c55f57-fkxhk" Sep 30 19:43:39 crc kubenswrapper[4756]: I0930 19:43:39.235256 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1c0240ba-0849-4a19-b0e2-619eeb82d742-config\") pod \"controller-manager-5c76c55f57-fkxhk\" (UID: \"1c0240ba-0849-4a19-b0e2-619eeb82d742\") " pod="openshift-controller-manager/controller-manager-5c76c55f57-fkxhk" Sep 30 19:43:39 crc kubenswrapper[4756]: I0930 19:43:39.235273 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-668ck\" (UniqueName: \"kubernetes.io/projected/eab36e37-f51d-45ef-b987-ea75d1f7b0a6-kube-api-access-668ck\") pod \"route-controller-manager-899bf5c5f-ks924\" (UID: \"eab36e37-f51d-45ef-b987-ea75d1f7b0a6\") " pod="openshift-route-controller-manager/route-controller-manager-899bf5c5f-ks924" Sep 30 19:43:39 crc kubenswrapper[4756]: I0930 19:43:39.235311 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/eab36e37-f51d-45ef-b987-ea75d1f7b0a6-client-ca\") pod \"route-controller-manager-899bf5c5f-ks924\" (UID: \"eab36e37-f51d-45ef-b987-ea75d1f7b0a6\") " pod="openshift-route-controller-manager/route-controller-manager-899bf5c5f-ks924" Sep 30 19:43:39 crc kubenswrapper[4756]: I0930 19:43:39.235326 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/1c0240ba-0849-4a19-b0e2-619eeb82d742-proxy-ca-bundles\") pod \"controller-manager-5c76c55f57-fkxhk\" (UID: \"1c0240ba-0849-4a19-b0e2-619eeb82d742\") " pod="openshift-controller-manager/controller-manager-5c76c55f57-fkxhk" Sep 30 19:43:39 crc kubenswrapper[4756]: I0930 19:43:39.235348 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/eab36e37-f51d-45ef-b987-ea75d1f7b0a6-config\") pod \"route-controller-manager-899bf5c5f-ks924\" (UID: \"eab36e37-f51d-45ef-b987-ea75d1f7b0a6\") " pod="openshift-route-controller-manager/route-controller-manager-899bf5c5f-ks924" Sep 30 19:43:39 crc kubenswrapper[4756]: I0930 19:43:39.235375 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/eab36e37-f51d-45ef-b987-ea75d1f7b0a6-serving-cert\") pod \"route-controller-manager-899bf5c5f-ks924\" (UID: \"eab36e37-f51d-45ef-b987-ea75d1f7b0a6\") " pod="openshift-route-controller-manager/route-controller-manager-899bf5c5f-ks924" Sep 30 19:43:39 crc kubenswrapper[4756]: I0930 19:43:39.235779 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-5c76c55f57-fkxhk"] Sep 30 19:43:39 crc kubenswrapper[4756]: I0930 19:43:39.239709 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Sep 30 19:43:39 crc kubenswrapper[4756]: I0930 19:43:39.337028 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jbzgs\" (UniqueName: \"kubernetes.io/projected/1c0240ba-0849-4a19-b0e2-619eeb82d742-kube-api-access-jbzgs\") pod \"controller-manager-5c76c55f57-fkxhk\" (UID: \"1c0240ba-0849-4a19-b0e2-619eeb82d742\") " pod="openshift-controller-manager/controller-manager-5c76c55f57-fkxhk" Sep 30 19:43:39 crc kubenswrapper[4756]: I0930 19:43:39.337102 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/1c0240ba-0849-4a19-b0e2-619eeb82d742-client-ca\") pod \"controller-manager-5c76c55f57-fkxhk\" (UID: \"1c0240ba-0849-4a19-b0e2-619eeb82d742\") " pod="openshift-controller-manager/controller-manager-5c76c55f57-fkxhk" Sep 30 19:43:39 crc kubenswrapper[4756]: I0930 19:43:39.337145 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1c0240ba-0849-4a19-b0e2-619eeb82d742-serving-cert\") pod \"controller-manager-5c76c55f57-fkxhk\" (UID: \"1c0240ba-0849-4a19-b0e2-619eeb82d742\") " pod="openshift-controller-manager/controller-manager-5c76c55f57-fkxhk" Sep 30 19:43:39 crc kubenswrapper[4756]: I0930 19:43:39.337182 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1c0240ba-0849-4a19-b0e2-619eeb82d742-config\") pod \"controller-manager-5c76c55f57-fkxhk\" (UID: \"1c0240ba-0849-4a19-b0e2-619eeb82d742\") " pod="openshift-controller-manager/controller-manager-5c76c55f57-fkxhk" Sep 30 19:43:39 crc kubenswrapper[4756]: I0930 19:43:39.337209 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-668ck\" (UniqueName: \"kubernetes.io/projected/eab36e37-f51d-45ef-b987-ea75d1f7b0a6-kube-api-access-668ck\") pod \"route-controller-manager-899bf5c5f-ks924\" (UID: \"eab36e37-f51d-45ef-b987-ea75d1f7b0a6\") " pod="openshift-route-controller-manager/route-controller-manager-899bf5c5f-ks924" Sep 30 19:43:39 crc kubenswrapper[4756]: I0930 19:43:39.337239 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/eab36e37-f51d-45ef-b987-ea75d1f7b0a6-client-ca\") pod \"route-controller-manager-899bf5c5f-ks924\" (UID: \"eab36e37-f51d-45ef-b987-ea75d1f7b0a6\") " pod="openshift-route-controller-manager/route-controller-manager-899bf5c5f-ks924" Sep 30 19:43:39 crc kubenswrapper[4756]: I0930 19:43:39.337258 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/1c0240ba-0849-4a19-b0e2-619eeb82d742-proxy-ca-bundles\") pod \"controller-manager-5c76c55f57-fkxhk\" (UID: \"1c0240ba-0849-4a19-b0e2-619eeb82d742\") " pod="openshift-controller-manager/controller-manager-5c76c55f57-fkxhk" Sep 30 19:43:39 crc kubenswrapper[4756]: I0930 19:43:39.337283 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/eab36e37-f51d-45ef-b987-ea75d1f7b0a6-config\") pod \"route-controller-manager-899bf5c5f-ks924\" (UID: \"eab36e37-f51d-45ef-b987-ea75d1f7b0a6\") " pod="openshift-route-controller-manager/route-controller-manager-899bf5c5f-ks924" Sep 30 19:43:39 crc kubenswrapper[4756]: I0930 19:43:39.337318 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/eab36e37-f51d-45ef-b987-ea75d1f7b0a6-serving-cert\") pod \"route-controller-manager-899bf5c5f-ks924\" (UID: \"eab36e37-f51d-45ef-b987-ea75d1f7b0a6\") " pod="openshift-route-controller-manager/route-controller-manager-899bf5c5f-ks924" Sep 30 19:43:39 crc kubenswrapper[4756]: I0930 19:43:39.339159 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/eab36e37-f51d-45ef-b987-ea75d1f7b0a6-client-ca\") pod \"route-controller-manager-899bf5c5f-ks924\" (UID: \"eab36e37-f51d-45ef-b987-ea75d1f7b0a6\") " pod="openshift-route-controller-manager/route-controller-manager-899bf5c5f-ks924" Sep 30 19:43:39 crc kubenswrapper[4756]: I0930 19:43:39.339328 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1c0240ba-0849-4a19-b0e2-619eeb82d742-config\") pod \"controller-manager-5c76c55f57-fkxhk\" (UID: \"1c0240ba-0849-4a19-b0e2-619eeb82d742\") " pod="openshift-controller-manager/controller-manager-5c76c55f57-fkxhk" Sep 30 19:43:39 crc kubenswrapper[4756]: I0930 19:43:39.340289 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/1c0240ba-0849-4a19-b0e2-619eeb82d742-client-ca\") pod \"controller-manager-5c76c55f57-fkxhk\" (UID: \"1c0240ba-0849-4a19-b0e2-619eeb82d742\") " pod="openshift-controller-manager/controller-manager-5c76c55f57-fkxhk" Sep 30 19:43:39 crc kubenswrapper[4756]: I0930 19:43:39.340500 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/eab36e37-f51d-45ef-b987-ea75d1f7b0a6-config\") pod \"route-controller-manager-899bf5c5f-ks924\" (UID: \"eab36e37-f51d-45ef-b987-ea75d1f7b0a6\") " pod="openshift-route-controller-manager/route-controller-manager-899bf5c5f-ks924" Sep 30 19:43:39 crc kubenswrapper[4756]: I0930 19:43:39.341021 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/1c0240ba-0849-4a19-b0e2-619eeb82d742-proxy-ca-bundles\") pod \"controller-manager-5c76c55f57-fkxhk\" (UID: \"1c0240ba-0849-4a19-b0e2-619eeb82d742\") " pod="openshift-controller-manager/controller-manager-5c76c55f57-fkxhk" Sep 30 19:43:39 crc kubenswrapper[4756]: I0930 19:43:39.344844 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1c0240ba-0849-4a19-b0e2-619eeb82d742-serving-cert\") pod \"controller-manager-5c76c55f57-fkxhk\" (UID: \"1c0240ba-0849-4a19-b0e2-619eeb82d742\") " pod="openshift-controller-manager/controller-manager-5c76c55f57-fkxhk" Sep 30 19:43:39 crc kubenswrapper[4756]: I0930 19:43:39.345375 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/eab36e37-f51d-45ef-b987-ea75d1f7b0a6-serving-cert\") pod \"route-controller-manager-899bf5c5f-ks924\" (UID: \"eab36e37-f51d-45ef-b987-ea75d1f7b0a6\") " pod="openshift-route-controller-manager/route-controller-manager-899bf5c5f-ks924" Sep 30 19:43:39 crc kubenswrapper[4756]: I0930 19:43:39.356594 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jbzgs\" (UniqueName: \"kubernetes.io/projected/1c0240ba-0849-4a19-b0e2-619eeb82d742-kube-api-access-jbzgs\") pod \"controller-manager-5c76c55f57-fkxhk\" (UID: \"1c0240ba-0849-4a19-b0e2-619eeb82d742\") " pod="openshift-controller-manager/controller-manager-5c76c55f57-fkxhk" Sep 30 19:43:39 crc kubenswrapper[4756]: I0930 19:43:39.358194 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-668ck\" (UniqueName: \"kubernetes.io/projected/eab36e37-f51d-45ef-b987-ea75d1f7b0a6-kube-api-access-668ck\") pod \"route-controller-manager-899bf5c5f-ks924\" (UID: \"eab36e37-f51d-45ef-b987-ea75d1f7b0a6\") " pod="openshift-route-controller-manager/route-controller-manager-899bf5c5f-ks924" Sep 30 19:43:39 crc kubenswrapper[4756]: I0930 19:43:39.601749 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-899bf5c5f-ks924" Sep 30 19:43:39 crc kubenswrapper[4756]: I0930 19:43:39.602073 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-5c76c55f57-fkxhk" Sep 30 19:43:39 crc kubenswrapper[4756]: I0930 19:43:39.880740 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-899bf5c5f-ks924"] Sep 30 19:43:39 crc kubenswrapper[4756]: I0930 19:43:39.940914 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-5c76c55f57-fkxhk"] Sep 30 19:43:40 crc kubenswrapper[4756]: I0930 19:43:40.471444 4756 generic.go:334] "Generic (PLEG): container finished" podID="c8aaa37a-07d0-4d40-a4b4-5204a6357700" containerID="28c02766cb8454ca2bb73acc6e7af0feacee9d43aaeedf4f97ea2b31071443cc" exitCode=0 Sep 30 19:43:40 crc kubenswrapper[4756]: I0930 19:43:40.472278 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcvmq86" event={"ID":"c8aaa37a-07d0-4d40-a4b4-5204a6357700","Type":"ContainerDied","Data":"28c02766cb8454ca2bb73acc6e7af0feacee9d43aaeedf4f97ea2b31071443cc"} Sep 30 19:43:40 crc kubenswrapper[4756]: I0930 19:43:40.475507 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-5c76c55f57-fkxhk" event={"ID":"1c0240ba-0849-4a19-b0e2-619eeb82d742","Type":"ContainerStarted","Data":"071e7dfe26c6e117168dbcfc580d261ea74eb04903aa706dd63d59fda7aa90db"} Sep 30 19:43:40 crc kubenswrapper[4756]: I0930 19:43:40.475536 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-5c76c55f57-fkxhk" event={"ID":"1c0240ba-0849-4a19-b0e2-619eeb82d742","Type":"ContainerStarted","Data":"f66236f024e02f516016c58b99e184364157b61e46ea578c80dd054081c5b787"} Sep 30 19:43:40 crc kubenswrapper[4756]: I0930 19:43:40.475946 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-5c76c55f57-fkxhk" Sep 30 19:43:40 crc kubenswrapper[4756]: I0930 19:43:40.482269 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-899bf5c5f-ks924" event={"ID":"eab36e37-f51d-45ef-b987-ea75d1f7b0a6","Type":"ContainerStarted","Data":"86e8f49083aa704fa60217bd1daa41b359f060f98a70c3e73c6b099e0e45bd03"} Sep 30 19:43:40 crc kubenswrapper[4756]: I0930 19:43:40.482330 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-899bf5c5f-ks924" event={"ID":"eab36e37-f51d-45ef-b987-ea75d1f7b0a6","Type":"ContainerStarted","Data":"21efb212660aa690ca75e1bc30d405cd503474c10537ccd6d338010ff942c574"} Sep 30 19:43:40 crc kubenswrapper[4756]: I0930 19:43:40.485014 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-899bf5c5f-ks924" Sep 30 19:43:40 crc kubenswrapper[4756]: I0930 19:43:40.487540 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-5c76c55f57-fkxhk" Sep 30 19:43:40 crc kubenswrapper[4756]: I0930 19:43:40.499833 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-899bf5c5f-ks924" Sep 30 19:43:40 crc kubenswrapper[4756]: I0930 19:43:40.556427 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-5c76c55f57-fkxhk" podStartSLOduration=3.556383506 podStartE2EDuration="3.556383506s" podCreationTimestamp="2025-09-30 19:43:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 19:43:40.548273915 +0000 UTC m=+750.169207402" watchObservedRunningTime="2025-09-30 19:43:40.556383506 +0000 UTC m=+750.177316983" Sep 30 19:43:41 crc kubenswrapper[4756]: I0930 19:43:41.491354 4756 generic.go:334] "Generic (PLEG): container finished" podID="c8aaa37a-07d0-4d40-a4b4-5204a6357700" containerID="26af75e202aa94abc733e2b99e17f110ea48a128bfefd39f03c9bf189fd9d266" exitCode=0 Sep 30 19:43:41 crc kubenswrapper[4756]: I0930 19:43:41.491545 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcvmq86" event={"ID":"c8aaa37a-07d0-4d40-a4b4-5204a6357700","Type":"ContainerDied","Data":"26af75e202aa94abc733e2b99e17f110ea48a128bfefd39f03c9bf189fd9d266"} Sep 30 19:43:41 crc kubenswrapper[4756]: I0930 19:43:41.520932 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-899bf5c5f-ks924" podStartSLOduration=4.520897527 podStartE2EDuration="4.520897527s" podCreationTimestamp="2025-09-30 19:43:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 19:43:40.578739119 +0000 UTC m=+750.199672606" watchObservedRunningTime="2025-09-30 19:43:41.520897527 +0000 UTC m=+751.141831064" Sep 30 19:43:41 crc kubenswrapper[4756]: I0930 19:43:41.538384 4756 patch_prober.go:28] interesting pod/machine-config-daemon-4n9zj container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 19:43:41 crc kubenswrapper[4756]: I0930 19:43:41.538567 4756 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 19:43:42 crc kubenswrapper[4756]: I0930 19:43:42.867328 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcvmq86" Sep 30 19:43:42 crc kubenswrapper[4756]: I0930 19:43:42.992850 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/c8aaa37a-07d0-4d40-a4b4-5204a6357700-bundle\") pod \"c8aaa37a-07d0-4d40-a4b4-5204a6357700\" (UID: \"c8aaa37a-07d0-4d40-a4b4-5204a6357700\") " Sep 30 19:43:42 crc kubenswrapper[4756]: I0930 19:43:42.993088 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6mpk8\" (UniqueName: \"kubernetes.io/projected/c8aaa37a-07d0-4d40-a4b4-5204a6357700-kube-api-access-6mpk8\") pod \"c8aaa37a-07d0-4d40-a4b4-5204a6357700\" (UID: \"c8aaa37a-07d0-4d40-a4b4-5204a6357700\") " Sep 30 19:43:42 crc kubenswrapper[4756]: I0930 19:43:42.993249 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/c8aaa37a-07d0-4d40-a4b4-5204a6357700-util\") pod \"c8aaa37a-07d0-4d40-a4b4-5204a6357700\" (UID: \"c8aaa37a-07d0-4d40-a4b4-5204a6357700\") " Sep 30 19:43:42 crc kubenswrapper[4756]: I0930 19:43:42.993639 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c8aaa37a-07d0-4d40-a4b4-5204a6357700-bundle" (OuterVolumeSpecName: "bundle") pod "c8aaa37a-07d0-4d40-a4b4-5204a6357700" (UID: "c8aaa37a-07d0-4d40-a4b4-5204a6357700"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 19:43:42 crc kubenswrapper[4756]: I0930 19:43:42.993908 4756 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/c8aaa37a-07d0-4d40-a4b4-5204a6357700-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 19:43:43 crc kubenswrapper[4756]: I0930 19:43:43.000872 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c8aaa37a-07d0-4d40-a4b4-5204a6357700-kube-api-access-6mpk8" (OuterVolumeSpecName: "kube-api-access-6mpk8") pod "c8aaa37a-07d0-4d40-a4b4-5204a6357700" (UID: "c8aaa37a-07d0-4d40-a4b4-5204a6357700"). InnerVolumeSpecName "kube-api-access-6mpk8". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:43:43 crc kubenswrapper[4756]: I0930 19:43:43.004487 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c8aaa37a-07d0-4d40-a4b4-5204a6357700-util" (OuterVolumeSpecName: "util") pod "c8aaa37a-07d0-4d40-a4b4-5204a6357700" (UID: "c8aaa37a-07d0-4d40-a4b4-5204a6357700"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 19:43:43 crc kubenswrapper[4756]: I0930 19:43:43.094757 4756 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/c8aaa37a-07d0-4d40-a4b4-5204a6357700-util\") on node \"crc\" DevicePath \"\"" Sep 30 19:43:43 crc kubenswrapper[4756]: I0930 19:43:43.094793 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6mpk8\" (UniqueName: \"kubernetes.io/projected/c8aaa37a-07d0-4d40-a4b4-5204a6357700-kube-api-access-6mpk8\") on node \"crc\" DevicePath \"\"" Sep 30 19:43:43 crc kubenswrapper[4756]: I0930 19:43:43.510970 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcvmq86" event={"ID":"c8aaa37a-07d0-4d40-a4b4-5204a6357700","Type":"ContainerDied","Data":"87c9877105d2fc2eface2ad980328bf198ca4d42e1c71323e1ba5e441d1bba0d"} Sep 30 19:43:43 crc kubenswrapper[4756]: I0930 19:43:43.511012 4756 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="87c9877105d2fc2eface2ad980328bf198ca4d42e1c71323e1ba5e441d1bba0d" Sep 30 19:43:43 crc kubenswrapper[4756]: I0930 19:43:43.511053 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcvmq86" Sep 30 19:43:45 crc kubenswrapper[4756]: I0930 19:43:45.154021 4756 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Sep 30 19:43:47 crc kubenswrapper[4756]: I0930 19:43:47.925906 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-operator-5d6f6cfd66-w8zn2"] Sep 30 19:43:47 crc kubenswrapper[4756]: E0930 19:43:47.926438 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c8aaa37a-07d0-4d40-a4b4-5204a6357700" containerName="util" Sep 30 19:43:47 crc kubenswrapper[4756]: I0930 19:43:47.926455 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="c8aaa37a-07d0-4d40-a4b4-5204a6357700" containerName="util" Sep 30 19:43:47 crc kubenswrapper[4756]: E0930 19:43:47.926465 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c8aaa37a-07d0-4d40-a4b4-5204a6357700" containerName="pull" Sep 30 19:43:47 crc kubenswrapper[4756]: I0930 19:43:47.926473 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="c8aaa37a-07d0-4d40-a4b4-5204a6357700" containerName="pull" Sep 30 19:43:47 crc kubenswrapper[4756]: E0930 19:43:47.926490 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c8aaa37a-07d0-4d40-a4b4-5204a6357700" containerName="extract" Sep 30 19:43:47 crc kubenswrapper[4756]: I0930 19:43:47.926497 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="c8aaa37a-07d0-4d40-a4b4-5204a6357700" containerName="extract" Sep 30 19:43:47 crc kubenswrapper[4756]: I0930 19:43:47.926617 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="c8aaa37a-07d0-4d40-a4b4-5204a6357700" containerName="extract" Sep 30 19:43:47 crc kubenswrapper[4756]: I0930 19:43:47.927047 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-5d6f6cfd66-w8zn2" Sep 30 19:43:47 crc kubenswrapper[4756]: W0930 19:43:47.928331 4756 reflector.go:561] object-"openshift-nmstate"/"kube-root-ca.crt": failed to list *v1.ConfigMap: configmaps "kube-root-ca.crt" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-nmstate": no relationship found between node 'crc' and this object Sep 30 19:43:47 crc kubenswrapper[4756]: E0930 19:43:47.928462 4756 reflector.go:158] "Unhandled Error" err="object-\"openshift-nmstate\"/\"kube-root-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"kube-root-ca.crt\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-nmstate\": no relationship found between node 'crc' and this object" logger="UnhandledError" Sep 30 19:43:47 crc kubenswrapper[4756]: I0930 19:43:47.928925 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"nmstate-operator-dockercfg-72nls" Sep 30 19:43:47 crc kubenswrapper[4756]: I0930 19:43:47.929181 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"openshift-service-ca.crt" Sep 30 19:43:47 crc kubenswrapper[4756]: I0930 19:43:47.949636 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-operator-5d6f6cfd66-w8zn2"] Sep 30 19:43:47 crc kubenswrapper[4756]: I0930 19:43:47.957733 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pzmfj\" (UniqueName: \"kubernetes.io/projected/2def6bf4-9b5a-4c15-9fdd-86918ae44098-kube-api-access-pzmfj\") pod \"nmstate-operator-5d6f6cfd66-w8zn2\" (UID: \"2def6bf4-9b5a-4c15-9fdd-86918ae44098\") " pod="openshift-nmstate/nmstate-operator-5d6f6cfd66-w8zn2" Sep 30 19:43:48 crc kubenswrapper[4756]: I0930 19:43:48.058775 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pzmfj\" (UniqueName: \"kubernetes.io/projected/2def6bf4-9b5a-4c15-9fdd-86918ae44098-kube-api-access-pzmfj\") pod \"nmstate-operator-5d6f6cfd66-w8zn2\" (UID: \"2def6bf4-9b5a-4c15-9fdd-86918ae44098\") " pod="openshift-nmstate/nmstate-operator-5d6f6cfd66-w8zn2" Sep 30 19:43:49 crc kubenswrapper[4756]: E0930 19:43:49.070208 4756 projected.go:288] Couldn't get configMap openshift-nmstate/kube-root-ca.crt: failed to sync configmap cache: timed out waiting for the condition Sep 30 19:43:49 crc kubenswrapper[4756]: E0930 19:43:49.070277 4756 projected.go:194] Error preparing data for projected volume kube-api-access-pzmfj for pod openshift-nmstate/nmstate-operator-5d6f6cfd66-w8zn2: failed to sync configmap cache: timed out waiting for the condition Sep 30 19:43:49 crc kubenswrapper[4756]: E0930 19:43:49.070361 4756 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/2def6bf4-9b5a-4c15-9fdd-86918ae44098-kube-api-access-pzmfj podName:2def6bf4-9b5a-4c15-9fdd-86918ae44098 nodeName:}" failed. No retries permitted until 2025-09-30 19:43:49.570334997 +0000 UTC m=+759.191268514 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-pzmfj" (UniqueName: "kubernetes.io/projected/2def6bf4-9b5a-4c15-9fdd-86918ae44098-kube-api-access-pzmfj") pod "nmstate-operator-5d6f6cfd66-w8zn2" (UID: "2def6bf4-9b5a-4c15-9fdd-86918ae44098") : failed to sync configmap cache: timed out waiting for the condition Sep 30 19:43:49 crc kubenswrapper[4756]: I0930 19:43:49.292159 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"kube-root-ca.crt" Sep 30 19:43:49 crc kubenswrapper[4756]: I0930 19:43:49.584339 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pzmfj\" (UniqueName: \"kubernetes.io/projected/2def6bf4-9b5a-4c15-9fdd-86918ae44098-kube-api-access-pzmfj\") pod \"nmstate-operator-5d6f6cfd66-w8zn2\" (UID: \"2def6bf4-9b5a-4c15-9fdd-86918ae44098\") " pod="openshift-nmstate/nmstate-operator-5d6f6cfd66-w8zn2" Sep 30 19:43:49 crc kubenswrapper[4756]: I0930 19:43:49.593541 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pzmfj\" (UniqueName: \"kubernetes.io/projected/2def6bf4-9b5a-4c15-9fdd-86918ae44098-kube-api-access-pzmfj\") pod \"nmstate-operator-5d6f6cfd66-w8zn2\" (UID: \"2def6bf4-9b5a-4c15-9fdd-86918ae44098\") " pod="openshift-nmstate/nmstate-operator-5d6f6cfd66-w8zn2" Sep 30 19:43:49 crc kubenswrapper[4756]: I0930 19:43:49.743650 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-5d6f6cfd66-w8zn2" Sep 30 19:43:50 crc kubenswrapper[4756]: I0930 19:43:50.209545 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-operator-5d6f6cfd66-w8zn2"] Sep 30 19:43:50 crc kubenswrapper[4756]: I0930 19:43:50.566794 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-operator-5d6f6cfd66-w8zn2" event={"ID":"2def6bf4-9b5a-4c15-9fdd-86918ae44098","Type":"ContainerStarted","Data":"e5ce056a444aeba7c617d65a99df116c8f972ef2e71e9575d6d99e243ea9e887"} Sep 30 19:43:52 crc kubenswrapper[4756]: I0930 19:43:52.579996 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-operator-5d6f6cfd66-w8zn2" event={"ID":"2def6bf4-9b5a-4c15-9fdd-86918ae44098","Type":"ContainerStarted","Data":"b0bf37088318c34883d9ba94df45ffc92cea4c86ac70c2a74149e0a639ee6c45"} Sep 30 19:43:52 crc kubenswrapper[4756]: I0930 19:43:52.606217 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-operator-5d6f6cfd66-w8zn2" podStartSLOduration=3.603991727 podStartE2EDuration="5.606184484s" podCreationTimestamp="2025-09-30 19:43:47 +0000 UTC" firstStartedPulling="2025-09-30 19:43:50.222481107 +0000 UTC m=+759.843414594" lastFinishedPulling="2025-09-30 19:43:52.224673864 +0000 UTC m=+761.845607351" observedRunningTime="2025-09-30 19:43:52.599308135 +0000 UTC m=+762.220241692" watchObservedRunningTime="2025-09-30 19:43:52.606184484 +0000 UTC m=+762.227118001" Sep 30 19:43:56 crc kubenswrapper[4756]: I0930 19:43:56.851381 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-metrics-58fcddf996-pcnln"] Sep 30 19:43:56 crc kubenswrapper[4756]: I0930 19:43:56.852807 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-metrics-58fcddf996-pcnln" Sep 30 19:43:56 crc kubenswrapper[4756]: I0930 19:43:56.857590 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"nmstate-handler-dockercfg-mbn7t" Sep 30 19:43:56 crc kubenswrapper[4756]: I0930 19:43:56.861162 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-metrics-58fcddf996-pcnln"] Sep 30 19:43:56 crc kubenswrapper[4756]: I0930 19:43:56.867321 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-webhook-6d689559c5-9swr2"] Sep 30 19:43:56 crc kubenswrapper[4756]: I0930 19:43:56.867953 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-webhook-6d689559c5-9swr2" Sep 30 19:43:56 crc kubenswrapper[4756]: I0930 19:43:56.872236 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"openshift-nmstate-webhook" Sep 30 19:43:56 crc kubenswrapper[4756]: I0930 19:43:56.879028 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vqm72\" (UniqueName: \"kubernetes.io/projected/0f0f1f9b-37a6-4af8-9a39-cdfd7656f1b3-kube-api-access-vqm72\") pod \"nmstate-webhook-6d689559c5-9swr2\" (UID: \"0f0f1f9b-37a6-4af8-9a39-cdfd7656f1b3\") " pod="openshift-nmstate/nmstate-webhook-6d689559c5-9swr2" Sep 30 19:43:56 crc kubenswrapper[4756]: I0930 19:43:56.879077 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ttn9h\" (UniqueName: \"kubernetes.io/projected/745caeb3-41a0-4698-9e0f-66e9b4f0f318-kube-api-access-ttn9h\") pod \"nmstate-metrics-58fcddf996-pcnln\" (UID: \"745caeb3-41a0-4698-9e0f-66e9b4f0f318\") " pod="openshift-nmstate/nmstate-metrics-58fcddf996-pcnln" Sep 30 19:43:56 crc kubenswrapper[4756]: I0930 19:43:56.879116 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/0f0f1f9b-37a6-4af8-9a39-cdfd7656f1b3-tls-key-pair\") pod \"nmstate-webhook-6d689559c5-9swr2\" (UID: \"0f0f1f9b-37a6-4af8-9a39-cdfd7656f1b3\") " pod="openshift-nmstate/nmstate-webhook-6d689559c5-9swr2" Sep 30 19:43:56 crc kubenswrapper[4756]: I0930 19:43:56.880156 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-handler-jgcq4"] Sep 30 19:43:56 crc kubenswrapper[4756]: I0930 19:43:56.880850 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-handler-jgcq4" Sep 30 19:43:56 crc kubenswrapper[4756]: I0930 19:43:56.890295 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-webhook-6d689559c5-9swr2"] Sep 30 19:43:56 crc kubenswrapper[4756]: I0930 19:43:56.970345 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-console-plugin-864bb6dfb5-bs656"] Sep 30 19:43:56 crc kubenswrapper[4756]: I0930 19:43:56.970976 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-bs656" Sep 30 19:43:56 crc kubenswrapper[4756]: I0930 19:43:56.972885 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"nginx-conf" Sep 30 19:43:56 crc kubenswrapper[4756]: I0930 19:43:56.973144 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"default-dockercfg-krwq8" Sep 30 19:43:56 crc kubenswrapper[4756]: I0930 19:43:56.980109 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ttn9h\" (UniqueName: \"kubernetes.io/projected/745caeb3-41a0-4698-9e0f-66e9b4f0f318-kube-api-access-ttn9h\") pod \"nmstate-metrics-58fcddf996-pcnln\" (UID: \"745caeb3-41a0-4698-9e0f-66e9b4f0f318\") " pod="openshift-nmstate/nmstate-metrics-58fcddf996-pcnln" Sep 30 19:43:56 crc kubenswrapper[4756]: I0930 19:43:56.980151 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/7962f4e9-16e9-46dc-b9f7-e5a31b1e79eb-plugin-serving-cert\") pod \"nmstate-console-plugin-864bb6dfb5-bs656\" (UID: \"7962f4e9-16e9-46dc-b9f7-e5a31b1e79eb\") " pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-bs656" Sep 30 19:43:56 crc kubenswrapper[4756]: I0930 19:43:56.980174 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/0f0f1f9b-37a6-4af8-9a39-cdfd7656f1b3-tls-key-pair\") pod \"nmstate-webhook-6d689559c5-9swr2\" (UID: \"0f0f1f9b-37a6-4af8-9a39-cdfd7656f1b3\") " pod="openshift-nmstate/nmstate-webhook-6d689559c5-9swr2" Sep 30 19:43:56 crc kubenswrapper[4756]: I0930 19:43:56.980202 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/c017e6b9-1931-4cac-9df1-4f8f03bc9c8c-dbus-socket\") pod \"nmstate-handler-jgcq4\" (UID: \"c017e6b9-1931-4cac-9df1-4f8f03bc9c8c\") " pod="openshift-nmstate/nmstate-handler-jgcq4" Sep 30 19:43:56 crc kubenswrapper[4756]: I0930 19:43:56.980218 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mwvvs\" (UniqueName: \"kubernetes.io/projected/7962f4e9-16e9-46dc-b9f7-e5a31b1e79eb-kube-api-access-mwvvs\") pod \"nmstate-console-plugin-864bb6dfb5-bs656\" (UID: \"7962f4e9-16e9-46dc-b9f7-e5a31b1e79eb\") " pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-bs656" Sep 30 19:43:56 crc kubenswrapper[4756]: I0930 19:43:56.980251 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/7962f4e9-16e9-46dc-b9f7-e5a31b1e79eb-nginx-conf\") pod \"nmstate-console-plugin-864bb6dfb5-bs656\" (UID: \"7962f4e9-16e9-46dc-b9f7-e5a31b1e79eb\") " pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-bs656" Sep 30 19:43:56 crc kubenswrapper[4756]: I0930 19:43:56.980273 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lm2lr\" (UniqueName: \"kubernetes.io/projected/c017e6b9-1931-4cac-9df1-4f8f03bc9c8c-kube-api-access-lm2lr\") pod \"nmstate-handler-jgcq4\" (UID: \"c017e6b9-1931-4cac-9df1-4f8f03bc9c8c\") " pod="openshift-nmstate/nmstate-handler-jgcq4" Sep 30 19:43:56 crc kubenswrapper[4756]: I0930 19:43:56.980295 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/c017e6b9-1931-4cac-9df1-4f8f03bc9c8c-nmstate-lock\") pod \"nmstate-handler-jgcq4\" (UID: \"c017e6b9-1931-4cac-9df1-4f8f03bc9c8c\") " pod="openshift-nmstate/nmstate-handler-jgcq4" Sep 30 19:43:56 crc kubenswrapper[4756]: I0930 19:43:56.980312 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/c017e6b9-1931-4cac-9df1-4f8f03bc9c8c-ovs-socket\") pod \"nmstate-handler-jgcq4\" (UID: \"c017e6b9-1931-4cac-9df1-4f8f03bc9c8c\") " pod="openshift-nmstate/nmstate-handler-jgcq4" Sep 30 19:43:56 crc kubenswrapper[4756]: I0930 19:43:56.980336 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vqm72\" (UniqueName: \"kubernetes.io/projected/0f0f1f9b-37a6-4af8-9a39-cdfd7656f1b3-kube-api-access-vqm72\") pod \"nmstate-webhook-6d689559c5-9swr2\" (UID: \"0f0f1f9b-37a6-4af8-9a39-cdfd7656f1b3\") " pod="openshift-nmstate/nmstate-webhook-6d689559c5-9swr2" Sep 30 19:43:56 crc kubenswrapper[4756]: E0930 19:43:56.980756 4756 secret.go:188] Couldn't get secret openshift-nmstate/openshift-nmstate-webhook: secret "openshift-nmstate-webhook" not found Sep 30 19:43:56 crc kubenswrapper[4756]: E0930 19:43:56.980791 4756 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/0f0f1f9b-37a6-4af8-9a39-cdfd7656f1b3-tls-key-pair podName:0f0f1f9b-37a6-4af8-9a39-cdfd7656f1b3 nodeName:}" failed. No retries permitted until 2025-09-30 19:43:57.480777704 +0000 UTC m=+767.101711181 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "tls-key-pair" (UniqueName: "kubernetes.io/secret/0f0f1f9b-37a6-4af8-9a39-cdfd7656f1b3-tls-key-pair") pod "nmstate-webhook-6d689559c5-9swr2" (UID: "0f0f1f9b-37a6-4af8-9a39-cdfd7656f1b3") : secret "openshift-nmstate-webhook" not found Sep 30 19:43:56 crc kubenswrapper[4756]: I0930 19:43:56.982534 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"plugin-serving-cert" Sep 30 19:43:56 crc kubenswrapper[4756]: I0930 19:43:56.985713 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-console-plugin-864bb6dfb5-bs656"] Sep 30 19:43:57 crc kubenswrapper[4756]: I0930 19:43:57.005059 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ttn9h\" (UniqueName: \"kubernetes.io/projected/745caeb3-41a0-4698-9e0f-66e9b4f0f318-kube-api-access-ttn9h\") pod \"nmstate-metrics-58fcddf996-pcnln\" (UID: \"745caeb3-41a0-4698-9e0f-66e9b4f0f318\") " pod="openshift-nmstate/nmstate-metrics-58fcddf996-pcnln" Sep 30 19:43:57 crc kubenswrapper[4756]: I0930 19:43:57.006101 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vqm72\" (UniqueName: \"kubernetes.io/projected/0f0f1f9b-37a6-4af8-9a39-cdfd7656f1b3-kube-api-access-vqm72\") pod \"nmstate-webhook-6d689559c5-9swr2\" (UID: \"0f0f1f9b-37a6-4af8-9a39-cdfd7656f1b3\") " pod="openshift-nmstate/nmstate-webhook-6d689559c5-9swr2" Sep 30 19:43:57 crc kubenswrapper[4756]: I0930 19:43:57.081745 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/7962f4e9-16e9-46dc-b9f7-e5a31b1e79eb-plugin-serving-cert\") pod \"nmstate-console-plugin-864bb6dfb5-bs656\" (UID: \"7962f4e9-16e9-46dc-b9f7-e5a31b1e79eb\") " pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-bs656" Sep 30 19:43:57 crc kubenswrapper[4756]: I0930 19:43:57.081810 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/c017e6b9-1931-4cac-9df1-4f8f03bc9c8c-dbus-socket\") pod \"nmstate-handler-jgcq4\" (UID: \"c017e6b9-1931-4cac-9df1-4f8f03bc9c8c\") " pod="openshift-nmstate/nmstate-handler-jgcq4" Sep 30 19:43:57 crc kubenswrapper[4756]: I0930 19:43:57.081829 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mwvvs\" (UniqueName: \"kubernetes.io/projected/7962f4e9-16e9-46dc-b9f7-e5a31b1e79eb-kube-api-access-mwvvs\") pod \"nmstate-console-plugin-864bb6dfb5-bs656\" (UID: \"7962f4e9-16e9-46dc-b9f7-e5a31b1e79eb\") " pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-bs656" Sep 30 19:43:57 crc kubenswrapper[4756]: I0930 19:43:57.081868 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/7962f4e9-16e9-46dc-b9f7-e5a31b1e79eb-nginx-conf\") pod \"nmstate-console-plugin-864bb6dfb5-bs656\" (UID: \"7962f4e9-16e9-46dc-b9f7-e5a31b1e79eb\") " pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-bs656" Sep 30 19:43:57 crc kubenswrapper[4756]: I0930 19:43:57.081892 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lm2lr\" (UniqueName: \"kubernetes.io/projected/c017e6b9-1931-4cac-9df1-4f8f03bc9c8c-kube-api-access-lm2lr\") pod \"nmstate-handler-jgcq4\" (UID: \"c017e6b9-1931-4cac-9df1-4f8f03bc9c8c\") " pod="openshift-nmstate/nmstate-handler-jgcq4" Sep 30 19:43:57 crc kubenswrapper[4756]: I0930 19:43:57.081916 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/c017e6b9-1931-4cac-9df1-4f8f03bc9c8c-nmstate-lock\") pod \"nmstate-handler-jgcq4\" (UID: \"c017e6b9-1931-4cac-9df1-4f8f03bc9c8c\") " pod="openshift-nmstate/nmstate-handler-jgcq4" Sep 30 19:43:57 crc kubenswrapper[4756]: I0930 19:43:57.081935 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/c017e6b9-1931-4cac-9df1-4f8f03bc9c8c-ovs-socket\") pod \"nmstate-handler-jgcq4\" (UID: \"c017e6b9-1931-4cac-9df1-4f8f03bc9c8c\") " pod="openshift-nmstate/nmstate-handler-jgcq4" Sep 30 19:43:57 crc kubenswrapper[4756]: E0930 19:43:57.081939 4756 secret.go:188] Couldn't get secret openshift-nmstate/plugin-serving-cert: secret "plugin-serving-cert" not found Sep 30 19:43:57 crc kubenswrapper[4756]: E0930 19:43:57.081993 4756 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/7962f4e9-16e9-46dc-b9f7-e5a31b1e79eb-plugin-serving-cert podName:7962f4e9-16e9-46dc-b9f7-e5a31b1e79eb nodeName:}" failed. No retries permitted until 2025-09-30 19:43:57.58197842 +0000 UTC m=+767.202911897 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "plugin-serving-cert" (UniqueName: "kubernetes.io/secret/7962f4e9-16e9-46dc-b9f7-e5a31b1e79eb-plugin-serving-cert") pod "nmstate-console-plugin-864bb6dfb5-bs656" (UID: "7962f4e9-16e9-46dc-b9f7-e5a31b1e79eb") : secret "plugin-serving-cert" not found Sep 30 19:43:57 crc kubenswrapper[4756]: I0930 19:43:57.081994 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/c017e6b9-1931-4cac-9df1-4f8f03bc9c8c-ovs-socket\") pod \"nmstate-handler-jgcq4\" (UID: \"c017e6b9-1931-4cac-9df1-4f8f03bc9c8c\") " pod="openshift-nmstate/nmstate-handler-jgcq4" Sep 30 19:43:57 crc kubenswrapper[4756]: I0930 19:43:57.082097 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/c017e6b9-1931-4cac-9df1-4f8f03bc9c8c-nmstate-lock\") pod \"nmstate-handler-jgcq4\" (UID: \"c017e6b9-1931-4cac-9df1-4f8f03bc9c8c\") " pod="openshift-nmstate/nmstate-handler-jgcq4" Sep 30 19:43:57 crc kubenswrapper[4756]: I0930 19:43:57.082316 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/c017e6b9-1931-4cac-9df1-4f8f03bc9c8c-dbus-socket\") pod \"nmstate-handler-jgcq4\" (UID: \"c017e6b9-1931-4cac-9df1-4f8f03bc9c8c\") " pod="openshift-nmstate/nmstate-handler-jgcq4" Sep 30 19:43:57 crc kubenswrapper[4756]: I0930 19:43:57.082688 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/7962f4e9-16e9-46dc-b9f7-e5a31b1e79eb-nginx-conf\") pod \"nmstate-console-plugin-864bb6dfb5-bs656\" (UID: \"7962f4e9-16e9-46dc-b9f7-e5a31b1e79eb\") " pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-bs656" Sep 30 19:43:57 crc kubenswrapper[4756]: I0930 19:43:57.100511 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mwvvs\" (UniqueName: \"kubernetes.io/projected/7962f4e9-16e9-46dc-b9f7-e5a31b1e79eb-kube-api-access-mwvvs\") pod \"nmstate-console-plugin-864bb6dfb5-bs656\" (UID: \"7962f4e9-16e9-46dc-b9f7-e5a31b1e79eb\") " pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-bs656" Sep 30 19:43:57 crc kubenswrapper[4756]: I0930 19:43:57.100800 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lm2lr\" (UniqueName: \"kubernetes.io/projected/c017e6b9-1931-4cac-9df1-4f8f03bc9c8c-kube-api-access-lm2lr\") pod \"nmstate-handler-jgcq4\" (UID: \"c017e6b9-1931-4cac-9df1-4f8f03bc9c8c\") " pod="openshift-nmstate/nmstate-handler-jgcq4" Sep 30 19:43:57 crc kubenswrapper[4756]: I0930 19:43:57.171845 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-metrics-58fcddf996-pcnln" Sep 30 19:43:57 crc kubenswrapper[4756]: I0930 19:43:57.175352 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-7d5cbc5764-mt5lx"] Sep 30 19:43:57 crc kubenswrapper[4756]: I0930 19:43:57.175949 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-7d5cbc5764-mt5lx" Sep 30 19:43:57 crc kubenswrapper[4756]: I0930 19:43:57.192563 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-7d5cbc5764-mt5lx"] Sep 30 19:43:57 crc kubenswrapper[4756]: I0930 19:43:57.195983 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-handler-jgcq4" Sep 30 19:43:57 crc kubenswrapper[4756]: I0930 19:43:57.284272 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/f99104b6-665b-4d14-bcf7-cc1ca09103e5-console-serving-cert\") pod \"console-7d5cbc5764-mt5lx\" (UID: \"f99104b6-665b-4d14-bcf7-cc1ca09103e5\") " pod="openshift-console/console-7d5cbc5764-mt5lx" Sep 30 19:43:57 crc kubenswrapper[4756]: I0930 19:43:57.284589 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jgqg8\" (UniqueName: \"kubernetes.io/projected/f99104b6-665b-4d14-bcf7-cc1ca09103e5-kube-api-access-jgqg8\") pod \"console-7d5cbc5764-mt5lx\" (UID: \"f99104b6-665b-4d14-bcf7-cc1ca09103e5\") " pod="openshift-console/console-7d5cbc5764-mt5lx" Sep 30 19:43:57 crc kubenswrapper[4756]: I0930 19:43:57.284610 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/f99104b6-665b-4d14-bcf7-cc1ca09103e5-service-ca\") pod \"console-7d5cbc5764-mt5lx\" (UID: \"f99104b6-665b-4d14-bcf7-cc1ca09103e5\") " pod="openshift-console/console-7d5cbc5764-mt5lx" Sep 30 19:43:57 crc kubenswrapper[4756]: I0930 19:43:57.284693 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/f99104b6-665b-4d14-bcf7-cc1ca09103e5-oauth-serving-cert\") pod \"console-7d5cbc5764-mt5lx\" (UID: \"f99104b6-665b-4d14-bcf7-cc1ca09103e5\") " pod="openshift-console/console-7d5cbc5764-mt5lx" Sep 30 19:43:57 crc kubenswrapper[4756]: I0930 19:43:57.284765 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/f99104b6-665b-4d14-bcf7-cc1ca09103e5-console-oauth-config\") pod \"console-7d5cbc5764-mt5lx\" (UID: \"f99104b6-665b-4d14-bcf7-cc1ca09103e5\") " pod="openshift-console/console-7d5cbc5764-mt5lx" Sep 30 19:43:57 crc kubenswrapper[4756]: I0930 19:43:57.284801 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/f99104b6-665b-4d14-bcf7-cc1ca09103e5-trusted-ca-bundle\") pod \"console-7d5cbc5764-mt5lx\" (UID: \"f99104b6-665b-4d14-bcf7-cc1ca09103e5\") " pod="openshift-console/console-7d5cbc5764-mt5lx" Sep 30 19:43:57 crc kubenswrapper[4756]: I0930 19:43:57.284904 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/f99104b6-665b-4d14-bcf7-cc1ca09103e5-console-config\") pod \"console-7d5cbc5764-mt5lx\" (UID: \"f99104b6-665b-4d14-bcf7-cc1ca09103e5\") " pod="openshift-console/console-7d5cbc5764-mt5lx" Sep 30 19:43:57 crc kubenswrapper[4756]: I0930 19:43:57.385826 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/f99104b6-665b-4d14-bcf7-cc1ca09103e5-console-config\") pod \"console-7d5cbc5764-mt5lx\" (UID: \"f99104b6-665b-4d14-bcf7-cc1ca09103e5\") " pod="openshift-console/console-7d5cbc5764-mt5lx" Sep 30 19:43:57 crc kubenswrapper[4756]: I0930 19:43:57.385922 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/f99104b6-665b-4d14-bcf7-cc1ca09103e5-console-serving-cert\") pod \"console-7d5cbc5764-mt5lx\" (UID: \"f99104b6-665b-4d14-bcf7-cc1ca09103e5\") " pod="openshift-console/console-7d5cbc5764-mt5lx" Sep 30 19:43:57 crc kubenswrapper[4756]: I0930 19:43:57.385956 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jgqg8\" (UniqueName: \"kubernetes.io/projected/f99104b6-665b-4d14-bcf7-cc1ca09103e5-kube-api-access-jgqg8\") pod \"console-7d5cbc5764-mt5lx\" (UID: \"f99104b6-665b-4d14-bcf7-cc1ca09103e5\") " pod="openshift-console/console-7d5cbc5764-mt5lx" Sep 30 19:43:57 crc kubenswrapper[4756]: I0930 19:43:57.385976 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/f99104b6-665b-4d14-bcf7-cc1ca09103e5-service-ca\") pod \"console-7d5cbc5764-mt5lx\" (UID: \"f99104b6-665b-4d14-bcf7-cc1ca09103e5\") " pod="openshift-console/console-7d5cbc5764-mt5lx" Sep 30 19:43:57 crc kubenswrapper[4756]: I0930 19:43:57.386018 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/f99104b6-665b-4d14-bcf7-cc1ca09103e5-oauth-serving-cert\") pod \"console-7d5cbc5764-mt5lx\" (UID: \"f99104b6-665b-4d14-bcf7-cc1ca09103e5\") " pod="openshift-console/console-7d5cbc5764-mt5lx" Sep 30 19:43:57 crc kubenswrapper[4756]: I0930 19:43:57.386073 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/f99104b6-665b-4d14-bcf7-cc1ca09103e5-console-oauth-config\") pod \"console-7d5cbc5764-mt5lx\" (UID: \"f99104b6-665b-4d14-bcf7-cc1ca09103e5\") " pod="openshift-console/console-7d5cbc5764-mt5lx" Sep 30 19:43:57 crc kubenswrapper[4756]: I0930 19:43:57.386110 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/f99104b6-665b-4d14-bcf7-cc1ca09103e5-trusted-ca-bundle\") pod \"console-7d5cbc5764-mt5lx\" (UID: \"f99104b6-665b-4d14-bcf7-cc1ca09103e5\") " pod="openshift-console/console-7d5cbc5764-mt5lx" Sep 30 19:43:57 crc kubenswrapper[4756]: I0930 19:43:57.386832 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/f99104b6-665b-4d14-bcf7-cc1ca09103e5-oauth-serving-cert\") pod \"console-7d5cbc5764-mt5lx\" (UID: \"f99104b6-665b-4d14-bcf7-cc1ca09103e5\") " pod="openshift-console/console-7d5cbc5764-mt5lx" Sep 30 19:43:57 crc kubenswrapper[4756]: I0930 19:43:57.386831 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/f99104b6-665b-4d14-bcf7-cc1ca09103e5-console-config\") pod \"console-7d5cbc5764-mt5lx\" (UID: \"f99104b6-665b-4d14-bcf7-cc1ca09103e5\") " pod="openshift-console/console-7d5cbc5764-mt5lx" Sep 30 19:43:57 crc kubenswrapper[4756]: I0930 19:43:57.386941 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/f99104b6-665b-4d14-bcf7-cc1ca09103e5-service-ca\") pod \"console-7d5cbc5764-mt5lx\" (UID: \"f99104b6-665b-4d14-bcf7-cc1ca09103e5\") " pod="openshift-console/console-7d5cbc5764-mt5lx" Sep 30 19:43:57 crc kubenswrapper[4756]: I0930 19:43:57.387380 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/f99104b6-665b-4d14-bcf7-cc1ca09103e5-trusted-ca-bundle\") pod \"console-7d5cbc5764-mt5lx\" (UID: \"f99104b6-665b-4d14-bcf7-cc1ca09103e5\") " pod="openshift-console/console-7d5cbc5764-mt5lx" Sep 30 19:43:57 crc kubenswrapper[4756]: I0930 19:43:57.391779 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/f99104b6-665b-4d14-bcf7-cc1ca09103e5-console-serving-cert\") pod \"console-7d5cbc5764-mt5lx\" (UID: \"f99104b6-665b-4d14-bcf7-cc1ca09103e5\") " pod="openshift-console/console-7d5cbc5764-mt5lx" Sep 30 19:43:57 crc kubenswrapper[4756]: I0930 19:43:57.391951 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/f99104b6-665b-4d14-bcf7-cc1ca09103e5-console-oauth-config\") pod \"console-7d5cbc5764-mt5lx\" (UID: \"f99104b6-665b-4d14-bcf7-cc1ca09103e5\") " pod="openshift-console/console-7d5cbc5764-mt5lx" Sep 30 19:43:57 crc kubenswrapper[4756]: I0930 19:43:57.406545 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jgqg8\" (UniqueName: \"kubernetes.io/projected/f99104b6-665b-4d14-bcf7-cc1ca09103e5-kube-api-access-jgqg8\") pod \"console-7d5cbc5764-mt5lx\" (UID: \"f99104b6-665b-4d14-bcf7-cc1ca09103e5\") " pod="openshift-console/console-7d5cbc5764-mt5lx" Sep 30 19:43:57 crc kubenswrapper[4756]: I0930 19:43:57.487300 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/0f0f1f9b-37a6-4af8-9a39-cdfd7656f1b3-tls-key-pair\") pod \"nmstate-webhook-6d689559c5-9swr2\" (UID: \"0f0f1f9b-37a6-4af8-9a39-cdfd7656f1b3\") " pod="openshift-nmstate/nmstate-webhook-6d689559c5-9swr2" Sep 30 19:43:57 crc kubenswrapper[4756]: I0930 19:43:57.489960 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/0f0f1f9b-37a6-4af8-9a39-cdfd7656f1b3-tls-key-pair\") pod \"nmstate-webhook-6d689559c5-9swr2\" (UID: \"0f0f1f9b-37a6-4af8-9a39-cdfd7656f1b3\") " pod="openshift-nmstate/nmstate-webhook-6d689559c5-9swr2" Sep 30 19:43:57 crc kubenswrapper[4756]: I0930 19:43:57.536465 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-7d5cbc5764-mt5lx" Sep 30 19:43:57 crc kubenswrapper[4756]: I0930 19:43:57.588336 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/7962f4e9-16e9-46dc-b9f7-e5a31b1e79eb-plugin-serving-cert\") pod \"nmstate-console-plugin-864bb6dfb5-bs656\" (UID: \"7962f4e9-16e9-46dc-b9f7-e5a31b1e79eb\") " pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-bs656" Sep 30 19:43:57 crc kubenswrapper[4756]: I0930 19:43:57.593069 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/7962f4e9-16e9-46dc-b9f7-e5a31b1e79eb-plugin-serving-cert\") pod \"nmstate-console-plugin-864bb6dfb5-bs656\" (UID: \"7962f4e9-16e9-46dc-b9f7-e5a31b1e79eb\") " pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-bs656" Sep 30 19:43:57 crc kubenswrapper[4756]: I0930 19:43:57.594484 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-metrics-58fcddf996-pcnln"] Sep 30 19:43:57 crc kubenswrapper[4756]: W0930 19:43:57.601629 4756 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod745caeb3_41a0_4698_9e0f_66e9b4f0f318.slice/crio-a11641f7dd590ed904f127a9be757bac02f381d698f47551da65d1b0ed0416cb WatchSource:0}: Error finding container a11641f7dd590ed904f127a9be757bac02f381d698f47551da65d1b0ed0416cb: Status 404 returned error can't find the container with id a11641f7dd590ed904f127a9be757bac02f381d698f47551da65d1b0ed0416cb Sep 30 19:43:57 crc kubenswrapper[4756]: I0930 19:43:57.615749 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-58fcddf996-pcnln" event={"ID":"745caeb3-41a0-4698-9e0f-66e9b4f0f318","Type":"ContainerStarted","Data":"a11641f7dd590ed904f127a9be757bac02f381d698f47551da65d1b0ed0416cb"} Sep 30 19:43:57 crc kubenswrapper[4756]: I0930 19:43:57.617463 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-handler-jgcq4" event={"ID":"c017e6b9-1931-4cac-9df1-4f8f03bc9c8c","Type":"ContainerStarted","Data":"bf8912890a4ff7122d27248f2e6d691924847f850553deb718c9d38e5b514570"} Sep 30 19:43:57 crc kubenswrapper[4756]: I0930 19:43:57.790420 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-webhook-6d689559c5-9swr2" Sep 30 19:43:57 crc kubenswrapper[4756]: I0930 19:43:57.883582 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-bs656" Sep 30 19:43:57 crc kubenswrapper[4756]: I0930 19:43:57.945754 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-7d5cbc5764-mt5lx"] Sep 30 19:43:57 crc kubenswrapper[4756]: W0930 19:43:57.955069 4756 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf99104b6_665b_4d14_bcf7_cc1ca09103e5.slice/crio-2b99287c0fc25e51aa80bff706fc74ea93f037bdd0f499c54a0c9169a56bfcf3 WatchSource:0}: Error finding container 2b99287c0fc25e51aa80bff706fc74ea93f037bdd0f499c54a0c9169a56bfcf3: Status 404 returned error can't find the container with id 2b99287c0fc25e51aa80bff706fc74ea93f037bdd0f499c54a0c9169a56bfcf3 Sep 30 19:43:58 crc kubenswrapper[4756]: I0930 19:43:58.224927 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-webhook-6d689559c5-9swr2"] Sep 30 19:43:58 crc kubenswrapper[4756]: W0930 19:43:58.233631 4756 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0f0f1f9b_37a6_4af8_9a39_cdfd7656f1b3.slice/crio-77b12c494d629451542bd9890ff222d0f19b6f8cd00b46fa53cab9aaebdd7769 WatchSource:0}: Error finding container 77b12c494d629451542bd9890ff222d0f19b6f8cd00b46fa53cab9aaebdd7769: Status 404 returned error can't find the container with id 77b12c494d629451542bd9890ff222d0f19b6f8cd00b46fa53cab9aaebdd7769 Sep 30 19:43:58 crc kubenswrapper[4756]: I0930 19:43:58.359664 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-console-plugin-864bb6dfb5-bs656"] Sep 30 19:43:58 crc kubenswrapper[4756]: W0930 19:43:58.362890 4756 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7962f4e9_16e9_46dc_b9f7_e5a31b1e79eb.slice/crio-3414d0a180c5407d1bfbf1723b0b83ebebdd13ec8649a93b5f6acb713dae1647 WatchSource:0}: Error finding container 3414d0a180c5407d1bfbf1723b0b83ebebdd13ec8649a93b5f6acb713dae1647: Status 404 returned error can't find the container with id 3414d0a180c5407d1bfbf1723b0b83ebebdd13ec8649a93b5f6acb713dae1647 Sep 30 19:43:58 crc kubenswrapper[4756]: I0930 19:43:58.626084 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-7d5cbc5764-mt5lx" event={"ID":"f99104b6-665b-4d14-bcf7-cc1ca09103e5","Type":"ContainerStarted","Data":"d8e2ac69394640519d772c71995e4f45a8d32077b0dcdcede997de9eb5922566"} Sep 30 19:43:58 crc kubenswrapper[4756]: I0930 19:43:58.626397 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-7d5cbc5764-mt5lx" event={"ID":"f99104b6-665b-4d14-bcf7-cc1ca09103e5","Type":"ContainerStarted","Data":"2b99287c0fc25e51aa80bff706fc74ea93f037bdd0f499c54a0c9169a56bfcf3"} Sep 30 19:43:58 crc kubenswrapper[4756]: I0930 19:43:58.627170 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-bs656" event={"ID":"7962f4e9-16e9-46dc-b9f7-e5a31b1e79eb","Type":"ContainerStarted","Data":"3414d0a180c5407d1bfbf1723b0b83ebebdd13ec8649a93b5f6acb713dae1647"} Sep 30 19:43:58 crc kubenswrapper[4756]: I0930 19:43:58.628699 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-webhook-6d689559c5-9swr2" event={"ID":"0f0f1f9b-37a6-4af8-9a39-cdfd7656f1b3","Type":"ContainerStarted","Data":"77b12c494d629451542bd9890ff222d0f19b6f8cd00b46fa53cab9aaebdd7769"} Sep 30 19:43:58 crc kubenswrapper[4756]: I0930 19:43:58.647158 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-7d5cbc5764-mt5lx" podStartSLOduration=1.6471404600000001 podStartE2EDuration="1.64714046s" podCreationTimestamp="2025-09-30 19:43:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 19:43:58.643861455 +0000 UTC m=+768.264794942" watchObservedRunningTime="2025-09-30 19:43:58.64714046 +0000 UTC m=+768.268073927" Sep 30 19:44:00 crc kubenswrapper[4756]: I0930 19:44:00.670946 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-bs656" event={"ID":"7962f4e9-16e9-46dc-b9f7-e5a31b1e79eb","Type":"ContainerStarted","Data":"28abe87e04735b48f2ef85f45338d0ca2d77439b713c31ee35bd6418c99e9e98"} Sep 30 19:44:00 crc kubenswrapper[4756]: I0930 19:44:00.672330 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-webhook-6d689559c5-9swr2" event={"ID":"0f0f1f9b-37a6-4af8-9a39-cdfd7656f1b3","Type":"ContainerStarted","Data":"6c3c53b6cc57728cb0fa23525ed13ced78d80f8664a7dfd20e19c683d71c9441"} Sep 30 19:44:00 crc kubenswrapper[4756]: I0930 19:44:00.672887 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-nmstate/nmstate-webhook-6d689559c5-9swr2" Sep 30 19:44:00 crc kubenswrapper[4756]: I0930 19:44:00.674252 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-handler-jgcq4" event={"ID":"c017e6b9-1931-4cac-9df1-4f8f03bc9c8c","Type":"ContainerStarted","Data":"097541bdcdce5df0555150c31b536c86409292d4b02dd9ddda96b45f52a1557a"} Sep 30 19:44:00 crc kubenswrapper[4756]: I0930 19:44:00.674615 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-nmstate/nmstate-handler-jgcq4" Sep 30 19:44:00 crc kubenswrapper[4756]: I0930 19:44:00.675997 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-58fcddf996-pcnln" event={"ID":"745caeb3-41a0-4698-9e0f-66e9b4f0f318","Type":"ContainerStarted","Data":"be8159111e652b49fddfdd43d2de0440efd38959b3687085e0d72197a5ba04fb"} Sep 30 19:44:00 crc kubenswrapper[4756]: I0930 19:44:00.685217 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-bs656" podStartSLOduration=2.553139981 podStartE2EDuration="4.685197062s" podCreationTimestamp="2025-09-30 19:43:56 +0000 UTC" firstStartedPulling="2025-09-30 19:43:58.365257356 +0000 UTC m=+767.986190833" lastFinishedPulling="2025-09-30 19:44:00.497314437 +0000 UTC m=+770.118247914" observedRunningTime="2025-09-30 19:44:00.684684309 +0000 UTC m=+770.305617796" watchObservedRunningTime="2025-09-30 19:44:00.685197062 +0000 UTC m=+770.306130549" Sep 30 19:44:00 crc kubenswrapper[4756]: I0930 19:44:00.768327 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-webhook-6d689559c5-9swr2" podStartSLOduration=3.415984694 podStartE2EDuration="4.768307178s" podCreationTimestamp="2025-09-30 19:43:56 +0000 UTC" firstStartedPulling="2025-09-30 19:43:58.235351392 +0000 UTC m=+767.856284909" lastFinishedPulling="2025-09-30 19:43:59.587673916 +0000 UTC m=+769.208607393" observedRunningTime="2025-09-30 19:44:00.761664805 +0000 UTC m=+770.382598282" watchObservedRunningTime="2025-09-30 19:44:00.768307178 +0000 UTC m=+770.389240655" Sep 30 19:44:00 crc kubenswrapper[4756]: I0930 19:44:00.790762 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-handler-jgcq4" podStartSLOduration=2.453190498 podStartE2EDuration="4.790744003s" podCreationTimestamp="2025-09-30 19:43:56 +0000 UTC" firstStartedPulling="2025-09-30 19:43:57.248456988 +0000 UTC m=+766.869390465" lastFinishedPulling="2025-09-30 19:43:59.586010483 +0000 UTC m=+769.206943970" observedRunningTime="2025-09-30 19:44:00.79064643 +0000 UTC m=+770.411579907" watchObservedRunningTime="2025-09-30 19:44:00.790744003 +0000 UTC m=+770.411677480" Sep 30 19:44:02 crc kubenswrapper[4756]: I0930 19:44:02.690983 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-58fcddf996-pcnln" event={"ID":"745caeb3-41a0-4698-9e0f-66e9b4f0f318","Type":"ContainerStarted","Data":"b34c09d2ba18a798a705ed1e55283dfca7520744b224b3b1488d1223c1039d14"} Sep 30 19:44:04 crc kubenswrapper[4756]: I0930 19:44:04.395821 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-metrics-58fcddf996-pcnln" podStartSLOduration=3.894558793 podStartE2EDuration="8.395785102s" podCreationTimestamp="2025-09-30 19:43:56 +0000 UTC" firstStartedPulling="2025-09-30 19:43:57.604751472 +0000 UTC m=+767.225684949" lastFinishedPulling="2025-09-30 19:44:02.10597777 +0000 UTC m=+771.726911258" observedRunningTime="2025-09-30 19:44:02.718458419 +0000 UTC m=+772.339391896" watchObservedRunningTime="2025-09-30 19:44:04.395785102 +0000 UTC m=+774.016718619" Sep 30 19:44:04 crc kubenswrapper[4756]: I0930 19:44:04.398910 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-pcxq9"] Sep 30 19:44:04 crc kubenswrapper[4756]: I0930 19:44:04.401112 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-pcxq9" Sep 30 19:44:04 crc kubenswrapper[4756]: I0930 19:44:04.411597 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-pcxq9"] Sep 30 19:44:04 crc kubenswrapper[4756]: I0930 19:44:04.529305 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vp4d4\" (UniqueName: \"kubernetes.io/projected/bed43b38-7bb9-4219-9a72-4506a007cb14-kube-api-access-vp4d4\") pod \"redhat-marketplace-pcxq9\" (UID: \"bed43b38-7bb9-4219-9a72-4506a007cb14\") " pod="openshift-marketplace/redhat-marketplace-pcxq9" Sep 30 19:44:04 crc kubenswrapper[4756]: I0930 19:44:04.529758 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bed43b38-7bb9-4219-9a72-4506a007cb14-catalog-content\") pod \"redhat-marketplace-pcxq9\" (UID: \"bed43b38-7bb9-4219-9a72-4506a007cb14\") " pod="openshift-marketplace/redhat-marketplace-pcxq9" Sep 30 19:44:04 crc kubenswrapper[4756]: I0930 19:44:04.529847 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bed43b38-7bb9-4219-9a72-4506a007cb14-utilities\") pod \"redhat-marketplace-pcxq9\" (UID: \"bed43b38-7bb9-4219-9a72-4506a007cb14\") " pod="openshift-marketplace/redhat-marketplace-pcxq9" Sep 30 19:44:04 crc kubenswrapper[4756]: I0930 19:44:04.630632 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vp4d4\" (UniqueName: \"kubernetes.io/projected/bed43b38-7bb9-4219-9a72-4506a007cb14-kube-api-access-vp4d4\") pod \"redhat-marketplace-pcxq9\" (UID: \"bed43b38-7bb9-4219-9a72-4506a007cb14\") " pod="openshift-marketplace/redhat-marketplace-pcxq9" Sep 30 19:44:04 crc kubenswrapper[4756]: I0930 19:44:04.630726 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bed43b38-7bb9-4219-9a72-4506a007cb14-catalog-content\") pod \"redhat-marketplace-pcxq9\" (UID: \"bed43b38-7bb9-4219-9a72-4506a007cb14\") " pod="openshift-marketplace/redhat-marketplace-pcxq9" Sep 30 19:44:04 crc kubenswrapper[4756]: I0930 19:44:04.630751 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bed43b38-7bb9-4219-9a72-4506a007cb14-utilities\") pod \"redhat-marketplace-pcxq9\" (UID: \"bed43b38-7bb9-4219-9a72-4506a007cb14\") " pod="openshift-marketplace/redhat-marketplace-pcxq9" Sep 30 19:44:04 crc kubenswrapper[4756]: I0930 19:44:04.631267 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bed43b38-7bb9-4219-9a72-4506a007cb14-utilities\") pod \"redhat-marketplace-pcxq9\" (UID: \"bed43b38-7bb9-4219-9a72-4506a007cb14\") " pod="openshift-marketplace/redhat-marketplace-pcxq9" Sep 30 19:44:04 crc kubenswrapper[4756]: I0930 19:44:04.631317 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bed43b38-7bb9-4219-9a72-4506a007cb14-catalog-content\") pod \"redhat-marketplace-pcxq9\" (UID: \"bed43b38-7bb9-4219-9a72-4506a007cb14\") " pod="openshift-marketplace/redhat-marketplace-pcxq9" Sep 30 19:44:04 crc kubenswrapper[4756]: I0930 19:44:04.652331 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vp4d4\" (UniqueName: \"kubernetes.io/projected/bed43b38-7bb9-4219-9a72-4506a007cb14-kube-api-access-vp4d4\") pod \"redhat-marketplace-pcxq9\" (UID: \"bed43b38-7bb9-4219-9a72-4506a007cb14\") " pod="openshift-marketplace/redhat-marketplace-pcxq9" Sep 30 19:44:04 crc kubenswrapper[4756]: I0930 19:44:04.738536 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-pcxq9" Sep 30 19:44:05 crc kubenswrapper[4756]: I0930 19:44:05.164792 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-pcxq9"] Sep 30 19:44:05 crc kubenswrapper[4756]: I0930 19:44:05.717851 4756 generic.go:334] "Generic (PLEG): container finished" podID="bed43b38-7bb9-4219-9a72-4506a007cb14" containerID="131d74801cd1512820c815726843fa573790898841201696e0b70986973adcf6" exitCode=0 Sep 30 19:44:05 crc kubenswrapper[4756]: I0930 19:44:05.717994 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-pcxq9" event={"ID":"bed43b38-7bb9-4219-9a72-4506a007cb14","Type":"ContainerDied","Data":"131d74801cd1512820c815726843fa573790898841201696e0b70986973adcf6"} Sep 30 19:44:05 crc kubenswrapper[4756]: I0930 19:44:05.718260 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-pcxq9" event={"ID":"bed43b38-7bb9-4219-9a72-4506a007cb14","Type":"ContainerStarted","Data":"630270ad50b322702e6a576e9eac8755f67da4907a1b51d53b3955b5016cc026"} Sep 30 19:44:06 crc kubenswrapper[4756]: I0930 19:44:06.729126 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-pcxq9" event={"ID":"bed43b38-7bb9-4219-9a72-4506a007cb14","Type":"ContainerStarted","Data":"ac0e160267a39b9a50f717503e9bb9661e38e5f0de6ac9103eb6f4e61469486f"} Sep 30 19:44:07 crc kubenswrapper[4756]: I0930 19:44:07.232568 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-nmstate/nmstate-handler-jgcq4" Sep 30 19:44:07 crc kubenswrapper[4756]: I0930 19:44:07.537135 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-7d5cbc5764-mt5lx" Sep 30 19:44:07 crc kubenswrapper[4756]: I0930 19:44:07.537186 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-7d5cbc5764-mt5lx" Sep 30 19:44:07 crc kubenswrapper[4756]: I0930 19:44:07.544121 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-7d5cbc5764-mt5lx" Sep 30 19:44:07 crc kubenswrapper[4756]: I0930 19:44:07.739747 4756 generic.go:334] "Generic (PLEG): container finished" podID="bed43b38-7bb9-4219-9a72-4506a007cb14" containerID="ac0e160267a39b9a50f717503e9bb9661e38e5f0de6ac9103eb6f4e61469486f" exitCode=0 Sep 30 19:44:07 crc kubenswrapper[4756]: I0930 19:44:07.739823 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-pcxq9" event={"ID":"bed43b38-7bb9-4219-9a72-4506a007cb14","Type":"ContainerDied","Data":"ac0e160267a39b9a50f717503e9bb9661e38e5f0de6ac9103eb6f4e61469486f"} Sep 30 19:44:07 crc kubenswrapper[4756]: I0930 19:44:07.744665 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-7d5cbc5764-mt5lx" Sep 30 19:44:07 crc kubenswrapper[4756]: I0930 19:44:07.835770 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-f9d7485db-h5cnz"] Sep 30 19:44:08 crc kubenswrapper[4756]: I0930 19:44:08.752662 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-pcxq9" event={"ID":"bed43b38-7bb9-4219-9a72-4506a007cb14","Type":"ContainerStarted","Data":"4876f38b69ed225153629b251e2aea9255bf39edabd7998d7e6c82287e99f717"} Sep 30 19:44:08 crc kubenswrapper[4756]: I0930 19:44:08.778727 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-pcxq9" podStartSLOduration=2.129009171 podStartE2EDuration="4.778705829s" podCreationTimestamp="2025-09-30 19:44:04 +0000 UTC" firstStartedPulling="2025-09-30 19:44:05.721259807 +0000 UTC m=+775.342193314" lastFinishedPulling="2025-09-30 19:44:08.370956475 +0000 UTC m=+777.991889972" observedRunningTime="2025-09-30 19:44:08.776014119 +0000 UTC m=+778.396947636" watchObservedRunningTime="2025-09-30 19:44:08.778705829 +0000 UTC m=+778.399639316" Sep 30 19:44:11 crc kubenswrapper[4756]: I0930 19:44:11.409769 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-6jwnz"] Sep 30 19:44:11 crc kubenswrapper[4756]: I0930 19:44:11.411307 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-6jwnz" Sep 30 19:44:11 crc kubenswrapper[4756]: I0930 19:44:11.415311 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-6jwnz"] Sep 30 19:44:11 crc kubenswrapper[4756]: I0930 19:44:11.531897 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-khqmb\" (UniqueName: \"kubernetes.io/projected/488a7ed1-06a9-4fce-9989-9d5c48c15799-kube-api-access-khqmb\") pod \"community-operators-6jwnz\" (UID: \"488a7ed1-06a9-4fce-9989-9d5c48c15799\") " pod="openshift-marketplace/community-operators-6jwnz" Sep 30 19:44:11 crc kubenswrapper[4756]: I0930 19:44:11.532170 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/488a7ed1-06a9-4fce-9989-9d5c48c15799-utilities\") pod \"community-operators-6jwnz\" (UID: \"488a7ed1-06a9-4fce-9989-9d5c48c15799\") " pod="openshift-marketplace/community-operators-6jwnz" Sep 30 19:44:11 crc kubenswrapper[4756]: I0930 19:44:11.532313 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/488a7ed1-06a9-4fce-9989-9d5c48c15799-catalog-content\") pod \"community-operators-6jwnz\" (UID: \"488a7ed1-06a9-4fce-9989-9d5c48c15799\") " pod="openshift-marketplace/community-operators-6jwnz" Sep 30 19:44:11 crc kubenswrapper[4756]: I0930 19:44:11.539146 4756 patch_prober.go:28] interesting pod/machine-config-daemon-4n9zj container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 19:44:11 crc kubenswrapper[4756]: I0930 19:44:11.539189 4756 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 19:44:11 crc kubenswrapper[4756]: I0930 19:44:11.539222 4756 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" Sep 30 19:44:11 crc kubenswrapper[4756]: I0930 19:44:11.539638 4756 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"2979d187e57e810a5b2b095948919aef457acc38ed2bbb2147029cc6d902e94d"} pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 30 19:44:11 crc kubenswrapper[4756]: I0930 19:44:11.539707 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" containerName="machine-config-daemon" containerID="cri-o://2979d187e57e810a5b2b095948919aef457acc38ed2bbb2147029cc6d902e94d" gracePeriod=600 Sep 30 19:44:11 crc kubenswrapper[4756]: I0930 19:44:11.633187 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-khqmb\" (UniqueName: \"kubernetes.io/projected/488a7ed1-06a9-4fce-9989-9d5c48c15799-kube-api-access-khqmb\") pod \"community-operators-6jwnz\" (UID: \"488a7ed1-06a9-4fce-9989-9d5c48c15799\") " pod="openshift-marketplace/community-operators-6jwnz" Sep 30 19:44:11 crc kubenswrapper[4756]: I0930 19:44:11.633242 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/488a7ed1-06a9-4fce-9989-9d5c48c15799-utilities\") pod \"community-operators-6jwnz\" (UID: \"488a7ed1-06a9-4fce-9989-9d5c48c15799\") " pod="openshift-marketplace/community-operators-6jwnz" Sep 30 19:44:11 crc kubenswrapper[4756]: I0930 19:44:11.633286 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/488a7ed1-06a9-4fce-9989-9d5c48c15799-catalog-content\") pod \"community-operators-6jwnz\" (UID: \"488a7ed1-06a9-4fce-9989-9d5c48c15799\") " pod="openshift-marketplace/community-operators-6jwnz" Sep 30 19:44:11 crc kubenswrapper[4756]: I0930 19:44:11.633985 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/488a7ed1-06a9-4fce-9989-9d5c48c15799-utilities\") pod \"community-operators-6jwnz\" (UID: \"488a7ed1-06a9-4fce-9989-9d5c48c15799\") " pod="openshift-marketplace/community-operators-6jwnz" Sep 30 19:44:11 crc kubenswrapper[4756]: I0930 19:44:11.634188 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/488a7ed1-06a9-4fce-9989-9d5c48c15799-catalog-content\") pod \"community-operators-6jwnz\" (UID: \"488a7ed1-06a9-4fce-9989-9d5c48c15799\") " pod="openshift-marketplace/community-operators-6jwnz" Sep 30 19:44:11 crc kubenswrapper[4756]: I0930 19:44:11.652077 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-khqmb\" (UniqueName: \"kubernetes.io/projected/488a7ed1-06a9-4fce-9989-9d5c48c15799-kube-api-access-khqmb\") pod \"community-operators-6jwnz\" (UID: \"488a7ed1-06a9-4fce-9989-9d5c48c15799\") " pod="openshift-marketplace/community-operators-6jwnz" Sep 30 19:44:11 crc kubenswrapper[4756]: I0930 19:44:11.733083 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-6jwnz" Sep 30 19:44:11 crc kubenswrapper[4756]: I0930 19:44:11.782211 4756 generic.go:334] "Generic (PLEG): container finished" podID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" containerID="2979d187e57e810a5b2b095948919aef457acc38ed2bbb2147029cc6d902e94d" exitCode=0 Sep 30 19:44:11 crc kubenswrapper[4756]: I0930 19:44:11.782256 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" event={"ID":"3370c2ca-fec3-4f90-8df7-51e21e6c7e1c","Type":"ContainerDied","Data":"2979d187e57e810a5b2b095948919aef457acc38ed2bbb2147029cc6d902e94d"} Sep 30 19:44:11 crc kubenswrapper[4756]: I0930 19:44:11.782287 4756 scope.go:117] "RemoveContainer" containerID="283db66c586f33bf62f42011fd591816e47d70e65049040b396eaac160f0797b" Sep 30 19:44:12 crc kubenswrapper[4756]: I0930 19:44:12.194025 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-6jwnz"] Sep 30 19:44:12 crc kubenswrapper[4756]: W0930 19:44:12.203554 4756 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod488a7ed1_06a9_4fce_9989_9d5c48c15799.slice/crio-a22e9daf810b057e2df8fb920d083dd3307e43483fad35590d56234be2b91d70 WatchSource:0}: Error finding container a22e9daf810b057e2df8fb920d083dd3307e43483fad35590d56234be2b91d70: Status 404 returned error can't find the container with id a22e9daf810b057e2df8fb920d083dd3307e43483fad35590d56234be2b91d70 Sep 30 19:44:12 crc kubenswrapper[4756]: I0930 19:44:12.794593 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" event={"ID":"3370c2ca-fec3-4f90-8df7-51e21e6c7e1c","Type":"ContainerStarted","Data":"2f76cebe68777fe0e73f317a28eb7b8968d88c664ac174239db340a67b731df2"} Sep 30 19:44:12 crc kubenswrapper[4756]: I0930 19:44:12.797512 4756 generic.go:334] "Generic (PLEG): container finished" podID="488a7ed1-06a9-4fce-9989-9d5c48c15799" containerID="821375530d9980363e28cb775af611f591b07fcdf3cf5962a111f6d74d535f4d" exitCode=0 Sep 30 19:44:12 crc kubenswrapper[4756]: I0930 19:44:12.797600 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6jwnz" event={"ID":"488a7ed1-06a9-4fce-9989-9d5c48c15799","Type":"ContainerDied","Data":"821375530d9980363e28cb775af611f591b07fcdf3cf5962a111f6d74d535f4d"} Sep 30 19:44:12 crc kubenswrapper[4756]: I0930 19:44:12.797676 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6jwnz" event={"ID":"488a7ed1-06a9-4fce-9989-9d5c48c15799","Type":"ContainerStarted","Data":"a22e9daf810b057e2df8fb920d083dd3307e43483fad35590d56234be2b91d70"} Sep 30 19:44:13 crc kubenswrapper[4756]: I0930 19:44:13.805327 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6jwnz" event={"ID":"488a7ed1-06a9-4fce-9989-9d5c48c15799","Type":"ContainerStarted","Data":"5885cb90ad8326beb4f17a61d321145125f1f8724bc48591cdb7a5ca17bb489a"} Sep 30 19:44:14 crc kubenswrapper[4756]: I0930 19:44:14.739642 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-pcxq9" Sep 30 19:44:14 crc kubenswrapper[4756]: I0930 19:44:14.739747 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-pcxq9" Sep 30 19:44:14 crc kubenswrapper[4756]: I0930 19:44:14.785993 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-pcxq9" Sep 30 19:44:14 crc kubenswrapper[4756]: I0930 19:44:14.815853 4756 generic.go:334] "Generic (PLEG): container finished" podID="488a7ed1-06a9-4fce-9989-9d5c48c15799" containerID="5885cb90ad8326beb4f17a61d321145125f1f8724bc48591cdb7a5ca17bb489a" exitCode=0 Sep 30 19:44:14 crc kubenswrapper[4756]: I0930 19:44:14.815975 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6jwnz" event={"ID":"488a7ed1-06a9-4fce-9989-9d5c48c15799","Type":"ContainerDied","Data":"5885cb90ad8326beb4f17a61d321145125f1f8724bc48591cdb7a5ca17bb489a"} Sep 30 19:44:14 crc kubenswrapper[4756]: I0930 19:44:14.859063 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-pcxq9" Sep 30 19:44:15 crc kubenswrapper[4756]: I0930 19:44:15.823792 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6jwnz" event={"ID":"488a7ed1-06a9-4fce-9989-9d5c48c15799","Type":"ContainerStarted","Data":"0f38707db1fbf77ea6c42fd62befb9775b33293c7b4d48898c1261425fb79da1"} Sep 30 19:44:15 crc kubenswrapper[4756]: I0930 19:44:15.844763 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-6jwnz" podStartSLOduration=2.405826173 podStartE2EDuration="4.844736553s" podCreationTimestamp="2025-09-30 19:44:11 +0000 UTC" firstStartedPulling="2025-09-30 19:44:12.80071983 +0000 UTC m=+782.421653337" lastFinishedPulling="2025-09-30 19:44:15.23963024 +0000 UTC m=+784.860563717" observedRunningTime="2025-09-30 19:44:15.841104188 +0000 UTC m=+785.462037675" watchObservedRunningTime="2025-09-30 19:44:15.844736553 +0000 UTC m=+785.465670040" Sep 30 19:44:17 crc kubenswrapper[4756]: I0930 19:44:17.801948 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-nmstate/nmstate-webhook-6d689559c5-9swr2" Sep 30 19:44:18 crc kubenswrapper[4756]: I0930 19:44:18.384353 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-pcxq9"] Sep 30 19:44:18 crc kubenswrapper[4756]: I0930 19:44:18.385208 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-pcxq9" podUID="bed43b38-7bb9-4219-9a72-4506a007cb14" containerName="registry-server" containerID="cri-o://4876f38b69ed225153629b251e2aea9255bf39edabd7998d7e6c82287e99f717" gracePeriod=2 Sep 30 19:44:18 crc kubenswrapper[4756]: I0930 19:44:18.863914 4756 generic.go:334] "Generic (PLEG): container finished" podID="bed43b38-7bb9-4219-9a72-4506a007cb14" containerID="4876f38b69ed225153629b251e2aea9255bf39edabd7998d7e6c82287e99f717" exitCode=0 Sep 30 19:44:18 crc kubenswrapper[4756]: I0930 19:44:18.863950 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-pcxq9" event={"ID":"bed43b38-7bb9-4219-9a72-4506a007cb14","Type":"ContainerDied","Data":"4876f38b69ed225153629b251e2aea9255bf39edabd7998d7e6c82287e99f717"} Sep 30 19:44:18 crc kubenswrapper[4756]: I0930 19:44:18.937852 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-pcxq9" Sep 30 19:44:19 crc kubenswrapper[4756]: I0930 19:44:19.036614 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vp4d4\" (UniqueName: \"kubernetes.io/projected/bed43b38-7bb9-4219-9a72-4506a007cb14-kube-api-access-vp4d4\") pod \"bed43b38-7bb9-4219-9a72-4506a007cb14\" (UID: \"bed43b38-7bb9-4219-9a72-4506a007cb14\") " Sep 30 19:44:19 crc kubenswrapper[4756]: I0930 19:44:19.036683 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bed43b38-7bb9-4219-9a72-4506a007cb14-utilities\") pod \"bed43b38-7bb9-4219-9a72-4506a007cb14\" (UID: \"bed43b38-7bb9-4219-9a72-4506a007cb14\") " Sep 30 19:44:19 crc kubenswrapper[4756]: I0930 19:44:19.036712 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bed43b38-7bb9-4219-9a72-4506a007cb14-catalog-content\") pod \"bed43b38-7bb9-4219-9a72-4506a007cb14\" (UID: \"bed43b38-7bb9-4219-9a72-4506a007cb14\") " Sep 30 19:44:19 crc kubenswrapper[4756]: I0930 19:44:19.037858 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bed43b38-7bb9-4219-9a72-4506a007cb14-utilities" (OuterVolumeSpecName: "utilities") pod "bed43b38-7bb9-4219-9a72-4506a007cb14" (UID: "bed43b38-7bb9-4219-9a72-4506a007cb14"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 19:44:19 crc kubenswrapper[4756]: I0930 19:44:19.038805 4756 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bed43b38-7bb9-4219-9a72-4506a007cb14-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 19:44:19 crc kubenswrapper[4756]: I0930 19:44:19.048254 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bed43b38-7bb9-4219-9a72-4506a007cb14-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "bed43b38-7bb9-4219-9a72-4506a007cb14" (UID: "bed43b38-7bb9-4219-9a72-4506a007cb14"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 19:44:19 crc kubenswrapper[4756]: I0930 19:44:19.048852 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bed43b38-7bb9-4219-9a72-4506a007cb14-kube-api-access-vp4d4" (OuterVolumeSpecName: "kube-api-access-vp4d4") pod "bed43b38-7bb9-4219-9a72-4506a007cb14" (UID: "bed43b38-7bb9-4219-9a72-4506a007cb14"). InnerVolumeSpecName "kube-api-access-vp4d4". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:44:19 crc kubenswrapper[4756]: I0930 19:44:19.140520 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vp4d4\" (UniqueName: \"kubernetes.io/projected/bed43b38-7bb9-4219-9a72-4506a007cb14-kube-api-access-vp4d4\") on node \"crc\" DevicePath \"\"" Sep 30 19:44:19 crc kubenswrapper[4756]: I0930 19:44:19.140548 4756 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bed43b38-7bb9-4219-9a72-4506a007cb14-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 19:44:19 crc kubenswrapper[4756]: I0930 19:44:19.873063 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-pcxq9" event={"ID":"bed43b38-7bb9-4219-9a72-4506a007cb14","Type":"ContainerDied","Data":"630270ad50b322702e6a576e9eac8755f67da4907a1b51d53b3955b5016cc026"} Sep 30 19:44:19 crc kubenswrapper[4756]: I0930 19:44:19.873407 4756 scope.go:117] "RemoveContainer" containerID="4876f38b69ed225153629b251e2aea9255bf39edabd7998d7e6c82287e99f717" Sep 30 19:44:19 crc kubenswrapper[4756]: I0930 19:44:19.873213 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-pcxq9" Sep 30 19:44:19 crc kubenswrapper[4756]: I0930 19:44:19.898766 4756 scope.go:117] "RemoveContainer" containerID="ac0e160267a39b9a50f717503e9bb9661e38e5f0de6ac9103eb6f4e61469486f" Sep 30 19:44:19 crc kubenswrapper[4756]: I0930 19:44:19.903076 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-pcxq9"] Sep 30 19:44:19 crc kubenswrapper[4756]: I0930 19:44:19.911068 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-pcxq9"] Sep 30 19:44:19 crc kubenswrapper[4756]: I0930 19:44:19.923221 4756 scope.go:117] "RemoveContainer" containerID="131d74801cd1512820c815726843fa573790898841201696e0b70986973adcf6" Sep 30 19:44:21 crc kubenswrapper[4756]: I0930 19:44:21.129230 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bed43b38-7bb9-4219-9a72-4506a007cb14" path="/var/lib/kubelet/pods/bed43b38-7bb9-4219-9a72-4506a007cb14/volumes" Sep 30 19:44:21 crc kubenswrapper[4756]: I0930 19:44:21.733816 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-6jwnz" Sep 30 19:44:21 crc kubenswrapper[4756]: I0930 19:44:21.734187 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-6jwnz" Sep 30 19:44:21 crc kubenswrapper[4756]: I0930 19:44:21.784754 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-6jwnz" Sep 30 19:44:21 crc kubenswrapper[4756]: I0930 19:44:21.947718 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-6jwnz" Sep 30 19:44:24 crc kubenswrapper[4756]: I0930 19:44:24.983786 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-6jwnz"] Sep 30 19:44:24 crc kubenswrapper[4756]: I0930 19:44:24.984348 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-6jwnz" podUID="488a7ed1-06a9-4fce-9989-9d5c48c15799" containerName="registry-server" containerID="cri-o://0f38707db1fbf77ea6c42fd62befb9775b33293c7b4d48898c1261425fb79da1" gracePeriod=2 Sep 30 19:44:25 crc kubenswrapper[4756]: I0930 19:44:25.591154 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-6jwnz" Sep 30 19:44:25 crc kubenswrapper[4756]: I0930 19:44:25.769257 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-khqmb\" (UniqueName: \"kubernetes.io/projected/488a7ed1-06a9-4fce-9989-9d5c48c15799-kube-api-access-khqmb\") pod \"488a7ed1-06a9-4fce-9989-9d5c48c15799\" (UID: \"488a7ed1-06a9-4fce-9989-9d5c48c15799\") " Sep 30 19:44:25 crc kubenswrapper[4756]: I0930 19:44:25.769685 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/488a7ed1-06a9-4fce-9989-9d5c48c15799-utilities\") pod \"488a7ed1-06a9-4fce-9989-9d5c48c15799\" (UID: \"488a7ed1-06a9-4fce-9989-9d5c48c15799\") " Sep 30 19:44:25 crc kubenswrapper[4756]: I0930 19:44:25.769726 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/488a7ed1-06a9-4fce-9989-9d5c48c15799-catalog-content\") pod \"488a7ed1-06a9-4fce-9989-9d5c48c15799\" (UID: \"488a7ed1-06a9-4fce-9989-9d5c48c15799\") " Sep 30 19:44:25 crc kubenswrapper[4756]: I0930 19:44:25.771266 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/488a7ed1-06a9-4fce-9989-9d5c48c15799-utilities" (OuterVolumeSpecName: "utilities") pod "488a7ed1-06a9-4fce-9989-9d5c48c15799" (UID: "488a7ed1-06a9-4fce-9989-9d5c48c15799"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 19:44:25 crc kubenswrapper[4756]: I0930 19:44:25.774217 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/488a7ed1-06a9-4fce-9989-9d5c48c15799-kube-api-access-khqmb" (OuterVolumeSpecName: "kube-api-access-khqmb") pod "488a7ed1-06a9-4fce-9989-9d5c48c15799" (UID: "488a7ed1-06a9-4fce-9989-9d5c48c15799"). InnerVolumeSpecName "kube-api-access-khqmb". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:44:25 crc kubenswrapper[4756]: I0930 19:44:25.825917 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/488a7ed1-06a9-4fce-9989-9d5c48c15799-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "488a7ed1-06a9-4fce-9989-9d5c48c15799" (UID: "488a7ed1-06a9-4fce-9989-9d5c48c15799"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 19:44:25 crc kubenswrapper[4756]: I0930 19:44:25.871383 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-khqmb\" (UniqueName: \"kubernetes.io/projected/488a7ed1-06a9-4fce-9989-9d5c48c15799-kube-api-access-khqmb\") on node \"crc\" DevicePath \"\"" Sep 30 19:44:25 crc kubenswrapper[4756]: I0930 19:44:25.871619 4756 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/488a7ed1-06a9-4fce-9989-9d5c48c15799-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 19:44:25 crc kubenswrapper[4756]: I0930 19:44:25.871679 4756 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/488a7ed1-06a9-4fce-9989-9d5c48c15799-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 19:44:25 crc kubenswrapper[4756]: I0930 19:44:25.911424 4756 generic.go:334] "Generic (PLEG): container finished" podID="488a7ed1-06a9-4fce-9989-9d5c48c15799" containerID="0f38707db1fbf77ea6c42fd62befb9775b33293c7b4d48898c1261425fb79da1" exitCode=0 Sep 30 19:44:25 crc kubenswrapper[4756]: I0930 19:44:25.911471 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6jwnz" event={"ID":"488a7ed1-06a9-4fce-9989-9d5c48c15799","Type":"ContainerDied","Data":"0f38707db1fbf77ea6c42fd62befb9775b33293c7b4d48898c1261425fb79da1"} Sep 30 19:44:25 crc kubenswrapper[4756]: I0930 19:44:25.911499 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6jwnz" event={"ID":"488a7ed1-06a9-4fce-9989-9d5c48c15799","Type":"ContainerDied","Data":"a22e9daf810b057e2df8fb920d083dd3307e43483fad35590d56234be2b91d70"} Sep 30 19:44:25 crc kubenswrapper[4756]: I0930 19:44:25.911519 4756 scope.go:117] "RemoveContainer" containerID="0f38707db1fbf77ea6c42fd62befb9775b33293c7b4d48898c1261425fb79da1" Sep 30 19:44:25 crc kubenswrapper[4756]: I0930 19:44:25.911641 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-6jwnz" Sep 30 19:44:25 crc kubenswrapper[4756]: I0930 19:44:25.936334 4756 scope.go:117] "RemoveContainer" containerID="5885cb90ad8326beb4f17a61d321145125f1f8724bc48591cdb7a5ca17bb489a" Sep 30 19:44:25 crc kubenswrapper[4756]: I0930 19:44:25.951361 4756 scope.go:117] "RemoveContainer" containerID="821375530d9980363e28cb775af611f591b07fcdf3cf5962a111f6d74d535f4d" Sep 30 19:44:25 crc kubenswrapper[4756]: I0930 19:44:25.962944 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-6jwnz"] Sep 30 19:44:25 crc kubenswrapper[4756]: I0930 19:44:25.968467 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-6jwnz"] Sep 30 19:44:25 crc kubenswrapper[4756]: I0930 19:44:25.981298 4756 scope.go:117] "RemoveContainer" containerID="0f38707db1fbf77ea6c42fd62befb9775b33293c7b4d48898c1261425fb79da1" Sep 30 19:44:25 crc kubenswrapper[4756]: E0930 19:44:25.981832 4756 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0f38707db1fbf77ea6c42fd62befb9775b33293c7b4d48898c1261425fb79da1\": container with ID starting with 0f38707db1fbf77ea6c42fd62befb9775b33293c7b4d48898c1261425fb79da1 not found: ID does not exist" containerID="0f38707db1fbf77ea6c42fd62befb9775b33293c7b4d48898c1261425fb79da1" Sep 30 19:44:25 crc kubenswrapper[4756]: I0930 19:44:25.981884 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0f38707db1fbf77ea6c42fd62befb9775b33293c7b4d48898c1261425fb79da1"} err="failed to get container status \"0f38707db1fbf77ea6c42fd62befb9775b33293c7b4d48898c1261425fb79da1\": rpc error: code = NotFound desc = could not find container \"0f38707db1fbf77ea6c42fd62befb9775b33293c7b4d48898c1261425fb79da1\": container with ID starting with 0f38707db1fbf77ea6c42fd62befb9775b33293c7b4d48898c1261425fb79da1 not found: ID does not exist" Sep 30 19:44:25 crc kubenswrapper[4756]: I0930 19:44:25.981912 4756 scope.go:117] "RemoveContainer" containerID="5885cb90ad8326beb4f17a61d321145125f1f8724bc48591cdb7a5ca17bb489a" Sep 30 19:44:25 crc kubenswrapper[4756]: E0930 19:44:25.982522 4756 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5885cb90ad8326beb4f17a61d321145125f1f8724bc48591cdb7a5ca17bb489a\": container with ID starting with 5885cb90ad8326beb4f17a61d321145125f1f8724bc48591cdb7a5ca17bb489a not found: ID does not exist" containerID="5885cb90ad8326beb4f17a61d321145125f1f8724bc48591cdb7a5ca17bb489a" Sep 30 19:44:25 crc kubenswrapper[4756]: I0930 19:44:25.982558 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5885cb90ad8326beb4f17a61d321145125f1f8724bc48591cdb7a5ca17bb489a"} err="failed to get container status \"5885cb90ad8326beb4f17a61d321145125f1f8724bc48591cdb7a5ca17bb489a\": rpc error: code = NotFound desc = could not find container \"5885cb90ad8326beb4f17a61d321145125f1f8724bc48591cdb7a5ca17bb489a\": container with ID starting with 5885cb90ad8326beb4f17a61d321145125f1f8724bc48591cdb7a5ca17bb489a not found: ID does not exist" Sep 30 19:44:25 crc kubenswrapper[4756]: I0930 19:44:25.982584 4756 scope.go:117] "RemoveContainer" containerID="821375530d9980363e28cb775af611f591b07fcdf3cf5962a111f6d74d535f4d" Sep 30 19:44:25 crc kubenswrapper[4756]: E0930 19:44:25.982889 4756 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"821375530d9980363e28cb775af611f591b07fcdf3cf5962a111f6d74d535f4d\": container with ID starting with 821375530d9980363e28cb775af611f591b07fcdf3cf5962a111f6d74d535f4d not found: ID does not exist" containerID="821375530d9980363e28cb775af611f591b07fcdf3cf5962a111f6d74d535f4d" Sep 30 19:44:25 crc kubenswrapper[4756]: I0930 19:44:25.982926 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"821375530d9980363e28cb775af611f591b07fcdf3cf5962a111f6d74d535f4d"} err="failed to get container status \"821375530d9980363e28cb775af611f591b07fcdf3cf5962a111f6d74d535f4d\": rpc error: code = NotFound desc = could not find container \"821375530d9980363e28cb775af611f591b07fcdf3cf5962a111f6d74d535f4d\": container with ID starting with 821375530d9980363e28cb775af611f591b07fcdf3cf5962a111f6d74d535f4d not found: ID does not exist" Sep 30 19:44:27 crc kubenswrapper[4756]: I0930 19:44:27.122836 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="488a7ed1-06a9-4fce-9989-9d5c48c15799" path="/var/lib/kubelet/pods/488a7ed1-06a9-4fce-9989-9d5c48c15799/volumes" Sep 30 19:44:31 crc kubenswrapper[4756]: I0930 19:44:31.829388 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d964x6lm"] Sep 30 19:44:31 crc kubenswrapper[4756]: E0930 19:44:31.830434 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="488a7ed1-06a9-4fce-9989-9d5c48c15799" containerName="extract-utilities" Sep 30 19:44:31 crc kubenswrapper[4756]: I0930 19:44:31.830453 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="488a7ed1-06a9-4fce-9989-9d5c48c15799" containerName="extract-utilities" Sep 30 19:44:31 crc kubenswrapper[4756]: E0930 19:44:31.830470 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="488a7ed1-06a9-4fce-9989-9d5c48c15799" containerName="registry-server" Sep 30 19:44:31 crc kubenswrapper[4756]: I0930 19:44:31.830482 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="488a7ed1-06a9-4fce-9989-9d5c48c15799" containerName="registry-server" Sep 30 19:44:31 crc kubenswrapper[4756]: E0930 19:44:31.830506 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bed43b38-7bb9-4219-9a72-4506a007cb14" containerName="extract-content" Sep 30 19:44:31 crc kubenswrapper[4756]: I0930 19:44:31.830517 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="bed43b38-7bb9-4219-9a72-4506a007cb14" containerName="extract-content" Sep 30 19:44:31 crc kubenswrapper[4756]: E0930 19:44:31.830533 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bed43b38-7bb9-4219-9a72-4506a007cb14" containerName="extract-utilities" Sep 30 19:44:31 crc kubenswrapper[4756]: I0930 19:44:31.830544 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="bed43b38-7bb9-4219-9a72-4506a007cb14" containerName="extract-utilities" Sep 30 19:44:31 crc kubenswrapper[4756]: E0930 19:44:31.830561 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="488a7ed1-06a9-4fce-9989-9d5c48c15799" containerName="extract-content" Sep 30 19:44:31 crc kubenswrapper[4756]: I0930 19:44:31.830571 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="488a7ed1-06a9-4fce-9989-9d5c48c15799" containerName="extract-content" Sep 30 19:44:31 crc kubenswrapper[4756]: E0930 19:44:31.830589 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bed43b38-7bb9-4219-9a72-4506a007cb14" containerName="registry-server" Sep 30 19:44:31 crc kubenswrapper[4756]: I0930 19:44:31.830601 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="bed43b38-7bb9-4219-9a72-4506a007cb14" containerName="registry-server" Sep 30 19:44:31 crc kubenswrapper[4756]: I0930 19:44:31.830782 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="488a7ed1-06a9-4fce-9989-9d5c48c15799" containerName="registry-server" Sep 30 19:44:31 crc kubenswrapper[4756]: I0930 19:44:31.830801 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="bed43b38-7bb9-4219-9a72-4506a007cb14" containerName="registry-server" Sep 30 19:44:31 crc kubenswrapper[4756]: I0930 19:44:31.832022 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d964x6lm" Sep 30 19:44:31 crc kubenswrapper[4756]: I0930 19:44:31.834556 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Sep 30 19:44:31 crc kubenswrapper[4756]: I0930 19:44:31.845886 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d964x6lm"] Sep 30 19:44:31 crc kubenswrapper[4756]: I0930 19:44:31.866862 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/63c2f5ab-2343-4339-9fcf-f0fd80b703af-bundle\") pod \"f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d964x6lm\" (UID: \"63c2f5ab-2343-4339-9fcf-f0fd80b703af\") " pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d964x6lm" Sep 30 19:44:31 crc kubenswrapper[4756]: I0930 19:44:31.866954 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-blxb7\" (UniqueName: \"kubernetes.io/projected/63c2f5ab-2343-4339-9fcf-f0fd80b703af-kube-api-access-blxb7\") pod \"f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d964x6lm\" (UID: \"63c2f5ab-2343-4339-9fcf-f0fd80b703af\") " pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d964x6lm" Sep 30 19:44:31 crc kubenswrapper[4756]: I0930 19:44:31.867024 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/63c2f5ab-2343-4339-9fcf-f0fd80b703af-util\") pod \"f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d964x6lm\" (UID: \"63c2f5ab-2343-4339-9fcf-f0fd80b703af\") " pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d964x6lm" Sep 30 19:44:31 crc kubenswrapper[4756]: I0930 19:44:31.968531 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/63c2f5ab-2343-4339-9fcf-f0fd80b703af-bundle\") pod \"f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d964x6lm\" (UID: \"63c2f5ab-2343-4339-9fcf-f0fd80b703af\") " pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d964x6lm" Sep 30 19:44:31 crc kubenswrapper[4756]: I0930 19:44:31.968939 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-blxb7\" (UniqueName: \"kubernetes.io/projected/63c2f5ab-2343-4339-9fcf-f0fd80b703af-kube-api-access-blxb7\") pod \"f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d964x6lm\" (UID: \"63c2f5ab-2343-4339-9fcf-f0fd80b703af\") " pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d964x6lm" Sep 30 19:44:31 crc kubenswrapper[4756]: I0930 19:44:31.969131 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/63c2f5ab-2343-4339-9fcf-f0fd80b703af-bundle\") pod \"f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d964x6lm\" (UID: \"63c2f5ab-2343-4339-9fcf-f0fd80b703af\") " pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d964x6lm" Sep 30 19:44:31 crc kubenswrapper[4756]: I0930 19:44:31.969288 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/63c2f5ab-2343-4339-9fcf-f0fd80b703af-util\") pod \"f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d964x6lm\" (UID: \"63c2f5ab-2343-4339-9fcf-f0fd80b703af\") " pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d964x6lm" Sep 30 19:44:31 crc kubenswrapper[4756]: I0930 19:44:31.969941 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/63c2f5ab-2343-4339-9fcf-f0fd80b703af-util\") pod \"f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d964x6lm\" (UID: \"63c2f5ab-2343-4339-9fcf-f0fd80b703af\") " pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d964x6lm" Sep 30 19:44:31 crc kubenswrapper[4756]: I0930 19:44:31.987128 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-blxb7\" (UniqueName: \"kubernetes.io/projected/63c2f5ab-2343-4339-9fcf-f0fd80b703af-kube-api-access-blxb7\") pod \"f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d964x6lm\" (UID: \"63c2f5ab-2343-4339-9fcf-f0fd80b703af\") " pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d964x6lm" Sep 30 19:44:32 crc kubenswrapper[4756]: I0930 19:44:32.155173 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d964x6lm" Sep 30 19:44:32 crc kubenswrapper[4756]: I0930 19:44:32.641735 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d964x6lm"] Sep 30 19:44:32 crc kubenswrapper[4756]: W0930 19:44:32.654714 4756 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod63c2f5ab_2343_4339_9fcf_f0fd80b703af.slice/crio-c09e76b333176e288db27009c2a5b2a320f2d4e8659b12430bedf049b13ca2e2 WatchSource:0}: Error finding container c09e76b333176e288db27009c2a5b2a320f2d4e8659b12430bedf049b13ca2e2: Status 404 returned error can't find the container with id c09e76b333176e288db27009c2a5b2a320f2d4e8659b12430bedf049b13ca2e2 Sep 30 19:44:32 crc kubenswrapper[4756]: I0930 19:44:32.897477 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-console/console-f9d7485db-h5cnz" podUID="37240768-1c94-44c3-9772-d4671f7e35bf" containerName="console" containerID="cri-o://6c224e8d17bfbca4fb8e8a0784ceb0718b95b7d7538f037c9b7b5d11e79372ad" gracePeriod=15 Sep 30 19:44:32 crc kubenswrapper[4756]: I0930 19:44:32.969205 4756 generic.go:334] "Generic (PLEG): container finished" podID="63c2f5ab-2343-4339-9fcf-f0fd80b703af" containerID="716380861d408cd27062c636d7f198cf2c2f61f0e7944ef215d90738132655cf" exitCode=0 Sep 30 19:44:32 crc kubenswrapper[4756]: I0930 19:44:32.969247 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d964x6lm" event={"ID":"63c2f5ab-2343-4339-9fcf-f0fd80b703af","Type":"ContainerDied","Data":"716380861d408cd27062c636d7f198cf2c2f61f0e7944ef215d90738132655cf"} Sep 30 19:44:32 crc kubenswrapper[4756]: I0930 19:44:32.969278 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d964x6lm" event={"ID":"63c2f5ab-2343-4339-9fcf-f0fd80b703af","Type":"ContainerStarted","Data":"c09e76b333176e288db27009c2a5b2a320f2d4e8659b12430bedf049b13ca2e2"} Sep 30 19:44:33 crc kubenswrapper[4756]: I0930 19:44:33.323229 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-f9d7485db-h5cnz_37240768-1c94-44c3-9772-d4671f7e35bf/console/0.log" Sep 30 19:44:33 crc kubenswrapper[4756]: I0930 19:44:33.323548 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-h5cnz" Sep 30 19:44:33 crc kubenswrapper[4756]: I0930 19:44:33.393865 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-m8h85\" (UniqueName: \"kubernetes.io/projected/37240768-1c94-44c3-9772-d4671f7e35bf-kube-api-access-m8h85\") pod \"37240768-1c94-44c3-9772-d4671f7e35bf\" (UID: \"37240768-1c94-44c3-9772-d4671f7e35bf\") " Sep 30 19:44:33 crc kubenswrapper[4756]: I0930 19:44:33.393941 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/37240768-1c94-44c3-9772-d4671f7e35bf-console-serving-cert\") pod \"37240768-1c94-44c3-9772-d4671f7e35bf\" (UID: \"37240768-1c94-44c3-9772-d4671f7e35bf\") " Sep 30 19:44:33 crc kubenswrapper[4756]: I0930 19:44:33.393987 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/37240768-1c94-44c3-9772-d4671f7e35bf-oauth-serving-cert\") pod \"37240768-1c94-44c3-9772-d4671f7e35bf\" (UID: \"37240768-1c94-44c3-9772-d4671f7e35bf\") " Sep 30 19:44:33 crc kubenswrapper[4756]: I0930 19:44:33.394015 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/37240768-1c94-44c3-9772-d4671f7e35bf-console-config\") pod \"37240768-1c94-44c3-9772-d4671f7e35bf\" (UID: \"37240768-1c94-44c3-9772-d4671f7e35bf\") " Sep 30 19:44:33 crc kubenswrapper[4756]: I0930 19:44:33.394059 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/37240768-1c94-44c3-9772-d4671f7e35bf-service-ca\") pod \"37240768-1c94-44c3-9772-d4671f7e35bf\" (UID: \"37240768-1c94-44c3-9772-d4671f7e35bf\") " Sep 30 19:44:33 crc kubenswrapper[4756]: I0930 19:44:33.394126 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/37240768-1c94-44c3-9772-d4671f7e35bf-console-oauth-config\") pod \"37240768-1c94-44c3-9772-d4671f7e35bf\" (UID: \"37240768-1c94-44c3-9772-d4671f7e35bf\") " Sep 30 19:44:33 crc kubenswrapper[4756]: I0930 19:44:33.394187 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/37240768-1c94-44c3-9772-d4671f7e35bf-trusted-ca-bundle\") pod \"37240768-1c94-44c3-9772-d4671f7e35bf\" (UID: \"37240768-1c94-44c3-9772-d4671f7e35bf\") " Sep 30 19:44:33 crc kubenswrapper[4756]: I0930 19:44:33.395175 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/37240768-1c94-44c3-9772-d4671f7e35bf-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "37240768-1c94-44c3-9772-d4671f7e35bf" (UID: "37240768-1c94-44c3-9772-d4671f7e35bf"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:44:33 crc kubenswrapper[4756]: I0930 19:44:33.395248 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/37240768-1c94-44c3-9772-d4671f7e35bf-service-ca" (OuterVolumeSpecName: "service-ca") pod "37240768-1c94-44c3-9772-d4671f7e35bf" (UID: "37240768-1c94-44c3-9772-d4671f7e35bf"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:44:33 crc kubenswrapper[4756]: I0930 19:44:33.395255 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/37240768-1c94-44c3-9772-d4671f7e35bf-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "37240768-1c94-44c3-9772-d4671f7e35bf" (UID: "37240768-1c94-44c3-9772-d4671f7e35bf"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:44:33 crc kubenswrapper[4756]: I0930 19:44:33.395278 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/37240768-1c94-44c3-9772-d4671f7e35bf-console-config" (OuterVolumeSpecName: "console-config") pod "37240768-1c94-44c3-9772-d4671f7e35bf" (UID: "37240768-1c94-44c3-9772-d4671f7e35bf"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:44:33 crc kubenswrapper[4756]: I0930 19:44:33.398902 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/37240768-1c94-44c3-9772-d4671f7e35bf-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "37240768-1c94-44c3-9772-d4671f7e35bf" (UID: "37240768-1c94-44c3-9772-d4671f7e35bf"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:44:33 crc kubenswrapper[4756]: I0930 19:44:33.404636 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/37240768-1c94-44c3-9772-d4671f7e35bf-kube-api-access-m8h85" (OuterVolumeSpecName: "kube-api-access-m8h85") pod "37240768-1c94-44c3-9772-d4671f7e35bf" (UID: "37240768-1c94-44c3-9772-d4671f7e35bf"). InnerVolumeSpecName "kube-api-access-m8h85". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:44:33 crc kubenswrapper[4756]: I0930 19:44:33.405579 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/37240768-1c94-44c3-9772-d4671f7e35bf-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "37240768-1c94-44c3-9772-d4671f7e35bf" (UID: "37240768-1c94-44c3-9772-d4671f7e35bf"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:44:33 crc kubenswrapper[4756]: I0930 19:44:33.494908 4756 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/37240768-1c94-44c3-9772-d4671f7e35bf-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 19:44:33 crc kubenswrapper[4756]: I0930 19:44:33.494952 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-m8h85\" (UniqueName: \"kubernetes.io/projected/37240768-1c94-44c3-9772-d4671f7e35bf-kube-api-access-m8h85\") on node \"crc\" DevicePath \"\"" Sep 30 19:44:33 crc kubenswrapper[4756]: I0930 19:44:33.494968 4756 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/37240768-1c94-44c3-9772-d4671f7e35bf-console-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 30 19:44:33 crc kubenswrapper[4756]: I0930 19:44:33.494980 4756 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/37240768-1c94-44c3-9772-d4671f7e35bf-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 30 19:44:33 crc kubenswrapper[4756]: I0930 19:44:33.494992 4756 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/37240768-1c94-44c3-9772-d4671f7e35bf-console-config\") on node \"crc\" DevicePath \"\"" Sep 30 19:44:33 crc kubenswrapper[4756]: I0930 19:44:33.495002 4756 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/37240768-1c94-44c3-9772-d4671f7e35bf-service-ca\") on node \"crc\" DevicePath \"\"" Sep 30 19:44:33 crc kubenswrapper[4756]: I0930 19:44:33.495014 4756 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/37240768-1c94-44c3-9772-d4671f7e35bf-console-oauth-config\") on node \"crc\" DevicePath \"\"" Sep 30 19:44:33 crc kubenswrapper[4756]: I0930 19:44:33.979256 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-f9d7485db-h5cnz_37240768-1c94-44c3-9772-d4671f7e35bf/console/0.log" Sep 30 19:44:33 crc kubenswrapper[4756]: I0930 19:44:33.979339 4756 generic.go:334] "Generic (PLEG): container finished" podID="37240768-1c94-44c3-9772-d4671f7e35bf" containerID="6c224e8d17bfbca4fb8e8a0784ceb0718b95b7d7538f037c9b7b5d11e79372ad" exitCode=2 Sep 30 19:44:33 crc kubenswrapper[4756]: I0930 19:44:33.979414 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-h5cnz" event={"ID":"37240768-1c94-44c3-9772-d4671f7e35bf","Type":"ContainerDied","Data":"6c224e8d17bfbca4fb8e8a0784ceb0718b95b7d7538f037c9b7b5d11e79372ad"} Sep 30 19:44:33 crc kubenswrapper[4756]: I0930 19:44:33.979463 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-h5cnz" event={"ID":"37240768-1c94-44c3-9772-d4671f7e35bf","Type":"ContainerDied","Data":"880e034848c0debb953f1f3067f01929986457e0c2fa0c3b8ac0602ba21e57dc"} Sep 30 19:44:33 crc kubenswrapper[4756]: I0930 19:44:33.979507 4756 scope.go:117] "RemoveContainer" containerID="6c224e8d17bfbca4fb8e8a0784ceb0718b95b7d7538f037c9b7b5d11e79372ad" Sep 30 19:44:33 crc kubenswrapper[4756]: I0930 19:44:33.979517 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-h5cnz" Sep 30 19:44:34 crc kubenswrapper[4756]: I0930 19:44:34.021284 4756 scope.go:117] "RemoveContainer" containerID="6c224e8d17bfbca4fb8e8a0784ceb0718b95b7d7538f037c9b7b5d11e79372ad" Sep 30 19:44:34 crc kubenswrapper[4756]: E0930 19:44:34.021869 4756 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6c224e8d17bfbca4fb8e8a0784ceb0718b95b7d7538f037c9b7b5d11e79372ad\": container with ID starting with 6c224e8d17bfbca4fb8e8a0784ceb0718b95b7d7538f037c9b7b5d11e79372ad not found: ID does not exist" containerID="6c224e8d17bfbca4fb8e8a0784ceb0718b95b7d7538f037c9b7b5d11e79372ad" Sep 30 19:44:34 crc kubenswrapper[4756]: I0930 19:44:34.021923 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6c224e8d17bfbca4fb8e8a0784ceb0718b95b7d7538f037c9b7b5d11e79372ad"} err="failed to get container status \"6c224e8d17bfbca4fb8e8a0784ceb0718b95b7d7538f037c9b7b5d11e79372ad\": rpc error: code = NotFound desc = could not find container \"6c224e8d17bfbca4fb8e8a0784ceb0718b95b7d7538f037c9b7b5d11e79372ad\": container with ID starting with 6c224e8d17bfbca4fb8e8a0784ceb0718b95b7d7538f037c9b7b5d11e79372ad not found: ID does not exist" Sep 30 19:44:34 crc kubenswrapper[4756]: I0930 19:44:34.074997 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-f9d7485db-h5cnz"] Sep 30 19:44:34 crc kubenswrapper[4756]: I0930 19:44:34.082609 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-console/console-f9d7485db-h5cnz"] Sep 30 19:44:34 crc kubenswrapper[4756]: I0930 19:44:34.987475 4756 generic.go:334] "Generic (PLEG): container finished" podID="63c2f5ab-2343-4339-9fcf-f0fd80b703af" containerID="ef0ad9e251b76fa351641ceb739bb26da3ff283b960e28d1ccde5a6b9618675c" exitCode=0 Sep 30 19:44:34 crc kubenswrapper[4756]: I0930 19:44:34.987608 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d964x6lm" event={"ID":"63c2f5ab-2343-4339-9fcf-f0fd80b703af","Type":"ContainerDied","Data":"ef0ad9e251b76fa351641ceb739bb26da3ff283b960e28d1ccde5a6b9618675c"} Sep 30 19:44:35 crc kubenswrapper[4756]: I0930 19:44:35.124238 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="37240768-1c94-44c3-9772-d4671f7e35bf" path="/var/lib/kubelet/pods/37240768-1c94-44c3-9772-d4671f7e35bf/volumes" Sep 30 19:44:35 crc kubenswrapper[4756]: I0930 19:44:35.590135 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-c4pf9"] Sep 30 19:44:35 crc kubenswrapper[4756]: E0930 19:44:35.590342 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="37240768-1c94-44c3-9772-d4671f7e35bf" containerName="console" Sep 30 19:44:35 crc kubenswrapper[4756]: I0930 19:44:35.590353 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="37240768-1c94-44c3-9772-d4671f7e35bf" containerName="console" Sep 30 19:44:35 crc kubenswrapper[4756]: I0930 19:44:35.590471 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="37240768-1c94-44c3-9772-d4671f7e35bf" containerName="console" Sep 30 19:44:35 crc kubenswrapper[4756]: I0930 19:44:35.591137 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-c4pf9" Sep 30 19:44:35 crc kubenswrapper[4756]: I0930 19:44:35.613708 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-c4pf9"] Sep 30 19:44:35 crc kubenswrapper[4756]: I0930 19:44:35.629868 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b2b43201-51ce-4e91-8dca-ca239156836d-utilities\") pod \"redhat-operators-c4pf9\" (UID: \"b2b43201-51ce-4e91-8dca-ca239156836d\") " pod="openshift-marketplace/redhat-operators-c4pf9" Sep 30 19:44:35 crc kubenswrapper[4756]: I0930 19:44:35.629956 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r5q2w\" (UniqueName: \"kubernetes.io/projected/b2b43201-51ce-4e91-8dca-ca239156836d-kube-api-access-r5q2w\") pod \"redhat-operators-c4pf9\" (UID: \"b2b43201-51ce-4e91-8dca-ca239156836d\") " pod="openshift-marketplace/redhat-operators-c4pf9" Sep 30 19:44:35 crc kubenswrapper[4756]: I0930 19:44:35.630052 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b2b43201-51ce-4e91-8dca-ca239156836d-catalog-content\") pod \"redhat-operators-c4pf9\" (UID: \"b2b43201-51ce-4e91-8dca-ca239156836d\") " pod="openshift-marketplace/redhat-operators-c4pf9" Sep 30 19:44:35 crc kubenswrapper[4756]: I0930 19:44:35.732127 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b2b43201-51ce-4e91-8dca-ca239156836d-catalog-content\") pod \"redhat-operators-c4pf9\" (UID: \"b2b43201-51ce-4e91-8dca-ca239156836d\") " pod="openshift-marketplace/redhat-operators-c4pf9" Sep 30 19:44:35 crc kubenswrapper[4756]: I0930 19:44:35.732634 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b2b43201-51ce-4e91-8dca-ca239156836d-utilities\") pod \"redhat-operators-c4pf9\" (UID: \"b2b43201-51ce-4e91-8dca-ca239156836d\") " pod="openshift-marketplace/redhat-operators-c4pf9" Sep 30 19:44:35 crc kubenswrapper[4756]: I0930 19:44:35.732681 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b2b43201-51ce-4e91-8dca-ca239156836d-catalog-content\") pod \"redhat-operators-c4pf9\" (UID: \"b2b43201-51ce-4e91-8dca-ca239156836d\") " pod="openshift-marketplace/redhat-operators-c4pf9" Sep 30 19:44:35 crc kubenswrapper[4756]: I0930 19:44:35.732707 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r5q2w\" (UniqueName: \"kubernetes.io/projected/b2b43201-51ce-4e91-8dca-ca239156836d-kube-api-access-r5q2w\") pod \"redhat-operators-c4pf9\" (UID: \"b2b43201-51ce-4e91-8dca-ca239156836d\") " pod="openshift-marketplace/redhat-operators-c4pf9" Sep 30 19:44:35 crc kubenswrapper[4756]: I0930 19:44:35.733198 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b2b43201-51ce-4e91-8dca-ca239156836d-utilities\") pod \"redhat-operators-c4pf9\" (UID: \"b2b43201-51ce-4e91-8dca-ca239156836d\") " pod="openshift-marketplace/redhat-operators-c4pf9" Sep 30 19:44:35 crc kubenswrapper[4756]: I0930 19:44:35.754851 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r5q2w\" (UniqueName: \"kubernetes.io/projected/b2b43201-51ce-4e91-8dca-ca239156836d-kube-api-access-r5q2w\") pod \"redhat-operators-c4pf9\" (UID: \"b2b43201-51ce-4e91-8dca-ca239156836d\") " pod="openshift-marketplace/redhat-operators-c4pf9" Sep 30 19:44:35 crc kubenswrapper[4756]: I0930 19:44:35.909204 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-c4pf9" Sep 30 19:44:35 crc kubenswrapper[4756]: I0930 19:44:35.997813 4756 generic.go:334] "Generic (PLEG): container finished" podID="63c2f5ab-2343-4339-9fcf-f0fd80b703af" containerID="4336ddcddee163b5662b1ae16f4427a2274005c51c6e2ce20c1378758b02d5ee" exitCode=0 Sep 30 19:44:35 crc kubenswrapper[4756]: I0930 19:44:35.997860 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d964x6lm" event={"ID":"63c2f5ab-2343-4339-9fcf-f0fd80b703af","Type":"ContainerDied","Data":"4336ddcddee163b5662b1ae16f4427a2274005c51c6e2ce20c1378758b02d5ee"} Sep 30 19:44:36 crc kubenswrapper[4756]: I0930 19:44:36.365973 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-c4pf9"] Sep 30 19:44:36 crc kubenswrapper[4756]: W0930 19:44:36.373554 4756 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb2b43201_51ce_4e91_8dca_ca239156836d.slice/crio-6e660008efd9e8940724f76824df5cab45e4b263eb0acd9931e5bee5b530b9b7 WatchSource:0}: Error finding container 6e660008efd9e8940724f76824df5cab45e4b263eb0acd9931e5bee5b530b9b7: Status 404 returned error can't find the container with id 6e660008efd9e8940724f76824df5cab45e4b263eb0acd9931e5bee5b530b9b7 Sep 30 19:44:37 crc kubenswrapper[4756]: I0930 19:44:37.005182 4756 generic.go:334] "Generic (PLEG): container finished" podID="b2b43201-51ce-4e91-8dca-ca239156836d" containerID="17fe6b96145a4ce7af81a065c1e467ab8c6df9bf85846307846b4ca57e1b8c4a" exitCode=0 Sep 30 19:44:37 crc kubenswrapper[4756]: I0930 19:44:37.005265 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-c4pf9" event={"ID":"b2b43201-51ce-4e91-8dca-ca239156836d","Type":"ContainerDied","Data":"17fe6b96145a4ce7af81a065c1e467ab8c6df9bf85846307846b4ca57e1b8c4a"} Sep 30 19:44:37 crc kubenswrapper[4756]: I0930 19:44:37.005753 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-c4pf9" event={"ID":"b2b43201-51ce-4e91-8dca-ca239156836d","Type":"ContainerStarted","Data":"6e660008efd9e8940724f76824df5cab45e4b263eb0acd9931e5bee5b530b9b7"} Sep 30 19:44:37 crc kubenswrapper[4756]: I0930 19:44:37.324054 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d964x6lm" Sep 30 19:44:37 crc kubenswrapper[4756]: I0930 19:44:37.374421 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/63c2f5ab-2343-4339-9fcf-f0fd80b703af-util\") pod \"63c2f5ab-2343-4339-9fcf-f0fd80b703af\" (UID: \"63c2f5ab-2343-4339-9fcf-f0fd80b703af\") " Sep 30 19:44:37 crc kubenswrapper[4756]: I0930 19:44:37.374525 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-blxb7\" (UniqueName: \"kubernetes.io/projected/63c2f5ab-2343-4339-9fcf-f0fd80b703af-kube-api-access-blxb7\") pod \"63c2f5ab-2343-4339-9fcf-f0fd80b703af\" (UID: \"63c2f5ab-2343-4339-9fcf-f0fd80b703af\") " Sep 30 19:44:37 crc kubenswrapper[4756]: I0930 19:44:37.374572 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/63c2f5ab-2343-4339-9fcf-f0fd80b703af-bundle\") pod \"63c2f5ab-2343-4339-9fcf-f0fd80b703af\" (UID: \"63c2f5ab-2343-4339-9fcf-f0fd80b703af\") " Sep 30 19:44:37 crc kubenswrapper[4756]: I0930 19:44:37.375643 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/63c2f5ab-2343-4339-9fcf-f0fd80b703af-bundle" (OuterVolumeSpecName: "bundle") pod "63c2f5ab-2343-4339-9fcf-f0fd80b703af" (UID: "63c2f5ab-2343-4339-9fcf-f0fd80b703af"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 19:44:37 crc kubenswrapper[4756]: I0930 19:44:37.381223 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/63c2f5ab-2343-4339-9fcf-f0fd80b703af-kube-api-access-blxb7" (OuterVolumeSpecName: "kube-api-access-blxb7") pod "63c2f5ab-2343-4339-9fcf-f0fd80b703af" (UID: "63c2f5ab-2343-4339-9fcf-f0fd80b703af"). InnerVolumeSpecName "kube-api-access-blxb7". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:44:37 crc kubenswrapper[4756]: I0930 19:44:37.397860 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/63c2f5ab-2343-4339-9fcf-f0fd80b703af-util" (OuterVolumeSpecName: "util") pod "63c2f5ab-2343-4339-9fcf-f0fd80b703af" (UID: "63c2f5ab-2343-4339-9fcf-f0fd80b703af"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 19:44:37 crc kubenswrapper[4756]: I0930 19:44:37.476535 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-blxb7\" (UniqueName: \"kubernetes.io/projected/63c2f5ab-2343-4339-9fcf-f0fd80b703af-kube-api-access-blxb7\") on node \"crc\" DevicePath \"\"" Sep 30 19:44:37 crc kubenswrapper[4756]: I0930 19:44:37.476579 4756 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/63c2f5ab-2343-4339-9fcf-f0fd80b703af-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 19:44:37 crc kubenswrapper[4756]: I0930 19:44:37.476593 4756 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/63c2f5ab-2343-4339-9fcf-f0fd80b703af-util\") on node \"crc\" DevicePath \"\"" Sep 30 19:44:38 crc kubenswrapper[4756]: I0930 19:44:38.015747 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d964x6lm" event={"ID":"63c2f5ab-2343-4339-9fcf-f0fd80b703af","Type":"ContainerDied","Data":"c09e76b333176e288db27009c2a5b2a320f2d4e8659b12430bedf049b13ca2e2"} Sep 30 19:44:38 crc kubenswrapper[4756]: I0930 19:44:38.015800 4756 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c09e76b333176e288db27009c2a5b2a320f2d4e8659b12430bedf049b13ca2e2" Sep 30 19:44:38 crc kubenswrapper[4756]: I0930 19:44:38.015880 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d964x6lm" Sep 30 19:44:39 crc kubenswrapper[4756]: I0930 19:44:39.023460 4756 generic.go:334] "Generic (PLEG): container finished" podID="b2b43201-51ce-4e91-8dca-ca239156836d" containerID="caeb9189323a425311a7e62bc0436ab2814e388ea59366cfb885aa2f61c1037a" exitCode=0 Sep 30 19:44:39 crc kubenswrapper[4756]: I0930 19:44:39.023568 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-c4pf9" event={"ID":"b2b43201-51ce-4e91-8dca-ca239156836d","Type":"ContainerDied","Data":"caeb9189323a425311a7e62bc0436ab2814e388ea59366cfb885aa2f61c1037a"} Sep 30 19:44:40 crc kubenswrapper[4756]: I0930 19:44:40.033620 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-c4pf9" event={"ID":"b2b43201-51ce-4e91-8dca-ca239156836d","Type":"ContainerStarted","Data":"8e7dce1d1f56d2639753dfbb824f3adec76a0a9d0818cad0b9b74fe0b59983b0"} Sep 30 19:44:40 crc kubenswrapper[4756]: I0930 19:44:40.064938 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-c4pf9" podStartSLOduration=2.614870262 podStartE2EDuration="5.064910812s" podCreationTimestamp="2025-09-30 19:44:35 +0000 UTC" firstStartedPulling="2025-09-30 19:44:37.006275799 +0000 UTC m=+806.627209296" lastFinishedPulling="2025-09-30 19:44:39.456316379 +0000 UTC m=+809.077249846" observedRunningTime="2025-09-30 19:44:40.058370091 +0000 UTC m=+809.679303608" watchObservedRunningTime="2025-09-30 19:44:40.064910812 +0000 UTC m=+809.685844329" Sep 30 19:44:45 crc kubenswrapper[4756]: I0930 19:44:45.909898 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-c4pf9" Sep 30 19:44:45 crc kubenswrapper[4756]: I0930 19:44:45.910467 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-c4pf9" Sep 30 19:44:45 crc kubenswrapper[4756]: I0930 19:44:45.947931 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-c4pf9" Sep 30 19:44:46 crc kubenswrapper[4756]: I0930 19:44:46.109917 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-c4pf9" Sep 30 19:44:46 crc kubenswrapper[4756]: I0930 19:44:46.984590 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-c4pf9"] Sep 30 19:44:48 crc kubenswrapper[4756]: I0930 19:44:48.078486 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-c4pf9" podUID="b2b43201-51ce-4e91-8dca-ca239156836d" containerName="registry-server" containerID="cri-o://8e7dce1d1f56d2639753dfbb824f3adec76a0a9d0818cad0b9b74fe0b59983b0" gracePeriod=2 Sep 30 19:44:48 crc kubenswrapper[4756]: I0930 19:44:48.428629 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-c4pf9" Sep 30 19:44:48 crc kubenswrapper[4756]: I0930 19:44:48.562191 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b2b43201-51ce-4e91-8dca-ca239156836d-catalog-content\") pod \"b2b43201-51ce-4e91-8dca-ca239156836d\" (UID: \"b2b43201-51ce-4e91-8dca-ca239156836d\") " Sep 30 19:44:48 crc kubenswrapper[4756]: I0930 19:44:48.579898 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-r5q2w\" (UniqueName: \"kubernetes.io/projected/b2b43201-51ce-4e91-8dca-ca239156836d-kube-api-access-r5q2w\") pod \"b2b43201-51ce-4e91-8dca-ca239156836d\" (UID: \"b2b43201-51ce-4e91-8dca-ca239156836d\") " Sep 30 19:44:48 crc kubenswrapper[4756]: I0930 19:44:48.579961 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b2b43201-51ce-4e91-8dca-ca239156836d-utilities\") pod \"b2b43201-51ce-4e91-8dca-ca239156836d\" (UID: \"b2b43201-51ce-4e91-8dca-ca239156836d\") " Sep 30 19:44:48 crc kubenswrapper[4756]: I0930 19:44:48.580747 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b2b43201-51ce-4e91-8dca-ca239156836d-utilities" (OuterVolumeSpecName: "utilities") pod "b2b43201-51ce-4e91-8dca-ca239156836d" (UID: "b2b43201-51ce-4e91-8dca-ca239156836d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 19:44:48 crc kubenswrapper[4756]: I0930 19:44:48.607674 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b2b43201-51ce-4e91-8dca-ca239156836d-kube-api-access-r5q2w" (OuterVolumeSpecName: "kube-api-access-r5q2w") pod "b2b43201-51ce-4e91-8dca-ca239156836d" (UID: "b2b43201-51ce-4e91-8dca-ca239156836d"). InnerVolumeSpecName "kube-api-access-r5q2w". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:44:48 crc kubenswrapper[4756]: I0930 19:44:48.681131 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-r5q2w\" (UniqueName: \"kubernetes.io/projected/b2b43201-51ce-4e91-8dca-ca239156836d-kube-api-access-r5q2w\") on node \"crc\" DevicePath \"\"" Sep 30 19:44:48 crc kubenswrapper[4756]: I0930 19:44:48.681166 4756 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b2b43201-51ce-4e91-8dca-ca239156836d-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 19:44:48 crc kubenswrapper[4756]: I0930 19:44:48.775609 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-controller-manager-6787fd9965-mp4jt"] Sep 30 19:44:48 crc kubenswrapper[4756]: E0930 19:44:48.775836 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="63c2f5ab-2343-4339-9fcf-f0fd80b703af" containerName="pull" Sep 30 19:44:48 crc kubenswrapper[4756]: I0930 19:44:48.775847 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="63c2f5ab-2343-4339-9fcf-f0fd80b703af" containerName="pull" Sep 30 19:44:48 crc kubenswrapper[4756]: E0930 19:44:48.775859 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b2b43201-51ce-4e91-8dca-ca239156836d" containerName="registry-server" Sep 30 19:44:48 crc kubenswrapper[4756]: I0930 19:44:48.775867 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="b2b43201-51ce-4e91-8dca-ca239156836d" containerName="registry-server" Sep 30 19:44:48 crc kubenswrapper[4756]: E0930 19:44:48.775874 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="63c2f5ab-2343-4339-9fcf-f0fd80b703af" containerName="util" Sep 30 19:44:48 crc kubenswrapper[4756]: I0930 19:44:48.775880 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="63c2f5ab-2343-4339-9fcf-f0fd80b703af" containerName="util" Sep 30 19:44:48 crc kubenswrapper[4756]: E0930 19:44:48.775890 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b2b43201-51ce-4e91-8dca-ca239156836d" containerName="extract-content" Sep 30 19:44:48 crc kubenswrapper[4756]: I0930 19:44:48.775895 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="b2b43201-51ce-4e91-8dca-ca239156836d" containerName="extract-content" Sep 30 19:44:48 crc kubenswrapper[4756]: E0930 19:44:48.775904 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="63c2f5ab-2343-4339-9fcf-f0fd80b703af" containerName="extract" Sep 30 19:44:48 crc kubenswrapper[4756]: I0930 19:44:48.775910 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="63c2f5ab-2343-4339-9fcf-f0fd80b703af" containerName="extract" Sep 30 19:44:48 crc kubenswrapper[4756]: E0930 19:44:48.775919 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b2b43201-51ce-4e91-8dca-ca239156836d" containerName="extract-utilities" Sep 30 19:44:48 crc kubenswrapper[4756]: I0930 19:44:48.775925 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="b2b43201-51ce-4e91-8dca-ca239156836d" containerName="extract-utilities" Sep 30 19:44:48 crc kubenswrapper[4756]: I0930 19:44:48.776013 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="63c2f5ab-2343-4339-9fcf-f0fd80b703af" containerName="extract" Sep 30 19:44:48 crc kubenswrapper[4756]: I0930 19:44:48.776023 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="b2b43201-51ce-4e91-8dca-ca239156836d" containerName="registry-server" Sep 30 19:44:48 crc kubenswrapper[4756]: I0930 19:44:48.776557 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-6787fd9965-mp4jt" Sep 30 19:44:48 crc kubenswrapper[4756]: I0930 19:44:48.778332 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"openshift-service-ca.crt" Sep 30 19:44:48 crc kubenswrapper[4756]: I0930 19:44:48.778780 4756 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-cert" Sep 30 19:44:48 crc kubenswrapper[4756]: I0930 19:44:48.778446 4756 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"manager-account-dockercfg-v8fns" Sep 30 19:44:48 crc kubenswrapper[4756]: I0930 19:44:48.778528 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"kube-root-ca.crt" Sep 30 19:44:48 crc kubenswrapper[4756]: I0930 19:44:48.778573 4756 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-controller-manager-service-cert" Sep 30 19:44:48 crc kubenswrapper[4756]: I0930 19:44:48.804943 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-6787fd9965-mp4jt"] Sep 30 19:44:48 crc kubenswrapper[4756]: I0930 19:44:48.882817 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/7a236abf-4700-48d8-a59e-2ad5ff34d034-apiservice-cert\") pod \"metallb-operator-controller-manager-6787fd9965-mp4jt\" (UID: \"7a236abf-4700-48d8-a59e-2ad5ff34d034\") " pod="metallb-system/metallb-operator-controller-manager-6787fd9965-mp4jt" Sep 30 19:44:48 crc kubenswrapper[4756]: I0930 19:44:48.882909 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xxfdn\" (UniqueName: \"kubernetes.io/projected/7a236abf-4700-48d8-a59e-2ad5ff34d034-kube-api-access-xxfdn\") pod \"metallb-operator-controller-manager-6787fd9965-mp4jt\" (UID: \"7a236abf-4700-48d8-a59e-2ad5ff34d034\") " pod="metallb-system/metallb-operator-controller-manager-6787fd9965-mp4jt" Sep 30 19:44:48 crc kubenswrapper[4756]: I0930 19:44:48.882937 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/7a236abf-4700-48d8-a59e-2ad5ff34d034-webhook-cert\") pod \"metallb-operator-controller-manager-6787fd9965-mp4jt\" (UID: \"7a236abf-4700-48d8-a59e-2ad5ff34d034\") " pod="metallb-system/metallb-operator-controller-manager-6787fd9965-mp4jt" Sep 30 19:44:48 crc kubenswrapper[4756]: I0930 19:44:48.984311 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/7a236abf-4700-48d8-a59e-2ad5ff34d034-apiservice-cert\") pod \"metallb-operator-controller-manager-6787fd9965-mp4jt\" (UID: \"7a236abf-4700-48d8-a59e-2ad5ff34d034\") " pod="metallb-system/metallb-operator-controller-manager-6787fd9965-mp4jt" Sep 30 19:44:48 crc kubenswrapper[4756]: I0930 19:44:48.984413 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xxfdn\" (UniqueName: \"kubernetes.io/projected/7a236abf-4700-48d8-a59e-2ad5ff34d034-kube-api-access-xxfdn\") pod \"metallb-operator-controller-manager-6787fd9965-mp4jt\" (UID: \"7a236abf-4700-48d8-a59e-2ad5ff34d034\") " pod="metallb-system/metallb-operator-controller-manager-6787fd9965-mp4jt" Sep 30 19:44:48 crc kubenswrapper[4756]: I0930 19:44:48.984440 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/7a236abf-4700-48d8-a59e-2ad5ff34d034-webhook-cert\") pod \"metallb-operator-controller-manager-6787fd9965-mp4jt\" (UID: \"7a236abf-4700-48d8-a59e-2ad5ff34d034\") " pod="metallb-system/metallb-operator-controller-manager-6787fd9965-mp4jt" Sep 30 19:44:48 crc kubenswrapper[4756]: I0930 19:44:48.990605 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/7a236abf-4700-48d8-a59e-2ad5ff34d034-webhook-cert\") pod \"metallb-operator-controller-manager-6787fd9965-mp4jt\" (UID: \"7a236abf-4700-48d8-a59e-2ad5ff34d034\") " pod="metallb-system/metallb-operator-controller-manager-6787fd9965-mp4jt" Sep 30 19:44:48 crc kubenswrapper[4756]: I0930 19:44:48.990786 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/7a236abf-4700-48d8-a59e-2ad5ff34d034-apiservice-cert\") pod \"metallb-operator-controller-manager-6787fd9965-mp4jt\" (UID: \"7a236abf-4700-48d8-a59e-2ad5ff34d034\") " pod="metallb-system/metallb-operator-controller-manager-6787fd9965-mp4jt" Sep 30 19:44:49 crc kubenswrapper[4756]: I0930 19:44:49.007223 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xxfdn\" (UniqueName: \"kubernetes.io/projected/7a236abf-4700-48d8-a59e-2ad5ff34d034-kube-api-access-xxfdn\") pod \"metallb-operator-controller-manager-6787fd9965-mp4jt\" (UID: \"7a236abf-4700-48d8-a59e-2ad5ff34d034\") " pod="metallb-system/metallb-operator-controller-manager-6787fd9965-mp4jt" Sep 30 19:44:49 crc kubenswrapper[4756]: I0930 19:44:49.028142 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-webhook-server-6b8b4f7c6f-s2bh6"] Sep 30 19:44:49 crc kubenswrapper[4756]: I0930 19:44:49.028865 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-6b8b4f7c6f-s2bh6" Sep 30 19:44:49 crc kubenswrapper[4756]: I0930 19:44:49.032287 4756 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-service-cert" Sep 30 19:44:49 crc kubenswrapper[4756]: I0930 19:44:49.032465 4756 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-webhook-cert" Sep 30 19:44:49 crc kubenswrapper[4756]: I0930 19:44:49.033034 4756 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-dockercfg-jbhrs" Sep 30 19:44:49 crc kubenswrapper[4756]: I0930 19:44:49.038891 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-6b8b4f7c6f-s2bh6"] Sep 30 19:44:49 crc kubenswrapper[4756]: I0930 19:44:49.085526 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c8q6b\" (UniqueName: \"kubernetes.io/projected/87993ac9-83b4-4481-8611-f3f458f4b424-kube-api-access-c8q6b\") pod \"metallb-operator-webhook-server-6b8b4f7c6f-s2bh6\" (UID: \"87993ac9-83b4-4481-8611-f3f458f4b424\") " pod="metallb-system/metallb-operator-webhook-server-6b8b4f7c6f-s2bh6" Sep 30 19:44:49 crc kubenswrapper[4756]: I0930 19:44:49.085564 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/87993ac9-83b4-4481-8611-f3f458f4b424-webhook-cert\") pod \"metallb-operator-webhook-server-6b8b4f7c6f-s2bh6\" (UID: \"87993ac9-83b4-4481-8611-f3f458f4b424\") " pod="metallb-system/metallb-operator-webhook-server-6b8b4f7c6f-s2bh6" Sep 30 19:44:49 crc kubenswrapper[4756]: I0930 19:44:49.085593 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/87993ac9-83b4-4481-8611-f3f458f4b424-apiservice-cert\") pod \"metallb-operator-webhook-server-6b8b4f7c6f-s2bh6\" (UID: \"87993ac9-83b4-4481-8611-f3f458f4b424\") " pod="metallb-system/metallb-operator-webhook-server-6b8b4f7c6f-s2bh6" Sep 30 19:44:49 crc kubenswrapper[4756]: I0930 19:44:49.089301 4756 generic.go:334] "Generic (PLEG): container finished" podID="b2b43201-51ce-4e91-8dca-ca239156836d" containerID="8e7dce1d1f56d2639753dfbb824f3adec76a0a9d0818cad0b9b74fe0b59983b0" exitCode=0 Sep 30 19:44:49 crc kubenswrapper[4756]: I0930 19:44:49.089338 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-c4pf9" event={"ID":"b2b43201-51ce-4e91-8dca-ca239156836d","Type":"ContainerDied","Data":"8e7dce1d1f56d2639753dfbb824f3adec76a0a9d0818cad0b9b74fe0b59983b0"} Sep 30 19:44:49 crc kubenswrapper[4756]: I0930 19:44:49.089360 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-c4pf9" event={"ID":"b2b43201-51ce-4e91-8dca-ca239156836d","Type":"ContainerDied","Data":"6e660008efd9e8940724f76824df5cab45e4b263eb0acd9931e5bee5b530b9b7"} Sep 30 19:44:49 crc kubenswrapper[4756]: I0930 19:44:49.089375 4756 scope.go:117] "RemoveContainer" containerID="8e7dce1d1f56d2639753dfbb824f3adec76a0a9d0818cad0b9b74fe0b59983b0" Sep 30 19:44:49 crc kubenswrapper[4756]: I0930 19:44:49.089504 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-c4pf9" Sep 30 19:44:49 crc kubenswrapper[4756]: I0930 19:44:49.090887 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-6787fd9965-mp4jt" Sep 30 19:44:49 crc kubenswrapper[4756]: I0930 19:44:49.107852 4756 scope.go:117] "RemoveContainer" containerID="caeb9189323a425311a7e62bc0436ab2814e388ea59366cfb885aa2f61c1037a" Sep 30 19:44:49 crc kubenswrapper[4756]: I0930 19:44:49.125598 4756 scope.go:117] "RemoveContainer" containerID="17fe6b96145a4ce7af81a065c1e467ab8c6df9bf85846307846b4ca57e1b8c4a" Sep 30 19:44:49 crc kubenswrapper[4756]: I0930 19:44:49.140664 4756 scope.go:117] "RemoveContainer" containerID="8e7dce1d1f56d2639753dfbb824f3adec76a0a9d0818cad0b9b74fe0b59983b0" Sep 30 19:44:49 crc kubenswrapper[4756]: E0930 19:44:49.140992 4756 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8e7dce1d1f56d2639753dfbb824f3adec76a0a9d0818cad0b9b74fe0b59983b0\": container with ID starting with 8e7dce1d1f56d2639753dfbb824f3adec76a0a9d0818cad0b9b74fe0b59983b0 not found: ID does not exist" containerID="8e7dce1d1f56d2639753dfbb824f3adec76a0a9d0818cad0b9b74fe0b59983b0" Sep 30 19:44:49 crc kubenswrapper[4756]: I0930 19:44:49.141033 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8e7dce1d1f56d2639753dfbb824f3adec76a0a9d0818cad0b9b74fe0b59983b0"} err="failed to get container status \"8e7dce1d1f56d2639753dfbb824f3adec76a0a9d0818cad0b9b74fe0b59983b0\": rpc error: code = NotFound desc = could not find container \"8e7dce1d1f56d2639753dfbb824f3adec76a0a9d0818cad0b9b74fe0b59983b0\": container with ID starting with 8e7dce1d1f56d2639753dfbb824f3adec76a0a9d0818cad0b9b74fe0b59983b0 not found: ID does not exist" Sep 30 19:44:49 crc kubenswrapper[4756]: I0930 19:44:49.141061 4756 scope.go:117] "RemoveContainer" containerID="caeb9189323a425311a7e62bc0436ab2814e388ea59366cfb885aa2f61c1037a" Sep 30 19:44:49 crc kubenswrapper[4756]: E0930 19:44:49.141408 4756 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"caeb9189323a425311a7e62bc0436ab2814e388ea59366cfb885aa2f61c1037a\": container with ID starting with caeb9189323a425311a7e62bc0436ab2814e388ea59366cfb885aa2f61c1037a not found: ID does not exist" containerID="caeb9189323a425311a7e62bc0436ab2814e388ea59366cfb885aa2f61c1037a" Sep 30 19:44:49 crc kubenswrapper[4756]: I0930 19:44:49.141437 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"caeb9189323a425311a7e62bc0436ab2814e388ea59366cfb885aa2f61c1037a"} err="failed to get container status \"caeb9189323a425311a7e62bc0436ab2814e388ea59366cfb885aa2f61c1037a\": rpc error: code = NotFound desc = could not find container \"caeb9189323a425311a7e62bc0436ab2814e388ea59366cfb885aa2f61c1037a\": container with ID starting with caeb9189323a425311a7e62bc0436ab2814e388ea59366cfb885aa2f61c1037a not found: ID does not exist" Sep 30 19:44:49 crc kubenswrapper[4756]: I0930 19:44:49.141457 4756 scope.go:117] "RemoveContainer" containerID="17fe6b96145a4ce7af81a065c1e467ab8c6df9bf85846307846b4ca57e1b8c4a" Sep 30 19:44:49 crc kubenswrapper[4756]: E0930 19:44:49.142035 4756 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"17fe6b96145a4ce7af81a065c1e467ab8c6df9bf85846307846b4ca57e1b8c4a\": container with ID starting with 17fe6b96145a4ce7af81a065c1e467ab8c6df9bf85846307846b4ca57e1b8c4a not found: ID does not exist" containerID="17fe6b96145a4ce7af81a065c1e467ab8c6df9bf85846307846b4ca57e1b8c4a" Sep 30 19:44:49 crc kubenswrapper[4756]: I0930 19:44:49.142059 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"17fe6b96145a4ce7af81a065c1e467ab8c6df9bf85846307846b4ca57e1b8c4a"} err="failed to get container status \"17fe6b96145a4ce7af81a065c1e467ab8c6df9bf85846307846b4ca57e1b8c4a\": rpc error: code = NotFound desc = could not find container \"17fe6b96145a4ce7af81a065c1e467ab8c6df9bf85846307846b4ca57e1b8c4a\": container with ID starting with 17fe6b96145a4ce7af81a065c1e467ab8c6df9bf85846307846b4ca57e1b8c4a not found: ID does not exist" Sep 30 19:44:49 crc kubenswrapper[4756]: I0930 19:44:49.186767 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c8q6b\" (UniqueName: \"kubernetes.io/projected/87993ac9-83b4-4481-8611-f3f458f4b424-kube-api-access-c8q6b\") pod \"metallb-operator-webhook-server-6b8b4f7c6f-s2bh6\" (UID: \"87993ac9-83b4-4481-8611-f3f458f4b424\") " pod="metallb-system/metallb-operator-webhook-server-6b8b4f7c6f-s2bh6" Sep 30 19:44:49 crc kubenswrapper[4756]: I0930 19:44:49.186970 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/87993ac9-83b4-4481-8611-f3f458f4b424-webhook-cert\") pod \"metallb-operator-webhook-server-6b8b4f7c6f-s2bh6\" (UID: \"87993ac9-83b4-4481-8611-f3f458f4b424\") " pod="metallb-system/metallb-operator-webhook-server-6b8b4f7c6f-s2bh6" Sep 30 19:44:49 crc kubenswrapper[4756]: I0930 19:44:49.187001 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/87993ac9-83b4-4481-8611-f3f458f4b424-apiservice-cert\") pod \"metallb-operator-webhook-server-6b8b4f7c6f-s2bh6\" (UID: \"87993ac9-83b4-4481-8611-f3f458f4b424\") " pod="metallb-system/metallb-operator-webhook-server-6b8b4f7c6f-s2bh6" Sep 30 19:44:49 crc kubenswrapper[4756]: I0930 19:44:49.193080 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/87993ac9-83b4-4481-8611-f3f458f4b424-apiservice-cert\") pod \"metallb-operator-webhook-server-6b8b4f7c6f-s2bh6\" (UID: \"87993ac9-83b4-4481-8611-f3f458f4b424\") " pod="metallb-system/metallb-operator-webhook-server-6b8b4f7c6f-s2bh6" Sep 30 19:44:49 crc kubenswrapper[4756]: I0930 19:44:49.198082 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/87993ac9-83b4-4481-8611-f3f458f4b424-webhook-cert\") pod \"metallb-operator-webhook-server-6b8b4f7c6f-s2bh6\" (UID: \"87993ac9-83b4-4481-8611-f3f458f4b424\") " pod="metallb-system/metallb-operator-webhook-server-6b8b4f7c6f-s2bh6" Sep 30 19:44:49 crc kubenswrapper[4756]: I0930 19:44:49.208978 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c8q6b\" (UniqueName: \"kubernetes.io/projected/87993ac9-83b4-4481-8611-f3f458f4b424-kube-api-access-c8q6b\") pod \"metallb-operator-webhook-server-6b8b4f7c6f-s2bh6\" (UID: \"87993ac9-83b4-4481-8611-f3f458f4b424\") " pod="metallb-system/metallb-operator-webhook-server-6b8b4f7c6f-s2bh6" Sep 30 19:44:49 crc kubenswrapper[4756]: I0930 19:44:49.396261 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-6b8b4f7c6f-s2bh6" Sep 30 19:44:49 crc kubenswrapper[4756]: I0930 19:44:49.565105 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-6787fd9965-mp4jt"] Sep 30 19:44:49 crc kubenswrapper[4756]: W0930 19:44:49.612133 4756 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7a236abf_4700_48d8_a59e_2ad5ff34d034.slice/crio-0be1b0f17972b85ce70ef9a1fa81ddde1e1aff701f5d3911022f1d608b357f83 WatchSource:0}: Error finding container 0be1b0f17972b85ce70ef9a1fa81ddde1e1aff701f5d3911022f1d608b357f83: Status 404 returned error can't find the container with id 0be1b0f17972b85ce70ef9a1fa81ddde1e1aff701f5d3911022f1d608b357f83 Sep 30 19:44:49 crc kubenswrapper[4756]: I0930 19:44:49.689209 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-6b8b4f7c6f-s2bh6"] Sep 30 19:44:49 crc kubenswrapper[4756]: I0930 19:44:49.862510 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b2b43201-51ce-4e91-8dca-ca239156836d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b2b43201-51ce-4e91-8dca-ca239156836d" (UID: "b2b43201-51ce-4e91-8dca-ca239156836d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 19:44:49 crc kubenswrapper[4756]: I0930 19:44:49.894482 4756 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b2b43201-51ce-4e91-8dca-ca239156836d-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 19:44:50 crc kubenswrapper[4756]: I0930 19:44:50.035518 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-c4pf9"] Sep 30 19:44:50 crc kubenswrapper[4756]: I0930 19:44:50.040534 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-c4pf9"] Sep 30 19:44:50 crc kubenswrapper[4756]: I0930 19:44:50.097852 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-6787fd9965-mp4jt" event={"ID":"7a236abf-4700-48d8-a59e-2ad5ff34d034","Type":"ContainerStarted","Data":"0be1b0f17972b85ce70ef9a1fa81ddde1e1aff701f5d3911022f1d608b357f83"} Sep 30 19:44:50 crc kubenswrapper[4756]: I0930 19:44:50.099449 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-6b8b4f7c6f-s2bh6" event={"ID":"87993ac9-83b4-4481-8611-f3f458f4b424","Type":"ContainerStarted","Data":"e8e717111273dc6f0955c13753a2cb9bfbb9e39f9d64cbcf881981ecf9784e0e"} Sep 30 19:44:51 crc kubenswrapper[4756]: I0930 19:44:51.164849 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b2b43201-51ce-4e91-8dca-ca239156836d" path="/var/lib/kubelet/pods/b2b43201-51ce-4e91-8dca-ca239156836d/volumes" Sep 30 19:44:53 crc kubenswrapper[4756]: I0930 19:44:53.121734 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-6787fd9965-mp4jt" event={"ID":"7a236abf-4700-48d8-a59e-2ad5ff34d034","Type":"ContainerStarted","Data":"5c15f2ca6884a174e996108a99b73510fde8b3c6cb1d329928b7c44ad6558303"} Sep 30 19:44:53 crc kubenswrapper[4756]: I0930 19:44:53.122314 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-controller-manager-6787fd9965-mp4jt" Sep 30 19:44:53 crc kubenswrapper[4756]: I0930 19:44:53.162813 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-controller-manager-6787fd9965-mp4jt" podStartSLOduration=2.140965487 podStartE2EDuration="5.162791831s" podCreationTimestamp="2025-09-30 19:44:48 +0000 UTC" firstStartedPulling="2025-09-30 19:44:49.623549989 +0000 UTC m=+819.244483466" lastFinishedPulling="2025-09-30 19:44:52.645376333 +0000 UTC m=+822.266309810" observedRunningTime="2025-09-30 19:44:53.156861277 +0000 UTC m=+822.777794764" watchObservedRunningTime="2025-09-30 19:44:53.162791831 +0000 UTC m=+822.783725328" Sep 30 19:44:55 crc kubenswrapper[4756]: I0930 19:44:55.133553 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-6b8b4f7c6f-s2bh6" event={"ID":"87993ac9-83b4-4481-8611-f3f458f4b424","Type":"ContainerStarted","Data":"acbb46d8056b5835e29f8b41baadf0650f9ac71272bb9d0e0796c4d96ad83ab5"} Sep 30 19:44:55 crc kubenswrapper[4756]: I0930 19:44:55.151361 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-webhook-server-6b8b4f7c6f-s2bh6" podStartSLOduration=0.871394935 podStartE2EDuration="6.151346641s" podCreationTimestamp="2025-09-30 19:44:49 +0000 UTC" firstStartedPulling="2025-09-30 19:44:49.701244023 +0000 UTC m=+819.322177500" lastFinishedPulling="2025-09-30 19:44:54.981195729 +0000 UTC m=+824.602129206" observedRunningTime="2025-09-30 19:44:55.149932814 +0000 UTC m=+824.770866291" watchObservedRunningTime="2025-09-30 19:44:55.151346641 +0000 UTC m=+824.772280118" Sep 30 19:44:56 crc kubenswrapper[4756]: I0930 19:44:56.139755 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-webhook-server-6b8b4f7c6f-s2bh6" Sep 30 19:45:00 crc kubenswrapper[4756]: I0930 19:45:00.146280 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29321025-twlmh"] Sep 30 19:45:00 crc kubenswrapper[4756]: I0930 19:45:00.147519 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29321025-twlmh" Sep 30 19:45:00 crc kubenswrapper[4756]: I0930 19:45:00.149968 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Sep 30 19:45:00 crc kubenswrapper[4756]: I0930 19:45:00.152319 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Sep 30 19:45:00 crc kubenswrapper[4756]: I0930 19:45:00.153383 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29321025-twlmh"] Sep 30 19:45:00 crc kubenswrapper[4756]: I0930 19:45:00.252025 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xm2q7\" (UniqueName: \"kubernetes.io/projected/9ad9e216-4aec-4e6c-84f9-8b714c6e57f1-kube-api-access-xm2q7\") pod \"collect-profiles-29321025-twlmh\" (UID: \"9ad9e216-4aec-4e6c-84f9-8b714c6e57f1\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321025-twlmh" Sep 30 19:45:00 crc kubenswrapper[4756]: I0930 19:45:00.252136 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/9ad9e216-4aec-4e6c-84f9-8b714c6e57f1-secret-volume\") pod \"collect-profiles-29321025-twlmh\" (UID: \"9ad9e216-4aec-4e6c-84f9-8b714c6e57f1\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321025-twlmh" Sep 30 19:45:00 crc kubenswrapper[4756]: I0930 19:45:00.252200 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/9ad9e216-4aec-4e6c-84f9-8b714c6e57f1-config-volume\") pod \"collect-profiles-29321025-twlmh\" (UID: \"9ad9e216-4aec-4e6c-84f9-8b714c6e57f1\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321025-twlmh" Sep 30 19:45:00 crc kubenswrapper[4756]: I0930 19:45:00.353127 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/9ad9e216-4aec-4e6c-84f9-8b714c6e57f1-secret-volume\") pod \"collect-profiles-29321025-twlmh\" (UID: \"9ad9e216-4aec-4e6c-84f9-8b714c6e57f1\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321025-twlmh" Sep 30 19:45:00 crc kubenswrapper[4756]: I0930 19:45:00.353197 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/9ad9e216-4aec-4e6c-84f9-8b714c6e57f1-config-volume\") pod \"collect-profiles-29321025-twlmh\" (UID: \"9ad9e216-4aec-4e6c-84f9-8b714c6e57f1\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321025-twlmh" Sep 30 19:45:00 crc kubenswrapper[4756]: I0930 19:45:00.353250 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xm2q7\" (UniqueName: \"kubernetes.io/projected/9ad9e216-4aec-4e6c-84f9-8b714c6e57f1-kube-api-access-xm2q7\") pod \"collect-profiles-29321025-twlmh\" (UID: \"9ad9e216-4aec-4e6c-84f9-8b714c6e57f1\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321025-twlmh" Sep 30 19:45:00 crc kubenswrapper[4756]: I0930 19:45:00.354118 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/9ad9e216-4aec-4e6c-84f9-8b714c6e57f1-config-volume\") pod \"collect-profiles-29321025-twlmh\" (UID: \"9ad9e216-4aec-4e6c-84f9-8b714c6e57f1\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321025-twlmh" Sep 30 19:45:00 crc kubenswrapper[4756]: I0930 19:45:00.360181 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/9ad9e216-4aec-4e6c-84f9-8b714c6e57f1-secret-volume\") pod \"collect-profiles-29321025-twlmh\" (UID: \"9ad9e216-4aec-4e6c-84f9-8b714c6e57f1\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321025-twlmh" Sep 30 19:45:00 crc kubenswrapper[4756]: I0930 19:45:00.367973 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xm2q7\" (UniqueName: \"kubernetes.io/projected/9ad9e216-4aec-4e6c-84f9-8b714c6e57f1-kube-api-access-xm2q7\") pod \"collect-profiles-29321025-twlmh\" (UID: \"9ad9e216-4aec-4e6c-84f9-8b714c6e57f1\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321025-twlmh" Sep 30 19:45:00 crc kubenswrapper[4756]: I0930 19:45:00.462540 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29321025-twlmh" Sep 30 19:45:00 crc kubenswrapper[4756]: I0930 19:45:00.850316 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29321025-twlmh"] Sep 30 19:45:01 crc kubenswrapper[4756]: I0930 19:45:01.179410 4756 generic.go:334] "Generic (PLEG): container finished" podID="9ad9e216-4aec-4e6c-84f9-8b714c6e57f1" containerID="b6b7bf2fe7d30cfb6e4ee8e95898b7b4ea8a2edeeed7df5ed94bc3a661470790" exitCode=0 Sep 30 19:45:01 crc kubenswrapper[4756]: I0930 19:45:01.179714 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29321025-twlmh" event={"ID":"9ad9e216-4aec-4e6c-84f9-8b714c6e57f1","Type":"ContainerDied","Data":"b6b7bf2fe7d30cfb6e4ee8e95898b7b4ea8a2edeeed7df5ed94bc3a661470790"} Sep 30 19:45:01 crc kubenswrapper[4756]: I0930 19:45:01.179740 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29321025-twlmh" event={"ID":"9ad9e216-4aec-4e6c-84f9-8b714c6e57f1","Type":"ContainerStarted","Data":"816c6507d11e40f479a08f424e2132ce3ecdef00736a75e46d930c01992eba9d"} Sep 30 19:45:02 crc kubenswrapper[4756]: I0930 19:45:02.509882 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29321025-twlmh" Sep 30 19:45:02 crc kubenswrapper[4756]: I0930 19:45:02.583320 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/9ad9e216-4aec-4e6c-84f9-8b714c6e57f1-config-volume\") pod \"9ad9e216-4aec-4e6c-84f9-8b714c6e57f1\" (UID: \"9ad9e216-4aec-4e6c-84f9-8b714c6e57f1\") " Sep 30 19:45:02 crc kubenswrapper[4756]: I0930 19:45:02.583384 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xm2q7\" (UniqueName: \"kubernetes.io/projected/9ad9e216-4aec-4e6c-84f9-8b714c6e57f1-kube-api-access-xm2q7\") pod \"9ad9e216-4aec-4e6c-84f9-8b714c6e57f1\" (UID: \"9ad9e216-4aec-4e6c-84f9-8b714c6e57f1\") " Sep 30 19:45:02 crc kubenswrapper[4756]: I0930 19:45:02.583474 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/9ad9e216-4aec-4e6c-84f9-8b714c6e57f1-secret-volume\") pod \"9ad9e216-4aec-4e6c-84f9-8b714c6e57f1\" (UID: \"9ad9e216-4aec-4e6c-84f9-8b714c6e57f1\") " Sep 30 19:45:02 crc kubenswrapper[4756]: I0930 19:45:02.584155 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9ad9e216-4aec-4e6c-84f9-8b714c6e57f1-config-volume" (OuterVolumeSpecName: "config-volume") pod "9ad9e216-4aec-4e6c-84f9-8b714c6e57f1" (UID: "9ad9e216-4aec-4e6c-84f9-8b714c6e57f1"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:45:02 crc kubenswrapper[4756]: I0930 19:45:02.591997 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9ad9e216-4aec-4e6c-84f9-8b714c6e57f1-kube-api-access-xm2q7" (OuterVolumeSpecName: "kube-api-access-xm2q7") pod "9ad9e216-4aec-4e6c-84f9-8b714c6e57f1" (UID: "9ad9e216-4aec-4e6c-84f9-8b714c6e57f1"). InnerVolumeSpecName "kube-api-access-xm2q7". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:45:02 crc kubenswrapper[4756]: I0930 19:45:02.592009 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9ad9e216-4aec-4e6c-84f9-8b714c6e57f1-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "9ad9e216-4aec-4e6c-84f9-8b714c6e57f1" (UID: "9ad9e216-4aec-4e6c-84f9-8b714c6e57f1"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:45:02 crc kubenswrapper[4756]: I0930 19:45:02.684750 4756 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/9ad9e216-4aec-4e6c-84f9-8b714c6e57f1-config-volume\") on node \"crc\" DevicePath \"\"" Sep 30 19:45:02 crc kubenswrapper[4756]: I0930 19:45:02.684793 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xm2q7\" (UniqueName: \"kubernetes.io/projected/9ad9e216-4aec-4e6c-84f9-8b714c6e57f1-kube-api-access-xm2q7\") on node \"crc\" DevicePath \"\"" Sep 30 19:45:02 crc kubenswrapper[4756]: I0930 19:45:02.684811 4756 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/9ad9e216-4aec-4e6c-84f9-8b714c6e57f1-secret-volume\") on node \"crc\" DevicePath \"\"" Sep 30 19:45:03 crc kubenswrapper[4756]: I0930 19:45:03.193604 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29321025-twlmh" event={"ID":"9ad9e216-4aec-4e6c-84f9-8b714c6e57f1","Type":"ContainerDied","Data":"816c6507d11e40f479a08f424e2132ce3ecdef00736a75e46d930c01992eba9d"} Sep 30 19:45:03 crc kubenswrapper[4756]: I0930 19:45:03.193872 4756 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="816c6507d11e40f479a08f424e2132ce3ecdef00736a75e46d930c01992eba9d" Sep 30 19:45:03 crc kubenswrapper[4756]: I0930 19:45:03.193693 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29321025-twlmh" Sep 30 19:45:03 crc kubenswrapper[4756]: I0930 19:45:03.813135 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-rwqq6"] Sep 30 19:45:03 crc kubenswrapper[4756]: E0930 19:45:03.813400 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9ad9e216-4aec-4e6c-84f9-8b714c6e57f1" containerName="collect-profiles" Sep 30 19:45:03 crc kubenswrapper[4756]: I0930 19:45:03.813460 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="9ad9e216-4aec-4e6c-84f9-8b714c6e57f1" containerName="collect-profiles" Sep 30 19:45:03 crc kubenswrapper[4756]: I0930 19:45:03.813584 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="9ad9e216-4aec-4e6c-84f9-8b714c6e57f1" containerName="collect-profiles" Sep 30 19:45:03 crc kubenswrapper[4756]: I0930 19:45:03.814432 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-rwqq6" Sep 30 19:45:03 crc kubenswrapper[4756]: I0930 19:45:03.825492 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-rwqq6"] Sep 30 19:45:03 crc kubenswrapper[4756]: I0930 19:45:03.900219 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a7cb24df-7c55-4c5c-be69-96c9d6542d44-catalog-content\") pod \"certified-operators-rwqq6\" (UID: \"a7cb24df-7c55-4c5c-be69-96c9d6542d44\") " pod="openshift-marketplace/certified-operators-rwqq6" Sep 30 19:45:03 crc kubenswrapper[4756]: I0930 19:45:03.900260 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t8f5f\" (UniqueName: \"kubernetes.io/projected/a7cb24df-7c55-4c5c-be69-96c9d6542d44-kube-api-access-t8f5f\") pod \"certified-operators-rwqq6\" (UID: \"a7cb24df-7c55-4c5c-be69-96c9d6542d44\") " pod="openshift-marketplace/certified-operators-rwqq6" Sep 30 19:45:03 crc kubenswrapper[4756]: I0930 19:45:03.900284 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a7cb24df-7c55-4c5c-be69-96c9d6542d44-utilities\") pod \"certified-operators-rwqq6\" (UID: \"a7cb24df-7c55-4c5c-be69-96c9d6542d44\") " pod="openshift-marketplace/certified-operators-rwqq6" Sep 30 19:45:04 crc kubenswrapper[4756]: I0930 19:45:04.001611 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t8f5f\" (UniqueName: \"kubernetes.io/projected/a7cb24df-7c55-4c5c-be69-96c9d6542d44-kube-api-access-t8f5f\") pod \"certified-operators-rwqq6\" (UID: \"a7cb24df-7c55-4c5c-be69-96c9d6542d44\") " pod="openshift-marketplace/certified-operators-rwqq6" Sep 30 19:45:04 crc kubenswrapper[4756]: I0930 19:45:04.001674 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a7cb24df-7c55-4c5c-be69-96c9d6542d44-utilities\") pod \"certified-operators-rwqq6\" (UID: \"a7cb24df-7c55-4c5c-be69-96c9d6542d44\") " pod="openshift-marketplace/certified-operators-rwqq6" Sep 30 19:45:04 crc kubenswrapper[4756]: I0930 19:45:04.001751 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a7cb24df-7c55-4c5c-be69-96c9d6542d44-catalog-content\") pod \"certified-operators-rwqq6\" (UID: \"a7cb24df-7c55-4c5c-be69-96c9d6542d44\") " pod="openshift-marketplace/certified-operators-rwqq6" Sep 30 19:45:04 crc kubenswrapper[4756]: I0930 19:45:04.002134 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a7cb24df-7c55-4c5c-be69-96c9d6542d44-catalog-content\") pod \"certified-operators-rwqq6\" (UID: \"a7cb24df-7c55-4c5c-be69-96c9d6542d44\") " pod="openshift-marketplace/certified-operators-rwqq6" Sep 30 19:45:04 crc kubenswrapper[4756]: I0930 19:45:04.002155 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a7cb24df-7c55-4c5c-be69-96c9d6542d44-utilities\") pod \"certified-operators-rwqq6\" (UID: \"a7cb24df-7c55-4c5c-be69-96c9d6542d44\") " pod="openshift-marketplace/certified-operators-rwqq6" Sep 30 19:45:04 crc kubenswrapper[4756]: I0930 19:45:04.020186 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t8f5f\" (UniqueName: \"kubernetes.io/projected/a7cb24df-7c55-4c5c-be69-96c9d6542d44-kube-api-access-t8f5f\") pod \"certified-operators-rwqq6\" (UID: \"a7cb24df-7c55-4c5c-be69-96c9d6542d44\") " pod="openshift-marketplace/certified-operators-rwqq6" Sep 30 19:45:04 crc kubenswrapper[4756]: I0930 19:45:04.172056 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-rwqq6" Sep 30 19:45:04 crc kubenswrapper[4756]: I0930 19:45:04.443648 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-rwqq6"] Sep 30 19:45:04 crc kubenswrapper[4756]: W0930 19:45:04.461137 4756 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda7cb24df_7c55_4c5c_be69_96c9d6542d44.slice/crio-4e6121542729f0559da278a0c40de4a178f5ad63a6a16d4a0ab4feb4e8e2c38b WatchSource:0}: Error finding container 4e6121542729f0559da278a0c40de4a178f5ad63a6a16d4a0ab4feb4e8e2c38b: Status 404 returned error can't find the container with id 4e6121542729f0559da278a0c40de4a178f5ad63a6a16d4a0ab4feb4e8e2c38b Sep 30 19:45:05 crc kubenswrapper[4756]: I0930 19:45:05.202907 4756 generic.go:334] "Generic (PLEG): container finished" podID="a7cb24df-7c55-4c5c-be69-96c9d6542d44" containerID="136eb021ee4ca3ef036f7b4e274d40124342f2202ccde400c3d156e68cd88465" exitCode=0 Sep 30 19:45:05 crc kubenswrapper[4756]: I0930 19:45:05.203014 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rwqq6" event={"ID":"a7cb24df-7c55-4c5c-be69-96c9d6542d44","Type":"ContainerDied","Data":"136eb021ee4ca3ef036f7b4e274d40124342f2202ccde400c3d156e68cd88465"} Sep 30 19:45:05 crc kubenswrapper[4756]: I0930 19:45:05.203225 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rwqq6" event={"ID":"a7cb24df-7c55-4c5c-be69-96c9d6542d44","Type":"ContainerStarted","Data":"4e6121542729f0559da278a0c40de4a178f5ad63a6a16d4a0ab4feb4e8e2c38b"} Sep 30 19:45:06 crc kubenswrapper[4756]: I0930 19:45:06.211417 4756 generic.go:334] "Generic (PLEG): container finished" podID="a7cb24df-7c55-4c5c-be69-96c9d6542d44" containerID="648e6cdc6190ddc18ba7ce6fd7f8632009f61a3836b2e5970c23abd404664aa3" exitCode=0 Sep 30 19:45:06 crc kubenswrapper[4756]: I0930 19:45:06.211467 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rwqq6" event={"ID":"a7cb24df-7c55-4c5c-be69-96c9d6542d44","Type":"ContainerDied","Data":"648e6cdc6190ddc18ba7ce6fd7f8632009f61a3836b2e5970c23abd404664aa3"} Sep 30 19:45:07 crc kubenswrapper[4756]: I0930 19:45:07.217584 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rwqq6" event={"ID":"a7cb24df-7c55-4c5c-be69-96c9d6542d44","Type":"ContainerStarted","Data":"ef6411c2f256dea7987ae596e1e7906530570539424be89aa90000965a2266fd"} Sep 30 19:45:07 crc kubenswrapper[4756]: I0930 19:45:07.233587 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-rwqq6" podStartSLOduration=2.601605324 podStartE2EDuration="4.233563654s" podCreationTimestamp="2025-09-30 19:45:03 +0000 UTC" firstStartedPulling="2025-09-30 19:45:05.20412465 +0000 UTC m=+834.825058127" lastFinishedPulling="2025-09-30 19:45:06.83608298 +0000 UTC m=+836.457016457" observedRunningTime="2025-09-30 19:45:07.230142625 +0000 UTC m=+836.851076112" watchObservedRunningTime="2025-09-30 19:45:07.233563654 +0000 UTC m=+836.854497141" Sep 30 19:45:09 crc kubenswrapper[4756]: I0930 19:45:09.403894 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-webhook-server-6b8b4f7c6f-s2bh6" Sep 30 19:45:14 crc kubenswrapper[4756]: I0930 19:45:14.173258 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-rwqq6" Sep 30 19:45:14 crc kubenswrapper[4756]: I0930 19:45:14.173595 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-rwqq6" Sep 30 19:45:14 crc kubenswrapper[4756]: I0930 19:45:14.243661 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-rwqq6" Sep 30 19:45:14 crc kubenswrapper[4756]: I0930 19:45:14.319817 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-rwqq6" Sep 30 19:45:15 crc kubenswrapper[4756]: I0930 19:45:15.007335 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-rwqq6"] Sep 30 19:45:16 crc kubenswrapper[4756]: I0930 19:45:16.276838 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-rwqq6" podUID="a7cb24df-7c55-4c5c-be69-96c9d6542d44" containerName="registry-server" containerID="cri-o://ef6411c2f256dea7987ae596e1e7906530570539424be89aa90000965a2266fd" gracePeriod=2 Sep 30 19:45:17 crc kubenswrapper[4756]: I0930 19:45:17.282057 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-rwqq6" Sep 30 19:45:17 crc kubenswrapper[4756]: I0930 19:45:17.283204 4756 generic.go:334] "Generic (PLEG): container finished" podID="a7cb24df-7c55-4c5c-be69-96c9d6542d44" containerID="ef6411c2f256dea7987ae596e1e7906530570539424be89aa90000965a2266fd" exitCode=0 Sep 30 19:45:17 crc kubenswrapper[4756]: I0930 19:45:17.283240 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rwqq6" event={"ID":"a7cb24df-7c55-4c5c-be69-96c9d6542d44","Type":"ContainerDied","Data":"ef6411c2f256dea7987ae596e1e7906530570539424be89aa90000965a2266fd"} Sep 30 19:45:17 crc kubenswrapper[4756]: I0930 19:45:17.283288 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rwqq6" event={"ID":"a7cb24df-7c55-4c5c-be69-96c9d6542d44","Type":"ContainerDied","Data":"4e6121542729f0559da278a0c40de4a178f5ad63a6a16d4a0ab4feb4e8e2c38b"} Sep 30 19:45:17 crc kubenswrapper[4756]: I0930 19:45:17.283307 4756 scope.go:117] "RemoveContainer" containerID="ef6411c2f256dea7987ae596e1e7906530570539424be89aa90000965a2266fd" Sep 30 19:45:17 crc kubenswrapper[4756]: I0930 19:45:17.301998 4756 scope.go:117] "RemoveContainer" containerID="648e6cdc6190ddc18ba7ce6fd7f8632009f61a3836b2e5970c23abd404664aa3" Sep 30 19:45:17 crc kubenswrapper[4756]: I0930 19:45:17.323361 4756 scope.go:117] "RemoveContainer" containerID="136eb021ee4ca3ef036f7b4e274d40124342f2202ccde400c3d156e68cd88465" Sep 30 19:45:17 crc kubenswrapper[4756]: I0930 19:45:17.340053 4756 scope.go:117] "RemoveContainer" containerID="ef6411c2f256dea7987ae596e1e7906530570539424be89aa90000965a2266fd" Sep 30 19:45:17 crc kubenswrapper[4756]: E0930 19:45:17.340540 4756 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ef6411c2f256dea7987ae596e1e7906530570539424be89aa90000965a2266fd\": container with ID starting with ef6411c2f256dea7987ae596e1e7906530570539424be89aa90000965a2266fd not found: ID does not exist" containerID="ef6411c2f256dea7987ae596e1e7906530570539424be89aa90000965a2266fd" Sep 30 19:45:17 crc kubenswrapper[4756]: I0930 19:45:17.340584 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ef6411c2f256dea7987ae596e1e7906530570539424be89aa90000965a2266fd"} err="failed to get container status \"ef6411c2f256dea7987ae596e1e7906530570539424be89aa90000965a2266fd\": rpc error: code = NotFound desc = could not find container \"ef6411c2f256dea7987ae596e1e7906530570539424be89aa90000965a2266fd\": container with ID starting with ef6411c2f256dea7987ae596e1e7906530570539424be89aa90000965a2266fd not found: ID does not exist" Sep 30 19:45:17 crc kubenswrapper[4756]: I0930 19:45:17.340610 4756 scope.go:117] "RemoveContainer" containerID="648e6cdc6190ddc18ba7ce6fd7f8632009f61a3836b2e5970c23abd404664aa3" Sep 30 19:45:17 crc kubenswrapper[4756]: E0930 19:45:17.341020 4756 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"648e6cdc6190ddc18ba7ce6fd7f8632009f61a3836b2e5970c23abd404664aa3\": container with ID starting with 648e6cdc6190ddc18ba7ce6fd7f8632009f61a3836b2e5970c23abd404664aa3 not found: ID does not exist" containerID="648e6cdc6190ddc18ba7ce6fd7f8632009f61a3836b2e5970c23abd404664aa3" Sep 30 19:45:17 crc kubenswrapper[4756]: I0930 19:45:17.341041 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"648e6cdc6190ddc18ba7ce6fd7f8632009f61a3836b2e5970c23abd404664aa3"} err="failed to get container status \"648e6cdc6190ddc18ba7ce6fd7f8632009f61a3836b2e5970c23abd404664aa3\": rpc error: code = NotFound desc = could not find container \"648e6cdc6190ddc18ba7ce6fd7f8632009f61a3836b2e5970c23abd404664aa3\": container with ID starting with 648e6cdc6190ddc18ba7ce6fd7f8632009f61a3836b2e5970c23abd404664aa3 not found: ID does not exist" Sep 30 19:45:17 crc kubenswrapper[4756]: I0930 19:45:17.341054 4756 scope.go:117] "RemoveContainer" containerID="136eb021ee4ca3ef036f7b4e274d40124342f2202ccde400c3d156e68cd88465" Sep 30 19:45:17 crc kubenswrapper[4756]: E0930 19:45:17.341374 4756 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"136eb021ee4ca3ef036f7b4e274d40124342f2202ccde400c3d156e68cd88465\": container with ID starting with 136eb021ee4ca3ef036f7b4e274d40124342f2202ccde400c3d156e68cd88465 not found: ID does not exist" containerID="136eb021ee4ca3ef036f7b4e274d40124342f2202ccde400c3d156e68cd88465" Sep 30 19:45:17 crc kubenswrapper[4756]: I0930 19:45:17.341427 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"136eb021ee4ca3ef036f7b4e274d40124342f2202ccde400c3d156e68cd88465"} err="failed to get container status \"136eb021ee4ca3ef036f7b4e274d40124342f2202ccde400c3d156e68cd88465\": rpc error: code = NotFound desc = could not find container \"136eb021ee4ca3ef036f7b4e274d40124342f2202ccde400c3d156e68cd88465\": container with ID starting with 136eb021ee4ca3ef036f7b4e274d40124342f2202ccde400c3d156e68cd88465 not found: ID does not exist" Sep 30 19:45:17 crc kubenswrapper[4756]: I0930 19:45:17.385325 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a7cb24df-7c55-4c5c-be69-96c9d6542d44-utilities\") pod \"a7cb24df-7c55-4c5c-be69-96c9d6542d44\" (UID: \"a7cb24df-7c55-4c5c-be69-96c9d6542d44\") " Sep 30 19:45:17 crc kubenswrapper[4756]: I0930 19:45:17.385471 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-t8f5f\" (UniqueName: \"kubernetes.io/projected/a7cb24df-7c55-4c5c-be69-96c9d6542d44-kube-api-access-t8f5f\") pod \"a7cb24df-7c55-4c5c-be69-96c9d6542d44\" (UID: \"a7cb24df-7c55-4c5c-be69-96c9d6542d44\") " Sep 30 19:45:17 crc kubenswrapper[4756]: I0930 19:45:17.385567 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a7cb24df-7c55-4c5c-be69-96c9d6542d44-catalog-content\") pod \"a7cb24df-7c55-4c5c-be69-96c9d6542d44\" (UID: \"a7cb24df-7c55-4c5c-be69-96c9d6542d44\") " Sep 30 19:45:17 crc kubenswrapper[4756]: I0930 19:45:17.386942 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a7cb24df-7c55-4c5c-be69-96c9d6542d44-utilities" (OuterVolumeSpecName: "utilities") pod "a7cb24df-7c55-4c5c-be69-96c9d6542d44" (UID: "a7cb24df-7c55-4c5c-be69-96c9d6542d44"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 19:45:17 crc kubenswrapper[4756]: I0930 19:45:17.391369 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a7cb24df-7c55-4c5c-be69-96c9d6542d44-kube-api-access-t8f5f" (OuterVolumeSpecName: "kube-api-access-t8f5f") pod "a7cb24df-7c55-4c5c-be69-96c9d6542d44" (UID: "a7cb24df-7c55-4c5c-be69-96c9d6542d44"). InnerVolumeSpecName "kube-api-access-t8f5f". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:45:17 crc kubenswrapper[4756]: I0930 19:45:17.432318 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a7cb24df-7c55-4c5c-be69-96c9d6542d44-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "a7cb24df-7c55-4c5c-be69-96c9d6542d44" (UID: "a7cb24df-7c55-4c5c-be69-96c9d6542d44"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 19:45:17 crc kubenswrapper[4756]: I0930 19:45:17.487906 4756 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a7cb24df-7c55-4c5c-be69-96c9d6542d44-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 19:45:17 crc kubenswrapper[4756]: I0930 19:45:17.487941 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-t8f5f\" (UniqueName: \"kubernetes.io/projected/a7cb24df-7c55-4c5c-be69-96c9d6542d44-kube-api-access-t8f5f\") on node \"crc\" DevicePath \"\"" Sep 30 19:45:17 crc kubenswrapper[4756]: I0930 19:45:17.487957 4756 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a7cb24df-7c55-4c5c-be69-96c9d6542d44-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 19:45:18 crc kubenswrapper[4756]: I0930 19:45:18.289772 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-rwqq6" Sep 30 19:45:18 crc kubenswrapper[4756]: I0930 19:45:18.339172 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-rwqq6"] Sep 30 19:45:18 crc kubenswrapper[4756]: I0930 19:45:18.345118 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-rwqq6"] Sep 30 19:45:19 crc kubenswrapper[4756]: I0930 19:45:19.121133 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a7cb24df-7c55-4c5c-be69-96c9d6542d44" path="/var/lib/kubelet/pods/a7cb24df-7c55-4c5c-be69-96c9d6542d44/volumes" Sep 30 19:45:29 crc kubenswrapper[4756]: I0930 19:45:29.094253 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-controller-manager-6787fd9965-mp4jt" Sep 30 19:45:29 crc kubenswrapper[4756]: I0930 19:45:29.869215 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-webhook-server-5478bdb765-f2dkx"] Sep 30 19:45:29 crc kubenswrapper[4756]: E0930 19:45:29.869866 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a7cb24df-7c55-4c5c-be69-96c9d6542d44" containerName="extract-content" Sep 30 19:45:29 crc kubenswrapper[4756]: I0930 19:45:29.869888 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="a7cb24df-7c55-4c5c-be69-96c9d6542d44" containerName="extract-content" Sep 30 19:45:29 crc kubenswrapper[4756]: E0930 19:45:29.869951 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a7cb24df-7c55-4c5c-be69-96c9d6542d44" containerName="extract-utilities" Sep 30 19:45:29 crc kubenswrapper[4756]: I0930 19:45:29.869960 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="a7cb24df-7c55-4c5c-be69-96c9d6542d44" containerName="extract-utilities" Sep 30 19:45:29 crc kubenswrapper[4756]: E0930 19:45:29.869969 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a7cb24df-7c55-4c5c-be69-96c9d6542d44" containerName="registry-server" Sep 30 19:45:29 crc kubenswrapper[4756]: I0930 19:45:29.869978 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="a7cb24df-7c55-4c5c-be69-96c9d6542d44" containerName="registry-server" Sep 30 19:45:29 crc kubenswrapper[4756]: I0930 19:45:29.870095 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="a7cb24df-7c55-4c5c-be69-96c9d6542d44" containerName="registry-server" Sep 30 19:45:29 crc kubenswrapper[4756]: I0930 19:45:29.870608 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-5478bdb765-f2dkx" Sep 30 19:45:29 crc kubenswrapper[4756]: I0930 19:45:29.872378 4756 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-daemon-dockercfg-kk5px" Sep 30 19:45:29 crc kubenswrapper[4756]: I0930 19:45:29.872840 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-prvjv"] Sep 30 19:45:29 crc kubenswrapper[4756]: I0930 19:45:29.873835 4756 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-webhook-server-cert" Sep 30 19:45:29 crc kubenswrapper[4756]: I0930 19:45:29.875041 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-prvjv" Sep 30 19:45:29 crc kubenswrapper[4756]: I0930 19:45:29.877709 4756 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-certs-secret" Sep 30 19:45:29 crc kubenswrapper[4756]: I0930 19:45:29.877732 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"frr-startup" Sep 30 19:45:29 crc kubenswrapper[4756]: I0930 19:45:29.877744 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-5478bdb765-f2dkx"] Sep 30 19:45:29 crc kubenswrapper[4756]: I0930 19:45:29.946332 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/beeaebc2-50a1-44eb-b336-d731d9658490-frr-startup\") pod \"frr-k8s-prvjv\" (UID: \"beeaebc2-50a1-44eb-b336-d731d9658490\") " pod="metallb-system/frr-k8s-prvjv" Sep 30 19:45:29 crc kubenswrapper[4756]: I0930 19:45:29.946383 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-686l5\" (UniqueName: \"kubernetes.io/projected/6c83c6d0-f45f-4c8d-89d6-4434da2f24c9-kube-api-access-686l5\") pod \"frr-k8s-webhook-server-5478bdb765-f2dkx\" (UID: \"6c83c6d0-f45f-4c8d-89d6-4434da2f24c9\") " pod="metallb-system/frr-k8s-webhook-server-5478bdb765-f2dkx" Sep 30 19:45:29 crc kubenswrapper[4756]: I0930 19:45:29.946424 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/beeaebc2-50a1-44eb-b336-d731d9658490-reloader\") pod \"frr-k8s-prvjv\" (UID: \"beeaebc2-50a1-44eb-b336-d731d9658490\") " pod="metallb-system/frr-k8s-prvjv" Sep 30 19:45:29 crc kubenswrapper[4756]: I0930 19:45:29.946456 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/beeaebc2-50a1-44eb-b336-d731d9658490-metrics-certs\") pod \"frr-k8s-prvjv\" (UID: \"beeaebc2-50a1-44eb-b336-d731d9658490\") " pod="metallb-system/frr-k8s-prvjv" Sep 30 19:45:29 crc kubenswrapper[4756]: I0930 19:45:29.946471 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/beeaebc2-50a1-44eb-b336-d731d9658490-frr-conf\") pod \"frr-k8s-prvjv\" (UID: \"beeaebc2-50a1-44eb-b336-d731d9658490\") " pod="metallb-system/frr-k8s-prvjv" Sep 30 19:45:29 crc kubenswrapper[4756]: I0930 19:45:29.946487 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/beeaebc2-50a1-44eb-b336-d731d9658490-metrics\") pod \"frr-k8s-prvjv\" (UID: \"beeaebc2-50a1-44eb-b336-d731d9658490\") " pod="metallb-system/frr-k8s-prvjv" Sep 30 19:45:29 crc kubenswrapper[4756]: I0930 19:45:29.946514 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/6c83c6d0-f45f-4c8d-89d6-4434da2f24c9-cert\") pod \"frr-k8s-webhook-server-5478bdb765-f2dkx\" (UID: \"6c83c6d0-f45f-4c8d-89d6-4434da2f24c9\") " pod="metallb-system/frr-k8s-webhook-server-5478bdb765-f2dkx" Sep 30 19:45:29 crc kubenswrapper[4756]: I0930 19:45:29.946531 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8rblt\" (UniqueName: \"kubernetes.io/projected/beeaebc2-50a1-44eb-b336-d731d9658490-kube-api-access-8rblt\") pod \"frr-k8s-prvjv\" (UID: \"beeaebc2-50a1-44eb-b336-d731d9658490\") " pod="metallb-system/frr-k8s-prvjv" Sep 30 19:45:29 crc kubenswrapper[4756]: I0930 19:45:29.946563 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/beeaebc2-50a1-44eb-b336-d731d9658490-frr-sockets\") pod \"frr-k8s-prvjv\" (UID: \"beeaebc2-50a1-44eb-b336-d731d9658490\") " pod="metallb-system/frr-k8s-prvjv" Sep 30 19:45:29 crc kubenswrapper[4756]: I0930 19:45:29.947671 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/speaker-kqcpz"] Sep 30 19:45:29 crc kubenswrapper[4756]: I0930 19:45:29.948739 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-kqcpz" Sep 30 19:45:29 crc kubenswrapper[4756]: I0930 19:45:29.953117 4756 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-memberlist" Sep 30 19:45:29 crc kubenswrapper[4756]: I0930 19:45:29.953142 4756 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-certs-secret" Sep 30 19:45:29 crc kubenswrapper[4756]: I0930 19:45:29.953235 4756 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-dockercfg-thdjc" Sep 30 19:45:29 crc kubenswrapper[4756]: I0930 19:45:29.954579 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"metallb-excludel2" Sep 30 19:45:29 crc kubenswrapper[4756]: I0930 19:45:29.961588 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/controller-5d688f5ffc-c9zc5"] Sep 30 19:45:29 crc kubenswrapper[4756]: I0930 19:45:29.962646 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-5d688f5ffc-c9zc5" Sep 30 19:45:29 crc kubenswrapper[4756]: I0930 19:45:29.964257 4756 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-certs-secret" Sep 30 19:45:29 crc kubenswrapper[4756]: I0930 19:45:29.982193 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-5d688f5ffc-c9zc5"] Sep 30 19:45:30 crc kubenswrapper[4756]: I0930 19:45:30.047146 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8rblt\" (UniqueName: \"kubernetes.io/projected/beeaebc2-50a1-44eb-b336-d731d9658490-kube-api-access-8rblt\") pod \"frr-k8s-prvjv\" (UID: \"beeaebc2-50a1-44eb-b336-d731d9658490\") " pod="metallb-system/frr-k8s-prvjv" Sep 30 19:45:30 crc kubenswrapper[4756]: I0930 19:45:30.047429 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/beeaebc2-50a1-44eb-b336-d731d9658490-frr-sockets\") pod \"frr-k8s-prvjv\" (UID: \"beeaebc2-50a1-44eb-b336-d731d9658490\") " pod="metallb-system/frr-k8s-prvjv" Sep 30 19:45:30 crc kubenswrapper[4756]: I0930 19:45:30.047513 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/0a9cec75-23b0-43d9-8914-4eb7fa93c7fa-memberlist\") pod \"speaker-kqcpz\" (UID: \"0a9cec75-23b0-43d9-8914-4eb7fa93c7fa\") " pod="metallb-system/speaker-kqcpz" Sep 30 19:45:30 crc kubenswrapper[4756]: I0930 19:45:30.047595 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/beeaebc2-50a1-44eb-b336-d731d9658490-frr-startup\") pod \"frr-k8s-prvjv\" (UID: \"beeaebc2-50a1-44eb-b336-d731d9658490\") " pod="metallb-system/frr-k8s-prvjv" Sep 30 19:45:30 crc kubenswrapper[4756]: I0930 19:45:30.047676 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-686l5\" (UniqueName: \"kubernetes.io/projected/6c83c6d0-f45f-4c8d-89d6-4434da2f24c9-kube-api-access-686l5\") pod \"frr-k8s-webhook-server-5478bdb765-f2dkx\" (UID: \"6c83c6d0-f45f-4c8d-89d6-4434da2f24c9\") " pod="metallb-system/frr-k8s-webhook-server-5478bdb765-f2dkx" Sep 30 19:45:30 crc kubenswrapper[4756]: I0930 19:45:30.047750 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-47drd\" (UniqueName: \"kubernetes.io/projected/44d5093f-1239-42d2-9ff5-393109958a08-kube-api-access-47drd\") pod \"controller-5d688f5ffc-c9zc5\" (UID: \"44d5093f-1239-42d2-9ff5-393109958a08\") " pod="metallb-system/controller-5d688f5ffc-c9zc5" Sep 30 19:45:30 crc kubenswrapper[4756]: I0930 19:45:30.047825 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/beeaebc2-50a1-44eb-b336-d731d9658490-reloader\") pod \"frr-k8s-prvjv\" (UID: \"beeaebc2-50a1-44eb-b336-d731d9658490\") " pod="metallb-system/frr-k8s-prvjv" Sep 30 19:45:30 crc kubenswrapper[4756]: I0930 19:45:30.047868 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/beeaebc2-50a1-44eb-b336-d731d9658490-frr-sockets\") pod \"frr-k8s-prvjv\" (UID: \"beeaebc2-50a1-44eb-b336-d731d9658490\") " pod="metallb-system/frr-k8s-prvjv" Sep 30 19:45:30 crc kubenswrapper[4756]: I0930 19:45:30.047903 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/44d5093f-1239-42d2-9ff5-393109958a08-metrics-certs\") pod \"controller-5d688f5ffc-c9zc5\" (UID: \"44d5093f-1239-42d2-9ff5-393109958a08\") " pod="metallb-system/controller-5d688f5ffc-c9zc5" Sep 30 19:45:30 crc kubenswrapper[4756]: I0930 19:45:30.048010 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/44d5093f-1239-42d2-9ff5-393109958a08-cert\") pod \"controller-5d688f5ffc-c9zc5\" (UID: \"44d5093f-1239-42d2-9ff5-393109958a08\") " pod="metallb-system/controller-5d688f5ffc-c9zc5" Sep 30 19:45:30 crc kubenswrapper[4756]: I0930 19:45:30.048049 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8gj78\" (UniqueName: \"kubernetes.io/projected/0a9cec75-23b0-43d9-8914-4eb7fa93c7fa-kube-api-access-8gj78\") pod \"speaker-kqcpz\" (UID: \"0a9cec75-23b0-43d9-8914-4eb7fa93c7fa\") " pod="metallb-system/speaker-kqcpz" Sep 30 19:45:30 crc kubenswrapper[4756]: I0930 19:45:30.048138 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/0a9cec75-23b0-43d9-8914-4eb7fa93c7fa-metrics-certs\") pod \"speaker-kqcpz\" (UID: \"0a9cec75-23b0-43d9-8914-4eb7fa93c7fa\") " pod="metallb-system/speaker-kqcpz" Sep 30 19:45:30 crc kubenswrapper[4756]: I0930 19:45:30.048143 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/beeaebc2-50a1-44eb-b336-d731d9658490-reloader\") pod \"frr-k8s-prvjv\" (UID: \"beeaebc2-50a1-44eb-b336-d731d9658490\") " pod="metallb-system/frr-k8s-prvjv" Sep 30 19:45:30 crc kubenswrapper[4756]: I0930 19:45:30.048177 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/beeaebc2-50a1-44eb-b336-d731d9658490-metrics-certs\") pod \"frr-k8s-prvjv\" (UID: \"beeaebc2-50a1-44eb-b336-d731d9658490\") " pod="metallb-system/frr-k8s-prvjv" Sep 30 19:45:30 crc kubenswrapper[4756]: I0930 19:45:30.048202 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/beeaebc2-50a1-44eb-b336-d731d9658490-frr-conf\") pod \"frr-k8s-prvjv\" (UID: \"beeaebc2-50a1-44eb-b336-d731d9658490\") " pod="metallb-system/frr-k8s-prvjv" Sep 30 19:45:30 crc kubenswrapper[4756]: I0930 19:45:30.048280 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/beeaebc2-50a1-44eb-b336-d731d9658490-metrics\") pod \"frr-k8s-prvjv\" (UID: \"beeaebc2-50a1-44eb-b336-d731d9658490\") " pod="metallb-system/frr-k8s-prvjv" Sep 30 19:45:30 crc kubenswrapper[4756]: I0930 19:45:30.048313 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/0a9cec75-23b0-43d9-8914-4eb7fa93c7fa-metallb-excludel2\") pod \"speaker-kqcpz\" (UID: \"0a9cec75-23b0-43d9-8914-4eb7fa93c7fa\") " pod="metallb-system/speaker-kqcpz" Sep 30 19:45:30 crc kubenswrapper[4756]: I0930 19:45:30.048467 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/6c83c6d0-f45f-4c8d-89d6-4434da2f24c9-cert\") pod \"frr-k8s-webhook-server-5478bdb765-f2dkx\" (UID: \"6c83c6d0-f45f-4c8d-89d6-4434da2f24c9\") " pod="metallb-system/frr-k8s-webhook-server-5478bdb765-f2dkx" Sep 30 19:45:30 crc kubenswrapper[4756]: I0930 19:45:30.048519 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/beeaebc2-50a1-44eb-b336-d731d9658490-frr-conf\") pod \"frr-k8s-prvjv\" (UID: \"beeaebc2-50a1-44eb-b336-d731d9658490\") " pod="metallb-system/frr-k8s-prvjv" Sep 30 19:45:30 crc kubenswrapper[4756]: E0930 19:45:30.048314 4756 secret.go:188] Couldn't get secret metallb-system/frr-k8s-certs-secret: secret "frr-k8s-certs-secret" not found Sep 30 19:45:30 crc kubenswrapper[4756]: E0930 19:45:30.048605 4756 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/beeaebc2-50a1-44eb-b336-d731d9658490-metrics-certs podName:beeaebc2-50a1-44eb-b336-d731d9658490 nodeName:}" failed. No retries permitted until 2025-09-30 19:45:30.548584953 +0000 UTC m=+860.169518440 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/beeaebc2-50a1-44eb-b336-d731d9658490-metrics-certs") pod "frr-k8s-prvjv" (UID: "beeaebc2-50a1-44eb-b336-d731d9658490") : secret "frr-k8s-certs-secret" not found Sep 30 19:45:30 crc kubenswrapper[4756]: I0930 19:45:30.048674 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/beeaebc2-50a1-44eb-b336-d731d9658490-frr-startup\") pod \"frr-k8s-prvjv\" (UID: \"beeaebc2-50a1-44eb-b336-d731d9658490\") " pod="metallb-system/frr-k8s-prvjv" Sep 30 19:45:30 crc kubenswrapper[4756]: I0930 19:45:30.048972 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/beeaebc2-50a1-44eb-b336-d731d9658490-metrics\") pod \"frr-k8s-prvjv\" (UID: \"beeaebc2-50a1-44eb-b336-d731d9658490\") " pod="metallb-system/frr-k8s-prvjv" Sep 30 19:45:30 crc kubenswrapper[4756]: I0930 19:45:30.055055 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/6c83c6d0-f45f-4c8d-89d6-4434da2f24c9-cert\") pod \"frr-k8s-webhook-server-5478bdb765-f2dkx\" (UID: \"6c83c6d0-f45f-4c8d-89d6-4434da2f24c9\") " pod="metallb-system/frr-k8s-webhook-server-5478bdb765-f2dkx" Sep 30 19:45:30 crc kubenswrapper[4756]: I0930 19:45:30.064738 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8rblt\" (UniqueName: \"kubernetes.io/projected/beeaebc2-50a1-44eb-b336-d731d9658490-kube-api-access-8rblt\") pod \"frr-k8s-prvjv\" (UID: \"beeaebc2-50a1-44eb-b336-d731d9658490\") " pod="metallb-system/frr-k8s-prvjv" Sep 30 19:45:30 crc kubenswrapper[4756]: I0930 19:45:30.071080 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-686l5\" (UniqueName: \"kubernetes.io/projected/6c83c6d0-f45f-4c8d-89d6-4434da2f24c9-kube-api-access-686l5\") pod \"frr-k8s-webhook-server-5478bdb765-f2dkx\" (UID: \"6c83c6d0-f45f-4c8d-89d6-4434da2f24c9\") " pod="metallb-system/frr-k8s-webhook-server-5478bdb765-f2dkx" Sep 30 19:45:30 crc kubenswrapper[4756]: I0930 19:45:30.149656 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/0a9cec75-23b0-43d9-8914-4eb7fa93c7fa-memberlist\") pod \"speaker-kqcpz\" (UID: \"0a9cec75-23b0-43d9-8914-4eb7fa93c7fa\") " pod="metallb-system/speaker-kqcpz" Sep 30 19:45:30 crc kubenswrapper[4756]: I0930 19:45:30.149729 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-47drd\" (UniqueName: \"kubernetes.io/projected/44d5093f-1239-42d2-9ff5-393109958a08-kube-api-access-47drd\") pod \"controller-5d688f5ffc-c9zc5\" (UID: \"44d5093f-1239-42d2-9ff5-393109958a08\") " pod="metallb-system/controller-5d688f5ffc-c9zc5" Sep 30 19:45:30 crc kubenswrapper[4756]: I0930 19:45:30.149751 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/44d5093f-1239-42d2-9ff5-393109958a08-metrics-certs\") pod \"controller-5d688f5ffc-c9zc5\" (UID: \"44d5093f-1239-42d2-9ff5-393109958a08\") " pod="metallb-system/controller-5d688f5ffc-c9zc5" Sep 30 19:45:30 crc kubenswrapper[4756]: I0930 19:45:30.149770 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/44d5093f-1239-42d2-9ff5-393109958a08-cert\") pod \"controller-5d688f5ffc-c9zc5\" (UID: \"44d5093f-1239-42d2-9ff5-393109958a08\") " pod="metallb-system/controller-5d688f5ffc-c9zc5" Sep 30 19:45:30 crc kubenswrapper[4756]: I0930 19:45:30.149787 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8gj78\" (UniqueName: \"kubernetes.io/projected/0a9cec75-23b0-43d9-8914-4eb7fa93c7fa-kube-api-access-8gj78\") pod \"speaker-kqcpz\" (UID: \"0a9cec75-23b0-43d9-8914-4eb7fa93c7fa\") " pod="metallb-system/speaker-kqcpz" Sep 30 19:45:30 crc kubenswrapper[4756]: I0930 19:45:30.149824 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/0a9cec75-23b0-43d9-8914-4eb7fa93c7fa-metrics-certs\") pod \"speaker-kqcpz\" (UID: \"0a9cec75-23b0-43d9-8914-4eb7fa93c7fa\") " pod="metallb-system/speaker-kqcpz" Sep 30 19:45:30 crc kubenswrapper[4756]: I0930 19:45:30.149857 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/0a9cec75-23b0-43d9-8914-4eb7fa93c7fa-metallb-excludel2\") pod \"speaker-kqcpz\" (UID: \"0a9cec75-23b0-43d9-8914-4eb7fa93c7fa\") " pod="metallb-system/speaker-kqcpz" Sep 30 19:45:30 crc kubenswrapper[4756]: I0930 19:45:30.150527 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/0a9cec75-23b0-43d9-8914-4eb7fa93c7fa-metallb-excludel2\") pod \"speaker-kqcpz\" (UID: \"0a9cec75-23b0-43d9-8914-4eb7fa93c7fa\") " pod="metallb-system/speaker-kqcpz" Sep 30 19:45:30 crc kubenswrapper[4756]: E0930 19:45:30.150572 4756 secret.go:188] Couldn't get secret metallb-system/speaker-certs-secret: secret "speaker-certs-secret" not found Sep 30 19:45:30 crc kubenswrapper[4756]: E0930 19:45:30.150626 4756 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/0a9cec75-23b0-43d9-8914-4eb7fa93c7fa-metrics-certs podName:0a9cec75-23b0-43d9-8914-4eb7fa93c7fa nodeName:}" failed. No retries permitted until 2025-09-30 19:45:30.65060883 +0000 UTC m=+860.271542307 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/0a9cec75-23b0-43d9-8914-4eb7fa93c7fa-metrics-certs") pod "speaker-kqcpz" (UID: "0a9cec75-23b0-43d9-8914-4eb7fa93c7fa") : secret "speaker-certs-secret" not found Sep 30 19:45:30 crc kubenswrapper[4756]: E0930 19:45:30.151106 4756 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Sep 30 19:45:30 crc kubenswrapper[4756]: E0930 19:45:30.151276 4756 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/0a9cec75-23b0-43d9-8914-4eb7fa93c7fa-memberlist podName:0a9cec75-23b0-43d9-8914-4eb7fa93c7fa nodeName:}" failed. No retries permitted until 2025-09-30 19:45:30.651253977 +0000 UTC m=+860.272187534 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/0a9cec75-23b0-43d9-8914-4eb7fa93c7fa-memberlist") pod "speaker-kqcpz" (UID: "0a9cec75-23b0-43d9-8914-4eb7fa93c7fa") : secret "metallb-memberlist" not found Sep 30 19:45:30 crc kubenswrapper[4756]: I0930 19:45:30.152079 4756 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-webhook-cert" Sep 30 19:45:30 crc kubenswrapper[4756]: I0930 19:45:30.155048 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/44d5093f-1239-42d2-9ff5-393109958a08-metrics-certs\") pod \"controller-5d688f5ffc-c9zc5\" (UID: \"44d5093f-1239-42d2-9ff5-393109958a08\") " pod="metallb-system/controller-5d688f5ffc-c9zc5" Sep 30 19:45:30 crc kubenswrapper[4756]: I0930 19:45:30.165765 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/44d5093f-1239-42d2-9ff5-393109958a08-cert\") pod \"controller-5d688f5ffc-c9zc5\" (UID: \"44d5093f-1239-42d2-9ff5-393109958a08\") " pod="metallb-system/controller-5d688f5ffc-c9zc5" Sep 30 19:45:30 crc kubenswrapper[4756]: I0930 19:45:30.165866 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8gj78\" (UniqueName: \"kubernetes.io/projected/0a9cec75-23b0-43d9-8914-4eb7fa93c7fa-kube-api-access-8gj78\") pod \"speaker-kqcpz\" (UID: \"0a9cec75-23b0-43d9-8914-4eb7fa93c7fa\") " pod="metallb-system/speaker-kqcpz" Sep 30 19:45:30 crc kubenswrapper[4756]: I0930 19:45:30.173352 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-47drd\" (UniqueName: \"kubernetes.io/projected/44d5093f-1239-42d2-9ff5-393109958a08-kube-api-access-47drd\") pod \"controller-5d688f5ffc-c9zc5\" (UID: \"44d5093f-1239-42d2-9ff5-393109958a08\") " pod="metallb-system/controller-5d688f5ffc-c9zc5" Sep 30 19:45:30 crc kubenswrapper[4756]: I0930 19:45:30.187770 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-5478bdb765-f2dkx" Sep 30 19:45:30 crc kubenswrapper[4756]: I0930 19:45:30.278806 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-5d688f5ffc-c9zc5" Sep 30 19:45:30 crc kubenswrapper[4756]: I0930 19:45:30.381020 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-5478bdb765-f2dkx"] Sep 30 19:45:30 crc kubenswrapper[4756]: I0930 19:45:30.556840 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/beeaebc2-50a1-44eb-b336-d731d9658490-metrics-certs\") pod \"frr-k8s-prvjv\" (UID: \"beeaebc2-50a1-44eb-b336-d731d9658490\") " pod="metallb-system/frr-k8s-prvjv" Sep 30 19:45:30 crc kubenswrapper[4756]: I0930 19:45:30.560739 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/beeaebc2-50a1-44eb-b336-d731d9658490-metrics-certs\") pod \"frr-k8s-prvjv\" (UID: \"beeaebc2-50a1-44eb-b336-d731d9658490\") " pod="metallb-system/frr-k8s-prvjv" Sep 30 19:45:30 crc kubenswrapper[4756]: I0930 19:45:30.658720 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/0a9cec75-23b0-43d9-8914-4eb7fa93c7fa-metrics-certs\") pod \"speaker-kqcpz\" (UID: \"0a9cec75-23b0-43d9-8914-4eb7fa93c7fa\") " pod="metallb-system/speaker-kqcpz" Sep 30 19:45:30 crc kubenswrapper[4756]: I0930 19:45:30.658799 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/0a9cec75-23b0-43d9-8914-4eb7fa93c7fa-memberlist\") pod \"speaker-kqcpz\" (UID: \"0a9cec75-23b0-43d9-8914-4eb7fa93c7fa\") " pod="metallb-system/speaker-kqcpz" Sep 30 19:45:30 crc kubenswrapper[4756]: E0930 19:45:30.658903 4756 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Sep 30 19:45:30 crc kubenswrapper[4756]: E0930 19:45:30.658948 4756 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/0a9cec75-23b0-43d9-8914-4eb7fa93c7fa-memberlist podName:0a9cec75-23b0-43d9-8914-4eb7fa93c7fa nodeName:}" failed. No retries permitted until 2025-09-30 19:45:31.658932728 +0000 UTC m=+861.279866205 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/0a9cec75-23b0-43d9-8914-4eb7fa93c7fa-memberlist") pod "speaker-kqcpz" (UID: "0a9cec75-23b0-43d9-8914-4eb7fa93c7fa") : secret "metallb-memberlist" not found Sep 30 19:45:30 crc kubenswrapper[4756]: I0930 19:45:30.662063 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/0a9cec75-23b0-43d9-8914-4eb7fa93c7fa-metrics-certs\") pod \"speaker-kqcpz\" (UID: \"0a9cec75-23b0-43d9-8914-4eb7fa93c7fa\") " pod="metallb-system/speaker-kqcpz" Sep 30 19:45:30 crc kubenswrapper[4756]: I0930 19:45:30.728090 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-5d688f5ffc-c9zc5"] Sep 30 19:45:30 crc kubenswrapper[4756]: W0930 19:45:30.733459 4756 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod44d5093f_1239_42d2_9ff5_393109958a08.slice/crio-88d2601d3fc82f330f3d2817bb7368b91ea1715d57854b71e925455775ecbb8a WatchSource:0}: Error finding container 88d2601d3fc82f330f3d2817bb7368b91ea1715d57854b71e925455775ecbb8a: Status 404 returned error can't find the container with id 88d2601d3fc82f330f3d2817bb7368b91ea1715d57854b71e925455775ecbb8a Sep 30 19:45:30 crc kubenswrapper[4756]: I0930 19:45:30.794495 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-prvjv" Sep 30 19:45:31 crc kubenswrapper[4756]: I0930 19:45:31.378494 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-5d688f5ffc-c9zc5" event={"ID":"44d5093f-1239-42d2-9ff5-393109958a08","Type":"ContainerStarted","Data":"e28640d995e35805b7eaa18ce5d1fd0663874ff5393d9978897c0974f391b8b5"} Sep 30 19:45:31 crc kubenswrapper[4756]: I0930 19:45:31.378887 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-5d688f5ffc-c9zc5" event={"ID":"44d5093f-1239-42d2-9ff5-393109958a08","Type":"ContainerStarted","Data":"20d0a8337598c008087c417a20efd3085647745f64cc9dfdc9b5eb6fe4ae0846"} Sep 30 19:45:31 crc kubenswrapper[4756]: I0930 19:45:31.378922 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/controller-5d688f5ffc-c9zc5" Sep 30 19:45:31 crc kubenswrapper[4756]: I0930 19:45:31.378944 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-5d688f5ffc-c9zc5" event={"ID":"44d5093f-1239-42d2-9ff5-393109958a08","Type":"ContainerStarted","Data":"88d2601d3fc82f330f3d2817bb7368b91ea1715d57854b71e925455775ecbb8a"} Sep 30 19:45:31 crc kubenswrapper[4756]: I0930 19:45:31.382029 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-5478bdb765-f2dkx" event={"ID":"6c83c6d0-f45f-4c8d-89d6-4434da2f24c9","Type":"ContainerStarted","Data":"5322792ab336e5db908fbe8675955950f538f96a5dde1e1e6afabfe9d95402c7"} Sep 30 19:45:31 crc kubenswrapper[4756]: I0930 19:45:31.383763 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-prvjv" event={"ID":"beeaebc2-50a1-44eb-b336-d731d9658490","Type":"ContainerStarted","Data":"b72acd60f9621d0e7fbedd921db72ba5e90cbf4fa38b32ffa41166bb0cc7db71"} Sep 30 19:45:31 crc kubenswrapper[4756]: I0930 19:45:31.404344 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/controller-5d688f5ffc-c9zc5" podStartSLOduration=2.40431844 podStartE2EDuration="2.40431844s" podCreationTimestamp="2025-09-30 19:45:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 19:45:31.39970612 +0000 UTC m=+861.020639607" watchObservedRunningTime="2025-09-30 19:45:31.40431844 +0000 UTC m=+861.025251917" Sep 30 19:45:31 crc kubenswrapper[4756]: I0930 19:45:31.672108 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/0a9cec75-23b0-43d9-8914-4eb7fa93c7fa-memberlist\") pod \"speaker-kqcpz\" (UID: \"0a9cec75-23b0-43d9-8914-4eb7fa93c7fa\") " pod="metallb-system/speaker-kqcpz" Sep 30 19:45:31 crc kubenswrapper[4756]: I0930 19:45:31.681280 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/0a9cec75-23b0-43d9-8914-4eb7fa93c7fa-memberlist\") pod \"speaker-kqcpz\" (UID: \"0a9cec75-23b0-43d9-8914-4eb7fa93c7fa\") " pod="metallb-system/speaker-kqcpz" Sep 30 19:45:31 crc kubenswrapper[4756]: I0930 19:45:31.762169 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-kqcpz" Sep 30 19:45:31 crc kubenswrapper[4756]: W0930 19:45:31.835401 4756 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0a9cec75_23b0_43d9_8914_4eb7fa93c7fa.slice/crio-720d2e0c76fd939ed8ee82f4769aba0abcc4e3487528bcbcc610be9f0945ba28 WatchSource:0}: Error finding container 720d2e0c76fd939ed8ee82f4769aba0abcc4e3487528bcbcc610be9f0945ba28: Status 404 returned error can't find the container with id 720d2e0c76fd939ed8ee82f4769aba0abcc4e3487528bcbcc610be9f0945ba28 Sep 30 19:45:32 crc kubenswrapper[4756]: I0930 19:45:32.390684 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-kqcpz" event={"ID":"0a9cec75-23b0-43d9-8914-4eb7fa93c7fa","Type":"ContainerStarted","Data":"5ee083162e7f0d16572ae3a2b43dbf9887667ee145674dff710bba862420a24b"} Sep 30 19:45:32 crc kubenswrapper[4756]: I0930 19:45:32.391055 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-kqcpz" event={"ID":"0a9cec75-23b0-43d9-8914-4eb7fa93c7fa","Type":"ContainerStarted","Data":"720d2e0c76fd939ed8ee82f4769aba0abcc4e3487528bcbcc610be9f0945ba28"} Sep 30 19:45:33 crc kubenswrapper[4756]: I0930 19:45:33.408958 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-kqcpz" event={"ID":"0a9cec75-23b0-43d9-8914-4eb7fa93c7fa","Type":"ContainerStarted","Data":"0606994fb10918ee7e5a656bd9f8e7e4f74a251402b605a951650090b66f7760"} Sep 30 19:45:33 crc kubenswrapper[4756]: I0930 19:45:33.427387 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/speaker-kqcpz" podStartSLOduration=4.427368398 podStartE2EDuration="4.427368398s" podCreationTimestamp="2025-09-30 19:45:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 19:45:33.427217584 +0000 UTC m=+863.048151071" watchObservedRunningTime="2025-09-30 19:45:33.427368398 +0000 UTC m=+863.048301875" Sep 30 19:45:34 crc kubenswrapper[4756]: I0930 19:45:34.413921 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/speaker-kqcpz" Sep 30 19:45:38 crc kubenswrapper[4756]: I0930 19:45:38.437464 4756 generic.go:334] "Generic (PLEG): container finished" podID="beeaebc2-50a1-44eb-b336-d731d9658490" containerID="9bc47e6857e921f5d8eadf168b137d161adfc20664d3e3e788d776b86951c89c" exitCode=0 Sep 30 19:45:38 crc kubenswrapper[4756]: I0930 19:45:38.437597 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-prvjv" event={"ID":"beeaebc2-50a1-44eb-b336-d731d9658490","Type":"ContainerDied","Data":"9bc47e6857e921f5d8eadf168b137d161adfc20664d3e3e788d776b86951c89c"} Sep 30 19:45:38 crc kubenswrapper[4756]: I0930 19:45:38.440664 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-5478bdb765-f2dkx" event={"ID":"6c83c6d0-f45f-4c8d-89d6-4434da2f24c9","Type":"ContainerStarted","Data":"cdf24b762d29dbcf04b6afb2ad241e57d71f13c4f8d65b842a98f15577a93f06"} Sep 30 19:45:38 crc kubenswrapper[4756]: I0930 19:45:38.441688 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-webhook-server-5478bdb765-f2dkx" Sep 30 19:45:38 crc kubenswrapper[4756]: I0930 19:45:38.505742 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-webhook-server-5478bdb765-f2dkx" podStartSLOduration=2.330209861 podStartE2EDuration="9.505721575s" podCreationTimestamp="2025-09-30 19:45:29 +0000 UTC" firstStartedPulling="2025-09-30 19:45:30.392866059 +0000 UTC m=+860.013799526" lastFinishedPulling="2025-09-30 19:45:37.568377773 +0000 UTC m=+867.189311240" observedRunningTime="2025-09-30 19:45:38.503743103 +0000 UTC m=+868.124676620" watchObservedRunningTime="2025-09-30 19:45:38.505721575 +0000 UTC m=+868.126655062" Sep 30 19:45:39 crc kubenswrapper[4756]: I0930 19:45:39.450710 4756 generic.go:334] "Generic (PLEG): container finished" podID="beeaebc2-50a1-44eb-b336-d731d9658490" containerID="adfa721975ae27449fdae9ef5618921a3867d2a1f7b7952bf7ad903c16cbda93" exitCode=0 Sep 30 19:45:39 crc kubenswrapper[4756]: I0930 19:45:39.450840 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-prvjv" event={"ID":"beeaebc2-50a1-44eb-b336-d731d9658490","Type":"ContainerDied","Data":"adfa721975ae27449fdae9ef5618921a3867d2a1f7b7952bf7ad903c16cbda93"} Sep 30 19:45:40 crc kubenswrapper[4756]: I0930 19:45:40.284720 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/controller-5d688f5ffc-c9zc5" Sep 30 19:45:40 crc kubenswrapper[4756]: I0930 19:45:40.460426 4756 generic.go:334] "Generic (PLEG): container finished" podID="beeaebc2-50a1-44eb-b336-d731d9658490" containerID="6595d1232f9a023b749ab03afb3d0c39194ba8ef7e206ea44c050688329e40c4" exitCode=0 Sep 30 19:45:40 crc kubenswrapper[4756]: I0930 19:45:40.460470 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-prvjv" event={"ID":"beeaebc2-50a1-44eb-b336-d731d9658490","Type":"ContainerDied","Data":"6595d1232f9a023b749ab03afb3d0c39194ba8ef7e206ea44c050688329e40c4"} Sep 30 19:45:41 crc kubenswrapper[4756]: I0930 19:45:41.469204 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-prvjv" event={"ID":"beeaebc2-50a1-44eb-b336-d731d9658490","Type":"ContainerStarted","Data":"7d523235bdee7a4c4f51a56ac78d7b37b7dd2a5a9971b7d08feb40d33c8f1655"} Sep 30 19:45:41 crc kubenswrapper[4756]: I0930 19:45:41.469507 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-prvjv" event={"ID":"beeaebc2-50a1-44eb-b336-d731d9658490","Type":"ContainerStarted","Data":"2c041ace6b45964e8aa98c5e4461a936f026d60b4fdb5587134821b49442aa90"} Sep 30 19:45:41 crc kubenswrapper[4756]: I0930 19:45:41.469518 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-prvjv" event={"ID":"beeaebc2-50a1-44eb-b336-d731d9658490","Type":"ContainerStarted","Data":"68f623415f113292630cae454ac289c771d6d7ad78d0f477bf3f8094ffddf84f"} Sep 30 19:45:41 crc kubenswrapper[4756]: I0930 19:45:41.469528 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-prvjv" event={"ID":"beeaebc2-50a1-44eb-b336-d731d9658490","Type":"ContainerStarted","Data":"9b9218fc0635805082ea5e67a602c9a735982bfa164b8150140d89ac373a3c04"} Sep 30 19:45:42 crc kubenswrapper[4756]: I0930 19:45:42.487595 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-prvjv" event={"ID":"beeaebc2-50a1-44eb-b336-d731d9658490","Type":"ContainerStarted","Data":"e186d80ff7505f8c789bd640950fb0a35cd18c569dcc2b6a6c6b7a297394f248"} Sep 30 19:45:42 crc kubenswrapper[4756]: I0930 19:45:42.488042 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-prvjv" Sep 30 19:45:42 crc kubenswrapper[4756]: I0930 19:45:42.488062 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-prvjv" event={"ID":"beeaebc2-50a1-44eb-b336-d731d9658490","Type":"ContainerStarted","Data":"7e50d995e3aba8530fe84b966da392b2f4a97d221dd1375460d12d2de43a66bd"} Sep 30 19:45:42 crc kubenswrapper[4756]: I0930 19:45:42.526489 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-prvjv" podStartSLOduration=6.884020775 podStartE2EDuration="13.526460076s" podCreationTimestamp="2025-09-30 19:45:29 +0000 UTC" firstStartedPulling="2025-09-30 19:45:30.915771677 +0000 UTC m=+860.536705154" lastFinishedPulling="2025-09-30 19:45:37.558210958 +0000 UTC m=+867.179144455" observedRunningTime="2025-09-30 19:45:42.521424485 +0000 UTC m=+872.142358022" watchObservedRunningTime="2025-09-30 19:45:42.526460076 +0000 UTC m=+872.147393613" Sep 30 19:45:45 crc kubenswrapper[4756]: I0930 19:45:45.795870 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="metallb-system/frr-k8s-prvjv" Sep 30 19:45:45 crc kubenswrapper[4756]: I0930 19:45:45.870996 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="metallb-system/frr-k8s-prvjv" Sep 30 19:45:50 crc kubenswrapper[4756]: I0930 19:45:50.197112 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-webhook-server-5478bdb765-f2dkx" Sep 30 19:45:50 crc kubenswrapper[4756]: I0930 19:45:50.800647 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-prvjv" Sep 30 19:45:51 crc kubenswrapper[4756]: I0930 19:45:51.767321 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/speaker-kqcpz" Sep 30 19:45:53 crc kubenswrapper[4756]: I0930 19:45:53.353893 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69ckkjc"] Sep 30 19:45:53 crc kubenswrapper[4756]: I0930 19:45:53.355915 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69ckkjc" Sep 30 19:45:53 crc kubenswrapper[4756]: I0930 19:45:53.359535 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Sep 30 19:45:53 crc kubenswrapper[4756]: I0930 19:45:53.373042 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69ckkjc"] Sep 30 19:45:53 crc kubenswrapper[4756]: I0930 19:45:53.507141 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/e562b41d-d5b4-4053-86a7-90bfc0442280-bundle\") pod \"695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69ckkjc\" (UID: \"e562b41d-d5b4-4053-86a7-90bfc0442280\") " pod="openshift-marketplace/695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69ckkjc" Sep 30 19:45:53 crc kubenswrapper[4756]: I0930 19:45:53.507427 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tpptq\" (UniqueName: \"kubernetes.io/projected/e562b41d-d5b4-4053-86a7-90bfc0442280-kube-api-access-tpptq\") pod \"695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69ckkjc\" (UID: \"e562b41d-d5b4-4053-86a7-90bfc0442280\") " pod="openshift-marketplace/695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69ckkjc" Sep 30 19:45:53 crc kubenswrapper[4756]: I0930 19:45:53.507483 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/e562b41d-d5b4-4053-86a7-90bfc0442280-util\") pod \"695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69ckkjc\" (UID: \"e562b41d-d5b4-4053-86a7-90bfc0442280\") " pod="openshift-marketplace/695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69ckkjc" Sep 30 19:45:53 crc kubenswrapper[4756]: I0930 19:45:53.609125 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tpptq\" (UniqueName: \"kubernetes.io/projected/e562b41d-d5b4-4053-86a7-90bfc0442280-kube-api-access-tpptq\") pod \"695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69ckkjc\" (UID: \"e562b41d-d5b4-4053-86a7-90bfc0442280\") " pod="openshift-marketplace/695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69ckkjc" Sep 30 19:45:53 crc kubenswrapper[4756]: I0930 19:45:53.609224 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/e562b41d-d5b4-4053-86a7-90bfc0442280-util\") pod \"695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69ckkjc\" (UID: \"e562b41d-d5b4-4053-86a7-90bfc0442280\") " pod="openshift-marketplace/695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69ckkjc" Sep 30 19:45:53 crc kubenswrapper[4756]: I0930 19:45:53.609297 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/e562b41d-d5b4-4053-86a7-90bfc0442280-bundle\") pod \"695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69ckkjc\" (UID: \"e562b41d-d5b4-4053-86a7-90bfc0442280\") " pod="openshift-marketplace/695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69ckkjc" Sep 30 19:45:53 crc kubenswrapper[4756]: I0930 19:45:53.611743 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/e562b41d-d5b4-4053-86a7-90bfc0442280-bundle\") pod \"695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69ckkjc\" (UID: \"e562b41d-d5b4-4053-86a7-90bfc0442280\") " pod="openshift-marketplace/695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69ckkjc" Sep 30 19:45:53 crc kubenswrapper[4756]: I0930 19:45:53.614015 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/e562b41d-d5b4-4053-86a7-90bfc0442280-util\") pod \"695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69ckkjc\" (UID: \"e562b41d-d5b4-4053-86a7-90bfc0442280\") " pod="openshift-marketplace/695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69ckkjc" Sep 30 19:45:53 crc kubenswrapper[4756]: I0930 19:45:53.639072 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tpptq\" (UniqueName: \"kubernetes.io/projected/e562b41d-d5b4-4053-86a7-90bfc0442280-kube-api-access-tpptq\") pod \"695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69ckkjc\" (UID: \"e562b41d-d5b4-4053-86a7-90bfc0442280\") " pod="openshift-marketplace/695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69ckkjc" Sep 30 19:45:53 crc kubenswrapper[4756]: I0930 19:45:53.712503 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69ckkjc" Sep 30 19:45:53 crc kubenswrapper[4756]: I0930 19:45:53.964437 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69ckkjc"] Sep 30 19:45:53 crc kubenswrapper[4756]: W0930 19:45:53.970289 4756 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode562b41d_d5b4_4053_86a7_90bfc0442280.slice/crio-c7cf441c06590a54f5d3018a59196ec3b03e1aea5faaa3c4425631ed1367d414 WatchSource:0}: Error finding container c7cf441c06590a54f5d3018a59196ec3b03e1aea5faaa3c4425631ed1367d414: Status 404 returned error can't find the container with id c7cf441c06590a54f5d3018a59196ec3b03e1aea5faaa3c4425631ed1367d414 Sep 30 19:45:54 crc kubenswrapper[4756]: I0930 19:45:54.582849 4756 generic.go:334] "Generic (PLEG): container finished" podID="e562b41d-d5b4-4053-86a7-90bfc0442280" containerID="6c423c03875b91119640f3235f5d8ed937ac7165c372386bb562983623932ec9" exitCode=0 Sep 30 19:45:54 crc kubenswrapper[4756]: I0930 19:45:54.582918 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69ckkjc" event={"ID":"e562b41d-d5b4-4053-86a7-90bfc0442280","Type":"ContainerDied","Data":"6c423c03875b91119640f3235f5d8ed937ac7165c372386bb562983623932ec9"} Sep 30 19:45:54 crc kubenswrapper[4756]: I0930 19:45:54.583123 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69ckkjc" event={"ID":"e562b41d-d5b4-4053-86a7-90bfc0442280","Type":"ContainerStarted","Data":"c7cf441c06590a54f5d3018a59196ec3b03e1aea5faaa3c4425631ed1367d414"} Sep 30 19:45:58 crc kubenswrapper[4756]: I0930 19:45:58.624093 4756 generic.go:334] "Generic (PLEG): container finished" podID="e562b41d-d5b4-4053-86a7-90bfc0442280" containerID="ae524d2b44d64ab539b9d85afbf228838c2411f1bdf474945d1550e279d07e1c" exitCode=0 Sep 30 19:45:58 crc kubenswrapper[4756]: I0930 19:45:58.624130 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69ckkjc" event={"ID":"e562b41d-d5b4-4053-86a7-90bfc0442280","Type":"ContainerDied","Data":"ae524d2b44d64ab539b9d85afbf228838c2411f1bdf474945d1550e279d07e1c"} Sep 30 19:45:59 crc kubenswrapper[4756]: I0930 19:45:59.635687 4756 generic.go:334] "Generic (PLEG): container finished" podID="e562b41d-d5b4-4053-86a7-90bfc0442280" containerID="59a1ba6b9c23bd676e7ed1d7aeed9e28185ee378e469583ce315e36f736bf1a7" exitCode=0 Sep 30 19:45:59 crc kubenswrapper[4756]: I0930 19:45:59.636031 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69ckkjc" event={"ID":"e562b41d-d5b4-4053-86a7-90bfc0442280","Type":"ContainerDied","Data":"59a1ba6b9c23bd676e7ed1d7aeed9e28185ee378e469583ce315e36f736bf1a7"} Sep 30 19:46:01 crc kubenswrapper[4756]: I0930 19:46:01.020684 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69ckkjc" Sep 30 19:46:01 crc kubenswrapper[4756]: I0930 19:46:01.221269 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/e562b41d-d5b4-4053-86a7-90bfc0442280-bundle\") pod \"e562b41d-d5b4-4053-86a7-90bfc0442280\" (UID: \"e562b41d-d5b4-4053-86a7-90bfc0442280\") " Sep 30 19:46:01 crc kubenswrapper[4756]: I0930 19:46:01.221329 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tpptq\" (UniqueName: \"kubernetes.io/projected/e562b41d-d5b4-4053-86a7-90bfc0442280-kube-api-access-tpptq\") pod \"e562b41d-d5b4-4053-86a7-90bfc0442280\" (UID: \"e562b41d-d5b4-4053-86a7-90bfc0442280\") " Sep 30 19:46:01 crc kubenswrapper[4756]: I0930 19:46:01.221474 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/e562b41d-d5b4-4053-86a7-90bfc0442280-util\") pod \"e562b41d-d5b4-4053-86a7-90bfc0442280\" (UID: \"e562b41d-d5b4-4053-86a7-90bfc0442280\") " Sep 30 19:46:01 crc kubenswrapper[4756]: I0930 19:46:01.223528 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e562b41d-d5b4-4053-86a7-90bfc0442280-bundle" (OuterVolumeSpecName: "bundle") pod "e562b41d-d5b4-4053-86a7-90bfc0442280" (UID: "e562b41d-d5b4-4053-86a7-90bfc0442280"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 19:46:01 crc kubenswrapper[4756]: I0930 19:46:01.231659 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e562b41d-d5b4-4053-86a7-90bfc0442280-kube-api-access-tpptq" (OuterVolumeSpecName: "kube-api-access-tpptq") pod "e562b41d-d5b4-4053-86a7-90bfc0442280" (UID: "e562b41d-d5b4-4053-86a7-90bfc0442280"). InnerVolumeSpecName "kube-api-access-tpptq". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:46:01 crc kubenswrapper[4756]: I0930 19:46:01.238731 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e562b41d-d5b4-4053-86a7-90bfc0442280-util" (OuterVolumeSpecName: "util") pod "e562b41d-d5b4-4053-86a7-90bfc0442280" (UID: "e562b41d-d5b4-4053-86a7-90bfc0442280"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 19:46:01 crc kubenswrapper[4756]: I0930 19:46:01.322811 4756 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/e562b41d-d5b4-4053-86a7-90bfc0442280-util\") on node \"crc\" DevicePath \"\"" Sep 30 19:46:01 crc kubenswrapper[4756]: I0930 19:46:01.322840 4756 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/e562b41d-d5b4-4053-86a7-90bfc0442280-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 19:46:01 crc kubenswrapper[4756]: I0930 19:46:01.322850 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tpptq\" (UniqueName: \"kubernetes.io/projected/e562b41d-d5b4-4053-86a7-90bfc0442280-kube-api-access-tpptq\") on node \"crc\" DevicePath \"\"" Sep 30 19:46:01 crc kubenswrapper[4756]: I0930 19:46:01.654318 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69ckkjc" event={"ID":"e562b41d-d5b4-4053-86a7-90bfc0442280","Type":"ContainerDied","Data":"c7cf441c06590a54f5d3018a59196ec3b03e1aea5faaa3c4425631ed1367d414"} Sep 30 19:46:01 crc kubenswrapper[4756]: I0930 19:46:01.654374 4756 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c7cf441c06590a54f5d3018a59196ec3b03e1aea5faaa3c4425631ed1367d414" Sep 30 19:46:01 crc kubenswrapper[4756]: I0930 19:46:01.654390 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69ckkjc" Sep 30 19:46:06 crc kubenswrapper[4756]: I0930 19:46:06.173842 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager-operator/cert-manager-operator-controller-manager-57cd46d6d-9gnnf"] Sep 30 19:46:06 crc kubenswrapper[4756]: E0930 19:46:06.174677 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e562b41d-d5b4-4053-86a7-90bfc0442280" containerName="pull" Sep 30 19:46:06 crc kubenswrapper[4756]: I0930 19:46:06.174692 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="e562b41d-d5b4-4053-86a7-90bfc0442280" containerName="pull" Sep 30 19:46:06 crc kubenswrapper[4756]: E0930 19:46:06.174702 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e562b41d-d5b4-4053-86a7-90bfc0442280" containerName="extract" Sep 30 19:46:06 crc kubenswrapper[4756]: I0930 19:46:06.174710 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="e562b41d-d5b4-4053-86a7-90bfc0442280" containerName="extract" Sep 30 19:46:06 crc kubenswrapper[4756]: E0930 19:46:06.174722 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e562b41d-d5b4-4053-86a7-90bfc0442280" containerName="util" Sep 30 19:46:06 crc kubenswrapper[4756]: I0930 19:46:06.174731 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="e562b41d-d5b4-4053-86a7-90bfc0442280" containerName="util" Sep 30 19:46:06 crc kubenswrapper[4756]: I0930 19:46:06.174876 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="e562b41d-d5b4-4053-86a7-90bfc0442280" containerName="extract" Sep 30 19:46:06 crc kubenswrapper[4756]: I0930 19:46:06.175325 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager-operator/cert-manager-operator-controller-manager-57cd46d6d-9gnnf" Sep 30 19:46:06 crc kubenswrapper[4756]: I0930 19:46:06.179236 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager-operator"/"kube-root-ca.crt" Sep 30 19:46:06 crc kubenswrapper[4756]: I0930 19:46:06.179365 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager-operator"/"openshift-service-ca.crt" Sep 30 19:46:06 crc kubenswrapper[4756]: I0930 19:46:06.179708 4756 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager-operator"/"cert-manager-operator-controller-manager-dockercfg-hvpcc" Sep 30 19:46:06 crc kubenswrapper[4756]: I0930 19:46:06.199621 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager-operator/cert-manager-operator-controller-manager-57cd46d6d-9gnnf"] Sep 30 19:46:06 crc kubenswrapper[4756]: I0930 19:46:06.313770 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t8jgt\" (UniqueName: \"kubernetes.io/projected/78ff7c36-bfe9-4a98-9c79-86a00408740e-kube-api-access-t8jgt\") pod \"cert-manager-operator-controller-manager-57cd46d6d-9gnnf\" (UID: \"78ff7c36-bfe9-4a98-9c79-86a00408740e\") " pod="cert-manager-operator/cert-manager-operator-controller-manager-57cd46d6d-9gnnf" Sep 30 19:46:06 crc kubenswrapper[4756]: I0930 19:46:06.414658 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t8jgt\" (UniqueName: \"kubernetes.io/projected/78ff7c36-bfe9-4a98-9c79-86a00408740e-kube-api-access-t8jgt\") pod \"cert-manager-operator-controller-manager-57cd46d6d-9gnnf\" (UID: \"78ff7c36-bfe9-4a98-9c79-86a00408740e\") " pod="cert-manager-operator/cert-manager-operator-controller-manager-57cd46d6d-9gnnf" Sep 30 19:46:06 crc kubenswrapper[4756]: I0930 19:46:06.434111 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t8jgt\" (UniqueName: \"kubernetes.io/projected/78ff7c36-bfe9-4a98-9c79-86a00408740e-kube-api-access-t8jgt\") pod \"cert-manager-operator-controller-manager-57cd46d6d-9gnnf\" (UID: \"78ff7c36-bfe9-4a98-9c79-86a00408740e\") " pod="cert-manager-operator/cert-manager-operator-controller-manager-57cd46d6d-9gnnf" Sep 30 19:46:06 crc kubenswrapper[4756]: I0930 19:46:06.494236 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager-operator/cert-manager-operator-controller-manager-57cd46d6d-9gnnf" Sep 30 19:46:07 crc kubenswrapper[4756]: I0930 19:46:07.004981 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager-operator/cert-manager-operator-controller-manager-57cd46d6d-9gnnf"] Sep 30 19:46:07 crc kubenswrapper[4756]: W0930 19:46:07.011104 4756 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod78ff7c36_bfe9_4a98_9c79_86a00408740e.slice/crio-2a865fca5efad086fb237a1565cf86c99de646b7febf12ebfb3d96b94dd32c40 WatchSource:0}: Error finding container 2a865fca5efad086fb237a1565cf86c99de646b7febf12ebfb3d96b94dd32c40: Status 404 returned error can't find the container with id 2a865fca5efad086fb237a1565cf86c99de646b7febf12ebfb3d96b94dd32c40 Sep 30 19:46:07 crc kubenswrapper[4756]: I0930 19:46:07.692311 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager-operator/cert-manager-operator-controller-manager-57cd46d6d-9gnnf" event={"ID":"78ff7c36-bfe9-4a98-9c79-86a00408740e","Type":"ContainerStarted","Data":"2a865fca5efad086fb237a1565cf86c99de646b7febf12ebfb3d96b94dd32c40"} Sep 30 19:46:11 crc kubenswrapper[4756]: I0930 19:46:11.539467 4756 patch_prober.go:28] interesting pod/machine-config-daemon-4n9zj container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 19:46:11 crc kubenswrapper[4756]: I0930 19:46:11.540384 4756 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 19:46:13 crc kubenswrapper[4756]: I0930 19:46:13.727458 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager-operator/cert-manager-operator-controller-manager-57cd46d6d-9gnnf" event={"ID":"78ff7c36-bfe9-4a98-9c79-86a00408740e","Type":"ContainerStarted","Data":"984af13db7834b3a19ff670ee8547166285a714dde6b6ba64e392f6ce0b491fb"} Sep 30 19:46:13 crc kubenswrapper[4756]: I0930 19:46:13.747761 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager-operator/cert-manager-operator-controller-manager-57cd46d6d-9gnnf" podStartSLOduration=1.572200857 podStartE2EDuration="7.747743828s" podCreationTimestamp="2025-09-30 19:46:06 +0000 UTC" firstStartedPulling="2025-09-30 19:46:07.012997853 +0000 UTC m=+896.633931330" lastFinishedPulling="2025-09-30 19:46:13.188540824 +0000 UTC m=+902.809474301" observedRunningTime="2025-09-30 19:46:13.744286108 +0000 UTC m=+903.365219605" watchObservedRunningTime="2025-09-30 19:46:13.747743828 +0000 UTC m=+903.368677305" Sep 30 19:46:16 crc kubenswrapper[4756]: I0930 19:46:16.962641 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-webhook-d969966f-gk77c"] Sep 30 19:46:16 crc kubenswrapper[4756]: I0930 19:46:16.963530 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-d969966f-gk77c" Sep 30 19:46:16 crc kubenswrapper[4756]: I0930 19:46:16.965161 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"kube-root-ca.crt" Sep 30 19:46:16 crc kubenswrapper[4756]: I0930 19:46:16.965217 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"openshift-service-ca.crt" Sep 30 19:46:16 crc kubenswrapper[4756]: I0930 19:46:16.965531 4756 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-webhook-dockercfg-lnc99" Sep 30 19:46:16 crc kubenswrapper[4756]: I0930 19:46:16.972823 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-d969966f-gk77c"] Sep 30 19:46:17 crc kubenswrapper[4756]: I0930 19:46:17.118167 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dg7hz\" (UniqueName: \"kubernetes.io/projected/01b70269-7116-446c-b3d7-f0ae3ff0a1c4-kube-api-access-dg7hz\") pod \"cert-manager-webhook-d969966f-gk77c\" (UID: \"01b70269-7116-446c-b3d7-f0ae3ff0a1c4\") " pod="cert-manager/cert-manager-webhook-d969966f-gk77c" Sep 30 19:46:17 crc kubenswrapper[4756]: I0930 19:46:17.118499 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/01b70269-7116-446c-b3d7-f0ae3ff0a1c4-bound-sa-token\") pod \"cert-manager-webhook-d969966f-gk77c\" (UID: \"01b70269-7116-446c-b3d7-f0ae3ff0a1c4\") " pod="cert-manager/cert-manager-webhook-d969966f-gk77c" Sep 30 19:46:17 crc kubenswrapper[4756]: I0930 19:46:17.220578 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dg7hz\" (UniqueName: \"kubernetes.io/projected/01b70269-7116-446c-b3d7-f0ae3ff0a1c4-kube-api-access-dg7hz\") pod \"cert-manager-webhook-d969966f-gk77c\" (UID: \"01b70269-7116-446c-b3d7-f0ae3ff0a1c4\") " pod="cert-manager/cert-manager-webhook-d969966f-gk77c" Sep 30 19:46:17 crc kubenswrapper[4756]: I0930 19:46:17.220748 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/01b70269-7116-446c-b3d7-f0ae3ff0a1c4-bound-sa-token\") pod \"cert-manager-webhook-d969966f-gk77c\" (UID: \"01b70269-7116-446c-b3d7-f0ae3ff0a1c4\") " pod="cert-manager/cert-manager-webhook-d969966f-gk77c" Sep 30 19:46:17 crc kubenswrapper[4756]: I0930 19:46:17.252353 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dg7hz\" (UniqueName: \"kubernetes.io/projected/01b70269-7116-446c-b3d7-f0ae3ff0a1c4-kube-api-access-dg7hz\") pod \"cert-manager-webhook-d969966f-gk77c\" (UID: \"01b70269-7116-446c-b3d7-f0ae3ff0a1c4\") " pod="cert-manager/cert-manager-webhook-d969966f-gk77c" Sep 30 19:46:17 crc kubenswrapper[4756]: I0930 19:46:17.253825 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/01b70269-7116-446c-b3d7-f0ae3ff0a1c4-bound-sa-token\") pod \"cert-manager-webhook-d969966f-gk77c\" (UID: \"01b70269-7116-446c-b3d7-f0ae3ff0a1c4\") " pod="cert-manager/cert-manager-webhook-d969966f-gk77c" Sep 30 19:46:17 crc kubenswrapper[4756]: I0930 19:46:17.281158 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-d969966f-gk77c" Sep 30 19:46:17 crc kubenswrapper[4756]: I0930 19:46:17.428193 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-cainjector-7d9f95dbf-xnqxh"] Sep 30 19:46:17 crc kubenswrapper[4756]: I0930 19:46:17.429384 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-7d9f95dbf-xnqxh" Sep 30 19:46:17 crc kubenswrapper[4756]: I0930 19:46:17.431315 4756 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-cainjector-dockercfg-dwtxp" Sep 30 19:46:17 crc kubenswrapper[4756]: I0930 19:46:17.435715 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-7d9f95dbf-xnqxh"] Sep 30 19:46:17 crc kubenswrapper[4756]: I0930 19:46:17.626268 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/6d4a1d50-a63f-4a46-a545-4af49e08fc6a-bound-sa-token\") pod \"cert-manager-cainjector-7d9f95dbf-xnqxh\" (UID: \"6d4a1d50-a63f-4a46-a545-4af49e08fc6a\") " pod="cert-manager/cert-manager-cainjector-7d9f95dbf-xnqxh" Sep 30 19:46:17 crc kubenswrapper[4756]: I0930 19:46:17.626449 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fdfdw\" (UniqueName: \"kubernetes.io/projected/6d4a1d50-a63f-4a46-a545-4af49e08fc6a-kube-api-access-fdfdw\") pod \"cert-manager-cainjector-7d9f95dbf-xnqxh\" (UID: \"6d4a1d50-a63f-4a46-a545-4af49e08fc6a\") " pod="cert-manager/cert-manager-cainjector-7d9f95dbf-xnqxh" Sep 30 19:46:17 crc kubenswrapper[4756]: I0930 19:46:17.727496 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/6d4a1d50-a63f-4a46-a545-4af49e08fc6a-bound-sa-token\") pod \"cert-manager-cainjector-7d9f95dbf-xnqxh\" (UID: \"6d4a1d50-a63f-4a46-a545-4af49e08fc6a\") " pod="cert-manager/cert-manager-cainjector-7d9f95dbf-xnqxh" Sep 30 19:46:17 crc kubenswrapper[4756]: I0930 19:46:17.727627 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fdfdw\" (UniqueName: \"kubernetes.io/projected/6d4a1d50-a63f-4a46-a545-4af49e08fc6a-kube-api-access-fdfdw\") pod \"cert-manager-cainjector-7d9f95dbf-xnqxh\" (UID: \"6d4a1d50-a63f-4a46-a545-4af49e08fc6a\") " pod="cert-manager/cert-manager-cainjector-7d9f95dbf-xnqxh" Sep 30 19:46:17 crc kubenswrapper[4756]: I0930 19:46:17.752653 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-d969966f-gk77c"] Sep 30 19:46:17 crc kubenswrapper[4756]: I0930 19:46:17.754883 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/6d4a1d50-a63f-4a46-a545-4af49e08fc6a-bound-sa-token\") pod \"cert-manager-cainjector-7d9f95dbf-xnqxh\" (UID: \"6d4a1d50-a63f-4a46-a545-4af49e08fc6a\") " pod="cert-manager/cert-manager-cainjector-7d9f95dbf-xnqxh" Sep 30 19:46:17 crc kubenswrapper[4756]: I0930 19:46:17.776491 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fdfdw\" (UniqueName: \"kubernetes.io/projected/6d4a1d50-a63f-4a46-a545-4af49e08fc6a-kube-api-access-fdfdw\") pod \"cert-manager-cainjector-7d9f95dbf-xnqxh\" (UID: \"6d4a1d50-a63f-4a46-a545-4af49e08fc6a\") " pod="cert-manager/cert-manager-cainjector-7d9f95dbf-xnqxh" Sep 30 19:46:18 crc kubenswrapper[4756]: I0930 19:46:18.055271 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-7d9f95dbf-xnqxh" Sep 30 19:46:18 crc kubenswrapper[4756]: I0930 19:46:18.318561 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-7d9f95dbf-xnqxh"] Sep 30 19:46:18 crc kubenswrapper[4756]: W0930 19:46:18.326946 4756 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6d4a1d50_a63f_4a46_a545_4af49e08fc6a.slice/crio-4440d79aeb383b9b599388c3ba064b765a4c22325d7435c3edeedb49e309e677 WatchSource:0}: Error finding container 4440d79aeb383b9b599388c3ba064b765a4c22325d7435c3edeedb49e309e677: Status 404 returned error can't find the container with id 4440d79aeb383b9b599388c3ba064b765a4c22325d7435c3edeedb49e309e677 Sep 30 19:46:18 crc kubenswrapper[4756]: I0930 19:46:18.764508 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-d969966f-gk77c" event={"ID":"01b70269-7116-446c-b3d7-f0ae3ff0a1c4","Type":"ContainerStarted","Data":"7aa46a6265e6d4456eacff6bc494bbe7bbfabc5d489800b13c4bec0718c82331"} Sep 30 19:46:18 crc kubenswrapper[4756]: I0930 19:46:18.766743 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-7d9f95dbf-xnqxh" event={"ID":"6d4a1d50-a63f-4a46-a545-4af49e08fc6a","Type":"ContainerStarted","Data":"4440d79aeb383b9b599388c3ba064b765a4c22325d7435c3edeedb49e309e677"} Sep 30 19:46:22 crc kubenswrapper[4756]: I0930 19:46:22.790454 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-d969966f-gk77c" event={"ID":"01b70269-7116-446c-b3d7-f0ae3ff0a1c4","Type":"ContainerStarted","Data":"813c2b77f70ecb479d6c2c38bbe390174ff468faafa9f484a0f54451e5c447e2"} Sep 30 19:46:22 crc kubenswrapper[4756]: I0930 19:46:22.790970 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="cert-manager/cert-manager-webhook-d969966f-gk77c" Sep 30 19:46:22 crc kubenswrapper[4756]: I0930 19:46:22.792316 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-7d9f95dbf-xnqxh" event={"ID":"6d4a1d50-a63f-4a46-a545-4af49e08fc6a","Type":"ContainerStarted","Data":"fcee7fa22b937b7bbd61bf0158b88a148ba32231e3b469cef6d712eeff20edf1"} Sep 30 19:46:22 crc kubenswrapper[4756]: I0930 19:46:22.823738 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-webhook-d969966f-gk77c" podStartSLOduration=2.392091378 podStartE2EDuration="6.823721592s" podCreationTimestamp="2025-09-30 19:46:16 +0000 UTC" firstStartedPulling="2025-09-30 19:46:17.751912319 +0000 UTC m=+907.372845836" lastFinishedPulling="2025-09-30 19:46:22.183542573 +0000 UTC m=+911.804476050" observedRunningTime="2025-09-30 19:46:22.819863851 +0000 UTC m=+912.440797328" watchObservedRunningTime="2025-09-30 19:46:22.823721592 +0000 UTC m=+912.444655089" Sep 30 19:46:22 crc kubenswrapper[4756]: I0930 19:46:22.840521 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-cainjector-7d9f95dbf-xnqxh" podStartSLOduration=2.002354847 podStartE2EDuration="5.840499032s" podCreationTimestamp="2025-09-30 19:46:17 +0000 UTC" firstStartedPulling="2025-09-30 19:46:18.329244524 +0000 UTC m=+907.950178031" lastFinishedPulling="2025-09-30 19:46:22.167388739 +0000 UTC m=+911.788322216" observedRunningTime="2025-09-30 19:46:22.836555439 +0000 UTC m=+912.457488916" watchObservedRunningTime="2025-09-30 19:46:22.840499032 +0000 UTC m=+912.461432519" Sep 30 19:46:27 crc kubenswrapper[4756]: I0930 19:46:27.283886 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="cert-manager/cert-manager-webhook-d969966f-gk77c" Sep 30 19:46:36 crc kubenswrapper[4756]: I0930 19:46:36.190175 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-7d4cc89fcb-c2pg9"] Sep 30 19:46:36 crc kubenswrapper[4756]: I0930 19:46:36.191919 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-7d4cc89fcb-c2pg9" Sep 30 19:46:36 crc kubenswrapper[4756]: I0930 19:46:36.195839 4756 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-dockercfg-fwlcl" Sep 30 19:46:36 crc kubenswrapper[4756]: I0930 19:46:36.208661 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-7d4cc89fcb-c2pg9"] Sep 30 19:46:36 crc kubenswrapper[4756]: I0930 19:46:36.321003 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n9wgm\" (UniqueName: \"kubernetes.io/projected/093e0d0f-7fdc-49ca-ba01-928bf465b0a8-kube-api-access-n9wgm\") pod \"cert-manager-7d4cc89fcb-c2pg9\" (UID: \"093e0d0f-7fdc-49ca-ba01-928bf465b0a8\") " pod="cert-manager/cert-manager-7d4cc89fcb-c2pg9" Sep 30 19:46:36 crc kubenswrapper[4756]: I0930 19:46:36.321155 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/093e0d0f-7fdc-49ca-ba01-928bf465b0a8-bound-sa-token\") pod \"cert-manager-7d4cc89fcb-c2pg9\" (UID: \"093e0d0f-7fdc-49ca-ba01-928bf465b0a8\") " pod="cert-manager/cert-manager-7d4cc89fcb-c2pg9" Sep 30 19:46:36 crc kubenswrapper[4756]: I0930 19:46:36.422758 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n9wgm\" (UniqueName: \"kubernetes.io/projected/093e0d0f-7fdc-49ca-ba01-928bf465b0a8-kube-api-access-n9wgm\") pod \"cert-manager-7d4cc89fcb-c2pg9\" (UID: \"093e0d0f-7fdc-49ca-ba01-928bf465b0a8\") " pod="cert-manager/cert-manager-7d4cc89fcb-c2pg9" Sep 30 19:46:36 crc kubenswrapper[4756]: I0930 19:46:36.422976 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/093e0d0f-7fdc-49ca-ba01-928bf465b0a8-bound-sa-token\") pod \"cert-manager-7d4cc89fcb-c2pg9\" (UID: \"093e0d0f-7fdc-49ca-ba01-928bf465b0a8\") " pod="cert-manager/cert-manager-7d4cc89fcb-c2pg9" Sep 30 19:46:36 crc kubenswrapper[4756]: I0930 19:46:36.448795 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/093e0d0f-7fdc-49ca-ba01-928bf465b0a8-bound-sa-token\") pod \"cert-manager-7d4cc89fcb-c2pg9\" (UID: \"093e0d0f-7fdc-49ca-ba01-928bf465b0a8\") " pod="cert-manager/cert-manager-7d4cc89fcb-c2pg9" Sep 30 19:46:36 crc kubenswrapper[4756]: I0930 19:46:36.449703 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n9wgm\" (UniqueName: \"kubernetes.io/projected/093e0d0f-7fdc-49ca-ba01-928bf465b0a8-kube-api-access-n9wgm\") pod \"cert-manager-7d4cc89fcb-c2pg9\" (UID: \"093e0d0f-7fdc-49ca-ba01-928bf465b0a8\") " pod="cert-manager/cert-manager-7d4cc89fcb-c2pg9" Sep 30 19:46:36 crc kubenswrapper[4756]: I0930 19:46:36.513798 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-7d4cc89fcb-c2pg9" Sep 30 19:46:36 crc kubenswrapper[4756]: I0930 19:46:36.970571 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-7d4cc89fcb-c2pg9"] Sep 30 19:46:37 crc kubenswrapper[4756]: I0930 19:46:37.900601 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-7d4cc89fcb-c2pg9" event={"ID":"093e0d0f-7fdc-49ca-ba01-928bf465b0a8","Type":"ContainerStarted","Data":"4384ae359715411048a99d3c278488fe827c175b3b806bd32b6d3bb6c361c48d"} Sep 30 19:46:37 crc kubenswrapper[4756]: I0930 19:46:37.900871 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-7d4cc89fcb-c2pg9" event={"ID":"093e0d0f-7fdc-49ca-ba01-928bf465b0a8","Type":"ContainerStarted","Data":"3ee2da558ff46a80b10dc3597d0c3ed9cd9084179ba4db5ba7157f5f7bbb393f"} Sep 30 19:46:37 crc kubenswrapper[4756]: I0930 19:46:37.926268 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-7d4cc89fcb-c2pg9" podStartSLOduration=1.926238964 podStartE2EDuration="1.926238964s" podCreationTimestamp="2025-09-30 19:46:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 19:46:37.919527628 +0000 UTC m=+927.540461105" watchObservedRunningTime="2025-09-30 19:46:37.926238964 +0000 UTC m=+927.547172451" Sep 30 19:46:41 crc kubenswrapper[4756]: I0930 19:46:41.538465 4756 patch_prober.go:28] interesting pod/machine-config-daemon-4n9zj container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 19:46:41 crc kubenswrapper[4756]: I0930 19:46:41.539187 4756 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 19:46:41 crc kubenswrapper[4756]: I0930 19:46:41.600224 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-index-qz8pr"] Sep 30 19:46:41 crc kubenswrapper[4756]: I0930 19:46:41.600949 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-qz8pr" Sep 30 19:46:41 crc kubenswrapper[4756]: I0930 19:46:41.602987 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"openshift-service-ca.crt" Sep 30 19:46:41 crc kubenswrapper[4756]: I0930 19:46:41.604467 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-index-dockercfg-j5zhn" Sep 30 19:46:41 crc kubenswrapper[4756]: I0930 19:46:41.621087 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"kube-root-ca.crt" Sep 30 19:46:41 crc kubenswrapper[4756]: I0930 19:46:41.631612 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-qz8pr"] Sep 30 19:46:41 crc kubenswrapper[4756]: I0930 19:46:41.710826 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9jct4\" (UniqueName: \"kubernetes.io/projected/447dcb99-6531-4ad4-b11c-7bd3d1f849f6-kube-api-access-9jct4\") pod \"openstack-operator-index-qz8pr\" (UID: \"447dcb99-6531-4ad4-b11c-7bd3d1f849f6\") " pod="openstack-operators/openstack-operator-index-qz8pr" Sep 30 19:46:41 crc kubenswrapper[4756]: I0930 19:46:41.812620 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9jct4\" (UniqueName: \"kubernetes.io/projected/447dcb99-6531-4ad4-b11c-7bd3d1f849f6-kube-api-access-9jct4\") pod \"openstack-operator-index-qz8pr\" (UID: \"447dcb99-6531-4ad4-b11c-7bd3d1f849f6\") " pod="openstack-operators/openstack-operator-index-qz8pr" Sep 30 19:46:41 crc kubenswrapper[4756]: I0930 19:46:41.829784 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9jct4\" (UniqueName: \"kubernetes.io/projected/447dcb99-6531-4ad4-b11c-7bd3d1f849f6-kube-api-access-9jct4\") pod \"openstack-operator-index-qz8pr\" (UID: \"447dcb99-6531-4ad4-b11c-7bd3d1f849f6\") " pod="openstack-operators/openstack-operator-index-qz8pr" Sep 30 19:46:41 crc kubenswrapper[4756]: I0930 19:46:41.926994 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-qz8pr" Sep 30 19:46:42 crc kubenswrapper[4756]: I0930 19:46:42.229927 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-qz8pr"] Sep 30 19:46:42 crc kubenswrapper[4756]: W0930 19:46:42.237714 4756 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod447dcb99_6531_4ad4_b11c_7bd3d1f849f6.slice/crio-acf41898e328891e95009c744befae86c3fad27b6eb63ea6bad6d10302f3ea79 WatchSource:0}: Error finding container acf41898e328891e95009c744befae86c3fad27b6eb63ea6bad6d10302f3ea79: Status 404 returned error can't find the container with id acf41898e328891e95009c744befae86c3fad27b6eb63ea6bad6d10302f3ea79 Sep 30 19:46:42 crc kubenswrapper[4756]: I0930 19:46:42.939561 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-qz8pr" event={"ID":"447dcb99-6531-4ad4-b11c-7bd3d1f849f6","Type":"ContainerStarted","Data":"acf41898e328891e95009c744befae86c3fad27b6eb63ea6bad6d10302f3ea79"} Sep 30 19:46:43 crc kubenswrapper[4756]: I0930 19:46:43.771582 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/openstack-operator-index-qz8pr"] Sep 30 19:46:44 crc kubenswrapper[4756]: I0930 19:46:44.178880 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-index-c57rv"] Sep 30 19:46:44 crc kubenswrapper[4756]: I0930 19:46:44.180363 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-c57rv" Sep 30 19:46:44 crc kubenswrapper[4756]: I0930 19:46:44.185530 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-c57rv"] Sep 30 19:46:44 crc kubenswrapper[4756]: I0930 19:46:44.353753 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qd6pr\" (UniqueName: \"kubernetes.io/projected/32314f62-22f9-45ae-9434-8345b30e8761-kube-api-access-qd6pr\") pod \"openstack-operator-index-c57rv\" (UID: \"32314f62-22f9-45ae-9434-8345b30e8761\") " pod="openstack-operators/openstack-operator-index-c57rv" Sep 30 19:46:44 crc kubenswrapper[4756]: I0930 19:46:44.455439 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qd6pr\" (UniqueName: \"kubernetes.io/projected/32314f62-22f9-45ae-9434-8345b30e8761-kube-api-access-qd6pr\") pod \"openstack-operator-index-c57rv\" (UID: \"32314f62-22f9-45ae-9434-8345b30e8761\") " pod="openstack-operators/openstack-operator-index-c57rv" Sep 30 19:46:44 crc kubenswrapper[4756]: I0930 19:46:44.473984 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qd6pr\" (UniqueName: \"kubernetes.io/projected/32314f62-22f9-45ae-9434-8345b30e8761-kube-api-access-qd6pr\") pod \"openstack-operator-index-c57rv\" (UID: \"32314f62-22f9-45ae-9434-8345b30e8761\") " pod="openstack-operators/openstack-operator-index-c57rv" Sep 30 19:46:44 crc kubenswrapper[4756]: I0930 19:46:44.503423 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-c57rv" Sep 30 19:46:44 crc kubenswrapper[4756]: I0930 19:46:44.960851 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-c57rv"] Sep 30 19:46:44 crc kubenswrapper[4756]: W0930 19:46:44.970458 4756 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod32314f62_22f9_45ae_9434_8345b30e8761.slice/crio-a20302f93aa460d2e4e73402512e9d08600b7ed55e13d3c091db7a9a33662694 WatchSource:0}: Error finding container a20302f93aa460d2e4e73402512e9d08600b7ed55e13d3c091db7a9a33662694: Status 404 returned error can't find the container with id a20302f93aa460d2e4e73402512e9d08600b7ed55e13d3c091db7a9a33662694 Sep 30 19:46:45 crc kubenswrapper[4756]: I0930 19:46:45.959327 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-c57rv" event={"ID":"32314f62-22f9-45ae-9434-8345b30e8761","Type":"ContainerStarted","Data":"a20302f93aa460d2e4e73402512e9d08600b7ed55e13d3c091db7a9a33662694"} Sep 30 19:46:49 crc kubenswrapper[4756]: I0930 19:46:49.004071 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-qz8pr" event={"ID":"447dcb99-6531-4ad4-b11c-7bd3d1f849f6","Type":"ContainerStarted","Data":"9f098957d0cdfd644ea387e9d5ded83175fd2e1e1a50bcc1dadcb680956622bb"} Sep 30 19:46:49 crc kubenswrapper[4756]: I0930 19:46:49.004182 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/openstack-operator-index-qz8pr" podUID="447dcb99-6531-4ad4-b11c-7bd3d1f849f6" containerName="registry-server" containerID="cri-o://9f098957d0cdfd644ea387e9d5ded83175fd2e1e1a50bcc1dadcb680956622bb" gracePeriod=2 Sep 30 19:46:49 crc kubenswrapper[4756]: I0930 19:46:49.008802 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-c57rv" event={"ID":"32314f62-22f9-45ae-9434-8345b30e8761","Type":"ContainerStarted","Data":"9cf65c415022bed3785d679fa34ba5cb8e8377bdaa6181b076f796f0ddb7e218"} Sep 30 19:46:49 crc kubenswrapper[4756]: I0930 19:46:49.032238 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-index-qz8pr" podStartSLOduration=2.218300916 podStartE2EDuration="8.032208511s" podCreationTimestamp="2025-09-30 19:46:41 +0000 UTC" firstStartedPulling="2025-09-30 19:46:42.240180338 +0000 UTC m=+931.861113835" lastFinishedPulling="2025-09-30 19:46:48.054087913 +0000 UTC m=+937.675021430" observedRunningTime="2025-09-30 19:46:49.029260434 +0000 UTC m=+938.650193911" watchObservedRunningTime="2025-09-30 19:46:49.032208511 +0000 UTC m=+938.653142028" Sep 30 19:46:49 crc kubenswrapper[4756]: I0930 19:46:49.052858 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-index-c57rv" podStartSLOduration=1.945378619 podStartE2EDuration="5.052833682s" podCreationTimestamp="2025-09-30 19:46:44 +0000 UTC" firstStartedPulling="2025-09-30 19:46:44.973119895 +0000 UTC m=+934.594053372" lastFinishedPulling="2025-09-30 19:46:48.080574958 +0000 UTC m=+937.701508435" observedRunningTime="2025-09-30 19:46:49.046236619 +0000 UTC m=+938.667170136" watchObservedRunningTime="2025-09-30 19:46:49.052833682 +0000 UTC m=+938.673767169" Sep 30 19:46:49 crc kubenswrapper[4756]: I0930 19:46:49.445225 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-qz8pr" Sep 30 19:46:49 crc kubenswrapper[4756]: I0930 19:46:49.638927 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9jct4\" (UniqueName: \"kubernetes.io/projected/447dcb99-6531-4ad4-b11c-7bd3d1f849f6-kube-api-access-9jct4\") pod \"447dcb99-6531-4ad4-b11c-7bd3d1f849f6\" (UID: \"447dcb99-6531-4ad4-b11c-7bd3d1f849f6\") " Sep 30 19:46:49 crc kubenswrapper[4756]: I0930 19:46:49.675608 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/447dcb99-6531-4ad4-b11c-7bd3d1f849f6-kube-api-access-9jct4" (OuterVolumeSpecName: "kube-api-access-9jct4") pod "447dcb99-6531-4ad4-b11c-7bd3d1f849f6" (UID: "447dcb99-6531-4ad4-b11c-7bd3d1f849f6"). InnerVolumeSpecName "kube-api-access-9jct4". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:46:49 crc kubenswrapper[4756]: I0930 19:46:49.740779 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9jct4\" (UniqueName: \"kubernetes.io/projected/447dcb99-6531-4ad4-b11c-7bd3d1f849f6-kube-api-access-9jct4\") on node \"crc\" DevicePath \"\"" Sep 30 19:46:50 crc kubenswrapper[4756]: I0930 19:46:50.019503 4756 generic.go:334] "Generic (PLEG): container finished" podID="447dcb99-6531-4ad4-b11c-7bd3d1f849f6" containerID="9f098957d0cdfd644ea387e9d5ded83175fd2e1e1a50bcc1dadcb680956622bb" exitCode=0 Sep 30 19:46:50 crc kubenswrapper[4756]: I0930 19:46:50.019619 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-qz8pr" Sep 30 19:46:50 crc kubenswrapper[4756]: I0930 19:46:50.019588 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-qz8pr" event={"ID":"447dcb99-6531-4ad4-b11c-7bd3d1f849f6","Type":"ContainerDied","Data":"9f098957d0cdfd644ea387e9d5ded83175fd2e1e1a50bcc1dadcb680956622bb"} Sep 30 19:46:50 crc kubenswrapper[4756]: I0930 19:46:50.019696 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-qz8pr" event={"ID":"447dcb99-6531-4ad4-b11c-7bd3d1f849f6","Type":"ContainerDied","Data":"acf41898e328891e95009c744befae86c3fad27b6eb63ea6bad6d10302f3ea79"} Sep 30 19:46:50 crc kubenswrapper[4756]: I0930 19:46:50.019759 4756 scope.go:117] "RemoveContainer" containerID="9f098957d0cdfd644ea387e9d5ded83175fd2e1e1a50bcc1dadcb680956622bb" Sep 30 19:46:50 crc kubenswrapper[4756]: I0930 19:46:50.052508 4756 scope.go:117] "RemoveContainer" containerID="9f098957d0cdfd644ea387e9d5ded83175fd2e1e1a50bcc1dadcb680956622bb" Sep 30 19:46:50 crc kubenswrapper[4756]: E0930 19:46:50.054277 4756 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9f098957d0cdfd644ea387e9d5ded83175fd2e1e1a50bcc1dadcb680956622bb\": container with ID starting with 9f098957d0cdfd644ea387e9d5ded83175fd2e1e1a50bcc1dadcb680956622bb not found: ID does not exist" containerID="9f098957d0cdfd644ea387e9d5ded83175fd2e1e1a50bcc1dadcb680956622bb" Sep 30 19:46:50 crc kubenswrapper[4756]: I0930 19:46:50.054386 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9f098957d0cdfd644ea387e9d5ded83175fd2e1e1a50bcc1dadcb680956622bb"} err="failed to get container status \"9f098957d0cdfd644ea387e9d5ded83175fd2e1e1a50bcc1dadcb680956622bb\": rpc error: code = NotFound desc = could not find container \"9f098957d0cdfd644ea387e9d5ded83175fd2e1e1a50bcc1dadcb680956622bb\": container with ID starting with 9f098957d0cdfd644ea387e9d5ded83175fd2e1e1a50bcc1dadcb680956622bb not found: ID does not exist" Sep 30 19:46:50 crc kubenswrapper[4756]: I0930 19:46:50.078700 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/openstack-operator-index-qz8pr"] Sep 30 19:46:50 crc kubenswrapper[4756]: I0930 19:46:50.085253 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/openstack-operator-index-qz8pr"] Sep 30 19:46:51 crc kubenswrapper[4756]: I0930 19:46:51.129423 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="447dcb99-6531-4ad4-b11c-7bd3d1f849f6" path="/var/lib/kubelet/pods/447dcb99-6531-4ad4-b11c-7bd3d1f849f6/volumes" Sep 30 19:46:54 crc kubenswrapper[4756]: I0930 19:46:54.504038 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-index-c57rv" Sep 30 19:46:54 crc kubenswrapper[4756]: I0930 19:46:54.504618 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-operators/openstack-operator-index-c57rv" Sep 30 19:46:54 crc kubenswrapper[4756]: I0930 19:46:54.532112 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-operators/openstack-operator-index-c57rv" Sep 30 19:46:55 crc kubenswrapper[4756]: I0930 19:46:55.098355 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-index-c57rv" Sep 30 19:47:00 crc kubenswrapper[4756]: I0930 19:47:00.620165 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/53b06609af47042a6f8e2d87c80942a2e7fa4642ad364cacb64cd797b3jkrwf"] Sep 30 19:47:00 crc kubenswrapper[4756]: E0930 19:47:00.620745 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="447dcb99-6531-4ad4-b11c-7bd3d1f849f6" containerName="registry-server" Sep 30 19:47:00 crc kubenswrapper[4756]: I0930 19:47:00.620759 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="447dcb99-6531-4ad4-b11c-7bd3d1f849f6" containerName="registry-server" Sep 30 19:47:00 crc kubenswrapper[4756]: I0930 19:47:00.620856 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="447dcb99-6531-4ad4-b11c-7bd3d1f849f6" containerName="registry-server" Sep 30 19:47:00 crc kubenswrapper[4756]: I0930 19:47:00.621645 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/53b06609af47042a6f8e2d87c80942a2e7fa4642ad364cacb64cd797b3jkrwf" Sep 30 19:47:00 crc kubenswrapper[4756]: I0930 19:47:00.628018 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"default-dockercfg-8lxkb" Sep 30 19:47:00 crc kubenswrapper[4756]: I0930 19:47:00.635440 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/53b06609af47042a6f8e2d87c80942a2e7fa4642ad364cacb64cd797b3jkrwf"] Sep 30 19:47:00 crc kubenswrapper[4756]: I0930 19:47:00.650560 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dqwfn\" (UniqueName: \"kubernetes.io/projected/82fec36a-fd50-4bc6-ab33-1ce3e87bf1d7-kube-api-access-dqwfn\") pod \"53b06609af47042a6f8e2d87c80942a2e7fa4642ad364cacb64cd797b3jkrwf\" (UID: \"82fec36a-fd50-4bc6-ab33-1ce3e87bf1d7\") " pod="openstack-operators/53b06609af47042a6f8e2d87c80942a2e7fa4642ad364cacb64cd797b3jkrwf" Sep 30 19:47:00 crc kubenswrapper[4756]: I0930 19:47:00.650663 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/82fec36a-fd50-4bc6-ab33-1ce3e87bf1d7-bundle\") pod \"53b06609af47042a6f8e2d87c80942a2e7fa4642ad364cacb64cd797b3jkrwf\" (UID: \"82fec36a-fd50-4bc6-ab33-1ce3e87bf1d7\") " pod="openstack-operators/53b06609af47042a6f8e2d87c80942a2e7fa4642ad364cacb64cd797b3jkrwf" Sep 30 19:47:00 crc kubenswrapper[4756]: I0930 19:47:00.651009 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/82fec36a-fd50-4bc6-ab33-1ce3e87bf1d7-util\") pod \"53b06609af47042a6f8e2d87c80942a2e7fa4642ad364cacb64cd797b3jkrwf\" (UID: \"82fec36a-fd50-4bc6-ab33-1ce3e87bf1d7\") " pod="openstack-operators/53b06609af47042a6f8e2d87c80942a2e7fa4642ad364cacb64cd797b3jkrwf" Sep 30 19:47:00 crc kubenswrapper[4756]: I0930 19:47:00.752338 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dqwfn\" (UniqueName: \"kubernetes.io/projected/82fec36a-fd50-4bc6-ab33-1ce3e87bf1d7-kube-api-access-dqwfn\") pod \"53b06609af47042a6f8e2d87c80942a2e7fa4642ad364cacb64cd797b3jkrwf\" (UID: \"82fec36a-fd50-4bc6-ab33-1ce3e87bf1d7\") " pod="openstack-operators/53b06609af47042a6f8e2d87c80942a2e7fa4642ad364cacb64cd797b3jkrwf" Sep 30 19:47:00 crc kubenswrapper[4756]: I0930 19:47:00.752477 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/82fec36a-fd50-4bc6-ab33-1ce3e87bf1d7-bundle\") pod \"53b06609af47042a6f8e2d87c80942a2e7fa4642ad364cacb64cd797b3jkrwf\" (UID: \"82fec36a-fd50-4bc6-ab33-1ce3e87bf1d7\") " pod="openstack-operators/53b06609af47042a6f8e2d87c80942a2e7fa4642ad364cacb64cd797b3jkrwf" Sep 30 19:47:00 crc kubenswrapper[4756]: I0930 19:47:00.752545 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/82fec36a-fd50-4bc6-ab33-1ce3e87bf1d7-util\") pod \"53b06609af47042a6f8e2d87c80942a2e7fa4642ad364cacb64cd797b3jkrwf\" (UID: \"82fec36a-fd50-4bc6-ab33-1ce3e87bf1d7\") " pod="openstack-operators/53b06609af47042a6f8e2d87c80942a2e7fa4642ad364cacb64cd797b3jkrwf" Sep 30 19:47:00 crc kubenswrapper[4756]: I0930 19:47:00.753076 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/82fec36a-fd50-4bc6-ab33-1ce3e87bf1d7-util\") pod \"53b06609af47042a6f8e2d87c80942a2e7fa4642ad364cacb64cd797b3jkrwf\" (UID: \"82fec36a-fd50-4bc6-ab33-1ce3e87bf1d7\") " pod="openstack-operators/53b06609af47042a6f8e2d87c80942a2e7fa4642ad364cacb64cd797b3jkrwf" Sep 30 19:47:00 crc kubenswrapper[4756]: I0930 19:47:00.753509 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/82fec36a-fd50-4bc6-ab33-1ce3e87bf1d7-bundle\") pod \"53b06609af47042a6f8e2d87c80942a2e7fa4642ad364cacb64cd797b3jkrwf\" (UID: \"82fec36a-fd50-4bc6-ab33-1ce3e87bf1d7\") " pod="openstack-operators/53b06609af47042a6f8e2d87c80942a2e7fa4642ad364cacb64cd797b3jkrwf" Sep 30 19:47:00 crc kubenswrapper[4756]: I0930 19:47:00.778183 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dqwfn\" (UniqueName: \"kubernetes.io/projected/82fec36a-fd50-4bc6-ab33-1ce3e87bf1d7-kube-api-access-dqwfn\") pod \"53b06609af47042a6f8e2d87c80942a2e7fa4642ad364cacb64cd797b3jkrwf\" (UID: \"82fec36a-fd50-4bc6-ab33-1ce3e87bf1d7\") " pod="openstack-operators/53b06609af47042a6f8e2d87c80942a2e7fa4642ad364cacb64cd797b3jkrwf" Sep 30 19:47:00 crc kubenswrapper[4756]: I0930 19:47:00.948468 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/53b06609af47042a6f8e2d87c80942a2e7fa4642ad364cacb64cd797b3jkrwf" Sep 30 19:47:01 crc kubenswrapper[4756]: I0930 19:47:01.228895 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/53b06609af47042a6f8e2d87c80942a2e7fa4642ad364cacb64cd797b3jkrwf"] Sep 30 19:47:01 crc kubenswrapper[4756]: W0930 19:47:01.237058 4756 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod82fec36a_fd50_4bc6_ab33_1ce3e87bf1d7.slice/crio-5d3ae78ba8826f615a5017d550b7fb74d996e251605cb3f42e113341c7c02b35 WatchSource:0}: Error finding container 5d3ae78ba8826f615a5017d550b7fb74d996e251605cb3f42e113341c7c02b35: Status 404 returned error can't find the container with id 5d3ae78ba8826f615a5017d550b7fb74d996e251605cb3f42e113341c7c02b35 Sep 30 19:47:02 crc kubenswrapper[4756]: I0930 19:47:02.114719 4756 generic.go:334] "Generic (PLEG): container finished" podID="82fec36a-fd50-4bc6-ab33-1ce3e87bf1d7" containerID="f107279eed23f36995ea19d72d006099905912feb1dc7a325a9fbaf761a2a6f1" exitCode=0 Sep 30 19:47:02 crc kubenswrapper[4756]: I0930 19:47:02.114808 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/53b06609af47042a6f8e2d87c80942a2e7fa4642ad364cacb64cd797b3jkrwf" event={"ID":"82fec36a-fd50-4bc6-ab33-1ce3e87bf1d7","Type":"ContainerDied","Data":"f107279eed23f36995ea19d72d006099905912feb1dc7a325a9fbaf761a2a6f1"} Sep 30 19:47:02 crc kubenswrapper[4756]: I0930 19:47:02.114863 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/53b06609af47042a6f8e2d87c80942a2e7fa4642ad364cacb64cd797b3jkrwf" event={"ID":"82fec36a-fd50-4bc6-ab33-1ce3e87bf1d7","Type":"ContainerStarted","Data":"5d3ae78ba8826f615a5017d550b7fb74d996e251605cb3f42e113341c7c02b35"} Sep 30 19:47:03 crc kubenswrapper[4756]: I0930 19:47:03.129150 4756 generic.go:334] "Generic (PLEG): container finished" podID="82fec36a-fd50-4bc6-ab33-1ce3e87bf1d7" containerID="254962b7ea74fb63cfb5fa43142e97918057f3ef45eea7020105427871967c6e" exitCode=0 Sep 30 19:47:03 crc kubenswrapper[4756]: I0930 19:47:03.133069 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/53b06609af47042a6f8e2d87c80942a2e7fa4642ad364cacb64cd797b3jkrwf" event={"ID":"82fec36a-fd50-4bc6-ab33-1ce3e87bf1d7","Type":"ContainerDied","Data":"254962b7ea74fb63cfb5fa43142e97918057f3ef45eea7020105427871967c6e"} Sep 30 19:47:04 crc kubenswrapper[4756]: I0930 19:47:04.137990 4756 generic.go:334] "Generic (PLEG): container finished" podID="82fec36a-fd50-4bc6-ab33-1ce3e87bf1d7" containerID="80627ed51d0cd88bcde4e7201aebe4512f0a29626e450700028080c2242435cc" exitCode=0 Sep 30 19:47:04 crc kubenswrapper[4756]: I0930 19:47:04.138098 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/53b06609af47042a6f8e2d87c80942a2e7fa4642ad364cacb64cd797b3jkrwf" event={"ID":"82fec36a-fd50-4bc6-ab33-1ce3e87bf1d7","Type":"ContainerDied","Data":"80627ed51d0cd88bcde4e7201aebe4512f0a29626e450700028080c2242435cc"} Sep 30 19:47:05 crc kubenswrapper[4756]: I0930 19:47:05.387538 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/53b06609af47042a6f8e2d87c80942a2e7fa4642ad364cacb64cd797b3jkrwf" Sep 30 19:47:05 crc kubenswrapper[4756]: I0930 19:47:05.428531 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/82fec36a-fd50-4bc6-ab33-1ce3e87bf1d7-util\") pod \"82fec36a-fd50-4bc6-ab33-1ce3e87bf1d7\" (UID: \"82fec36a-fd50-4bc6-ab33-1ce3e87bf1d7\") " Sep 30 19:47:05 crc kubenswrapper[4756]: I0930 19:47:05.448543 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/82fec36a-fd50-4bc6-ab33-1ce3e87bf1d7-util" (OuterVolumeSpecName: "util") pod "82fec36a-fd50-4bc6-ab33-1ce3e87bf1d7" (UID: "82fec36a-fd50-4bc6-ab33-1ce3e87bf1d7"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 19:47:05 crc kubenswrapper[4756]: I0930 19:47:05.529253 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dqwfn\" (UniqueName: \"kubernetes.io/projected/82fec36a-fd50-4bc6-ab33-1ce3e87bf1d7-kube-api-access-dqwfn\") pod \"82fec36a-fd50-4bc6-ab33-1ce3e87bf1d7\" (UID: \"82fec36a-fd50-4bc6-ab33-1ce3e87bf1d7\") " Sep 30 19:47:05 crc kubenswrapper[4756]: I0930 19:47:05.529356 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/82fec36a-fd50-4bc6-ab33-1ce3e87bf1d7-bundle\") pod \"82fec36a-fd50-4bc6-ab33-1ce3e87bf1d7\" (UID: \"82fec36a-fd50-4bc6-ab33-1ce3e87bf1d7\") " Sep 30 19:47:05 crc kubenswrapper[4756]: I0930 19:47:05.529520 4756 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/82fec36a-fd50-4bc6-ab33-1ce3e87bf1d7-util\") on node \"crc\" DevicePath \"\"" Sep 30 19:47:05 crc kubenswrapper[4756]: I0930 19:47:05.530010 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/82fec36a-fd50-4bc6-ab33-1ce3e87bf1d7-bundle" (OuterVolumeSpecName: "bundle") pod "82fec36a-fd50-4bc6-ab33-1ce3e87bf1d7" (UID: "82fec36a-fd50-4bc6-ab33-1ce3e87bf1d7"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 19:47:05 crc kubenswrapper[4756]: I0930 19:47:05.534538 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/82fec36a-fd50-4bc6-ab33-1ce3e87bf1d7-kube-api-access-dqwfn" (OuterVolumeSpecName: "kube-api-access-dqwfn") pod "82fec36a-fd50-4bc6-ab33-1ce3e87bf1d7" (UID: "82fec36a-fd50-4bc6-ab33-1ce3e87bf1d7"). InnerVolumeSpecName "kube-api-access-dqwfn". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:47:05 crc kubenswrapper[4756]: I0930 19:47:05.630611 4756 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/82fec36a-fd50-4bc6-ab33-1ce3e87bf1d7-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 19:47:05 crc kubenswrapper[4756]: I0930 19:47:05.630663 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dqwfn\" (UniqueName: \"kubernetes.io/projected/82fec36a-fd50-4bc6-ab33-1ce3e87bf1d7-kube-api-access-dqwfn\") on node \"crc\" DevicePath \"\"" Sep 30 19:47:06 crc kubenswrapper[4756]: I0930 19:47:06.157273 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/53b06609af47042a6f8e2d87c80942a2e7fa4642ad364cacb64cd797b3jkrwf" event={"ID":"82fec36a-fd50-4bc6-ab33-1ce3e87bf1d7","Type":"ContainerDied","Data":"5d3ae78ba8826f615a5017d550b7fb74d996e251605cb3f42e113341c7c02b35"} Sep 30 19:47:06 crc kubenswrapper[4756]: I0930 19:47:06.157334 4756 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5d3ae78ba8826f615a5017d550b7fb74d996e251605cb3f42e113341c7c02b35" Sep 30 19:47:06 crc kubenswrapper[4756]: I0930 19:47:06.157473 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/53b06609af47042a6f8e2d87c80942a2e7fa4642ad364cacb64cd797b3jkrwf" Sep 30 19:47:11 crc kubenswrapper[4756]: I0930 19:47:11.538649 4756 patch_prober.go:28] interesting pod/machine-config-daemon-4n9zj container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 19:47:11 crc kubenswrapper[4756]: I0930 19:47:11.538978 4756 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 19:47:11 crc kubenswrapper[4756]: I0930 19:47:11.539024 4756 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" Sep 30 19:47:11 crc kubenswrapper[4756]: I0930 19:47:11.539595 4756 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"2f76cebe68777fe0e73f317a28eb7b8968d88c664ac174239db340a67b731df2"} pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 30 19:47:11 crc kubenswrapper[4756]: I0930 19:47:11.539645 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" containerName="machine-config-daemon" containerID="cri-o://2f76cebe68777fe0e73f317a28eb7b8968d88c664ac174239db340a67b731df2" gracePeriod=600 Sep 30 19:47:12 crc kubenswrapper[4756]: I0930 19:47:12.200341 4756 generic.go:334] "Generic (PLEG): container finished" podID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" containerID="2f76cebe68777fe0e73f317a28eb7b8968d88c664ac174239db340a67b731df2" exitCode=0 Sep 30 19:47:12 crc kubenswrapper[4756]: I0930 19:47:12.200383 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" event={"ID":"3370c2ca-fec3-4f90-8df7-51e21e6c7e1c","Type":"ContainerDied","Data":"2f76cebe68777fe0e73f317a28eb7b8968d88c664ac174239db340a67b731df2"} Sep 30 19:47:12 crc kubenswrapper[4756]: I0930 19:47:12.200836 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" event={"ID":"3370c2ca-fec3-4f90-8df7-51e21e6c7e1c","Type":"ContainerStarted","Data":"1fda6d8389dddb1fad1f1c0dcec9808a71ad955037c692639b3d4fd84034ae3c"} Sep 30 19:47:12 crc kubenswrapper[4756]: I0930 19:47:12.200860 4756 scope.go:117] "RemoveContainer" containerID="2979d187e57e810a5b2b095948919aef457acc38ed2bbb2147029cc6d902e94d" Sep 30 19:47:12 crc kubenswrapper[4756]: I0930 19:47:12.387910 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-controller-operator-67dd46bc9f-7pbdn"] Sep 30 19:47:12 crc kubenswrapper[4756]: E0930 19:47:12.388157 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="82fec36a-fd50-4bc6-ab33-1ce3e87bf1d7" containerName="extract" Sep 30 19:47:12 crc kubenswrapper[4756]: I0930 19:47:12.388171 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="82fec36a-fd50-4bc6-ab33-1ce3e87bf1d7" containerName="extract" Sep 30 19:47:12 crc kubenswrapper[4756]: E0930 19:47:12.388200 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="82fec36a-fd50-4bc6-ab33-1ce3e87bf1d7" containerName="util" Sep 30 19:47:12 crc kubenswrapper[4756]: I0930 19:47:12.388209 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="82fec36a-fd50-4bc6-ab33-1ce3e87bf1d7" containerName="util" Sep 30 19:47:12 crc kubenswrapper[4756]: E0930 19:47:12.388228 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="82fec36a-fd50-4bc6-ab33-1ce3e87bf1d7" containerName="pull" Sep 30 19:47:12 crc kubenswrapper[4756]: I0930 19:47:12.388237 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="82fec36a-fd50-4bc6-ab33-1ce3e87bf1d7" containerName="pull" Sep 30 19:47:12 crc kubenswrapper[4756]: I0930 19:47:12.388382 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="82fec36a-fd50-4bc6-ab33-1ce3e87bf1d7" containerName="extract" Sep 30 19:47:12 crc kubenswrapper[4756]: I0930 19:47:12.389093 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-operator-67dd46bc9f-7pbdn" Sep 30 19:47:12 crc kubenswrapper[4756]: I0930 19:47:12.391313 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-controller-operator-dockercfg-z6wgq" Sep 30 19:47:12 crc kubenswrapper[4756]: I0930 19:47:12.409005 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-operator-67dd46bc9f-7pbdn"] Sep 30 19:47:12 crc kubenswrapper[4756]: I0930 19:47:12.527549 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fpwqb\" (UniqueName: \"kubernetes.io/projected/27ec2c4e-9af3-46a8-a478-178875319a55-kube-api-access-fpwqb\") pod \"openstack-operator-controller-operator-67dd46bc9f-7pbdn\" (UID: \"27ec2c4e-9af3-46a8-a478-178875319a55\") " pod="openstack-operators/openstack-operator-controller-operator-67dd46bc9f-7pbdn" Sep 30 19:47:12 crc kubenswrapper[4756]: I0930 19:47:12.629222 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fpwqb\" (UniqueName: \"kubernetes.io/projected/27ec2c4e-9af3-46a8-a478-178875319a55-kube-api-access-fpwqb\") pod \"openstack-operator-controller-operator-67dd46bc9f-7pbdn\" (UID: \"27ec2c4e-9af3-46a8-a478-178875319a55\") " pod="openstack-operators/openstack-operator-controller-operator-67dd46bc9f-7pbdn" Sep 30 19:47:12 crc kubenswrapper[4756]: I0930 19:47:12.672291 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fpwqb\" (UniqueName: \"kubernetes.io/projected/27ec2c4e-9af3-46a8-a478-178875319a55-kube-api-access-fpwqb\") pod \"openstack-operator-controller-operator-67dd46bc9f-7pbdn\" (UID: \"27ec2c4e-9af3-46a8-a478-178875319a55\") " pod="openstack-operators/openstack-operator-controller-operator-67dd46bc9f-7pbdn" Sep 30 19:47:12 crc kubenswrapper[4756]: I0930 19:47:12.708618 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-operator-67dd46bc9f-7pbdn" Sep 30 19:47:13 crc kubenswrapper[4756]: I0930 19:47:13.157129 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-operator-67dd46bc9f-7pbdn"] Sep 30 19:47:13 crc kubenswrapper[4756]: I0930 19:47:13.216553 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-operator-67dd46bc9f-7pbdn" event={"ID":"27ec2c4e-9af3-46a8-a478-178875319a55","Type":"ContainerStarted","Data":"4e39b5e1ff42bf1e06e03996571fb57202d94569d2d0c5f022b202c3da9f0f52"} Sep 30 19:47:17 crc kubenswrapper[4756]: I0930 19:47:17.247479 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-operator-67dd46bc9f-7pbdn" event={"ID":"27ec2c4e-9af3-46a8-a478-178875319a55","Type":"ContainerStarted","Data":"4640ac6cc990c5ae32daeffda65eb34cc6ffb3b02b8e140cc09f30d68c6d77c0"} Sep 30 19:47:19 crc kubenswrapper[4756]: I0930 19:47:19.263443 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-operator-67dd46bc9f-7pbdn" event={"ID":"27ec2c4e-9af3-46a8-a478-178875319a55","Type":"ContainerStarted","Data":"f79e7422ead1e59abfabfc2cf2ef0323dc1339796e4c0f185669230d86d403f9"} Sep 30 19:47:19 crc kubenswrapper[4756]: I0930 19:47:19.263837 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-controller-operator-67dd46bc9f-7pbdn" Sep 30 19:47:19 crc kubenswrapper[4756]: I0930 19:47:19.301928 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-controller-operator-67dd46bc9f-7pbdn" podStartSLOduration=1.385989893 podStartE2EDuration="7.301912995s" podCreationTimestamp="2025-09-30 19:47:12 +0000 UTC" firstStartedPulling="2025-09-30 19:47:13.167950912 +0000 UTC m=+962.788884389" lastFinishedPulling="2025-09-30 19:47:19.083874014 +0000 UTC m=+968.704807491" observedRunningTime="2025-09-30 19:47:19.295673492 +0000 UTC m=+968.916606979" watchObservedRunningTime="2025-09-30 19:47:19.301912995 +0000 UTC m=+968.922846472" Sep 30 19:47:22 crc kubenswrapper[4756]: I0930 19:47:22.711282 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-controller-operator-67dd46bc9f-7pbdn" Sep 30 19:47:37 crc kubenswrapper[4756]: I0930 19:47:37.078550 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/barbican-operator-controller-manager-6ff8b75857-7cv4w"] Sep 30 19:47:37 crc kubenswrapper[4756]: I0930 19:47:37.080029 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-controller-manager-6ff8b75857-7cv4w" Sep 30 19:47:37 crc kubenswrapper[4756]: I0930 19:47:37.082030 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"barbican-operator-controller-manager-dockercfg-hpmm8" Sep 30 19:47:37 crc kubenswrapper[4756]: I0930 19:47:37.102530 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/barbican-operator-controller-manager-6ff8b75857-7cv4w"] Sep 30 19:47:37 crc kubenswrapper[4756]: I0930 19:47:37.104132 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/cinder-operator-controller-manager-644bddb6d8-klbxz"] Sep 30 19:47:37 crc kubenswrapper[4756]: I0930 19:47:37.105381 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/cinder-operator-controller-manager-644bddb6d8-klbxz" Sep 30 19:47:37 crc kubenswrapper[4756]: I0930 19:47:37.106758 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/designate-operator-controller-manager-84f4f7b77b-ldpdd"] Sep 30 19:47:37 crc kubenswrapper[4756]: I0930 19:47:37.109442 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/designate-operator-controller-manager-84f4f7b77b-ldpdd" Sep 30 19:47:37 crc kubenswrapper[4756]: I0930 19:47:37.110082 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"cinder-operator-controller-manager-dockercfg-5fnxh" Sep 30 19:47:37 crc kubenswrapper[4756]: I0930 19:47:37.111141 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"designate-operator-controller-manager-dockercfg-bkrpl" Sep 30 19:47:37 crc kubenswrapper[4756]: I0930 19:47:37.117457 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/cinder-operator-controller-manager-644bddb6d8-klbxz"] Sep 30 19:47:37 crc kubenswrapper[4756]: I0930 19:47:37.138139 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/designate-operator-controller-manager-84f4f7b77b-ldpdd"] Sep 30 19:47:37 crc kubenswrapper[4756]: I0930 19:47:37.148791 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/glance-operator-controller-manager-84958c4d49-xg9fd"] Sep 30 19:47:37 crc kubenswrapper[4756]: I0930 19:47:37.150139 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/glance-operator-controller-manager-84958c4d49-xg9fd" Sep 30 19:47:37 crc kubenswrapper[4756]: I0930 19:47:37.153759 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"glance-operator-controller-manager-dockercfg-x8h8j" Sep 30 19:47:37 crc kubenswrapper[4756]: I0930 19:47:37.168951 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/glance-operator-controller-manager-84958c4d49-xg9fd"] Sep 30 19:47:37 crc kubenswrapper[4756]: I0930 19:47:37.196409 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/horizon-operator-controller-manager-9f4696d94-6rqqt"] Sep 30 19:47:37 crc kubenswrapper[4756]: I0930 19:47:37.197427 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-controller-manager-9f4696d94-6rqqt" Sep 30 19:47:37 crc kubenswrapper[4756]: I0930 19:47:37.199475 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"horizon-operator-controller-manager-dockercfg-5khwg" Sep 30 19:47:37 crc kubenswrapper[4756]: I0930 19:47:37.228507 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/heat-operator-controller-manager-5d889d78cf-gz6p9"] Sep 30 19:47:37 crc kubenswrapper[4756]: I0930 19:47:37.229514 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/heat-operator-controller-manager-5d889d78cf-gz6p9" Sep 30 19:47:37 crc kubenswrapper[4756]: I0930 19:47:37.231481 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"heat-operator-controller-manager-dockercfg-7kf88" Sep 30 19:47:37 crc kubenswrapper[4756]: I0930 19:47:37.237616 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-controller-manager-9f4696d94-6rqqt"] Sep 30 19:47:37 crc kubenswrapper[4756]: I0930 19:47:37.244256 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/heat-operator-controller-manager-5d889d78cf-gz6p9"] Sep 30 19:47:37 crc kubenswrapper[4756]: I0930 19:47:37.253208 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/infra-operator-controller-manager-9d6c5db85-r8xqm"] Sep 30 19:47:37 crc kubenswrapper[4756]: I0930 19:47:37.254539 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-9d6c5db85-r8xqm" Sep 30 19:47:37 crc kubenswrapper[4756]: I0930 19:47:37.260306 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g6t4v\" (UniqueName: \"kubernetes.io/projected/31f39b27-7fcc-453c-8ffd-3c5d7b12aed2-kube-api-access-g6t4v\") pod \"barbican-operator-controller-manager-6ff8b75857-7cv4w\" (UID: \"31f39b27-7fcc-453c-8ffd-3c5d7b12aed2\") " pod="openstack-operators/barbican-operator-controller-manager-6ff8b75857-7cv4w" Sep 30 19:47:37 crc kubenswrapper[4756]: I0930 19:47:37.260383 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kx9js\" (UniqueName: \"kubernetes.io/projected/7a303d5c-39f9-4cb5-94ee-9b126d8cc4f3-kube-api-access-kx9js\") pod \"designate-operator-controller-manager-84f4f7b77b-ldpdd\" (UID: \"7a303d5c-39f9-4cb5-94ee-9b126d8cc4f3\") " pod="openstack-operators/designate-operator-controller-manager-84f4f7b77b-ldpdd" Sep 30 19:47:37 crc kubenswrapper[4756]: I0930 19:47:37.260435 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-txzxj\" (UniqueName: \"kubernetes.io/projected/7871ae32-4b8d-4902-af0f-152e38693ee5-kube-api-access-txzxj\") pod \"cinder-operator-controller-manager-644bddb6d8-klbxz\" (UID: \"7871ae32-4b8d-4902-af0f-152e38693ee5\") " pod="openstack-operators/cinder-operator-controller-manager-644bddb6d8-klbxz" Sep 30 19:47:37 crc kubenswrapper[4756]: I0930 19:47:37.260457 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4s44c\" (UniqueName: \"kubernetes.io/projected/a5234caa-5273-4074-837e-30ee316c5d73-kube-api-access-4s44c\") pod \"glance-operator-controller-manager-84958c4d49-xg9fd\" (UID: \"a5234caa-5273-4074-837e-30ee316c5d73\") " pod="openstack-operators/glance-operator-controller-manager-84958c4d49-xg9fd" Sep 30 19:47:37 crc kubenswrapper[4756]: I0930 19:47:37.261438 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-webhook-server-cert" Sep 30 19:47:37 crc kubenswrapper[4756]: I0930 19:47:37.261593 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/ironic-operator-controller-manager-5cd4858477-k5ccf"] Sep 30 19:47:37 crc kubenswrapper[4756]: I0930 19:47:37.261671 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-controller-manager-dockercfg-2xpdn" Sep 30 19:47:37 crc kubenswrapper[4756]: I0930 19:47:37.262779 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ironic-operator-controller-manager-5cd4858477-k5ccf" Sep 30 19:47:37 crc kubenswrapper[4756]: I0930 19:47:37.269185 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"ironic-operator-controller-manager-dockercfg-5s2bl" Sep 30 19:47:37 crc kubenswrapper[4756]: I0930 19:47:37.271439 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-controller-manager-9d6c5db85-r8xqm"] Sep 30 19:47:37 crc kubenswrapper[4756]: I0930 19:47:37.298277 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ironic-operator-controller-manager-5cd4858477-k5ccf"] Sep 30 19:47:37 crc kubenswrapper[4756]: I0930 19:47:37.326495 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/keystone-operator-controller-manager-5bd55b4bff-c46nt"] Sep 30 19:47:37 crc kubenswrapper[4756]: I0930 19:47:37.327524 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-5bd55b4bff-c46nt" Sep 30 19:47:37 crc kubenswrapper[4756]: I0930 19:47:37.332918 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"keystone-operator-controller-manager-dockercfg-z646n" Sep 30 19:47:37 crc kubenswrapper[4756]: I0930 19:47:37.340288 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/manila-operator-controller-manager-6d68dbc695-54d4j"] Sep 30 19:47:37 crc kubenswrapper[4756]: I0930 19:47:37.341350 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/manila-operator-controller-manager-6d68dbc695-54d4j" Sep 30 19:47:37 crc kubenswrapper[4756]: I0930 19:47:37.343354 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"manila-operator-controller-manager-dockercfg-mtf8k" Sep 30 19:47:37 crc kubenswrapper[4756]: I0930 19:47:37.356679 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-88c7-h8tt7"] Sep 30 19:47:37 crc kubenswrapper[4756]: I0930 19:47:37.357667 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-88c7-h8tt7" Sep 30 19:47:37 crc kubenswrapper[4756]: I0930 19:47:37.360666 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"mariadb-operator-controller-manager-dockercfg-2djnd" Sep 30 19:47:37 crc kubenswrapper[4756]: I0930 19:47:37.361644 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zrnwt\" (UniqueName: \"kubernetes.io/projected/a76989a9-9056-4bc1-9e9c-5d7513a3e81b-kube-api-access-zrnwt\") pod \"infra-operator-controller-manager-9d6c5db85-r8xqm\" (UID: \"a76989a9-9056-4bc1-9e9c-5d7513a3e81b\") " pod="openstack-operators/infra-operator-controller-manager-9d6c5db85-r8xqm" Sep 30 19:47:37 crc kubenswrapper[4756]: I0930 19:47:37.361670 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6r5wp\" (UniqueName: \"kubernetes.io/projected/18189d34-2947-4744-b2ca-e970a456289f-kube-api-access-6r5wp\") pod \"horizon-operator-controller-manager-9f4696d94-6rqqt\" (UID: \"18189d34-2947-4744-b2ca-e970a456289f\") " pod="openstack-operators/horizon-operator-controller-manager-9f4696d94-6rqqt" Sep 30 19:47:37 crc kubenswrapper[4756]: I0930 19:47:37.361705 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g6t4v\" (UniqueName: \"kubernetes.io/projected/31f39b27-7fcc-453c-8ffd-3c5d7b12aed2-kube-api-access-g6t4v\") pod \"barbican-operator-controller-manager-6ff8b75857-7cv4w\" (UID: \"31f39b27-7fcc-453c-8ffd-3c5d7b12aed2\") " pod="openstack-operators/barbican-operator-controller-manager-6ff8b75857-7cv4w" Sep 30 19:47:37 crc kubenswrapper[4756]: I0930 19:47:37.361735 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/a76989a9-9056-4bc1-9e9c-5d7513a3e81b-cert\") pod \"infra-operator-controller-manager-9d6c5db85-r8xqm\" (UID: \"a76989a9-9056-4bc1-9e9c-5d7513a3e81b\") " pod="openstack-operators/infra-operator-controller-manager-9d6c5db85-r8xqm" Sep 30 19:47:37 crc kubenswrapper[4756]: I0930 19:47:37.361753 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fsckp\" (UniqueName: \"kubernetes.io/projected/869ee50e-ceab-400c-a7b1-b76b81b29bbc-kube-api-access-fsckp\") pod \"heat-operator-controller-manager-5d889d78cf-gz6p9\" (UID: \"869ee50e-ceab-400c-a7b1-b76b81b29bbc\") " pod="openstack-operators/heat-operator-controller-manager-5d889d78cf-gz6p9" Sep 30 19:47:37 crc kubenswrapper[4756]: I0930 19:47:37.361777 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kx9js\" (UniqueName: \"kubernetes.io/projected/7a303d5c-39f9-4cb5-94ee-9b126d8cc4f3-kube-api-access-kx9js\") pod \"designate-operator-controller-manager-84f4f7b77b-ldpdd\" (UID: \"7a303d5c-39f9-4cb5-94ee-9b126d8cc4f3\") " pod="openstack-operators/designate-operator-controller-manager-84f4f7b77b-ldpdd" Sep 30 19:47:37 crc kubenswrapper[4756]: I0930 19:47:37.361807 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-txzxj\" (UniqueName: \"kubernetes.io/projected/7871ae32-4b8d-4902-af0f-152e38693ee5-kube-api-access-txzxj\") pod \"cinder-operator-controller-manager-644bddb6d8-klbxz\" (UID: \"7871ae32-4b8d-4902-af0f-152e38693ee5\") " pod="openstack-operators/cinder-operator-controller-manager-644bddb6d8-klbxz" Sep 30 19:47:37 crc kubenswrapper[4756]: I0930 19:47:37.361837 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4s44c\" (UniqueName: \"kubernetes.io/projected/a5234caa-5273-4074-837e-30ee316c5d73-kube-api-access-4s44c\") pod \"glance-operator-controller-manager-84958c4d49-xg9fd\" (UID: \"a5234caa-5273-4074-837e-30ee316c5d73\") " pod="openstack-operators/glance-operator-controller-manager-84958c4d49-xg9fd" Sep 30 19:47:37 crc kubenswrapper[4756]: I0930 19:47:37.369150 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-5bd55b4bff-c46nt"] Sep 30 19:47:37 crc kubenswrapper[4756]: I0930 19:47:37.379456 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/manila-operator-controller-manager-6d68dbc695-54d4j"] Sep 30 19:47:37 crc kubenswrapper[4756]: I0930 19:47:37.405976 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-88c7-h8tt7"] Sep 30 19:47:37 crc kubenswrapper[4756]: I0930 19:47:37.408550 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4s44c\" (UniqueName: \"kubernetes.io/projected/a5234caa-5273-4074-837e-30ee316c5d73-kube-api-access-4s44c\") pod \"glance-operator-controller-manager-84958c4d49-xg9fd\" (UID: \"a5234caa-5273-4074-837e-30ee316c5d73\") " pod="openstack-operators/glance-operator-controller-manager-84958c4d49-xg9fd" Sep 30 19:47:37 crc kubenswrapper[4756]: I0930 19:47:37.408700 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kx9js\" (UniqueName: \"kubernetes.io/projected/7a303d5c-39f9-4cb5-94ee-9b126d8cc4f3-kube-api-access-kx9js\") pod \"designate-operator-controller-manager-84f4f7b77b-ldpdd\" (UID: \"7a303d5c-39f9-4cb5-94ee-9b126d8cc4f3\") " pod="openstack-operators/designate-operator-controller-manager-84f4f7b77b-ldpdd" Sep 30 19:47:37 crc kubenswrapper[4756]: I0930 19:47:37.410943 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/neutron-operator-controller-manager-849d5b9b84-b7v6v"] Sep 30 19:47:37 crc kubenswrapper[4756]: I0930 19:47:37.414020 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/neutron-operator-controller-manager-849d5b9b84-b7v6v" Sep 30 19:47:37 crc kubenswrapper[4756]: I0930 19:47:37.416856 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"neutron-operator-controller-manager-dockercfg-rt98r" Sep 30 19:47:37 crc kubenswrapper[4756]: I0930 19:47:37.428458 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/neutron-operator-controller-manager-849d5b9b84-b7v6v"] Sep 30 19:47:37 crc kubenswrapper[4756]: I0930 19:47:37.430725 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g6t4v\" (UniqueName: \"kubernetes.io/projected/31f39b27-7fcc-453c-8ffd-3c5d7b12aed2-kube-api-access-g6t4v\") pod \"barbican-operator-controller-manager-6ff8b75857-7cv4w\" (UID: \"31f39b27-7fcc-453c-8ffd-3c5d7b12aed2\") " pod="openstack-operators/barbican-operator-controller-manager-6ff8b75857-7cv4w" Sep 30 19:47:37 crc kubenswrapper[4756]: I0930 19:47:37.431128 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/nova-operator-controller-manager-64cd67b5cb-m2fvs"] Sep 30 19:47:37 crc kubenswrapper[4756]: I0930 19:47:37.431415 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-txzxj\" (UniqueName: \"kubernetes.io/projected/7871ae32-4b8d-4902-af0f-152e38693ee5-kube-api-access-txzxj\") pod \"cinder-operator-controller-manager-644bddb6d8-klbxz\" (UID: \"7871ae32-4b8d-4902-af0f-152e38693ee5\") " pod="openstack-operators/cinder-operator-controller-manager-644bddb6d8-klbxz" Sep 30 19:47:37 crc kubenswrapper[4756]: I0930 19:47:37.432217 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-controller-manager-64cd67b5cb-m2fvs" Sep 30 19:47:37 crc kubenswrapper[4756]: I0930 19:47:37.434907 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"nova-operator-controller-manager-dockercfg-fkhzb" Sep 30 19:47:37 crc kubenswrapper[4756]: I0930 19:47:37.437459 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/octavia-operator-controller-manager-7b787867f4-rsz5s"] Sep 30 19:47:37 crc kubenswrapper[4756]: I0930 19:47:37.438814 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/octavia-operator-controller-manager-7b787867f4-rsz5s" Sep 30 19:47:37 crc kubenswrapper[4756]: I0930 19:47:37.443898 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"octavia-operator-controller-manager-dockercfg-z86l6" Sep 30 19:47:37 crc kubenswrapper[4756]: I0930 19:47:37.449720 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/octavia-operator-controller-manager-7b787867f4-rsz5s"] Sep 30 19:47:37 crc kubenswrapper[4756]: I0930 19:47:37.454111 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/nova-operator-controller-manager-64cd67b5cb-m2fvs"] Sep 30 19:47:37 crc kubenswrapper[4756]: I0930 19:47:37.456309 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/cinder-operator-controller-manager-644bddb6d8-klbxz" Sep 30 19:47:37 crc kubenswrapper[4756]: I0930 19:47:37.466891 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qtmps\" (UniqueName: \"kubernetes.io/projected/2c4a76fe-4543-43e7-9e7e-7e3b4c149e46-kube-api-access-qtmps\") pod \"ironic-operator-controller-manager-5cd4858477-k5ccf\" (UID: \"2c4a76fe-4543-43e7-9e7e-7e3b4c149e46\") " pod="openstack-operators/ironic-operator-controller-manager-5cd4858477-k5ccf" Sep 30 19:47:37 crc kubenswrapper[4756]: I0930 19:47:37.466946 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f9xv7\" (UniqueName: \"kubernetes.io/projected/9ffa6269-c199-4388-93a5-cdb131bf656d-kube-api-access-f9xv7\") pod \"manila-operator-controller-manager-6d68dbc695-54d4j\" (UID: \"9ffa6269-c199-4388-93a5-cdb131bf656d\") " pod="openstack-operators/manila-operator-controller-manager-6d68dbc695-54d4j" Sep 30 19:47:37 crc kubenswrapper[4756]: I0930 19:47:37.466977 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/a76989a9-9056-4bc1-9e9c-5d7513a3e81b-cert\") pod \"infra-operator-controller-manager-9d6c5db85-r8xqm\" (UID: \"a76989a9-9056-4bc1-9e9c-5d7513a3e81b\") " pod="openstack-operators/infra-operator-controller-manager-9d6c5db85-r8xqm" Sep 30 19:47:37 crc kubenswrapper[4756]: I0930 19:47:37.467002 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fsckp\" (UniqueName: \"kubernetes.io/projected/869ee50e-ceab-400c-a7b1-b76b81b29bbc-kube-api-access-fsckp\") pod \"heat-operator-controller-manager-5d889d78cf-gz6p9\" (UID: \"869ee50e-ceab-400c-a7b1-b76b81b29bbc\") " pod="openstack-operators/heat-operator-controller-manager-5d889d78cf-gz6p9" Sep 30 19:47:37 crc kubenswrapper[4756]: I0930 19:47:37.467024 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k6vl6\" (UniqueName: \"kubernetes.io/projected/3dc3c221-bd19-4634-8979-2c5f843c9b06-kube-api-access-k6vl6\") pod \"nova-operator-controller-manager-64cd67b5cb-m2fvs\" (UID: \"3dc3c221-bd19-4634-8979-2c5f843c9b06\") " pod="openstack-operators/nova-operator-controller-manager-64cd67b5cb-m2fvs" Sep 30 19:47:37 crc kubenswrapper[4756]: I0930 19:47:37.467076 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r5pfb\" (UniqueName: \"kubernetes.io/projected/081b46ad-0352-4dbf-8fde-1e064cad9b24-kube-api-access-r5pfb\") pod \"neutron-operator-controller-manager-849d5b9b84-b7v6v\" (UID: \"081b46ad-0352-4dbf-8fde-1e064cad9b24\") " pod="openstack-operators/neutron-operator-controller-manager-849d5b9b84-b7v6v" Sep 30 19:47:37 crc kubenswrapper[4756]: I0930 19:47:37.467119 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zrnwt\" (UniqueName: \"kubernetes.io/projected/a76989a9-9056-4bc1-9e9c-5d7513a3e81b-kube-api-access-zrnwt\") pod \"infra-operator-controller-manager-9d6c5db85-r8xqm\" (UID: \"a76989a9-9056-4bc1-9e9c-5d7513a3e81b\") " pod="openstack-operators/infra-operator-controller-manager-9d6c5db85-r8xqm" Sep 30 19:47:37 crc kubenswrapper[4756]: I0930 19:47:37.467150 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6r5wp\" (UniqueName: \"kubernetes.io/projected/18189d34-2947-4744-b2ca-e970a456289f-kube-api-access-6r5wp\") pod \"horizon-operator-controller-manager-9f4696d94-6rqqt\" (UID: \"18189d34-2947-4744-b2ca-e970a456289f\") " pod="openstack-operators/horizon-operator-controller-manager-9f4696d94-6rqqt" Sep 30 19:47:37 crc kubenswrapper[4756]: I0930 19:47:37.467178 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-92wjm\" (UniqueName: \"kubernetes.io/projected/e42a9d09-d1ac-4d6c-853a-bc9f472e2b91-kube-api-access-92wjm\") pod \"mariadb-operator-controller-manager-88c7-h8tt7\" (UID: \"e42a9d09-d1ac-4d6c-853a-bc9f472e2b91\") " pod="openstack-operators/mariadb-operator-controller-manager-88c7-h8tt7" Sep 30 19:47:37 crc kubenswrapper[4756]: I0930 19:47:37.467202 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rjqs2\" (UniqueName: \"kubernetes.io/projected/511b7524-d136-4131-9949-72c4f46a60d8-kube-api-access-rjqs2\") pod \"keystone-operator-controller-manager-5bd55b4bff-c46nt\" (UID: \"511b7524-d136-4131-9949-72c4f46a60d8\") " pod="openstack-operators/keystone-operator-controller-manager-5bd55b4bff-c46nt" Sep 30 19:47:37 crc kubenswrapper[4756]: E0930 19:47:37.467455 4756 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Sep 30 19:47:37 crc kubenswrapper[4756]: E0930 19:47:37.467503 4756 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/a76989a9-9056-4bc1-9e9c-5d7513a3e81b-cert podName:a76989a9-9056-4bc1-9e9c-5d7513a3e81b nodeName:}" failed. No retries permitted until 2025-09-30 19:47:37.967487596 +0000 UTC m=+987.588421073 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/a76989a9-9056-4bc1-9e9c-5d7513a3e81b-cert") pod "infra-operator-controller-manager-9d6c5db85-r8xqm" (UID: "a76989a9-9056-4bc1-9e9c-5d7513a3e81b") : secret "infra-operator-webhook-server-cert" not found Sep 30 19:47:37 crc kubenswrapper[4756]: I0930 19:47:37.471081 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-77b9676b8c6lblw"] Sep 30 19:47:37 crc kubenswrapper[4756]: I0930 19:47:37.472383 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-baremetal-operator-controller-manager-77b9676b8c6lblw" Sep 30 19:47:37 crc kubenswrapper[4756]: I0930 19:47:37.474752 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-baremetal-operator-webhook-server-cert" Sep 30 19:47:37 crc kubenswrapper[4756]: I0930 19:47:37.474981 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-baremetal-operator-controller-manager-dockercfg-sttmf" Sep 30 19:47:37 crc kubenswrapper[4756]: I0930 19:47:37.485879 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/designate-operator-controller-manager-84f4f7b77b-ldpdd" Sep 30 19:47:37 crc kubenswrapper[4756]: I0930 19:47:37.486688 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zrnwt\" (UniqueName: \"kubernetes.io/projected/a76989a9-9056-4bc1-9e9c-5d7513a3e81b-kube-api-access-zrnwt\") pod \"infra-operator-controller-manager-9d6c5db85-r8xqm\" (UID: \"a76989a9-9056-4bc1-9e9c-5d7513a3e81b\") " pod="openstack-operators/infra-operator-controller-manager-9d6c5db85-r8xqm" Sep 30 19:47:37 crc kubenswrapper[4756]: I0930 19:47:37.488540 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/ovn-operator-controller-manager-9976ff44c-lc6f7"] Sep 30 19:47:37 crc kubenswrapper[4756]: I0930 19:47:37.490456 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ovn-operator-controller-manager-9976ff44c-lc6f7" Sep 30 19:47:37 crc kubenswrapper[4756]: I0930 19:47:37.491305 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/glance-operator-controller-manager-84958c4d49-xg9fd" Sep 30 19:47:37 crc kubenswrapper[4756]: I0930 19:47:37.491680 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6r5wp\" (UniqueName: \"kubernetes.io/projected/18189d34-2947-4744-b2ca-e970a456289f-kube-api-access-6r5wp\") pod \"horizon-operator-controller-manager-9f4696d94-6rqqt\" (UID: \"18189d34-2947-4744-b2ca-e970a456289f\") " pod="openstack-operators/horizon-operator-controller-manager-9f4696d94-6rqqt" Sep 30 19:47:37 crc kubenswrapper[4756]: I0930 19:47:37.492160 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"ovn-operator-controller-manager-dockercfg-xbqkg" Sep 30 19:47:37 crc kubenswrapper[4756]: I0930 19:47:37.496666 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fsckp\" (UniqueName: \"kubernetes.io/projected/869ee50e-ceab-400c-a7b1-b76b81b29bbc-kube-api-access-fsckp\") pod \"heat-operator-controller-manager-5d889d78cf-gz6p9\" (UID: \"869ee50e-ceab-400c-a7b1-b76b81b29bbc\") " pod="openstack-operators/heat-operator-controller-manager-5d889d78cf-gz6p9" Sep 30 19:47:37 crc kubenswrapper[4756]: I0930 19:47:37.516817 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-controller-manager-9f4696d94-6rqqt" Sep 30 19:47:37 crc kubenswrapper[4756]: I0930 19:47:37.526255 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-77b9676b8c6lblw"] Sep 30 19:47:37 crc kubenswrapper[4756]: I0930 19:47:37.542253 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/placement-operator-controller-manager-589c58c6c-j8wpg"] Sep 30 19:47:37 crc kubenswrapper[4756]: I0930 19:47:37.563943 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/placement-operator-controller-manager-589c58c6c-j8wpg" Sep 30 19:47:37 crc kubenswrapper[4756]: I0930 19:47:37.564032 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ovn-operator-controller-manager-9976ff44c-lc6f7"] Sep 30 19:47:37 crc kubenswrapper[4756]: I0930 19:47:37.571942 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"placement-operator-controller-manager-dockercfg-ph48c" Sep 30 19:47:37 crc kubenswrapper[4756]: I0930 19:47:37.574644 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/heat-operator-controller-manager-5d889d78cf-gz6p9" Sep 30 19:47:37 crc kubenswrapper[4756]: I0930 19:47:37.575916 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-92wjm\" (UniqueName: \"kubernetes.io/projected/e42a9d09-d1ac-4d6c-853a-bc9f472e2b91-kube-api-access-92wjm\") pod \"mariadb-operator-controller-manager-88c7-h8tt7\" (UID: \"e42a9d09-d1ac-4d6c-853a-bc9f472e2b91\") " pod="openstack-operators/mariadb-operator-controller-manager-88c7-h8tt7" Sep 30 19:47:37 crc kubenswrapper[4756]: I0930 19:47:37.575952 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rjqs2\" (UniqueName: \"kubernetes.io/projected/511b7524-d136-4131-9949-72c4f46a60d8-kube-api-access-rjqs2\") pod \"keystone-operator-controller-manager-5bd55b4bff-c46nt\" (UID: \"511b7524-d136-4131-9949-72c4f46a60d8\") " pod="openstack-operators/keystone-operator-controller-manager-5bd55b4bff-c46nt" Sep 30 19:47:37 crc kubenswrapper[4756]: I0930 19:47:37.575990 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qtmps\" (UniqueName: \"kubernetes.io/projected/2c4a76fe-4543-43e7-9e7e-7e3b4c149e46-kube-api-access-qtmps\") pod \"ironic-operator-controller-manager-5cd4858477-k5ccf\" (UID: \"2c4a76fe-4543-43e7-9e7e-7e3b4c149e46\") " pod="openstack-operators/ironic-operator-controller-manager-5cd4858477-k5ccf" Sep 30 19:47:37 crc kubenswrapper[4756]: I0930 19:47:37.576012 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f9xv7\" (UniqueName: \"kubernetes.io/projected/9ffa6269-c199-4388-93a5-cdb131bf656d-kube-api-access-f9xv7\") pod \"manila-operator-controller-manager-6d68dbc695-54d4j\" (UID: \"9ffa6269-c199-4388-93a5-cdb131bf656d\") " pod="openstack-operators/manila-operator-controller-manager-6d68dbc695-54d4j" Sep 30 19:47:37 crc kubenswrapper[4756]: I0930 19:47:37.576042 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k6vl6\" (UniqueName: \"kubernetes.io/projected/3dc3c221-bd19-4634-8979-2c5f843c9b06-kube-api-access-k6vl6\") pod \"nova-operator-controller-manager-64cd67b5cb-m2fvs\" (UID: \"3dc3c221-bd19-4634-8979-2c5f843c9b06\") " pod="openstack-operators/nova-operator-controller-manager-64cd67b5cb-m2fvs" Sep 30 19:47:37 crc kubenswrapper[4756]: I0930 19:47:37.576065 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-66kvj\" (UniqueName: \"kubernetes.io/projected/860ff2da-8a1c-49a2-8f9b-e0e5fa8a6b2a-kube-api-access-66kvj\") pod \"octavia-operator-controller-manager-7b787867f4-rsz5s\" (UID: \"860ff2da-8a1c-49a2-8f9b-e0e5fa8a6b2a\") " pod="openstack-operators/octavia-operator-controller-manager-7b787867f4-rsz5s" Sep 30 19:47:37 crc kubenswrapper[4756]: I0930 19:47:37.576104 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r5pfb\" (UniqueName: \"kubernetes.io/projected/081b46ad-0352-4dbf-8fde-1e064cad9b24-kube-api-access-r5pfb\") pod \"neutron-operator-controller-manager-849d5b9b84-b7v6v\" (UID: \"081b46ad-0352-4dbf-8fde-1e064cad9b24\") " pod="openstack-operators/neutron-operator-controller-manager-849d5b9b84-b7v6v" Sep 30 19:47:37 crc kubenswrapper[4756]: I0930 19:47:37.586093 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/swift-operator-controller-manager-84d6b4b759-vrf7m"] Sep 30 19:47:37 crc kubenswrapper[4756]: I0930 19:47:37.587054 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-controller-manager-84d6b4b759-vrf7m" Sep 30 19:47:37 crc kubenswrapper[4756]: I0930 19:47:37.590215 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"swift-operator-controller-manager-dockercfg-tt4sv" Sep 30 19:47:37 crc kubenswrapper[4756]: I0930 19:47:37.598081 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/placement-operator-controller-manager-589c58c6c-j8wpg"] Sep 30 19:47:37 crc kubenswrapper[4756]: I0930 19:47:37.602280 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-controller-manager-84d6b4b759-vrf7m"] Sep 30 19:47:37 crc kubenswrapper[4756]: I0930 19:47:37.602586 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qtmps\" (UniqueName: \"kubernetes.io/projected/2c4a76fe-4543-43e7-9e7e-7e3b4c149e46-kube-api-access-qtmps\") pod \"ironic-operator-controller-manager-5cd4858477-k5ccf\" (UID: \"2c4a76fe-4543-43e7-9e7e-7e3b4c149e46\") " pod="openstack-operators/ironic-operator-controller-manager-5cd4858477-k5ccf" Sep 30 19:47:37 crc kubenswrapper[4756]: I0930 19:47:37.613704 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k6vl6\" (UniqueName: \"kubernetes.io/projected/3dc3c221-bd19-4634-8979-2c5f843c9b06-kube-api-access-k6vl6\") pod \"nova-operator-controller-manager-64cd67b5cb-m2fvs\" (UID: \"3dc3c221-bd19-4634-8979-2c5f843c9b06\") " pod="openstack-operators/nova-operator-controller-manager-64cd67b5cb-m2fvs" Sep 30 19:47:37 crc kubenswrapper[4756]: I0930 19:47:37.625027 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rjqs2\" (UniqueName: \"kubernetes.io/projected/511b7524-d136-4131-9949-72c4f46a60d8-kube-api-access-rjqs2\") pod \"keystone-operator-controller-manager-5bd55b4bff-c46nt\" (UID: \"511b7524-d136-4131-9949-72c4f46a60d8\") " pod="openstack-operators/keystone-operator-controller-manager-5bd55b4bff-c46nt" Sep 30 19:47:37 crc kubenswrapper[4756]: I0930 19:47:37.625220 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-92wjm\" (UniqueName: \"kubernetes.io/projected/e42a9d09-d1ac-4d6c-853a-bc9f472e2b91-kube-api-access-92wjm\") pod \"mariadb-operator-controller-manager-88c7-h8tt7\" (UID: \"e42a9d09-d1ac-4d6c-853a-bc9f472e2b91\") " pod="openstack-operators/mariadb-operator-controller-manager-88c7-h8tt7" Sep 30 19:47:37 crc kubenswrapper[4756]: I0930 19:47:37.625629 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r5pfb\" (UniqueName: \"kubernetes.io/projected/081b46ad-0352-4dbf-8fde-1e064cad9b24-kube-api-access-r5pfb\") pod \"neutron-operator-controller-manager-849d5b9b84-b7v6v\" (UID: \"081b46ad-0352-4dbf-8fde-1e064cad9b24\") " pod="openstack-operators/neutron-operator-controller-manager-849d5b9b84-b7v6v" Sep 30 19:47:37 crc kubenswrapper[4756]: I0930 19:47:37.656983 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-5bd55b4bff-c46nt" Sep 30 19:47:37 crc kubenswrapper[4756]: I0930 19:47:37.666027 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-b8d54b5d7-nvz55"] Sep 30 19:47:37 crc kubenswrapper[4756]: I0930 19:47:37.667193 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/telemetry-operator-controller-manager-b8d54b5d7-nvz55" Sep 30 19:47:37 crc kubenswrapper[4756]: I0930 19:47:37.672024 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"telemetry-operator-controller-manager-dockercfg-jgsgj" Sep 30 19:47:37 crc kubenswrapper[4756]: I0930 19:47:37.678110 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/89223991-b3cb-47fb-9efc-ac95917de547-cert\") pod \"openstack-baremetal-operator-controller-manager-77b9676b8c6lblw\" (UID: \"89223991-b3cb-47fb-9efc-ac95917de547\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-77b9676b8c6lblw" Sep 30 19:47:37 crc kubenswrapper[4756]: I0930 19:47:37.678160 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w64fg\" (UniqueName: \"kubernetes.io/projected/89223991-b3cb-47fb-9efc-ac95917de547-kube-api-access-w64fg\") pod \"openstack-baremetal-operator-controller-manager-77b9676b8c6lblw\" (UID: \"89223991-b3cb-47fb-9efc-ac95917de547\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-77b9676b8c6lblw" Sep 30 19:47:37 crc kubenswrapper[4756]: I0930 19:47:37.678187 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cw8fk\" (UniqueName: \"kubernetes.io/projected/c1408e4f-3a38-4b09-8418-a9a3ab3ca3b1-kube-api-access-cw8fk\") pod \"ovn-operator-controller-manager-9976ff44c-lc6f7\" (UID: \"c1408e4f-3a38-4b09-8418-a9a3ab3ca3b1\") " pod="openstack-operators/ovn-operator-controller-manager-9976ff44c-lc6f7" Sep 30 19:47:37 crc kubenswrapper[4756]: I0930 19:47:37.678231 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tst95\" (UniqueName: \"kubernetes.io/projected/9f9592fa-373c-436e-ab8a-96745a6c3f2d-kube-api-access-tst95\") pod \"placement-operator-controller-manager-589c58c6c-j8wpg\" (UID: \"9f9592fa-373c-436e-ab8a-96745a6c3f2d\") " pod="openstack-operators/placement-operator-controller-manager-589c58c6c-j8wpg" Sep 30 19:47:37 crc kubenswrapper[4756]: I0930 19:47:37.678291 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-66kvj\" (UniqueName: \"kubernetes.io/projected/860ff2da-8a1c-49a2-8f9b-e0e5fa8a6b2a-kube-api-access-66kvj\") pod \"octavia-operator-controller-manager-7b787867f4-rsz5s\" (UID: \"860ff2da-8a1c-49a2-8f9b-e0e5fa8a6b2a\") " pod="openstack-operators/octavia-operator-controller-manager-7b787867f4-rsz5s" Sep 30 19:47:37 crc kubenswrapper[4756]: I0930 19:47:37.679729 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f9xv7\" (UniqueName: \"kubernetes.io/projected/9ffa6269-c199-4388-93a5-cdb131bf656d-kube-api-access-f9xv7\") pod \"manila-operator-controller-manager-6d68dbc695-54d4j\" (UID: \"9ffa6269-c199-4388-93a5-cdb131bf656d\") " pod="openstack-operators/manila-operator-controller-manager-6d68dbc695-54d4j" Sep 30 19:47:37 crc kubenswrapper[4756]: I0930 19:47:37.694977 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-b8d54b5d7-nvz55"] Sep 30 19:47:37 crc kubenswrapper[4756]: I0930 19:47:37.695840 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-88c7-h8tt7" Sep 30 19:47:37 crc kubenswrapper[4756]: I0930 19:47:37.696328 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-controller-manager-6ff8b75857-7cv4w" Sep 30 19:47:37 crc kubenswrapper[4756]: I0930 19:47:37.709857 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-66kvj\" (UniqueName: \"kubernetes.io/projected/860ff2da-8a1c-49a2-8f9b-e0e5fa8a6b2a-kube-api-access-66kvj\") pod \"octavia-operator-controller-manager-7b787867f4-rsz5s\" (UID: \"860ff2da-8a1c-49a2-8f9b-e0e5fa8a6b2a\") " pod="openstack-operators/octavia-operator-controller-manager-7b787867f4-rsz5s" Sep 30 19:47:37 crc kubenswrapper[4756]: I0930 19:47:37.729557 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/test-operator-controller-manager-85777745bb-gfsx6"] Sep 30 19:47:37 crc kubenswrapper[4756]: I0930 19:47:37.730839 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/test-operator-controller-manager-85777745bb-gfsx6" Sep 30 19:47:37 crc kubenswrapper[4756]: I0930 19:47:37.733631 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"test-operator-controller-manager-dockercfg-z8xsb" Sep 30 19:47:37 crc kubenswrapper[4756]: I0930 19:47:37.739605 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/test-operator-controller-manager-85777745bb-gfsx6"] Sep 30 19:47:37 crc kubenswrapper[4756]: I0930 19:47:37.757499 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/watcher-operator-controller-manager-6b9957f54f-p4prj"] Sep 30 19:47:37 crc kubenswrapper[4756]: I0930 19:47:37.758588 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/watcher-operator-controller-manager-6b9957f54f-p4prj" Sep 30 19:47:37 crc kubenswrapper[4756]: I0930 19:47:37.761894 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"watcher-operator-controller-manager-dockercfg-szbh5" Sep 30 19:47:37 crc kubenswrapper[4756]: I0930 19:47:37.765476 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/watcher-operator-controller-manager-6b9957f54f-p4prj"] Sep 30 19:47:37 crc kubenswrapper[4756]: I0930 19:47:37.779489 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tst95\" (UniqueName: \"kubernetes.io/projected/9f9592fa-373c-436e-ab8a-96745a6c3f2d-kube-api-access-tst95\") pod \"placement-operator-controller-manager-589c58c6c-j8wpg\" (UID: \"9f9592fa-373c-436e-ab8a-96745a6c3f2d\") " pod="openstack-operators/placement-operator-controller-manager-589c58c6c-j8wpg" Sep 30 19:47:37 crc kubenswrapper[4756]: I0930 19:47:37.779583 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cdrmw\" (UniqueName: \"kubernetes.io/projected/e8efd43e-cc1b-4812-acff-c618a78692a6-kube-api-access-cdrmw\") pod \"swift-operator-controller-manager-84d6b4b759-vrf7m\" (UID: \"e8efd43e-cc1b-4812-acff-c618a78692a6\") " pod="openstack-operators/swift-operator-controller-manager-84d6b4b759-vrf7m" Sep 30 19:47:37 crc kubenswrapper[4756]: I0930 19:47:37.779605 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9lpl5\" (UniqueName: \"kubernetes.io/projected/f7d0b1cb-4c87-4f60-89fb-a2ee19b62ceb-kube-api-access-9lpl5\") pod \"telemetry-operator-controller-manager-b8d54b5d7-nvz55\" (UID: \"f7d0b1cb-4c87-4f60-89fb-a2ee19b62ceb\") " pod="openstack-operators/telemetry-operator-controller-manager-b8d54b5d7-nvz55" Sep 30 19:47:37 crc kubenswrapper[4756]: I0930 19:47:37.779624 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/89223991-b3cb-47fb-9efc-ac95917de547-cert\") pod \"openstack-baremetal-operator-controller-manager-77b9676b8c6lblw\" (UID: \"89223991-b3cb-47fb-9efc-ac95917de547\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-77b9676b8c6lblw" Sep 30 19:47:37 crc kubenswrapper[4756]: I0930 19:47:37.779653 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w64fg\" (UniqueName: \"kubernetes.io/projected/89223991-b3cb-47fb-9efc-ac95917de547-kube-api-access-w64fg\") pod \"openstack-baremetal-operator-controller-manager-77b9676b8c6lblw\" (UID: \"89223991-b3cb-47fb-9efc-ac95917de547\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-77b9676b8c6lblw" Sep 30 19:47:37 crc kubenswrapper[4756]: I0930 19:47:37.779675 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cw8fk\" (UniqueName: \"kubernetes.io/projected/c1408e4f-3a38-4b09-8418-a9a3ab3ca3b1-kube-api-access-cw8fk\") pod \"ovn-operator-controller-manager-9976ff44c-lc6f7\" (UID: \"c1408e4f-3a38-4b09-8418-a9a3ab3ca3b1\") " pod="openstack-operators/ovn-operator-controller-manager-9976ff44c-lc6f7" Sep 30 19:47:37 crc kubenswrapper[4756]: E0930 19:47:37.780184 4756 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Sep 30 19:47:37 crc kubenswrapper[4756]: E0930 19:47:37.780228 4756 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/89223991-b3cb-47fb-9efc-ac95917de547-cert podName:89223991-b3cb-47fb-9efc-ac95917de547 nodeName:}" failed. No retries permitted until 2025-09-30 19:47:38.280214562 +0000 UTC m=+987.901148039 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/89223991-b3cb-47fb-9efc-ac95917de547-cert") pod "openstack-baremetal-operator-controller-manager-77b9676b8c6lblw" (UID: "89223991-b3cb-47fb-9efc-ac95917de547") : secret "openstack-baremetal-operator-webhook-server-cert" not found Sep 30 19:47:37 crc kubenswrapper[4756]: I0930 19:47:37.819989 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cw8fk\" (UniqueName: \"kubernetes.io/projected/c1408e4f-3a38-4b09-8418-a9a3ab3ca3b1-kube-api-access-cw8fk\") pod \"ovn-operator-controller-manager-9976ff44c-lc6f7\" (UID: \"c1408e4f-3a38-4b09-8418-a9a3ab3ca3b1\") " pod="openstack-operators/ovn-operator-controller-manager-9976ff44c-lc6f7" Sep 30 19:47:37 crc kubenswrapper[4756]: I0930 19:47:37.820993 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tst95\" (UniqueName: \"kubernetes.io/projected/9f9592fa-373c-436e-ab8a-96745a6c3f2d-kube-api-access-tst95\") pod \"placement-operator-controller-manager-589c58c6c-j8wpg\" (UID: \"9f9592fa-373c-436e-ab8a-96745a6c3f2d\") " pod="openstack-operators/placement-operator-controller-manager-589c58c6c-j8wpg" Sep 30 19:47:37 crc kubenswrapper[4756]: I0930 19:47:37.821208 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w64fg\" (UniqueName: \"kubernetes.io/projected/89223991-b3cb-47fb-9efc-ac95917de547-kube-api-access-w64fg\") pod \"openstack-baremetal-operator-controller-manager-77b9676b8c6lblw\" (UID: \"89223991-b3cb-47fb-9efc-ac95917de547\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-77b9676b8c6lblw" Sep 30 19:47:37 crc kubenswrapper[4756]: I0930 19:47:37.838809 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-controller-manager-98d66ccb9-2xh6m"] Sep 30 19:47:37 crc kubenswrapper[4756]: I0930 19:47:37.840735 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-manager-98d66ccb9-2xh6m" Sep 30 19:47:37 crc kubenswrapper[4756]: I0930 19:47:37.842367 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/neutron-operator-controller-manager-849d5b9b84-b7v6v" Sep 30 19:47:37 crc kubenswrapper[4756]: I0930 19:47:37.846385 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-controller-manager-dockercfg-5msqb" Sep 30 19:47:37 crc kubenswrapper[4756]: I0930 19:47:37.846544 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"webhook-server-cert" Sep 30 19:47:37 crc kubenswrapper[4756]: I0930 19:47:37.857839 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-controller-manager-64cd67b5cb-m2fvs" Sep 30 19:47:37 crc kubenswrapper[4756]: I0930 19:47:37.859661 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-manager-98d66ccb9-2xh6m"] Sep 30 19:47:37 crc kubenswrapper[4756]: I0930 19:47:37.883059 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/octavia-operator-controller-manager-7b787867f4-rsz5s" Sep 30 19:47:37 crc kubenswrapper[4756]: I0930 19:47:37.883709 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kgxzt\" (UniqueName: \"kubernetes.io/projected/8ca7950c-897c-4a5b-bf6e-2a9aa33bfa2c-kube-api-access-kgxzt\") pod \"watcher-operator-controller-manager-6b9957f54f-p4prj\" (UID: \"8ca7950c-897c-4a5b-bf6e-2a9aa33bfa2c\") " pod="openstack-operators/watcher-operator-controller-manager-6b9957f54f-p4prj" Sep 30 19:47:37 crc kubenswrapper[4756]: I0930 19:47:37.883754 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cdrmw\" (UniqueName: \"kubernetes.io/projected/e8efd43e-cc1b-4812-acff-c618a78692a6-kube-api-access-cdrmw\") pod \"swift-operator-controller-manager-84d6b4b759-vrf7m\" (UID: \"e8efd43e-cc1b-4812-acff-c618a78692a6\") " pod="openstack-operators/swift-operator-controller-manager-84d6b4b759-vrf7m" Sep 30 19:47:37 crc kubenswrapper[4756]: I0930 19:47:37.883774 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9lpl5\" (UniqueName: \"kubernetes.io/projected/f7d0b1cb-4c87-4f60-89fb-a2ee19b62ceb-kube-api-access-9lpl5\") pod \"telemetry-operator-controller-manager-b8d54b5d7-nvz55\" (UID: \"f7d0b1cb-4c87-4f60-89fb-a2ee19b62ceb\") " pod="openstack-operators/telemetry-operator-controller-manager-b8d54b5d7-nvz55" Sep 30 19:47:37 crc kubenswrapper[4756]: I0930 19:47:37.883847 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fcfh8\" (UniqueName: \"kubernetes.io/projected/e592430a-6d1d-4e0e-8220-d51f46bbd54e-kube-api-access-fcfh8\") pod \"test-operator-controller-manager-85777745bb-gfsx6\" (UID: \"e592430a-6d1d-4e0e-8220-d51f46bbd54e\") " pod="openstack-operators/test-operator-controller-manager-85777745bb-gfsx6" Sep 30 19:47:37 crc kubenswrapper[4756]: I0930 19:47:37.900621 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-xxhjq"] Sep 30 19:47:37 crc kubenswrapper[4756]: I0930 19:47:37.901883 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-xxhjq" Sep 30 19:47:37 crc kubenswrapper[4756]: I0930 19:47:37.902824 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ironic-operator-controller-manager-5cd4858477-k5ccf" Sep 30 19:47:37 crc kubenswrapper[4756]: I0930 19:47:37.903774 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"rabbitmq-cluster-operator-controller-manager-dockercfg-zsdb5" Sep 30 19:47:37 crc kubenswrapper[4756]: I0930 19:47:37.908036 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9lpl5\" (UniqueName: \"kubernetes.io/projected/f7d0b1cb-4c87-4f60-89fb-a2ee19b62ceb-kube-api-access-9lpl5\") pod \"telemetry-operator-controller-manager-b8d54b5d7-nvz55\" (UID: \"f7d0b1cb-4c87-4f60-89fb-a2ee19b62ceb\") " pod="openstack-operators/telemetry-operator-controller-manager-b8d54b5d7-nvz55" Sep 30 19:47:37 crc kubenswrapper[4756]: I0930 19:47:37.908533 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cdrmw\" (UniqueName: \"kubernetes.io/projected/e8efd43e-cc1b-4812-acff-c618a78692a6-kube-api-access-cdrmw\") pod \"swift-operator-controller-manager-84d6b4b759-vrf7m\" (UID: \"e8efd43e-cc1b-4812-acff-c618a78692a6\") " pod="openstack-operators/swift-operator-controller-manager-84d6b4b759-vrf7m" Sep 30 19:47:37 crc kubenswrapper[4756]: I0930 19:47:37.910186 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-xxhjq"] Sep 30 19:47:37 crc kubenswrapper[4756]: I0930 19:47:37.912491 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ovn-operator-controller-manager-9976ff44c-lc6f7" Sep 30 19:47:37 crc kubenswrapper[4756]: I0930 19:47:37.979684 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/manila-operator-controller-manager-6d68dbc695-54d4j" Sep 30 19:47:37 crc kubenswrapper[4756]: I0930 19:47:37.988657 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/placement-operator-controller-manager-589c58c6c-j8wpg" Sep 30 19:47:37 crc kubenswrapper[4756]: I0930 19:47:37.989522 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fpzct\" (UniqueName: \"kubernetes.io/projected/fbc8173b-fdca-44d0-af4e-5f2f106bd031-kube-api-access-fpzct\") pod \"openstack-operator-controller-manager-98d66ccb9-2xh6m\" (UID: \"fbc8173b-fdca-44d0-af4e-5f2f106bd031\") " pod="openstack-operators/openstack-operator-controller-manager-98d66ccb9-2xh6m" Sep 30 19:47:37 crc kubenswrapper[4756]: I0930 19:47:37.989574 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fcfh8\" (UniqueName: \"kubernetes.io/projected/e592430a-6d1d-4e0e-8220-d51f46bbd54e-kube-api-access-fcfh8\") pod \"test-operator-controller-manager-85777745bb-gfsx6\" (UID: \"e592430a-6d1d-4e0e-8220-d51f46bbd54e\") " pod="openstack-operators/test-operator-controller-manager-85777745bb-gfsx6" Sep 30 19:47:37 crc kubenswrapper[4756]: I0930 19:47:37.989605 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/a76989a9-9056-4bc1-9e9c-5d7513a3e81b-cert\") pod \"infra-operator-controller-manager-9d6c5db85-r8xqm\" (UID: \"a76989a9-9056-4bc1-9e9c-5d7513a3e81b\") " pod="openstack-operators/infra-operator-controller-manager-9d6c5db85-r8xqm" Sep 30 19:47:37 crc kubenswrapper[4756]: I0930 19:47:37.989637 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/fbc8173b-fdca-44d0-af4e-5f2f106bd031-cert\") pod \"openstack-operator-controller-manager-98d66ccb9-2xh6m\" (UID: \"fbc8173b-fdca-44d0-af4e-5f2f106bd031\") " pod="openstack-operators/openstack-operator-controller-manager-98d66ccb9-2xh6m" Sep 30 19:47:37 crc kubenswrapper[4756]: I0930 19:47:37.989660 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kgxzt\" (UniqueName: \"kubernetes.io/projected/8ca7950c-897c-4a5b-bf6e-2a9aa33bfa2c-kube-api-access-kgxzt\") pod \"watcher-operator-controller-manager-6b9957f54f-p4prj\" (UID: \"8ca7950c-897c-4a5b-bf6e-2a9aa33bfa2c\") " pod="openstack-operators/watcher-operator-controller-manager-6b9957f54f-p4prj" Sep 30 19:47:38 crc kubenswrapper[4756]: I0930 19:47:38.025537 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-controller-manager-84d6b4b759-vrf7m" Sep 30 19:47:38 crc kubenswrapper[4756]: I0930 19:47:38.033758 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/telemetry-operator-controller-manager-b8d54b5d7-nvz55" Sep 30 19:47:38 crc kubenswrapper[4756]: I0930 19:47:38.051927 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/a76989a9-9056-4bc1-9e9c-5d7513a3e81b-cert\") pod \"infra-operator-controller-manager-9d6c5db85-r8xqm\" (UID: \"a76989a9-9056-4bc1-9e9c-5d7513a3e81b\") " pod="openstack-operators/infra-operator-controller-manager-9d6c5db85-r8xqm" Sep 30 19:47:38 crc kubenswrapper[4756]: I0930 19:47:38.070956 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fcfh8\" (UniqueName: \"kubernetes.io/projected/e592430a-6d1d-4e0e-8220-d51f46bbd54e-kube-api-access-fcfh8\") pod \"test-operator-controller-manager-85777745bb-gfsx6\" (UID: \"e592430a-6d1d-4e0e-8220-d51f46bbd54e\") " pod="openstack-operators/test-operator-controller-manager-85777745bb-gfsx6" Sep 30 19:47:38 crc kubenswrapper[4756]: I0930 19:47:38.071311 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kgxzt\" (UniqueName: \"kubernetes.io/projected/8ca7950c-897c-4a5b-bf6e-2a9aa33bfa2c-kube-api-access-kgxzt\") pod \"watcher-operator-controller-manager-6b9957f54f-p4prj\" (UID: \"8ca7950c-897c-4a5b-bf6e-2a9aa33bfa2c\") " pod="openstack-operators/watcher-operator-controller-manager-6b9957f54f-p4prj" Sep 30 19:47:38 crc kubenswrapper[4756]: I0930 19:47:38.091276 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/fbc8173b-fdca-44d0-af4e-5f2f106bd031-cert\") pod \"openstack-operator-controller-manager-98d66ccb9-2xh6m\" (UID: \"fbc8173b-fdca-44d0-af4e-5f2f106bd031\") " pod="openstack-operators/openstack-operator-controller-manager-98d66ccb9-2xh6m" Sep 30 19:47:38 crc kubenswrapper[4756]: I0930 19:47:38.091372 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5q25n\" (UniqueName: \"kubernetes.io/projected/401ab06e-1973-4cbc-9cb6-f1d3a1af5ba8-kube-api-access-5q25n\") pod \"rabbitmq-cluster-operator-manager-5f97d8c699-xxhjq\" (UID: \"401ab06e-1973-4cbc-9cb6-f1d3a1af5ba8\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-xxhjq" Sep 30 19:47:38 crc kubenswrapper[4756]: I0930 19:47:38.091418 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fpzct\" (UniqueName: \"kubernetes.io/projected/fbc8173b-fdca-44d0-af4e-5f2f106bd031-kube-api-access-fpzct\") pod \"openstack-operator-controller-manager-98d66ccb9-2xh6m\" (UID: \"fbc8173b-fdca-44d0-af4e-5f2f106bd031\") " pod="openstack-operators/openstack-operator-controller-manager-98d66ccb9-2xh6m" Sep 30 19:47:38 crc kubenswrapper[4756]: E0930 19:47:38.091678 4756 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Sep 30 19:47:38 crc kubenswrapper[4756]: E0930 19:47:38.091766 4756 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/fbc8173b-fdca-44d0-af4e-5f2f106bd031-cert podName:fbc8173b-fdca-44d0-af4e-5f2f106bd031 nodeName:}" failed. No retries permitted until 2025-09-30 19:47:38.591746868 +0000 UTC m=+988.212680345 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/fbc8173b-fdca-44d0-af4e-5f2f106bd031-cert") pod "openstack-operator-controller-manager-98d66ccb9-2xh6m" (UID: "fbc8173b-fdca-44d0-af4e-5f2f106bd031") : secret "webhook-server-cert" not found Sep 30 19:47:38 crc kubenswrapper[4756]: I0930 19:47:38.100832 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/test-operator-controller-manager-85777745bb-gfsx6" Sep 30 19:47:38 crc kubenswrapper[4756]: I0930 19:47:38.114091 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fpzct\" (UniqueName: \"kubernetes.io/projected/fbc8173b-fdca-44d0-af4e-5f2f106bd031-kube-api-access-fpzct\") pod \"openstack-operator-controller-manager-98d66ccb9-2xh6m\" (UID: \"fbc8173b-fdca-44d0-af4e-5f2f106bd031\") " pod="openstack-operators/openstack-operator-controller-manager-98d66ccb9-2xh6m" Sep 30 19:47:38 crc kubenswrapper[4756]: I0930 19:47:38.122250 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/designate-operator-controller-manager-84f4f7b77b-ldpdd"] Sep 30 19:47:38 crc kubenswrapper[4756]: I0930 19:47:38.139160 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/watcher-operator-controller-manager-6b9957f54f-p4prj" Sep 30 19:47:38 crc kubenswrapper[4756]: I0930 19:47:38.186847 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-9d6c5db85-r8xqm" Sep 30 19:47:38 crc kubenswrapper[4756]: I0930 19:47:38.195625 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5q25n\" (UniqueName: \"kubernetes.io/projected/401ab06e-1973-4cbc-9cb6-f1d3a1af5ba8-kube-api-access-5q25n\") pod \"rabbitmq-cluster-operator-manager-5f97d8c699-xxhjq\" (UID: \"401ab06e-1973-4cbc-9cb6-f1d3a1af5ba8\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-xxhjq" Sep 30 19:47:38 crc kubenswrapper[4756]: I0930 19:47:38.218587 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5q25n\" (UniqueName: \"kubernetes.io/projected/401ab06e-1973-4cbc-9cb6-f1d3a1af5ba8-kube-api-access-5q25n\") pod \"rabbitmq-cluster-operator-manager-5f97d8c699-xxhjq\" (UID: \"401ab06e-1973-4cbc-9cb6-f1d3a1af5ba8\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-xxhjq" Sep 30 19:47:38 crc kubenswrapper[4756]: I0930 19:47:38.237385 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-xxhjq" Sep 30 19:47:38 crc kubenswrapper[4756]: I0930 19:47:38.296879 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/89223991-b3cb-47fb-9efc-ac95917de547-cert\") pod \"openstack-baremetal-operator-controller-manager-77b9676b8c6lblw\" (UID: \"89223991-b3cb-47fb-9efc-ac95917de547\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-77b9676b8c6lblw" Sep 30 19:47:38 crc kubenswrapper[4756]: I0930 19:47:38.301343 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/89223991-b3cb-47fb-9efc-ac95917de547-cert\") pod \"openstack-baremetal-operator-controller-manager-77b9676b8c6lblw\" (UID: \"89223991-b3cb-47fb-9efc-ac95917de547\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-77b9676b8c6lblw" Sep 30 19:47:38 crc kubenswrapper[4756]: I0930 19:47:38.402735 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/glance-operator-controller-manager-84958c4d49-xg9fd"] Sep 30 19:47:38 crc kubenswrapper[4756]: I0930 19:47:38.422111 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-84f4f7b77b-ldpdd" event={"ID":"7a303d5c-39f9-4cb5-94ee-9b126d8cc4f3","Type":"ContainerStarted","Data":"c46ae0e0389885ac67fa568e156bd5b05b51a5b467d58b45214d16510f2dc7b2"} Sep 30 19:47:38 crc kubenswrapper[4756]: I0930 19:47:38.516313 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-baremetal-operator-controller-manager-77b9676b8c6lblw" Sep 30 19:47:38 crc kubenswrapper[4756]: I0930 19:47:38.536371 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-controller-manager-9f4696d94-6rqqt"] Sep 30 19:47:38 crc kubenswrapper[4756]: I0930 19:47:38.542618 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/cinder-operator-controller-manager-644bddb6d8-klbxz"] Sep 30 19:47:38 crc kubenswrapper[4756]: I0930 19:47:38.606649 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/fbc8173b-fdca-44d0-af4e-5f2f106bd031-cert\") pod \"openstack-operator-controller-manager-98d66ccb9-2xh6m\" (UID: \"fbc8173b-fdca-44d0-af4e-5f2f106bd031\") " pod="openstack-operators/openstack-operator-controller-manager-98d66ccb9-2xh6m" Sep 30 19:47:38 crc kubenswrapper[4756]: E0930 19:47:38.606809 4756 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Sep 30 19:47:38 crc kubenswrapper[4756]: E0930 19:47:38.606862 4756 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/fbc8173b-fdca-44d0-af4e-5f2f106bd031-cert podName:fbc8173b-fdca-44d0-af4e-5f2f106bd031 nodeName:}" failed. No retries permitted until 2025-09-30 19:47:39.606845715 +0000 UTC m=+989.227779192 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/fbc8173b-fdca-44d0-af4e-5f2f106bd031-cert") pod "openstack-operator-controller-manager-98d66ccb9-2xh6m" (UID: "fbc8173b-fdca-44d0-af4e-5f2f106bd031") : secret "webhook-server-cert" not found Sep 30 19:47:38 crc kubenswrapper[4756]: I0930 19:47:38.678185 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/heat-operator-controller-manager-5d889d78cf-gz6p9"] Sep 30 19:47:38 crc kubenswrapper[4756]: W0930 19:47:38.686674 4756 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod869ee50e_ceab_400c_a7b1_b76b81b29bbc.slice/crio-cf2c9b4550a61eae5ea3f3f5e335fd70020dbc2444e0fe51be270c6734fee5f0 WatchSource:0}: Error finding container cf2c9b4550a61eae5ea3f3f5e335fd70020dbc2444e0fe51be270c6734fee5f0: Status 404 returned error can't find the container with id cf2c9b4550a61eae5ea3f3f5e335fd70020dbc2444e0fe51be270c6734fee5f0 Sep 30 19:47:38 crc kubenswrapper[4756]: I0930 19:47:38.711642 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/barbican-operator-controller-manager-6ff8b75857-7cv4w"] Sep 30 19:47:38 crc kubenswrapper[4756]: I0930 19:47:38.718327 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-5bd55b4bff-c46nt"] Sep 30 19:47:38 crc kubenswrapper[4756]: I0930 19:47:38.917214 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/nova-operator-controller-manager-64cd67b5cb-m2fvs"] Sep 30 19:47:38 crc kubenswrapper[4756]: W0930 19:47:38.924729 4756 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3dc3c221_bd19_4634_8979_2c5f843c9b06.slice/crio-0710c972b0b7385b56541baff5a16e822472337de9c0cb648299d03ede8a8bb6 WatchSource:0}: Error finding container 0710c972b0b7385b56541baff5a16e822472337de9c0cb648299d03ede8a8bb6: Status 404 returned error can't find the container with id 0710c972b0b7385b56541baff5a16e822472337de9c0cb648299d03ede8a8bb6 Sep 30 19:47:38 crc kubenswrapper[4756]: I0930 19:47:38.934802 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/neutron-operator-controller-manager-849d5b9b84-b7v6v"] Sep 30 19:47:38 crc kubenswrapper[4756]: I0930 19:47:38.956699 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ovn-operator-controller-manager-9976ff44c-lc6f7"] Sep 30 19:47:38 crc kubenswrapper[4756]: W0930 19:47:38.966452 4756 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc1408e4f_3a38_4b09_8418_a9a3ab3ca3b1.slice/crio-dd3c3b636e68bc0e6079d9762fb0c6827c0961730cb747814a89551bb42b6cd2 WatchSource:0}: Error finding container dd3c3b636e68bc0e6079d9762fb0c6827c0961730cb747814a89551bb42b6cd2: Status 404 returned error can't find the container with id dd3c3b636e68bc0e6079d9762fb0c6827c0961730cb747814a89551bb42b6cd2 Sep 30 19:47:38 crc kubenswrapper[4756]: I0930 19:47:38.969222 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-88c7-h8tt7"] Sep 30 19:47:38 crc kubenswrapper[4756]: W0930 19:47:38.979830 4756 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode42a9d09_d1ac_4d6c_853a_bc9f472e2b91.slice/crio-199c5b0fab7e39ee78e8bf877f7a409d9fea2e52d6d5d02c1aba50a60779c3f6 WatchSource:0}: Error finding container 199c5b0fab7e39ee78e8bf877f7a409d9fea2e52d6d5d02c1aba50a60779c3f6: Status 404 returned error can't find the container with id 199c5b0fab7e39ee78e8bf877f7a409d9fea2e52d6d5d02c1aba50a60779c3f6 Sep 30 19:47:39 crc kubenswrapper[4756]: I0930 19:47:39.086181 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-b8d54b5d7-nvz55"] Sep 30 19:47:39 crc kubenswrapper[4756]: I0930 19:47:39.104310 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/octavia-operator-controller-manager-7b787867f4-rsz5s"] Sep 30 19:47:39 crc kubenswrapper[4756]: I0930 19:47:39.114461 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ironic-operator-controller-manager-5cd4858477-k5ccf"] Sep 30 19:47:39 crc kubenswrapper[4756]: E0930 19:47:39.130745 4756 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/ironic-operator@sha256:ae0bf0aa387864d26d4cbd04d84f6362d53b721348188f096aaa48757419c4f5,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-qtmps,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ironic-operator-controller-manager-5cd4858477-k5ccf_openstack-operators(2c4a76fe-4543-43e7-9e7e-7e3b4c149e46): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Sep 30 19:47:39 crc kubenswrapper[4756]: E0930 19:47:39.132389 4756 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/telemetry-operator@sha256:8fdf377daf05e2fa7346505017078fa81981dd945bf635a64c8022633c68118f,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-9lpl5,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod telemetry-operator-controller-manager-b8d54b5d7-nvz55_openstack-operators(f7d0b1cb-4c87-4f60-89fb-a2ee19b62ceb): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Sep 30 19:47:39 crc kubenswrapper[4756]: E0930 19:47:39.133804 4756 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/swift-operator@sha256:bca053da8adc37a9a246b478949960ac7abef8fcc0c58a2a45045c59a62b5fe4,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-cdrmw,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000660000,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod swift-operator-controller-manager-84d6b4b759-vrf7m_openstack-operators(e8efd43e-cc1b-4812-acff-c618a78692a6): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Sep 30 19:47:39 crc kubenswrapper[4756]: I0930 19:47:39.137099 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/placement-operator-controller-manager-589c58c6c-j8wpg"] Sep 30 19:47:39 crc kubenswrapper[4756]: I0930 19:47:39.137128 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-controller-manager-84d6b4b759-vrf7m"] Sep 30 19:47:39 crc kubenswrapper[4756]: I0930 19:47:39.137138 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/manila-operator-controller-manager-6d68dbc695-54d4j"] Sep 30 19:47:39 crc kubenswrapper[4756]: I0930 19:47:39.137148 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-xxhjq"] Sep 30 19:47:39 crc kubenswrapper[4756]: I0930 19:47:39.137156 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/test-operator-controller-manager-85777745bb-gfsx6"] Sep 30 19:47:39 crc kubenswrapper[4756]: E0930 19:47:39.139719 4756 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/manila-operator@sha256:4cdb30423c14ab48888aeeb699259bd9051284ec9f874ed9bab94c7965f45884,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-f9xv7,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod manila-operator-controller-manager-6d68dbc695-54d4j_openstack-operators(9ffa6269-c199-4388-93a5-cdb131bf656d): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Sep 30 19:47:39 crc kubenswrapper[4756]: E0930 19:47:39.144292 4756 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:operator,Image:quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2,Command:[/manager],Args:[],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:metrics,HostPort:0,ContainerPort:9782,Protocol:TCP,HostIP:,},},Env:[]EnvVar{EnvVar{Name:OPERATOR_NAMESPACE,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:metadata.namespace,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{200 -3} {} 200m DecimalSI},memory: {{524288000 0} {} 500Mi BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-5q25n,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000660000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod rabbitmq-cluster-operator-manager-5f97d8c699-xxhjq_openstack-operators(401ab06e-1973-4cbc-9cb6-f1d3a1af5ba8): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Sep 30 19:47:39 crc kubenswrapper[4756]: E0930 19:47:39.145923 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-xxhjq" podUID="401ab06e-1973-4cbc-9cb6-f1d3a1af5ba8" Sep 30 19:47:39 crc kubenswrapper[4756]: E0930 19:47:39.160048 4756 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/test-operator@sha256:f61fdfbfd12027ce6b4e7ad553ec0582f080de0cfb472de6dc04ad3078bb17e3,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-fcfh8,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod test-operator-controller-manager-85777745bb-gfsx6_openstack-operators(e592430a-6d1d-4e0e-8220-d51f46bbd54e): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Sep 30 19:47:39 crc kubenswrapper[4756]: I0930 19:47:39.266050 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/watcher-operator-controller-manager-6b9957f54f-p4prj"] Sep 30 19:47:39 crc kubenswrapper[4756]: I0930 19:47:39.273516 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-controller-manager-9d6c5db85-r8xqm"] Sep 30 19:47:39 crc kubenswrapper[4756]: W0930 19:47:39.286167 4756 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod8ca7950c_897c_4a5b_bf6e_2a9aa33bfa2c.slice/crio-5a68d8cd88c4809cbacd4c36a9d54228ccbc7df829b8cd69a643d63eef07f3cb WatchSource:0}: Error finding container 5a68d8cd88c4809cbacd4c36a9d54228ccbc7df829b8cd69a643d63eef07f3cb: Status 404 returned error can't find the container with id 5a68d8cd88c4809cbacd4c36a9d54228ccbc7df829b8cd69a643d63eef07f3cb Sep 30 19:47:39 crc kubenswrapper[4756]: I0930 19:47:39.289005 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-77b9676b8c6lblw"] Sep 30 19:47:39 crc kubenswrapper[4756]: E0930 19:47:39.291130 4756 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/watcher-operator@sha256:09c2f519ea218f6038b7be039b8e6ac33ee93b217b9be0d2d18a5e7f94faae06,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-kgxzt,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod watcher-operator-controller-manager-6b9957f54f-p4prj_openstack-operators(8ca7950c-897c-4a5b-bf6e-2a9aa33bfa2c): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Sep 30 19:47:39 crc kubenswrapper[4756]: E0930 19:47:39.300831 4756 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/openstack-baremetal-operator@sha256:e7cfed051c1cf801e651fd4035070e38698039f284ac0b2a0332769fdbb4a9c8,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:true,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_AGENT_IMAGE_URL_DEFAULT,Value:quay.io/openstack-k8s-operators/openstack-baremetal-operator-agent:latest,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_ANSIBLEEE_IMAGE_URL_DEFAULT,Value:quay.io/openstack-k8s-operators/openstack-ansibleee-runner:latest,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_AODH_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-aodh-api:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_AODH_EVALUATOR_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-aodh-evaluator:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_AODH_LISTENER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-aodh-listener:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_AODH_NOTIFIER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-aodh-notifier:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_APACHE_IMAGE_URL_DEFAULT,Value:registry.redhat.io/ubi9/httpd-24:latest,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_BARBICAN_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-barbican-api:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_BARBICAN_KEYSTONE_LISTENER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-barbican-keystone-listener:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_BARBICAN_WORKER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-barbican-worker:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CEILOMETER_CENTRAL_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ceilometer-central:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CEILOMETER_COMPUTE_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CEILOMETER_IPMI_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ceilometer-ipmi:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CEILOMETER_MYSQLD_EXPORTER_IMAGE_URL_DEFAULT,Value:quay.io/prometheus/mysqld-exporter:v0.15.1,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CEILOMETER_NOTIFICATION_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ceilometer-notification:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CEILOMETER_SGCORE_IMAGE_URL_DEFAULT,Value:quay.io/openstack-k8s-operators/sg-core:latest,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CINDER_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-cinder-api:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CINDER_BACKUP_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-cinder-backup:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CINDER_SCHEDULER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-cinder-scheduler:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CINDER_VOLUME_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-cinder-volume:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_DESIGNATE_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-designate-api:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_DESIGNATE_BACKENDBIND9_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-designate-backend-bind9:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_DESIGNATE_CENTRAL_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-designate-central:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_DESIGNATE_MDNS_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-designate-mdns:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_DESIGNATE_PRODUCER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-designate-producer:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_DESIGNATE_UNBOUND_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-unbound:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_DESIGNATE_WORKER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-designate-worker:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_FRR_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-frr:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_ISCSID_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-iscsid:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_KEPLER_IMAGE_URL_DEFAULT,Value:quay.io/sustainable_computing_io/kepler:release-0.7.12,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_LOGROTATE_CROND_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-cron:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_MULTIPATHD_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-multipathd:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_NEUTRON_DHCP_AGENT_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-neutron-dhcp-agent:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_NEUTRON_METADATA_AGENT_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_NEUTRON_OVN_AGENT_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-neutron-ovn-agent:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_NEUTRON_SRIOV_AGENT_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-neutron-sriov-agent:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_NODE_EXPORTER_IMAGE_URL_DEFAULT,Value:quay.io/prometheus/node-exporter:v1.5.0,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_OVN_BGP_AGENT_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ovn-bgp-agent:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_PODMAN_EXPORTER_IMAGE_URL_DEFAULT,Value:quay.io/navidys/prometheus-podman-exporter:v1.10.1,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_GLANCE_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-glance-api:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_HEAT_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-heat-api:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_HEAT_CFNAPI_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-heat-api-cfn:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_HEAT_ENGINE_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-heat-engine:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_HORIZON_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-horizon:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_INFRA_MEMCACHED_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-memcached:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_INFRA_REDIS_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-redis:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_IRONIC_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ironic-api:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_IRONIC_CONDUCTOR_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ironic-conductor:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_IRONIC_INSPECTOR_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ironic-inspector:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_IRONIC_NEUTRON_AGENT_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ironic-neutron-agent:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_IRONIC_PXE_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ironic-pxe:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_IRONIC_PYTHON_AGENT_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/ironic-python-agent:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_KEYSTONE_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-keystone:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_KSM_IMAGE_URL_DEFAULT,Value:registry.k8s.io/kube-state-metrics/kube-state-metrics:v2.15.0,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_MANILA_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-manila-api:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_MANILA_SCHEDULER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-manila-scheduler:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_MANILA_SHARE_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-manila-share:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_MARIADB_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-mariadb:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_NET_UTILS_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-netutils:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_NEUTRON_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_NOVA_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-nova-api:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_NOVA_COMPUTE_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-nova-compute:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_NOVA_CONDUCTOR_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-nova-conductor:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_NOVA_NOVNC_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-nova-novncproxy:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_NOVA_SCHEDULER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-nova-scheduler:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OCTAVIA_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-octavia-api:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OCTAVIA_HEALTHMANAGER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-octavia-health-manager:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OCTAVIA_HOUSEKEEPING_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-octavia-housekeeping:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OCTAVIA_RSYSLOG_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-rsyslog:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OCTAVIA_WORKER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-octavia-worker:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OPENSTACK_CLIENT_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-openstackclient:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OPENSTACK_LIGHTSPEED_IMAGE_URL_DEFAULT,Value:quay.io/openstack-lightspeed/rag-content:os-docs-2024.2,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OPENSTACK_MUST_GATHER_DEFAULT,Value:quay.io/openstack-k8s-operators/openstack-must-gather:latest,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OPENSTACK_NETWORK_EXPORTER_IMAGE_URL_DEFAULT,Value:quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OS_CONTAINER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/edpm-hardened-uefi:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OVN_CONTROLLER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OVN_CONTROLLER_OVS_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ovn-base:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OVN_NB_DBCLUSTER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ovn-nb-db-server:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OVN_NORTHD_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ovn-northd:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OVN_SB_DBCLUSTER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ovn-sb-db-server:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_PLACEMENT_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-placement-api:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_RABBITMQ_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-rabbitmq:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_SWIFT_ACCOUNT_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-swift-account:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_SWIFT_CONTAINER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-swift-container:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_SWIFT_OBJECT_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-swift-object:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_SWIFT_PROXY_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-swift-proxy-server:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_TEST_TEMPEST_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-tempest-all:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_WATCHER_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-master-centos9/openstack-watcher-api:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_WATCHER_APPLIER_IMAGE_URL_DEFAULT,Value:quay.io/podified-master-centos9/openstack-watcher-applier:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_WATCHER_DECISION_ENGINE_IMAGE_URL_DEFAULT,Value:quay.io/podified-master-centos9/openstack-watcher-decision-engine:current-podified,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:cert,ReadOnly:true,MountPath:/tmp/k8s-webhook-server/serving-certs,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-w64fg,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000660000,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod openstack-baremetal-operator-controller-manager-77b9676b8c6lblw_openstack-operators(89223991-b3cb-47fb-9efc-ac95917de547): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Sep 30 19:47:39 crc kubenswrapper[4756]: E0930 19:47:39.346092 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/telemetry-operator-controller-manager-b8d54b5d7-nvz55" podUID="f7d0b1cb-4c87-4f60-89fb-a2ee19b62ceb" Sep 30 19:47:39 crc kubenswrapper[4756]: E0930 19:47:39.348879 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/ironic-operator-controller-manager-5cd4858477-k5ccf" podUID="2c4a76fe-4543-43e7-9e7e-7e3b4c149e46" Sep 30 19:47:39 crc kubenswrapper[4756]: E0930 19:47:39.386972 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/test-operator-controller-manager-85777745bb-gfsx6" podUID="e592430a-6d1d-4e0e-8220-d51f46bbd54e" Sep 30 19:47:39 crc kubenswrapper[4756]: E0930 19:47:39.394549 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/swift-operator-controller-manager-84d6b4b759-vrf7m" podUID="e8efd43e-cc1b-4812-acff-c618a78692a6" Sep 30 19:47:39 crc kubenswrapper[4756]: E0930 19:47:39.400974 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/manila-operator-controller-manager-6d68dbc695-54d4j" podUID="9ffa6269-c199-4388-93a5-cdb131bf656d" Sep 30 19:47:39 crc kubenswrapper[4756]: I0930 19:47:39.435699 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-849d5b9b84-b7v6v" event={"ID":"081b46ad-0352-4dbf-8fde-1e064cad9b24","Type":"ContainerStarted","Data":"eb2fe1bab518b9fd17764876770be01d3cd4c8b77e70a26039b31feefdf28875"} Sep 30 19:47:39 crc kubenswrapper[4756]: I0930 19:47:39.437717 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-9976ff44c-lc6f7" event={"ID":"c1408e4f-3a38-4b09-8418-a9a3ab3ca3b1","Type":"ContainerStarted","Data":"dd3c3b636e68bc0e6079d9762fb0c6827c0961730cb747814a89551bb42b6cd2"} Sep 30 19:47:39 crc kubenswrapper[4756]: I0930 19:47:39.440779 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-85777745bb-gfsx6" event={"ID":"e592430a-6d1d-4e0e-8220-d51f46bbd54e","Type":"ContainerStarted","Data":"21b5e1d171e639b2d05f9c5d98fb6e1036c4d6b8b10475ba66977354c23b4bbc"} Sep 30 19:47:39 crc kubenswrapper[4756]: I0930 19:47:39.440823 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-85777745bb-gfsx6" event={"ID":"e592430a-6d1d-4e0e-8220-d51f46bbd54e","Type":"ContainerStarted","Data":"7c8309dac7ac71ab455d2bf257f1eff7748ed6ab25cfa991eb1d8e43d72296c0"} Sep 30 19:47:39 crc kubenswrapper[4756]: E0930 19:47:39.442601 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/test-operator@sha256:f61fdfbfd12027ce6b4e7ad553ec0582f080de0cfb472de6dc04ad3078bb17e3\\\"\"" pod="openstack-operators/test-operator-controller-manager-85777745bb-gfsx6" podUID="e592430a-6d1d-4e0e-8220-d51f46bbd54e" Sep 30 19:47:39 crc kubenswrapper[4756]: I0930 19:47:39.445319 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-xxhjq" event={"ID":"401ab06e-1973-4cbc-9cb6-f1d3a1af5ba8","Type":"ContainerStarted","Data":"b41e2840d8e44f1fbf9671f34466c1d1e3790e4add1081dd7a8ea298c6259958"} Sep 30 19:47:39 crc kubenswrapper[4756]: E0930 19:47:39.448809 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2\\\"\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-xxhjq" podUID="401ab06e-1973-4cbc-9cb6-f1d3a1af5ba8" Sep 30 19:47:39 crc kubenswrapper[4756]: I0930 19:47:39.452331 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-b8d54b5d7-nvz55" event={"ID":"f7d0b1cb-4c87-4f60-89fb-a2ee19b62ceb","Type":"ContainerStarted","Data":"78b036de36f31da4fa55f9465bbe1286f07f52723f8f25eb724a6f2e30cabf07"} Sep 30 19:47:39 crc kubenswrapper[4756]: I0930 19:47:39.452361 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-b8d54b5d7-nvz55" event={"ID":"f7d0b1cb-4c87-4f60-89fb-a2ee19b62ceb","Type":"ContainerStarted","Data":"74ce403e5996bbbfb638536e4ce3f64138ea83077e171ec3bcb7b57ea435acc0"} Sep 30 19:47:39 crc kubenswrapper[4756]: E0930 19:47:39.461039 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/telemetry-operator@sha256:8fdf377daf05e2fa7346505017078fa81981dd945bf635a64c8022633c68118f\\\"\"" pod="openstack-operators/telemetry-operator-controller-manager-b8d54b5d7-nvz55" podUID="f7d0b1cb-4c87-4f60-89fb-a2ee19b62ceb" Sep 30 19:47:39 crc kubenswrapper[4756]: I0930 19:47:39.461428 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-9d6c5db85-r8xqm" event={"ID":"a76989a9-9056-4bc1-9e9c-5d7513a3e81b","Type":"ContainerStarted","Data":"d409d8ae5c9cec64d85827536f1f58372125cd628743e800ea88176d50494a1b"} Sep 30 19:47:39 crc kubenswrapper[4756]: I0930 19:47:39.463628 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-644bddb6d8-klbxz" event={"ID":"7871ae32-4b8d-4902-af0f-152e38693ee5","Type":"ContainerStarted","Data":"2f52f640e4a94bbe2274177c75eb6004887dacc772746ae7f47e172d9b72146c"} Sep 30 19:47:39 crc kubenswrapper[4756]: I0930 19:47:39.466549 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-5d889d78cf-gz6p9" event={"ID":"869ee50e-ceab-400c-a7b1-b76b81b29bbc","Type":"ContainerStarted","Data":"cf2c9b4550a61eae5ea3f3f5e335fd70020dbc2444e0fe51be270c6734fee5f0"} Sep 30 19:47:39 crc kubenswrapper[4756]: I0930 19:47:39.470249 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-6d68dbc695-54d4j" event={"ID":"9ffa6269-c199-4388-93a5-cdb131bf656d","Type":"ContainerStarted","Data":"388864ec53bb324a9ed39498007f972b69f906b68ea42d9429a201f07a819b1a"} Sep 30 19:47:39 crc kubenswrapper[4756]: I0930 19:47:39.470275 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-6d68dbc695-54d4j" event={"ID":"9ffa6269-c199-4388-93a5-cdb131bf656d","Type":"ContainerStarted","Data":"0157a3b82adec327c44d65a41576c72cea6a485a19c724a4db227cbe6e727314"} Sep 30 19:47:39 crc kubenswrapper[4756]: E0930 19:47:39.484728 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/manila-operator@sha256:4cdb30423c14ab48888aeeb699259bd9051284ec9f874ed9bab94c7965f45884\\\"\"" pod="openstack-operators/manila-operator-controller-manager-6d68dbc695-54d4j" podUID="9ffa6269-c199-4388-93a5-cdb131bf656d" Sep 30 19:47:39 crc kubenswrapper[4756]: I0930 19:47:39.493572 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-9f4696d94-6rqqt" event={"ID":"18189d34-2947-4744-b2ca-e970a456289f","Type":"ContainerStarted","Data":"31a2dd8f96d439a945f40c78f7dacc21573d5af4959ceebfe63c8cf62f5b0abb"} Sep 30 19:47:39 crc kubenswrapper[4756]: I0930 19:47:39.498747 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-84d6b4b759-vrf7m" event={"ID":"e8efd43e-cc1b-4812-acff-c618a78692a6","Type":"ContainerStarted","Data":"2482affdcab7b1211a028fa8c5b8c3a396c2b608ab393fbdfb2b866d7d7f7053"} Sep 30 19:47:39 crc kubenswrapper[4756]: I0930 19:47:39.498815 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-84d6b4b759-vrf7m" event={"ID":"e8efd43e-cc1b-4812-acff-c618a78692a6","Type":"ContainerStarted","Data":"0a82dbbaea2f7f010ea5e1f5b14589f6da9aee0a3a248755958c6ba26fb1b084"} Sep 30 19:47:39 crc kubenswrapper[4756]: E0930 19:47:39.500455 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/swift-operator@sha256:bca053da8adc37a9a246b478949960ac7abef8fcc0c58a2a45045c59a62b5fe4\\\"\"" pod="openstack-operators/swift-operator-controller-manager-84d6b4b759-vrf7m" podUID="e8efd43e-cc1b-4812-acff-c618a78692a6" Sep 30 19:47:39 crc kubenswrapper[4756]: I0930 19:47:39.501686 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-6ff8b75857-7cv4w" event={"ID":"31f39b27-7fcc-453c-8ffd-3c5d7b12aed2","Type":"ContainerStarted","Data":"47127aec08de12817492eed26f58755e461fbcebf5385a77acacd655e5d89aab"} Sep 30 19:47:39 crc kubenswrapper[4756]: I0930 19:47:39.516325 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-5cd4858477-k5ccf" event={"ID":"2c4a76fe-4543-43e7-9e7e-7e3b4c149e46","Type":"ContainerStarted","Data":"28bc3c616117cabd510a0cf83e80c6d06cb2bb0b7982b3fea147faa519bd654b"} Sep 30 19:47:39 crc kubenswrapper[4756]: I0930 19:47:39.516367 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-5cd4858477-k5ccf" event={"ID":"2c4a76fe-4543-43e7-9e7e-7e3b4c149e46","Type":"ContainerStarted","Data":"b775792c7c27df62e7c1f619f8b36793df835dbef69729ff6aff9c71afda9875"} Sep 30 19:47:39 crc kubenswrapper[4756]: E0930 19:47:39.534818 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/ironic-operator@sha256:ae0bf0aa387864d26d4cbd04d84f6362d53b721348188f096aaa48757419c4f5\\\"\"" pod="openstack-operators/ironic-operator-controller-manager-5cd4858477-k5ccf" podUID="2c4a76fe-4543-43e7-9e7e-7e3b4c149e46" Sep 30 19:47:39 crc kubenswrapper[4756]: E0930 19:47:39.535718 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/openstack-baremetal-operator-controller-manager-77b9676b8c6lblw" podUID="89223991-b3cb-47fb-9efc-ac95917de547" Sep 30 19:47:39 crc kubenswrapper[4756]: I0930 19:47:39.536440 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-6b9957f54f-p4prj" event={"ID":"8ca7950c-897c-4a5b-bf6e-2a9aa33bfa2c","Type":"ContainerStarted","Data":"5a68d8cd88c4809cbacd4c36a9d54228ccbc7df829b8cd69a643d63eef07f3cb"} Sep 30 19:47:39 crc kubenswrapper[4756]: E0930 19:47:39.537113 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/watcher-operator-controller-manager-6b9957f54f-p4prj" podUID="8ca7950c-897c-4a5b-bf6e-2a9aa33bfa2c" Sep 30 19:47:39 crc kubenswrapper[4756]: I0930 19:47:39.540937 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-7b787867f4-rsz5s" event={"ID":"860ff2da-8a1c-49a2-8f9b-e0e5fa8a6b2a","Type":"ContainerStarted","Data":"e9563d12fb29e35afe01e0df19f81410ed42f33b3c4f0959961f1a8333a71664"} Sep 30 19:47:39 crc kubenswrapper[4756]: I0930 19:47:39.547518 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-64cd67b5cb-m2fvs" event={"ID":"3dc3c221-bd19-4634-8979-2c5f843c9b06","Type":"ContainerStarted","Data":"0710c972b0b7385b56541baff5a16e822472337de9c0cb648299d03ede8a8bb6"} Sep 30 19:47:39 crc kubenswrapper[4756]: I0930 19:47:39.561493 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-589c58c6c-j8wpg" event={"ID":"9f9592fa-373c-436e-ab8a-96745a6c3f2d","Type":"ContainerStarted","Data":"74db31bcb86e19d87d8f491fa3c45ee2bc548f4445e04ddd9af914dddb8b3095"} Sep 30 19:47:39 crc kubenswrapper[4756]: I0930 19:47:39.568943 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-77b9676b8c6lblw" event={"ID":"89223991-b3cb-47fb-9efc-ac95917de547","Type":"ContainerStarted","Data":"63a22e936461c3d40c7f9e1ef4b4642edf1279afb63e7603ab787398df6da1ca"} Sep 30 19:47:39 crc kubenswrapper[4756]: E0930 19:47:39.572062 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/openstack-baremetal-operator@sha256:e7cfed051c1cf801e651fd4035070e38698039f284ac0b2a0332769fdbb4a9c8\\\"\"" pod="openstack-operators/openstack-baremetal-operator-controller-manager-77b9676b8c6lblw" podUID="89223991-b3cb-47fb-9efc-ac95917de547" Sep 30 19:47:39 crc kubenswrapper[4756]: I0930 19:47:39.573560 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-84958c4d49-xg9fd" event={"ID":"a5234caa-5273-4074-837e-30ee316c5d73","Type":"ContainerStarted","Data":"c4bffad48d55a2dc5b41c9a0da9257c37ebc3dc4ae54fcace110f56954d95b74"} Sep 30 19:47:39 crc kubenswrapper[4756]: I0930 19:47:39.574665 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-5bd55b4bff-c46nt" event={"ID":"511b7524-d136-4131-9949-72c4f46a60d8","Type":"ContainerStarted","Data":"6e69565cda02fcef26b46e95d3696848217a94c715d25b0e4aa3f8a380142389"} Sep 30 19:47:39 crc kubenswrapper[4756]: I0930 19:47:39.607539 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-88c7-h8tt7" event={"ID":"e42a9d09-d1ac-4d6c-853a-bc9f472e2b91","Type":"ContainerStarted","Data":"199c5b0fab7e39ee78e8bf877f7a409d9fea2e52d6d5d02c1aba50a60779c3f6"} Sep 30 19:47:39 crc kubenswrapper[4756]: I0930 19:47:39.623927 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/fbc8173b-fdca-44d0-af4e-5f2f106bd031-cert\") pod \"openstack-operator-controller-manager-98d66ccb9-2xh6m\" (UID: \"fbc8173b-fdca-44d0-af4e-5f2f106bd031\") " pod="openstack-operators/openstack-operator-controller-manager-98d66ccb9-2xh6m" Sep 30 19:47:39 crc kubenswrapper[4756]: I0930 19:47:39.636235 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/fbc8173b-fdca-44d0-af4e-5f2f106bd031-cert\") pod \"openstack-operator-controller-manager-98d66ccb9-2xh6m\" (UID: \"fbc8173b-fdca-44d0-af4e-5f2f106bd031\") " pod="openstack-operators/openstack-operator-controller-manager-98d66ccb9-2xh6m" Sep 30 19:47:39 crc kubenswrapper[4756]: I0930 19:47:39.670637 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-manager-98d66ccb9-2xh6m" Sep 30 19:47:40 crc kubenswrapper[4756]: I0930 19:47:40.120818 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-manager-98d66ccb9-2xh6m"] Sep 30 19:47:40 crc kubenswrapper[4756]: W0930 19:47:40.126878 4756 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podfbc8173b_fdca_44d0_af4e_5f2f106bd031.slice/crio-0e0d106de7e878ab122b8cf27126cd33c233687cd3be8d3aa236d65da1a4658d WatchSource:0}: Error finding container 0e0d106de7e878ab122b8cf27126cd33c233687cd3be8d3aa236d65da1a4658d: Status 404 returned error can't find the container with id 0e0d106de7e878ab122b8cf27126cd33c233687cd3be8d3aa236d65da1a4658d Sep 30 19:47:40 crc kubenswrapper[4756]: I0930 19:47:40.643003 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-77b9676b8c6lblw" event={"ID":"89223991-b3cb-47fb-9efc-ac95917de547","Type":"ContainerStarted","Data":"8655fd1b2f556beb8b7402e029a7c48873d452bd987988f5aaee8f9abdc06e8b"} Sep 30 19:47:40 crc kubenswrapper[4756]: E0930 19:47:40.645773 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/openstack-baremetal-operator@sha256:e7cfed051c1cf801e651fd4035070e38698039f284ac0b2a0332769fdbb4a9c8\\\"\"" pod="openstack-operators/openstack-baremetal-operator-controller-manager-77b9676b8c6lblw" podUID="89223991-b3cb-47fb-9efc-ac95917de547" Sep 30 19:47:40 crc kubenswrapper[4756]: I0930 19:47:40.647671 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-6b9957f54f-p4prj" event={"ID":"8ca7950c-897c-4a5b-bf6e-2a9aa33bfa2c","Type":"ContainerStarted","Data":"c6b0350379aaf717c7f9b798039d96b6530313e631b9982abe699f1dd67cfada"} Sep 30 19:47:40 crc kubenswrapper[4756]: E0930 19:47:40.648820 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/watcher-operator@sha256:09c2f519ea218f6038b7be039b8e6ac33ee93b217b9be0d2d18a5e7f94faae06\\\"\"" pod="openstack-operators/watcher-operator-controller-manager-6b9957f54f-p4prj" podUID="8ca7950c-897c-4a5b-bf6e-2a9aa33bfa2c" Sep 30 19:47:40 crc kubenswrapper[4756]: I0930 19:47:40.679556 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-98d66ccb9-2xh6m" event={"ID":"fbc8173b-fdca-44d0-af4e-5f2f106bd031","Type":"ContainerStarted","Data":"9bede193ab7f59d8ebb9a596b8328823e535498e3dcd0db8a7dfe60964f95f39"} Sep 30 19:47:40 crc kubenswrapper[4756]: I0930 19:47:40.679607 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-98d66ccb9-2xh6m" event={"ID":"fbc8173b-fdca-44d0-af4e-5f2f106bd031","Type":"ContainerStarted","Data":"dd87f68437593732fb13e21ad70d9d13484d6562e02dab22458c0d8875bf50d8"} Sep 30 19:47:40 crc kubenswrapper[4756]: I0930 19:47:40.679617 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-98d66ccb9-2xh6m" event={"ID":"fbc8173b-fdca-44d0-af4e-5f2f106bd031","Type":"ContainerStarted","Data":"0e0d106de7e878ab122b8cf27126cd33c233687cd3be8d3aa236d65da1a4658d"} Sep 30 19:47:40 crc kubenswrapper[4756]: I0930 19:47:40.679830 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-controller-manager-98d66ccb9-2xh6m" Sep 30 19:47:40 crc kubenswrapper[4756]: E0930 19:47:40.691439 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2\\\"\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-xxhjq" podUID="401ab06e-1973-4cbc-9cb6-f1d3a1af5ba8" Sep 30 19:47:40 crc kubenswrapper[4756]: E0930 19:47:40.691561 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/ironic-operator@sha256:ae0bf0aa387864d26d4cbd04d84f6362d53b721348188f096aaa48757419c4f5\\\"\"" pod="openstack-operators/ironic-operator-controller-manager-5cd4858477-k5ccf" podUID="2c4a76fe-4543-43e7-9e7e-7e3b4c149e46" Sep 30 19:47:40 crc kubenswrapper[4756]: E0930 19:47:40.691624 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/swift-operator@sha256:bca053da8adc37a9a246b478949960ac7abef8fcc0c58a2a45045c59a62b5fe4\\\"\"" pod="openstack-operators/swift-operator-controller-manager-84d6b4b759-vrf7m" podUID="e8efd43e-cc1b-4812-acff-c618a78692a6" Sep 30 19:47:40 crc kubenswrapper[4756]: E0930 19:47:40.691682 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/manila-operator@sha256:4cdb30423c14ab48888aeeb699259bd9051284ec9f874ed9bab94c7965f45884\\\"\"" pod="openstack-operators/manila-operator-controller-manager-6d68dbc695-54d4j" podUID="9ffa6269-c199-4388-93a5-cdb131bf656d" Sep 30 19:47:40 crc kubenswrapper[4756]: E0930 19:47:40.691739 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/telemetry-operator@sha256:8fdf377daf05e2fa7346505017078fa81981dd945bf635a64c8022633c68118f\\\"\"" pod="openstack-operators/telemetry-operator-controller-manager-b8d54b5d7-nvz55" podUID="f7d0b1cb-4c87-4f60-89fb-a2ee19b62ceb" Sep 30 19:47:40 crc kubenswrapper[4756]: E0930 19:47:40.691794 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/test-operator@sha256:f61fdfbfd12027ce6b4e7ad553ec0582f080de0cfb472de6dc04ad3078bb17e3\\\"\"" pod="openstack-operators/test-operator-controller-manager-85777745bb-gfsx6" podUID="e592430a-6d1d-4e0e-8220-d51f46bbd54e" Sep 30 19:47:40 crc kubenswrapper[4756]: I0930 19:47:40.765510 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-controller-manager-98d66ccb9-2xh6m" podStartSLOduration=3.76548845 podStartE2EDuration="3.76548845s" podCreationTimestamp="2025-09-30 19:47:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 19:47:40.755523298 +0000 UTC m=+990.376456785" watchObservedRunningTime="2025-09-30 19:47:40.76548845 +0000 UTC m=+990.386421927" Sep 30 19:47:41 crc kubenswrapper[4756]: E0930 19:47:41.687513 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/watcher-operator@sha256:09c2f519ea218f6038b7be039b8e6ac33ee93b217b9be0d2d18a5e7f94faae06\\\"\"" pod="openstack-operators/watcher-operator-controller-manager-6b9957f54f-p4prj" podUID="8ca7950c-897c-4a5b-bf6e-2a9aa33bfa2c" Sep 30 19:47:41 crc kubenswrapper[4756]: E0930 19:47:41.688040 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/openstack-baremetal-operator@sha256:e7cfed051c1cf801e651fd4035070e38698039f284ac0b2a0332769fdbb4a9c8\\\"\"" pod="openstack-operators/openstack-baremetal-operator-controller-manager-77b9676b8c6lblw" podUID="89223991-b3cb-47fb-9efc-ac95917de547" Sep 30 19:47:48 crc kubenswrapper[4756]: I0930 19:47:48.781014 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-9976ff44c-lc6f7" event={"ID":"c1408e4f-3a38-4b09-8418-a9a3ab3ca3b1","Type":"ContainerStarted","Data":"21f0caae3d23b931c677a9384cb4ea9371746ca5a2be6a0b9edd0d18e93ae20c"} Sep 30 19:47:48 crc kubenswrapper[4756]: I0930 19:47:48.781557 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-9976ff44c-lc6f7" event={"ID":"c1408e4f-3a38-4b09-8418-a9a3ab3ca3b1","Type":"ContainerStarted","Data":"6ff6219ec7d72b23aaf994def1ff168d3962bb4b532df0b0569b4da1d6cbabbf"} Sep 30 19:47:48 crc kubenswrapper[4756]: I0930 19:47:48.781608 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/ovn-operator-controller-manager-9976ff44c-lc6f7" Sep 30 19:47:48 crc kubenswrapper[4756]: I0930 19:47:48.785363 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-5d889d78cf-gz6p9" event={"ID":"869ee50e-ceab-400c-a7b1-b76b81b29bbc","Type":"ContainerStarted","Data":"078908c9a88d4da224e6f404a48b871bd397a7954595551b2f1bd2c998b4881b"} Sep 30 19:47:48 crc kubenswrapper[4756]: I0930 19:47:48.786878 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-7b787867f4-rsz5s" event={"ID":"860ff2da-8a1c-49a2-8f9b-e0e5fa8a6b2a","Type":"ContainerStarted","Data":"1d230febacc1b86d2f46a62554fac7be20e23bf81473f8477006b63e471c4e14"} Sep 30 19:47:48 crc kubenswrapper[4756]: I0930 19:47:48.797494 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-9f4696d94-6rqqt" event={"ID":"18189d34-2947-4744-b2ca-e970a456289f","Type":"ContainerStarted","Data":"3c20697b1a88c8b7a1616ccd1f1e47ddc54012c1cea062a91ed8c83f0acd0e67"} Sep 30 19:47:48 crc kubenswrapper[4756]: I0930 19:47:48.797548 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-9f4696d94-6rqqt" event={"ID":"18189d34-2947-4744-b2ca-e970a456289f","Type":"ContainerStarted","Data":"2d608f5d376439cb3d8b70e4b782a0829422c25d4f5b450b91c677aee54b1c22"} Sep 30 19:47:48 crc kubenswrapper[4756]: I0930 19:47:48.797833 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/horizon-operator-controller-manager-9f4696d94-6rqqt" Sep 30 19:47:48 crc kubenswrapper[4756]: I0930 19:47:48.810856 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-5bd55b4bff-c46nt" event={"ID":"511b7524-d136-4131-9949-72c4f46a60d8","Type":"ContainerStarted","Data":"37d41d467bb715a0f4d1ad99a8d8121f4b2c11a9c68ddbfb9a04fc2749c61c0b"} Sep 30 19:47:48 crc kubenswrapper[4756]: I0930 19:47:48.810908 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-5bd55b4bff-c46nt" event={"ID":"511b7524-d136-4131-9949-72c4f46a60d8","Type":"ContainerStarted","Data":"9eb0afd33451455c9b5cdeadc41b0a1b22f7987139e7a9b583a4b77f7dcca42b"} Sep 30 19:47:48 crc kubenswrapper[4756]: I0930 19:47:48.811137 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/keystone-operator-controller-manager-5bd55b4bff-c46nt" Sep 30 19:47:48 crc kubenswrapper[4756]: I0930 19:47:48.817063 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/ovn-operator-controller-manager-9976ff44c-lc6f7" podStartSLOduration=3.364469388 podStartE2EDuration="11.817049645s" podCreationTimestamp="2025-09-30 19:47:37 +0000 UTC" firstStartedPulling="2025-09-30 19:47:38.971196526 +0000 UTC m=+988.592130023" lastFinishedPulling="2025-09-30 19:47:47.423776803 +0000 UTC m=+997.044710280" observedRunningTime="2025-09-30 19:47:48.814856447 +0000 UTC m=+998.435789924" watchObservedRunningTime="2025-09-30 19:47:48.817049645 +0000 UTC m=+998.437983122" Sep 30 19:47:48 crc kubenswrapper[4756]: I0930 19:47:48.817494 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-64cd67b5cb-m2fvs" event={"ID":"3dc3c221-bd19-4634-8979-2c5f843c9b06","Type":"ContainerStarted","Data":"ade4f156176aa17fd95590a771f7a0f5406b11c0054a03f7606d5dba4e01c508"} Sep 30 19:47:48 crc kubenswrapper[4756]: I0930 19:47:48.817533 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-64cd67b5cb-m2fvs" event={"ID":"3dc3c221-bd19-4634-8979-2c5f843c9b06","Type":"ContainerStarted","Data":"ebcfaedbbaeae1d1efaf6836004f895148f0d04f11c752eb65f75a4270873200"} Sep 30 19:47:48 crc kubenswrapper[4756]: I0930 19:47:48.817724 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/nova-operator-controller-manager-64cd67b5cb-m2fvs" Sep 30 19:47:48 crc kubenswrapper[4756]: I0930 19:47:48.819860 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-644bddb6d8-klbxz" event={"ID":"7871ae32-4b8d-4902-af0f-152e38693ee5","Type":"ContainerStarted","Data":"e32dd667649935579e19dd04b4bf04f9182af2ca1f253d944edffef1e8f71353"} Sep 30 19:47:48 crc kubenswrapper[4756]: I0930 19:47:48.819893 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-644bddb6d8-klbxz" event={"ID":"7871ae32-4b8d-4902-af0f-152e38693ee5","Type":"ContainerStarted","Data":"61b0841b4c06f08207346c3e0262dfe8bcb2835a061f131813b6183c616f1a46"} Sep 30 19:47:48 crc kubenswrapper[4756]: I0930 19:47:48.820066 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/cinder-operator-controller-manager-644bddb6d8-klbxz" Sep 30 19:47:48 crc kubenswrapper[4756]: I0930 19:47:48.827881 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-849d5b9b84-b7v6v" event={"ID":"081b46ad-0352-4dbf-8fde-1e064cad9b24","Type":"ContainerStarted","Data":"6416cdb81120c422d955ae783c8c24b20c150fb1470434cc182b170dcf583576"} Sep 30 19:47:48 crc kubenswrapper[4756]: I0930 19:47:48.830856 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-84958c4d49-xg9fd" event={"ID":"a5234caa-5273-4074-837e-30ee316c5d73","Type":"ContainerStarted","Data":"18dd8cf3bf9628381db90cf8a6ca6ff0c0341ef948bc467d4b41ee3cc0cdd5a7"} Sep 30 19:47:48 crc kubenswrapper[4756]: I0930 19:47:48.840380 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-6ff8b75857-7cv4w" event={"ID":"31f39b27-7fcc-453c-8ffd-3c5d7b12aed2","Type":"ContainerStarted","Data":"a8e83563acb6695348d29405d3ce78bf8766378d15bcb74ea7cc056d891c7fb3"} Sep 30 19:47:48 crc kubenswrapper[4756]: I0930 19:47:48.840433 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-6ff8b75857-7cv4w" event={"ID":"31f39b27-7fcc-453c-8ffd-3c5d7b12aed2","Type":"ContainerStarted","Data":"838bfa730a290eb546e868fb0b5cff4a83cfa1350ba66a4e2513b915e19b8194"} Sep 30 19:47:48 crc kubenswrapper[4756]: I0930 19:47:48.841121 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/barbican-operator-controller-manager-6ff8b75857-7cv4w" Sep 30 19:47:48 crc kubenswrapper[4756]: I0930 19:47:48.845741 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/keystone-operator-controller-manager-5bd55b4bff-c46nt" podStartSLOduration=3.147598157 podStartE2EDuration="11.845721317s" podCreationTimestamp="2025-09-30 19:47:37 +0000 UTC" firstStartedPulling="2025-09-30 19:47:38.726655369 +0000 UTC m=+988.347588846" lastFinishedPulling="2025-09-30 19:47:47.424778519 +0000 UTC m=+997.045712006" observedRunningTime="2025-09-30 19:47:48.841123696 +0000 UTC m=+998.462057163" watchObservedRunningTime="2025-09-30 19:47:48.845721317 +0000 UTC m=+998.466654794" Sep 30 19:47:48 crc kubenswrapper[4756]: I0930 19:47:48.847631 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-589c58c6c-j8wpg" event={"ID":"9f9592fa-373c-436e-ab8a-96745a6c3f2d","Type":"ContainerStarted","Data":"1bd4522546159f9b63b49143ca8b16b7bb0136736485fd6f94454922f51a8ef4"} Sep 30 19:47:48 crc kubenswrapper[4756]: I0930 19:47:48.847673 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-589c58c6c-j8wpg" event={"ID":"9f9592fa-373c-436e-ab8a-96745a6c3f2d","Type":"ContainerStarted","Data":"b6851093fddb4c831af3477103a18233bd276258e42e96e2343d5775c16fc54e"} Sep 30 19:47:48 crc kubenswrapper[4756]: I0930 19:47:48.848277 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/placement-operator-controller-manager-589c58c6c-j8wpg" Sep 30 19:47:48 crc kubenswrapper[4756]: I0930 19:47:48.857323 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-84f4f7b77b-ldpdd" event={"ID":"7a303d5c-39f9-4cb5-94ee-9b126d8cc4f3","Type":"ContainerStarted","Data":"30ac3eb428e3177ec228b6e22a2943a1b73a399d74650b816d6c6641486c314e"} Sep 30 19:47:48 crc kubenswrapper[4756]: I0930 19:47:48.857376 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-84f4f7b77b-ldpdd" event={"ID":"7a303d5c-39f9-4cb5-94ee-9b126d8cc4f3","Type":"ContainerStarted","Data":"f28c6f9400587b0a7dc27f98b679f8e92f65b1486cc162a308a5ecd82511d675"} Sep 30 19:47:48 crc kubenswrapper[4756]: I0930 19:47:48.857440 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/designate-operator-controller-manager-84f4f7b77b-ldpdd" Sep 30 19:47:48 crc kubenswrapper[4756]: I0930 19:47:48.859881 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-9d6c5db85-r8xqm" event={"ID":"a76989a9-9056-4bc1-9e9c-5d7513a3e81b","Type":"ContainerStarted","Data":"83f95d47cfb524cf4cd1769a16b68acd9e96dd1f3ae4c16b5a12c66aae20c285"} Sep 30 19:47:48 crc kubenswrapper[4756]: I0930 19:47:48.859992 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-9d6c5db85-r8xqm" event={"ID":"a76989a9-9056-4bc1-9e9c-5d7513a3e81b","Type":"ContainerStarted","Data":"4d6835fc9695f3c5032703a06330d9871ccfd6b95c2874df58503af96f01a75f"} Sep 30 19:47:48 crc kubenswrapper[4756]: I0930 19:47:48.860668 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/infra-operator-controller-manager-9d6c5db85-r8xqm" Sep 30 19:47:48 crc kubenswrapper[4756]: I0930 19:47:48.870549 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-88c7-h8tt7" event={"ID":"e42a9d09-d1ac-4d6c-853a-bc9f472e2b91","Type":"ContainerStarted","Data":"3eaa80a70069981814a87c97786d2e6bdb826419960a576d0d1d9529e2eb913b"} Sep 30 19:47:48 crc kubenswrapper[4756]: I0930 19:47:48.870599 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-88c7-h8tt7" event={"ID":"e42a9d09-d1ac-4d6c-853a-bc9f472e2b91","Type":"ContainerStarted","Data":"62bac97e86d17ef47b00b3b2e5a4a108d5c3f337d06f8a50b4aaccca21b8e6a9"} Sep 30 19:47:48 crc kubenswrapper[4756]: I0930 19:47:48.871245 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/mariadb-operator-controller-manager-88c7-h8tt7" Sep 30 19:47:48 crc kubenswrapper[4756]: I0930 19:47:48.875486 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/horizon-operator-controller-manager-9f4696d94-6rqqt" podStartSLOduration=3.041904683 podStartE2EDuration="11.875474698s" podCreationTimestamp="2025-09-30 19:47:37 +0000 UTC" firstStartedPulling="2025-09-30 19:47:38.596304418 +0000 UTC m=+988.217237895" lastFinishedPulling="2025-09-30 19:47:47.429874423 +0000 UTC m=+997.050807910" observedRunningTime="2025-09-30 19:47:48.870276801 +0000 UTC m=+998.491210278" watchObservedRunningTime="2025-09-30 19:47:48.875474698 +0000 UTC m=+998.496408175" Sep 30 19:47:48 crc kubenswrapper[4756]: I0930 19:47:48.910442 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/infra-operator-controller-manager-9d6c5db85-r8xqm" podStartSLOduration=3.773296586 podStartE2EDuration="11.910425735s" podCreationTimestamp="2025-09-30 19:47:37 +0000 UTC" firstStartedPulling="2025-09-30 19:47:39.290204207 +0000 UTC m=+988.911137684" lastFinishedPulling="2025-09-30 19:47:47.427333346 +0000 UTC m=+997.048266833" observedRunningTime="2025-09-30 19:47:48.907284012 +0000 UTC m=+998.528217489" watchObservedRunningTime="2025-09-30 19:47:48.910425735 +0000 UTC m=+998.531359212" Sep 30 19:47:48 crc kubenswrapper[4756]: I0930 19:47:48.923212 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/placement-operator-controller-manager-589c58c6c-j8wpg" podStartSLOduration=3.6556483589999997 podStartE2EDuration="11.92319387s" podCreationTimestamp="2025-09-30 19:47:37 +0000 UTC" firstStartedPulling="2025-09-30 19:47:39.130103046 +0000 UTC m=+988.751036523" lastFinishedPulling="2025-09-30 19:47:47.397648557 +0000 UTC m=+997.018582034" observedRunningTime="2025-09-30 19:47:48.922922483 +0000 UTC m=+998.543855960" watchObservedRunningTime="2025-09-30 19:47:48.92319387 +0000 UTC m=+998.544127347" Sep 30 19:47:48 crc kubenswrapper[4756]: I0930 19:47:48.945475 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/barbican-operator-controller-manager-6ff8b75857-7cv4w" podStartSLOduration=3.251435332 podStartE2EDuration="11.945458224s" podCreationTimestamp="2025-09-30 19:47:37 +0000 UTC" firstStartedPulling="2025-09-30 19:47:38.723338792 +0000 UTC m=+988.344272269" lastFinishedPulling="2025-09-30 19:47:47.417361684 +0000 UTC m=+997.038295161" observedRunningTime="2025-09-30 19:47:48.94150256 +0000 UTC m=+998.562436047" watchObservedRunningTime="2025-09-30 19:47:48.945458224 +0000 UTC m=+998.566391701" Sep 30 19:47:48 crc kubenswrapper[4756]: I0930 19:47:48.957456 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/designate-operator-controller-manager-84f4f7b77b-ldpdd" podStartSLOduration=2.751448312 podStartE2EDuration="11.957434219s" podCreationTimestamp="2025-09-30 19:47:37 +0000 UTC" firstStartedPulling="2025-09-30 19:47:38.184545953 +0000 UTC m=+987.805479430" lastFinishedPulling="2025-09-30 19:47:47.39053186 +0000 UTC m=+997.011465337" observedRunningTime="2025-09-30 19:47:48.95366903 +0000 UTC m=+998.574602507" watchObservedRunningTime="2025-09-30 19:47:48.957434219 +0000 UTC m=+998.578367696" Sep 30 19:47:48 crc kubenswrapper[4756]: I0930 19:47:48.982420 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/nova-operator-controller-manager-64cd67b5cb-m2fvs" podStartSLOduration=3.421905135 podStartE2EDuration="11.982406324s" podCreationTimestamp="2025-09-30 19:47:37 +0000 UTC" firstStartedPulling="2025-09-30 19:47:38.927797067 +0000 UTC m=+988.548730544" lastFinishedPulling="2025-09-30 19:47:47.488298256 +0000 UTC m=+997.109231733" observedRunningTime="2025-09-30 19:47:48.981529271 +0000 UTC m=+998.602462748" watchObservedRunningTime="2025-09-30 19:47:48.982406324 +0000 UTC m=+998.603339801" Sep 30 19:47:49 crc kubenswrapper[4756]: I0930 19:47:49.024826 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/cinder-operator-controller-manager-644bddb6d8-klbxz" podStartSLOduration=3.199707095 podStartE2EDuration="12.024810207s" podCreationTimestamp="2025-09-30 19:47:37 +0000 UTC" firstStartedPulling="2025-09-30 19:47:38.597630253 +0000 UTC m=+988.218563730" lastFinishedPulling="2025-09-30 19:47:47.422733365 +0000 UTC m=+997.043666842" observedRunningTime="2025-09-30 19:47:49.004165015 +0000 UTC m=+998.625098492" watchObservedRunningTime="2025-09-30 19:47:49.024810207 +0000 UTC m=+998.645743684" Sep 30 19:47:49 crc kubenswrapper[4756]: I0930 19:47:49.024921 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/mariadb-operator-controller-manager-88c7-h8tt7" podStartSLOduration=3.583961627 podStartE2EDuration="12.024918279s" podCreationTimestamp="2025-09-30 19:47:37 +0000 UTC" firstStartedPulling="2025-09-30 19:47:38.982643436 +0000 UTC m=+988.603576913" lastFinishedPulling="2025-09-30 19:47:47.423600078 +0000 UTC m=+997.044533565" observedRunningTime="2025-09-30 19:47:49.021719025 +0000 UTC m=+998.642652502" watchObservedRunningTime="2025-09-30 19:47:49.024918279 +0000 UTC m=+998.645851756" Sep 30 19:47:49 crc kubenswrapper[4756]: I0930 19:47:49.680992 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-controller-manager-98d66ccb9-2xh6m" Sep 30 19:47:49 crc kubenswrapper[4756]: I0930 19:47:49.879280 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-5d889d78cf-gz6p9" event={"ID":"869ee50e-ceab-400c-a7b1-b76b81b29bbc","Type":"ContainerStarted","Data":"e9eea82285587ae44d80d8bb732ffde09c7a47ae014c02880ec6e45b4722232e"} Sep 30 19:47:49 crc kubenswrapper[4756]: I0930 19:47:49.880239 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/heat-operator-controller-manager-5d889d78cf-gz6p9" Sep 30 19:47:49 crc kubenswrapper[4756]: I0930 19:47:49.881565 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-7b787867f4-rsz5s" event={"ID":"860ff2da-8a1c-49a2-8f9b-e0e5fa8a6b2a","Type":"ContainerStarted","Data":"3a11a92651aab3f08bb26013d2527f030738507540da818d479afc245a54fba7"} Sep 30 19:47:49 crc kubenswrapper[4756]: I0930 19:47:49.881936 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/octavia-operator-controller-manager-7b787867f4-rsz5s" Sep 30 19:47:49 crc kubenswrapper[4756]: I0930 19:47:49.883517 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-849d5b9b84-b7v6v" event={"ID":"081b46ad-0352-4dbf-8fde-1e064cad9b24","Type":"ContainerStarted","Data":"cafe95392e8882b38cb3056bf78dcb08fd65279b4a7d245a982d4589a30f65af"} Sep 30 19:47:49 crc kubenswrapper[4756]: I0930 19:47:49.883764 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/neutron-operator-controller-manager-849d5b9b84-b7v6v" Sep 30 19:47:49 crc kubenswrapper[4756]: I0930 19:47:49.886506 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-84958c4d49-xg9fd" event={"ID":"a5234caa-5273-4074-837e-30ee316c5d73","Type":"ContainerStarted","Data":"35ef1c420fd08355fc11ddadac60dee19003a878fe8b50beffaf19e9d6f16d91"} Sep 30 19:47:49 crc kubenswrapper[4756]: I0930 19:47:49.904207 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/heat-operator-controller-manager-5d889d78cf-gz6p9" podStartSLOduration=4.178780766 podStartE2EDuration="12.904186663s" podCreationTimestamp="2025-09-30 19:47:37 +0000 UTC" firstStartedPulling="2025-09-30 19:47:38.692835721 +0000 UTC m=+988.313769198" lastFinishedPulling="2025-09-30 19:47:47.418241618 +0000 UTC m=+997.039175095" observedRunningTime="2025-09-30 19:47:49.898065262 +0000 UTC m=+999.518998769" watchObservedRunningTime="2025-09-30 19:47:49.904186663 +0000 UTC m=+999.525120140" Sep 30 19:47:49 crc kubenswrapper[4756]: I0930 19:47:49.916829 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/neutron-operator-controller-manager-849d5b9b84-b7v6v" podStartSLOduration=4.389323091 podStartE2EDuration="12.916813054s" podCreationTimestamp="2025-09-30 19:47:37 +0000 UTC" firstStartedPulling="2025-09-30 19:47:38.940747297 +0000 UTC m=+988.561680774" lastFinishedPulling="2025-09-30 19:47:47.46823725 +0000 UTC m=+997.089170737" observedRunningTime="2025-09-30 19:47:49.91398888 +0000 UTC m=+999.534922357" watchObservedRunningTime="2025-09-30 19:47:49.916813054 +0000 UTC m=+999.537746531" Sep 30 19:47:49 crc kubenswrapper[4756]: I0930 19:47:49.936076 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/octavia-operator-controller-manager-7b787867f4-rsz5s" podStartSLOduration=4.640068031 podStartE2EDuration="12.936055579s" podCreationTimestamp="2025-09-30 19:47:37 +0000 UTC" firstStartedPulling="2025-09-30 19:47:39.130372073 +0000 UTC m=+988.751305550" lastFinishedPulling="2025-09-30 19:47:47.426359611 +0000 UTC m=+997.047293098" observedRunningTime="2025-09-30 19:47:49.933830641 +0000 UTC m=+999.554764138" watchObservedRunningTime="2025-09-30 19:47:49.936055579 +0000 UTC m=+999.556989076" Sep 30 19:47:50 crc kubenswrapper[4756]: I0930 19:47:50.896045 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/glance-operator-controller-manager-84958c4d49-xg9fd" Sep 30 19:47:53 crc kubenswrapper[4756]: I0930 19:47:53.925125 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-84d6b4b759-vrf7m" event={"ID":"e8efd43e-cc1b-4812-acff-c618a78692a6","Type":"ContainerStarted","Data":"73223cabd0c35fe99c77402638dcf10cdc6806ced63010b9aef73a2b77044747"} Sep 30 19:47:53 crc kubenswrapper[4756]: I0930 19:47:53.925992 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/swift-operator-controller-manager-84d6b4b759-vrf7m" Sep 30 19:47:53 crc kubenswrapper[4756]: I0930 19:47:53.944280 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/swift-operator-controller-manager-84d6b4b759-vrf7m" podStartSLOduration=2.97474207 podStartE2EDuration="16.94426368s" podCreationTimestamp="2025-09-30 19:47:37 +0000 UTC" firstStartedPulling="2025-09-30 19:47:39.133670489 +0000 UTC m=+988.754603966" lastFinishedPulling="2025-09-30 19:47:53.103192109 +0000 UTC m=+1002.724125576" observedRunningTime="2025-09-30 19:47:53.941894558 +0000 UTC m=+1003.562828035" watchObservedRunningTime="2025-09-30 19:47:53.94426368 +0000 UTC m=+1003.565197167" Sep 30 19:47:53 crc kubenswrapper[4756]: I0930 19:47:53.944861 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/glance-operator-controller-manager-84958c4d49-xg9fd" podStartSLOduration=8.059938364 podStartE2EDuration="16.944852776s" podCreationTimestamp="2025-09-30 19:47:37 +0000 UTC" firstStartedPulling="2025-09-30 19:47:38.572296428 +0000 UTC m=+988.193229905" lastFinishedPulling="2025-09-30 19:47:47.45721084 +0000 UTC m=+997.078144317" observedRunningTime="2025-09-30 19:47:49.957292897 +0000 UTC m=+999.578226384" watchObservedRunningTime="2025-09-30 19:47:53.944852776 +0000 UTC m=+1003.565786263" Sep 30 19:47:54 crc kubenswrapper[4756]: I0930 19:47:54.936026 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-85777745bb-gfsx6" event={"ID":"e592430a-6d1d-4e0e-8220-d51f46bbd54e","Type":"ContainerStarted","Data":"ed4d50e39723b20987ada204c8c62a5ef41873c47e0718f61752af784b8ed6cc"} Sep 30 19:47:54 crc kubenswrapper[4756]: I0930 19:47:54.936901 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/test-operator-controller-manager-85777745bb-gfsx6" Sep 30 19:47:54 crc kubenswrapper[4756]: I0930 19:47:54.963268 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/test-operator-controller-manager-85777745bb-gfsx6" podStartSLOduration=2.861611261 podStartE2EDuration="17.963231959s" podCreationTimestamp="2025-09-30 19:47:37 +0000 UTC" firstStartedPulling="2025-09-30 19:47:39.159920888 +0000 UTC m=+988.780854365" lastFinishedPulling="2025-09-30 19:47:54.261541586 +0000 UTC m=+1003.882475063" observedRunningTime="2025-09-30 19:47:54.955537177 +0000 UTC m=+1004.576470664" watchObservedRunningTime="2025-09-30 19:47:54.963231959 +0000 UTC m=+1004.584165486" Sep 30 19:47:57 crc kubenswrapper[4756]: I0930 19:47:57.460461 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/cinder-operator-controller-manager-644bddb6d8-klbxz" Sep 30 19:47:57 crc kubenswrapper[4756]: I0930 19:47:57.489487 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/designate-operator-controller-manager-84f4f7b77b-ldpdd" Sep 30 19:47:57 crc kubenswrapper[4756]: I0930 19:47:57.494818 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/glance-operator-controller-manager-84958c4d49-xg9fd" Sep 30 19:47:57 crc kubenswrapper[4756]: I0930 19:47:57.538000 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/horizon-operator-controller-manager-9f4696d94-6rqqt" Sep 30 19:47:57 crc kubenswrapper[4756]: I0930 19:47:57.580972 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/heat-operator-controller-manager-5d889d78cf-gz6p9" Sep 30 19:47:57 crc kubenswrapper[4756]: I0930 19:47:57.660078 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/keystone-operator-controller-manager-5bd55b4bff-c46nt" Sep 30 19:47:57 crc kubenswrapper[4756]: I0930 19:47:57.700252 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/mariadb-operator-controller-manager-88c7-h8tt7" Sep 30 19:47:57 crc kubenswrapper[4756]: I0930 19:47:57.700766 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/barbican-operator-controller-manager-6ff8b75857-7cv4w" Sep 30 19:47:57 crc kubenswrapper[4756]: I0930 19:47:57.845752 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/neutron-operator-controller-manager-849d5b9b84-b7v6v" Sep 30 19:47:57 crc kubenswrapper[4756]: I0930 19:47:57.871070 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/nova-operator-controller-manager-64cd67b5cb-m2fvs" Sep 30 19:47:57 crc kubenswrapper[4756]: I0930 19:47:57.887753 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/octavia-operator-controller-manager-7b787867f4-rsz5s" Sep 30 19:47:57 crc kubenswrapper[4756]: I0930 19:47:57.920652 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/ovn-operator-controller-manager-9976ff44c-lc6f7" Sep 30 19:47:57 crc kubenswrapper[4756]: I0930 19:47:57.969241 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-xxhjq" event={"ID":"401ab06e-1973-4cbc-9cb6-f1d3a1af5ba8","Type":"ContainerStarted","Data":"afc354f970a9f47676d94b3194e58b65f2afb450b1d9182d91a4c9e67a8fd511"} Sep 30 19:47:57 crc kubenswrapper[4756]: I0930 19:47:57.982849 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-b8d54b5d7-nvz55" event={"ID":"f7d0b1cb-4c87-4f60-89fb-a2ee19b62ceb","Type":"ContainerStarted","Data":"17ea7ab16cce12ad114232fb69814633ea25d623779229859247bc733e3653b1"} Sep 30 19:47:57 crc kubenswrapper[4756]: I0930 19:47:57.983795 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/telemetry-operator-controller-manager-b8d54b5d7-nvz55" Sep 30 19:47:57 crc kubenswrapper[4756]: I0930 19:47:57.986025 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-xxhjq" podStartSLOduration=3.185974514 podStartE2EDuration="20.986009682s" podCreationTimestamp="2025-09-30 19:47:37 +0000 UTC" firstStartedPulling="2025-09-30 19:47:39.144177745 +0000 UTC m=+988.765111222" lastFinishedPulling="2025-09-30 19:47:56.944212903 +0000 UTC m=+1006.565146390" observedRunningTime="2025-09-30 19:47:57.984371549 +0000 UTC m=+1007.605305016" watchObservedRunningTime="2025-09-30 19:47:57.986009682 +0000 UTC m=+1007.606943159" Sep 30 19:47:57 crc kubenswrapper[4756]: I0930 19:47:57.987870 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-5cd4858477-k5ccf" event={"ID":"2c4a76fe-4543-43e7-9e7e-7e3b4c149e46","Type":"ContainerStarted","Data":"2e210e26b9d57c775e4b092929c30556b6dbed992b3adabb7f34c29fd1004543"} Sep 30 19:47:57 crc kubenswrapper[4756]: I0930 19:47:57.988305 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/ironic-operator-controller-manager-5cd4858477-k5ccf" Sep 30 19:47:57 crc kubenswrapper[4756]: I0930 19:47:57.998189 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/placement-operator-controller-manager-589c58c6c-j8wpg" Sep 30 19:47:58 crc kubenswrapper[4756]: I0930 19:47:58.006254 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/telemetry-operator-controller-manager-b8d54b5d7-nvz55" podStartSLOduration=3.167658272 podStartE2EDuration="21.006236762s" podCreationTimestamp="2025-09-30 19:47:37 +0000 UTC" firstStartedPulling="2025-09-30 19:47:39.132266602 +0000 UTC m=+988.753200079" lastFinishedPulling="2025-09-30 19:47:56.970845092 +0000 UTC m=+1006.591778569" observedRunningTime="2025-09-30 19:47:58.001256702 +0000 UTC m=+1007.622190169" watchObservedRunningTime="2025-09-30 19:47:58.006236762 +0000 UTC m=+1007.627170239" Sep 30 19:47:58 crc kubenswrapper[4756]: I0930 19:47:58.030376 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/swift-operator-controller-manager-84d6b4b759-vrf7m" Sep 30 19:47:58 crc kubenswrapper[4756]: I0930 19:47:58.033205 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/ironic-operator-controller-manager-5cd4858477-k5ccf" podStartSLOduration=3.214305317 podStartE2EDuration="21.03319635s" podCreationTimestamp="2025-09-30 19:47:37 +0000 UTC" firstStartedPulling="2025-09-30 19:47:39.130623479 +0000 UTC m=+988.751556956" lastFinishedPulling="2025-09-30 19:47:56.949514502 +0000 UTC m=+1006.570447989" observedRunningTime="2025-09-30 19:47:58.032003379 +0000 UTC m=+1007.652936856" watchObservedRunningTime="2025-09-30 19:47:58.03319635 +0000 UTC m=+1007.654129827" Sep 30 19:47:58 crc kubenswrapper[4756]: I0930 19:47:58.196432 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/infra-operator-controller-manager-9d6c5db85-r8xqm" Sep 30 19:48:00 crc kubenswrapper[4756]: I0930 19:48:00.017785 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-77b9676b8c6lblw" event={"ID":"89223991-b3cb-47fb-9efc-ac95917de547","Type":"ContainerStarted","Data":"9abae0915c140958b108eafc2aeb287974f479e6691ad63c99697a68c08e3026"} Sep 30 19:48:00 crc kubenswrapper[4756]: I0930 19:48:00.019981 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-baremetal-operator-controller-manager-77b9676b8c6lblw" Sep 30 19:48:00 crc kubenswrapper[4756]: I0930 19:48:00.020209 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-6b9957f54f-p4prj" event={"ID":"8ca7950c-897c-4a5b-bf6e-2a9aa33bfa2c","Type":"ContainerStarted","Data":"e46a75a10fa793474f83345d15181800a04a8365d38b6d3923899d76680454d5"} Sep 30 19:48:00 crc kubenswrapper[4756]: I0930 19:48:00.022860 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-6d68dbc695-54d4j" event={"ID":"9ffa6269-c199-4388-93a5-cdb131bf656d","Type":"ContainerStarted","Data":"34be6ebfbb597b0a67e341fa949c0e822115d252ea2d0882e2767f6998d6ea2f"} Sep 30 19:48:00 crc kubenswrapper[4756]: I0930 19:48:00.023254 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/manila-operator-controller-manager-6d68dbc695-54d4j" Sep 30 19:48:00 crc kubenswrapper[4756]: I0930 19:48:00.068627 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-baremetal-operator-controller-manager-77b9676b8c6lblw" podStartSLOduration=3.214197114 podStartE2EDuration="23.068601141s" podCreationTimestamp="2025-09-30 19:47:37 +0000 UTC" firstStartedPulling="2025-09-30 19:47:39.300365634 +0000 UTC m=+988.921299111" lastFinishedPulling="2025-09-30 19:47:59.154769661 +0000 UTC m=+1008.775703138" observedRunningTime="2025-09-30 19:48:00.056473713 +0000 UTC m=+1009.677407200" watchObservedRunningTime="2025-09-30 19:48:00.068601141 +0000 UTC m=+1009.689534648" Sep 30 19:48:00 crc kubenswrapper[4756]: I0930 19:48:00.091860 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/manila-operator-controller-manager-6d68dbc695-54d4j" podStartSLOduration=3.087711924 podStartE2EDuration="23.091839251s" podCreationTimestamp="2025-09-30 19:47:37 +0000 UTC" firstStartedPulling="2025-09-30 19:47:39.139620135 +0000 UTC m=+988.760553612" lastFinishedPulling="2025-09-30 19:47:59.143747462 +0000 UTC m=+1008.764680939" observedRunningTime="2025-09-30 19:48:00.085279109 +0000 UTC m=+1009.706212626" watchObservedRunningTime="2025-09-30 19:48:00.091839251 +0000 UTC m=+1009.712772738" Sep 30 19:48:00 crc kubenswrapper[4756]: I0930 19:48:00.114917 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/watcher-operator-controller-manager-6b9957f54f-p4prj" podStartSLOduration=3.269756442 podStartE2EDuration="23.114890596s" podCreationTimestamp="2025-09-30 19:47:37 +0000 UTC" firstStartedPulling="2025-09-30 19:47:39.291009548 +0000 UTC m=+988.911943025" lastFinishedPulling="2025-09-30 19:47:59.136143692 +0000 UTC m=+1008.757077179" observedRunningTime="2025-09-30 19:48:00.106872616 +0000 UTC m=+1009.727806133" watchObservedRunningTime="2025-09-30 19:48:00.114890596 +0000 UTC m=+1009.735824123" Sep 30 19:48:07 crc kubenswrapper[4756]: I0930 19:48:07.907411 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/ironic-operator-controller-manager-5cd4858477-k5ccf" Sep 30 19:48:07 crc kubenswrapper[4756]: I0930 19:48:07.985276 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/manila-operator-controller-manager-6d68dbc695-54d4j" Sep 30 19:48:08 crc kubenswrapper[4756]: I0930 19:48:08.037894 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/telemetry-operator-controller-manager-b8d54b5d7-nvz55" Sep 30 19:48:08 crc kubenswrapper[4756]: I0930 19:48:08.106619 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/test-operator-controller-manager-85777745bb-gfsx6" Sep 30 19:48:08 crc kubenswrapper[4756]: I0930 19:48:08.140347 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/watcher-operator-controller-manager-6b9957f54f-p4prj" Sep 30 19:48:08 crc kubenswrapper[4756]: I0930 19:48:08.143563 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/watcher-operator-controller-manager-6b9957f54f-p4prj" Sep 30 19:48:08 crc kubenswrapper[4756]: I0930 19:48:08.524958 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-baremetal-operator-controller-manager-77b9676b8c6lblw" Sep 30 19:48:24 crc kubenswrapper[4756]: I0930 19:48:24.289971 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-fz4nk"] Sep 30 19:48:24 crc kubenswrapper[4756]: I0930 19:48:24.291807 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-fz4nk" Sep 30 19:48:24 crc kubenswrapper[4756]: I0930 19:48:24.297933 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns" Sep 30 19:48:24 crc kubenswrapper[4756]: I0930 19:48:24.298241 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dnsmasq-dns-dockercfg-px7pf" Sep 30 19:48:24 crc kubenswrapper[4756]: I0930 19:48:24.298886 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"kube-root-ca.crt" Sep 30 19:48:24 crc kubenswrapper[4756]: I0930 19:48:24.301888 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openshift-service-ca.crt" Sep 30 19:48:24 crc kubenswrapper[4756]: I0930 19:48:24.315017 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-fz4nk"] Sep 30 19:48:24 crc kubenswrapper[4756]: I0930 19:48:24.327169 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5ca10d7c-3f4f-44e3-b0c3-95e9e20929b2-config\") pod \"dnsmasq-dns-675f4bcbfc-fz4nk\" (UID: \"5ca10d7c-3f4f-44e3-b0c3-95e9e20929b2\") " pod="openstack/dnsmasq-dns-675f4bcbfc-fz4nk" Sep 30 19:48:24 crc kubenswrapper[4756]: I0930 19:48:24.327211 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8x84j\" (UniqueName: \"kubernetes.io/projected/5ca10d7c-3f4f-44e3-b0c3-95e9e20929b2-kube-api-access-8x84j\") pod \"dnsmasq-dns-675f4bcbfc-fz4nk\" (UID: \"5ca10d7c-3f4f-44e3-b0c3-95e9e20929b2\") " pod="openstack/dnsmasq-dns-675f4bcbfc-fz4nk" Sep 30 19:48:24 crc kubenswrapper[4756]: I0930 19:48:24.359661 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-wwb2p"] Sep 30 19:48:24 crc kubenswrapper[4756]: I0930 19:48:24.362439 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-wwb2p" Sep 30 19:48:24 crc kubenswrapper[4756]: I0930 19:48:24.366134 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns-svc" Sep 30 19:48:24 crc kubenswrapper[4756]: I0930 19:48:24.372070 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-wwb2p"] Sep 30 19:48:24 crc kubenswrapper[4756]: I0930 19:48:24.429990 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5ca10d7c-3f4f-44e3-b0c3-95e9e20929b2-config\") pod \"dnsmasq-dns-675f4bcbfc-fz4nk\" (UID: \"5ca10d7c-3f4f-44e3-b0c3-95e9e20929b2\") " pod="openstack/dnsmasq-dns-675f4bcbfc-fz4nk" Sep 30 19:48:24 crc kubenswrapper[4756]: I0930 19:48:24.430080 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8x84j\" (UniqueName: \"kubernetes.io/projected/5ca10d7c-3f4f-44e3-b0c3-95e9e20929b2-kube-api-access-8x84j\") pod \"dnsmasq-dns-675f4bcbfc-fz4nk\" (UID: \"5ca10d7c-3f4f-44e3-b0c3-95e9e20929b2\") " pod="openstack/dnsmasq-dns-675f4bcbfc-fz4nk" Sep 30 19:48:24 crc kubenswrapper[4756]: I0930 19:48:24.430104 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zjvtk\" (UniqueName: \"kubernetes.io/projected/5821e392-3671-42cb-ba62-65d1921a6836-kube-api-access-zjvtk\") pod \"dnsmasq-dns-78dd6ddcc-wwb2p\" (UID: \"5821e392-3671-42cb-ba62-65d1921a6836\") " pod="openstack/dnsmasq-dns-78dd6ddcc-wwb2p" Sep 30 19:48:24 crc kubenswrapper[4756]: I0930 19:48:24.430161 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5821e392-3671-42cb-ba62-65d1921a6836-config\") pod \"dnsmasq-dns-78dd6ddcc-wwb2p\" (UID: \"5821e392-3671-42cb-ba62-65d1921a6836\") " pod="openstack/dnsmasq-dns-78dd6ddcc-wwb2p" Sep 30 19:48:24 crc kubenswrapper[4756]: I0930 19:48:24.430197 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5821e392-3671-42cb-ba62-65d1921a6836-dns-svc\") pod \"dnsmasq-dns-78dd6ddcc-wwb2p\" (UID: \"5821e392-3671-42cb-ba62-65d1921a6836\") " pod="openstack/dnsmasq-dns-78dd6ddcc-wwb2p" Sep 30 19:48:24 crc kubenswrapper[4756]: I0930 19:48:24.431022 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5ca10d7c-3f4f-44e3-b0c3-95e9e20929b2-config\") pod \"dnsmasq-dns-675f4bcbfc-fz4nk\" (UID: \"5ca10d7c-3f4f-44e3-b0c3-95e9e20929b2\") " pod="openstack/dnsmasq-dns-675f4bcbfc-fz4nk" Sep 30 19:48:24 crc kubenswrapper[4756]: I0930 19:48:24.472356 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8x84j\" (UniqueName: \"kubernetes.io/projected/5ca10d7c-3f4f-44e3-b0c3-95e9e20929b2-kube-api-access-8x84j\") pod \"dnsmasq-dns-675f4bcbfc-fz4nk\" (UID: \"5ca10d7c-3f4f-44e3-b0c3-95e9e20929b2\") " pod="openstack/dnsmasq-dns-675f4bcbfc-fz4nk" Sep 30 19:48:24 crc kubenswrapper[4756]: I0930 19:48:24.532088 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zjvtk\" (UniqueName: \"kubernetes.io/projected/5821e392-3671-42cb-ba62-65d1921a6836-kube-api-access-zjvtk\") pod \"dnsmasq-dns-78dd6ddcc-wwb2p\" (UID: \"5821e392-3671-42cb-ba62-65d1921a6836\") " pod="openstack/dnsmasq-dns-78dd6ddcc-wwb2p" Sep 30 19:48:24 crc kubenswrapper[4756]: I0930 19:48:24.532198 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5821e392-3671-42cb-ba62-65d1921a6836-config\") pod \"dnsmasq-dns-78dd6ddcc-wwb2p\" (UID: \"5821e392-3671-42cb-ba62-65d1921a6836\") " pod="openstack/dnsmasq-dns-78dd6ddcc-wwb2p" Sep 30 19:48:24 crc kubenswrapper[4756]: I0930 19:48:24.532249 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5821e392-3671-42cb-ba62-65d1921a6836-dns-svc\") pod \"dnsmasq-dns-78dd6ddcc-wwb2p\" (UID: \"5821e392-3671-42cb-ba62-65d1921a6836\") " pod="openstack/dnsmasq-dns-78dd6ddcc-wwb2p" Sep 30 19:48:24 crc kubenswrapper[4756]: I0930 19:48:24.533058 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5821e392-3671-42cb-ba62-65d1921a6836-dns-svc\") pod \"dnsmasq-dns-78dd6ddcc-wwb2p\" (UID: \"5821e392-3671-42cb-ba62-65d1921a6836\") " pod="openstack/dnsmasq-dns-78dd6ddcc-wwb2p" Sep 30 19:48:24 crc kubenswrapper[4756]: I0930 19:48:24.533656 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5821e392-3671-42cb-ba62-65d1921a6836-config\") pod \"dnsmasq-dns-78dd6ddcc-wwb2p\" (UID: \"5821e392-3671-42cb-ba62-65d1921a6836\") " pod="openstack/dnsmasq-dns-78dd6ddcc-wwb2p" Sep 30 19:48:24 crc kubenswrapper[4756]: I0930 19:48:24.552699 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zjvtk\" (UniqueName: \"kubernetes.io/projected/5821e392-3671-42cb-ba62-65d1921a6836-kube-api-access-zjvtk\") pod \"dnsmasq-dns-78dd6ddcc-wwb2p\" (UID: \"5821e392-3671-42cb-ba62-65d1921a6836\") " pod="openstack/dnsmasq-dns-78dd6ddcc-wwb2p" Sep 30 19:48:24 crc kubenswrapper[4756]: I0930 19:48:24.616237 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-fz4nk" Sep 30 19:48:24 crc kubenswrapper[4756]: I0930 19:48:24.686822 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-wwb2p" Sep 30 19:48:25 crc kubenswrapper[4756]: I0930 19:48:25.018474 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-fz4nk"] Sep 30 19:48:25 crc kubenswrapper[4756]: W0930 19:48:25.024943 4756 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5ca10d7c_3f4f_44e3_b0c3_95e9e20929b2.slice/crio-f87d0450323ea77df80b04495fe9ee2c5aa62335746039b3d4ed38ccaf306dde WatchSource:0}: Error finding container f87d0450323ea77df80b04495fe9ee2c5aa62335746039b3d4ed38ccaf306dde: Status 404 returned error can't find the container with id f87d0450323ea77df80b04495fe9ee2c5aa62335746039b3d4ed38ccaf306dde Sep 30 19:48:25 crc kubenswrapper[4756]: I0930 19:48:25.026778 4756 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Sep 30 19:48:25 crc kubenswrapper[4756]: I0930 19:48:25.115686 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-wwb2p"] Sep 30 19:48:25 crc kubenswrapper[4756]: I0930 19:48:25.250190 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-675f4bcbfc-fz4nk" event={"ID":"5ca10d7c-3f4f-44e3-b0c3-95e9e20929b2","Type":"ContainerStarted","Data":"f87d0450323ea77df80b04495fe9ee2c5aa62335746039b3d4ed38ccaf306dde"} Sep 30 19:48:25 crc kubenswrapper[4756]: I0930 19:48:25.252632 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-78dd6ddcc-wwb2p" event={"ID":"5821e392-3671-42cb-ba62-65d1921a6836","Type":"ContainerStarted","Data":"c658e5beec386f67eb8fb934ba9be6e1c99e8c2b5432f7bc47a56ce0a877e0c5"} Sep 30 19:48:27 crc kubenswrapper[4756]: I0930 19:48:27.066958 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-fz4nk"] Sep 30 19:48:27 crc kubenswrapper[4756]: I0930 19:48:27.088470 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-wz57l"] Sep 30 19:48:27 crc kubenswrapper[4756]: I0930 19:48:27.089927 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-666b6646f7-wz57l" Sep 30 19:48:27 crc kubenswrapper[4756]: I0930 19:48:27.102343 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-wz57l"] Sep 30 19:48:27 crc kubenswrapper[4756]: I0930 19:48:27.170252 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/61d939ce-b9ae-456e-949f-ebf03ff09847-dns-svc\") pod \"dnsmasq-dns-666b6646f7-wz57l\" (UID: \"61d939ce-b9ae-456e-949f-ebf03ff09847\") " pod="openstack/dnsmasq-dns-666b6646f7-wz57l" Sep 30 19:48:27 crc kubenswrapper[4756]: I0930 19:48:27.170330 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/61d939ce-b9ae-456e-949f-ebf03ff09847-config\") pod \"dnsmasq-dns-666b6646f7-wz57l\" (UID: \"61d939ce-b9ae-456e-949f-ebf03ff09847\") " pod="openstack/dnsmasq-dns-666b6646f7-wz57l" Sep 30 19:48:27 crc kubenswrapper[4756]: I0930 19:48:27.170433 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ggv4v\" (UniqueName: \"kubernetes.io/projected/61d939ce-b9ae-456e-949f-ebf03ff09847-kube-api-access-ggv4v\") pod \"dnsmasq-dns-666b6646f7-wz57l\" (UID: \"61d939ce-b9ae-456e-949f-ebf03ff09847\") " pod="openstack/dnsmasq-dns-666b6646f7-wz57l" Sep 30 19:48:27 crc kubenswrapper[4756]: I0930 19:48:27.271413 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/61d939ce-b9ae-456e-949f-ebf03ff09847-config\") pod \"dnsmasq-dns-666b6646f7-wz57l\" (UID: \"61d939ce-b9ae-456e-949f-ebf03ff09847\") " pod="openstack/dnsmasq-dns-666b6646f7-wz57l" Sep 30 19:48:27 crc kubenswrapper[4756]: I0930 19:48:27.271492 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ggv4v\" (UniqueName: \"kubernetes.io/projected/61d939ce-b9ae-456e-949f-ebf03ff09847-kube-api-access-ggv4v\") pod \"dnsmasq-dns-666b6646f7-wz57l\" (UID: \"61d939ce-b9ae-456e-949f-ebf03ff09847\") " pod="openstack/dnsmasq-dns-666b6646f7-wz57l" Sep 30 19:48:27 crc kubenswrapper[4756]: I0930 19:48:27.271534 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/61d939ce-b9ae-456e-949f-ebf03ff09847-dns-svc\") pod \"dnsmasq-dns-666b6646f7-wz57l\" (UID: \"61d939ce-b9ae-456e-949f-ebf03ff09847\") " pod="openstack/dnsmasq-dns-666b6646f7-wz57l" Sep 30 19:48:27 crc kubenswrapper[4756]: I0930 19:48:27.272361 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/61d939ce-b9ae-456e-949f-ebf03ff09847-dns-svc\") pod \"dnsmasq-dns-666b6646f7-wz57l\" (UID: \"61d939ce-b9ae-456e-949f-ebf03ff09847\") " pod="openstack/dnsmasq-dns-666b6646f7-wz57l" Sep 30 19:48:27 crc kubenswrapper[4756]: I0930 19:48:27.272374 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/61d939ce-b9ae-456e-949f-ebf03ff09847-config\") pod \"dnsmasq-dns-666b6646f7-wz57l\" (UID: \"61d939ce-b9ae-456e-949f-ebf03ff09847\") " pod="openstack/dnsmasq-dns-666b6646f7-wz57l" Sep 30 19:48:27 crc kubenswrapper[4756]: I0930 19:48:27.293973 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ggv4v\" (UniqueName: \"kubernetes.io/projected/61d939ce-b9ae-456e-949f-ebf03ff09847-kube-api-access-ggv4v\") pod \"dnsmasq-dns-666b6646f7-wz57l\" (UID: \"61d939ce-b9ae-456e-949f-ebf03ff09847\") " pod="openstack/dnsmasq-dns-666b6646f7-wz57l" Sep 30 19:48:27 crc kubenswrapper[4756]: I0930 19:48:27.317835 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-wwb2p"] Sep 30 19:48:27 crc kubenswrapper[4756]: I0930 19:48:27.344040 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-qx9p6"] Sep 30 19:48:27 crc kubenswrapper[4756]: I0930 19:48:27.345130 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-qx9p6" Sep 30 19:48:27 crc kubenswrapper[4756]: I0930 19:48:27.380093 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0bc4b3cb-5f9f-4af2-8204-243ea2bd899d-dns-svc\") pod \"dnsmasq-dns-57d769cc4f-qx9p6\" (UID: \"0bc4b3cb-5f9f-4af2-8204-243ea2bd899d\") " pod="openstack/dnsmasq-dns-57d769cc4f-qx9p6" Sep 30 19:48:27 crc kubenswrapper[4756]: I0930 19:48:27.380151 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0bc4b3cb-5f9f-4af2-8204-243ea2bd899d-config\") pod \"dnsmasq-dns-57d769cc4f-qx9p6\" (UID: \"0bc4b3cb-5f9f-4af2-8204-243ea2bd899d\") " pod="openstack/dnsmasq-dns-57d769cc4f-qx9p6" Sep 30 19:48:27 crc kubenswrapper[4756]: I0930 19:48:27.380183 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lxrxx\" (UniqueName: \"kubernetes.io/projected/0bc4b3cb-5f9f-4af2-8204-243ea2bd899d-kube-api-access-lxrxx\") pod \"dnsmasq-dns-57d769cc4f-qx9p6\" (UID: \"0bc4b3cb-5f9f-4af2-8204-243ea2bd899d\") " pod="openstack/dnsmasq-dns-57d769cc4f-qx9p6" Sep 30 19:48:27 crc kubenswrapper[4756]: I0930 19:48:27.393102 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-qx9p6"] Sep 30 19:48:27 crc kubenswrapper[4756]: I0930 19:48:27.417825 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-666b6646f7-wz57l" Sep 30 19:48:27 crc kubenswrapper[4756]: I0930 19:48:27.481389 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lxrxx\" (UniqueName: \"kubernetes.io/projected/0bc4b3cb-5f9f-4af2-8204-243ea2bd899d-kube-api-access-lxrxx\") pod \"dnsmasq-dns-57d769cc4f-qx9p6\" (UID: \"0bc4b3cb-5f9f-4af2-8204-243ea2bd899d\") " pod="openstack/dnsmasq-dns-57d769cc4f-qx9p6" Sep 30 19:48:27 crc kubenswrapper[4756]: I0930 19:48:27.481787 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0bc4b3cb-5f9f-4af2-8204-243ea2bd899d-dns-svc\") pod \"dnsmasq-dns-57d769cc4f-qx9p6\" (UID: \"0bc4b3cb-5f9f-4af2-8204-243ea2bd899d\") " pod="openstack/dnsmasq-dns-57d769cc4f-qx9p6" Sep 30 19:48:27 crc kubenswrapper[4756]: I0930 19:48:27.481820 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0bc4b3cb-5f9f-4af2-8204-243ea2bd899d-config\") pod \"dnsmasq-dns-57d769cc4f-qx9p6\" (UID: \"0bc4b3cb-5f9f-4af2-8204-243ea2bd899d\") " pod="openstack/dnsmasq-dns-57d769cc4f-qx9p6" Sep 30 19:48:27 crc kubenswrapper[4756]: I0930 19:48:27.484806 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0bc4b3cb-5f9f-4af2-8204-243ea2bd899d-dns-svc\") pod \"dnsmasq-dns-57d769cc4f-qx9p6\" (UID: \"0bc4b3cb-5f9f-4af2-8204-243ea2bd899d\") " pod="openstack/dnsmasq-dns-57d769cc4f-qx9p6" Sep 30 19:48:27 crc kubenswrapper[4756]: I0930 19:48:27.485203 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0bc4b3cb-5f9f-4af2-8204-243ea2bd899d-config\") pod \"dnsmasq-dns-57d769cc4f-qx9p6\" (UID: \"0bc4b3cb-5f9f-4af2-8204-243ea2bd899d\") " pod="openstack/dnsmasq-dns-57d769cc4f-qx9p6" Sep 30 19:48:27 crc kubenswrapper[4756]: I0930 19:48:27.506122 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lxrxx\" (UniqueName: \"kubernetes.io/projected/0bc4b3cb-5f9f-4af2-8204-243ea2bd899d-kube-api-access-lxrxx\") pod \"dnsmasq-dns-57d769cc4f-qx9p6\" (UID: \"0bc4b3cb-5f9f-4af2-8204-243ea2bd899d\") " pod="openstack/dnsmasq-dns-57d769cc4f-qx9p6" Sep 30 19:48:27 crc kubenswrapper[4756]: I0930 19:48:27.663309 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-qx9p6" Sep 30 19:48:27 crc kubenswrapper[4756]: I0930 19:48:27.909059 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-wz57l"] Sep 30 19:48:27 crc kubenswrapper[4756]: W0930 19:48:27.917502 4756 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod61d939ce_b9ae_456e_949f_ebf03ff09847.slice/crio-c60870c448882a05569ada145951e2c74edb0e16173fb053b804d2a292ece150 WatchSource:0}: Error finding container c60870c448882a05569ada145951e2c74edb0e16173fb053b804d2a292ece150: Status 404 returned error can't find the container with id c60870c448882a05569ada145951e2c74edb0e16173fb053b804d2a292ece150 Sep 30 19:48:28 crc kubenswrapper[4756]: I0930 19:48:28.124489 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-qx9p6"] Sep 30 19:48:28 crc kubenswrapper[4756]: I0930 19:48:28.216133 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-server-0"] Sep 30 19:48:28 crc kubenswrapper[4756]: I0930 19:48:28.217990 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Sep 30 19:48:28 crc kubenswrapper[4756]: I0930 19:48:28.222372 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-server-conf" Sep 30 19:48:28 crc kubenswrapper[4756]: I0930 19:48:28.222390 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-svc" Sep 30 19:48:28 crc kubenswrapper[4756]: I0930 19:48:28.222469 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-default-user" Sep 30 19:48:28 crc kubenswrapper[4756]: I0930 19:48:28.222593 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-config-data" Sep 30 19:48:28 crc kubenswrapper[4756]: I0930 19:48:28.222637 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-erlang-cookie" Sep 30 19:48:28 crc kubenswrapper[4756]: I0930 19:48:28.223194 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-server-dockercfg-2qsvc" Sep 30 19:48:28 crc kubenswrapper[4756]: I0930 19:48:28.223306 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-plugins-conf" Sep 30 19:48:28 crc kubenswrapper[4756]: I0930 19:48:28.227576 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Sep 30 19:48:28 crc kubenswrapper[4756]: I0930 19:48:28.295755 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-666b6646f7-wz57l" event={"ID":"61d939ce-b9ae-456e-949f-ebf03ff09847","Type":"ContainerStarted","Data":"c60870c448882a05569ada145951e2c74edb0e16173fb053b804d2a292ece150"} Sep 30 19:48:28 crc kubenswrapper[4756]: I0930 19:48:28.297701 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/c3b1ef6d-6617-4d15-8709-a7623e75faa4-config-data\") pod \"rabbitmq-server-0\" (UID: \"c3b1ef6d-6617-4d15-8709-a7623e75faa4\") " pod="openstack/rabbitmq-server-0" Sep 30 19:48:28 crc kubenswrapper[4756]: I0930 19:48:28.297733 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"rabbitmq-server-0\" (UID: \"c3b1ef6d-6617-4d15-8709-a7623e75faa4\") " pod="openstack/rabbitmq-server-0" Sep 30 19:48:28 crc kubenswrapper[4756]: I0930 19:48:28.297749 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/c3b1ef6d-6617-4d15-8709-a7623e75faa4-pod-info\") pod \"rabbitmq-server-0\" (UID: \"c3b1ef6d-6617-4d15-8709-a7623e75faa4\") " pod="openstack/rabbitmq-server-0" Sep 30 19:48:28 crc kubenswrapper[4756]: I0930 19:48:28.297800 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/c3b1ef6d-6617-4d15-8709-a7623e75faa4-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"c3b1ef6d-6617-4d15-8709-a7623e75faa4\") " pod="openstack/rabbitmq-server-0" Sep 30 19:48:28 crc kubenswrapper[4756]: I0930 19:48:28.297829 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/c3b1ef6d-6617-4d15-8709-a7623e75faa4-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"c3b1ef6d-6617-4d15-8709-a7623e75faa4\") " pod="openstack/rabbitmq-server-0" Sep 30 19:48:28 crc kubenswrapper[4756]: I0930 19:48:28.297875 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/c3b1ef6d-6617-4d15-8709-a7623e75faa4-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"c3b1ef6d-6617-4d15-8709-a7623e75faa4\") " pod="openstack/rabbitmq-server-0" Sep 30 19:48:28 crc kubenswrapper[4756]: I0930 19:48:28.297902 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/c3b1ef6d-6617-4d15-8709-a7623e75faa4-server-conf\") pod \"rabbitmq-server-0\" (UID: \"c3b1ef6d-6617-4d15-8709-a7623e75faa4\") " pod="openstack/rabbitmq-server-0" Sep 30 19:48:28 crc kubenswrapper[4756]: I0930 19:48:28.297935 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/c3b1ef6d-6617-4d15-8709-a7623e75faa4-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"c3b1ef6d-6617-4d15-8709-a7623e75faa4\") " pod="openstack/rabbitmq-server-0" Sep 30 19:48:28 crc kubenswrapper[4756]: I0930 19:48:28.297952 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/c3b1ef6d-6617-4d15-8709-a7623e75faa4-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"c3b1ef6d-6617-4d15-8709-a7623e75faa4\") " pod="openstack/rabbitmq-server-0" Sep 30 19:48:28 crc kubenswrapper[4756]: I0930 19:48:28.297971 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/c3b1ef6d-6617-4d15-8709-a7623e75faa4-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"c3b1ef6d-6617-4d15-8709-a7623e75faa4\") " pod="openstack/rabbitmq-server-0" Sep 30 19:48:28 crc kubenswrapper[4756]: I0930 19:48:28.297991 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w2n7d\" (UniqueName: \"kubernetes.io/projected/c3b1ef6d-6617-4d15-8709-a7623e75faa4-kube-api-access-w2n7d\") pod \"rabbitmq-server-0\" (UID: \"c3b1ef6d-6617-4d15-8709-a7623e75faa4\") " pod="openstack/rabbitmq-server-0" Sep 30 19:48:28 crc kubenswrapper[4756]: I0930 19:48:28.399493 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/c3b1ef6d-6617-4d15-8709-a7623e75faa4-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"c3b1ef6d-6617-4d15-8709-a7623e75faa4\") " pod="openstack/rabbitmq-server-0" Sep 30 19:48:28 crc kubenswrapper[4756]: I0930 19:48:28.399537 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/c3b1ef6d-6617-4d15-8709-a7623e75faa4-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"c3b1ef6d-6617-4d15-8709-a7623e75faa4\") " pod="openstack/rabbitmq-server-0" Sep 30 19:48:28 crc kubenswrapper[4756]: I0930 19:48:28.399559 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/c3b1ef6d-6617-4d15-8709-a7623e75faa4-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"c3b1ef6d-6617-4d15-8709-a7623e75faa4\") " pod="openstack/rabbitmq-server-0" Sep 30 19:48:28 crc kubenswrapper[4756]: I0930 19:48:28.399578 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w2n7d\" (UniqueName: \"kubernetes.io/projected/c3b1ef6d-6617-4d15-8709-a7623e75faa4-kube-api-access-w2n7d\") pod \"rabbitmq-server-0\" (UID: \"c3b1ef6d-6617-4d15-8709-a7623e75faa4\") " pod="openstack/rabbitmq-server-0" Sep 30 19:48:28 crc kubenswrapper[4756]: I0930 19:48:28.399619 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/c3b1ef6d-6617-4d15-8709-a7623e75faa4-config-data\") pod \"rabbitmq-server-0\" (UID: \"c3b1ef6d-6617-4d15-8709-a7623e75faa4\") " pod="openstack/rabbitmq-server-0" Sep 30 19:48:28 crc kubenswrapper[4756]: I0930 19:48:28.399642 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"rabbitmq-server-0\" (UID: \"c3b1ef6d-6617-4d15-8709-a7623e75faa4\") " pod="openstack/rabbitmq-server-0" Sep 30 19:48:28 crc kubenswrapper[4756]: I0930 19:48:28.399655 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/c3b1ef6d-6617-4d15-8709-a7623e75faa4-pod-info\") pod \"rabbitmq-server-0\" (UID: \"c3b1ef6d-6617-4d15-8709-a7623e75faa4\") " pod="openstack/rabbitmq-server-0" Sep 30 19:48:28 crc kubenswrapper[4756]: I0930 19:48:28.399700 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/c3b1ef6d-6617-4d15-8709-a7623e75faa4-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"c3b1ef6d-6617-4d15-8709-a7623e75faa4\") " pod="openstack/rabbitmq-server-0" Sep 30 19:48:28 crc kubenswrapper[4756]: I0930 19:48:28.399723 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/c3b1ef6d-6617-4d15-8709-a7623e75faa4-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"c3b1ef6d-6617-4d15-8709-a7623e75faa4\") " pod="openstack/rabbitmq-server-0" Sep 30 19:48:28 crc kubenswrapper[4756]: I0930 19:48:28.399750 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/c3b1ef6d-6617-4d15-8709-a7623e75faa4-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"c3b1ef6d-6617-4d15-8709-a7623e75faa4\") " pod="openstack/rabbitmq-server-0" Sep 30 19:48:28 crc kubenswrapper[4756]: I0930 19:48:28.399769 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/c3b1ef6d-6617-4d15-8709-a7623e75faa4-server-conf\") pod \"rabbitmq-server-0\" (UID: \"c3b1ef6d-6617-4d15-8709-a7623e75faa4\") " pod="openstack/rabbitmq-server-0" Sep 30 19:48:28 crc kubenswrapper[4756]: I0930 19:48:28.400922 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/c3b1ef6d-6617-4d15-8709-a7623e75faa4-server-conf\") pod \"rabbitmq-server-0\" (UID: \"c3b1ef6d-6617-4d15-8709-a7623e75faa4\") " pod="openstack/rabbitmq-server-0" Sep 30 19:48:28 crc kubenswrapper[4756]: I0930 19:48:28.401826 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/c3b1ef6d-6617-4d15-8709-a7623e75faa4-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"c3b1ef6d-6617-4d15-8709-a7623e75faa4\") " pod="openstack/rabbitmq-server-0" Sep 30 19:48:28 crc kubenswrapper[4756]: I0930 19:48:28.401847 4756 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"rabbitmq-server-0\" (UID: \"c3b1ef6d-6617-4d15-8709-a7623e75faa4\") device mount path \"/mnt/openstack/pv11\"" pod="openstack/rabbitmq-server-0" Sep 30 19:48:28 crc kubenswrapper[4756]: I0930 19:48:28.401991 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/c3b1ef6d-6617-4d15-8709-a7623e75faa4-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"c3b1ef6d-6617-4d15-8709-a7623e75faa4\") " pod="openstack/rabbitmq-server-0" Sep 30 19:48:28 crc kubenswrapper[4756]: I0930 19:48:28.402054 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/c3b1ef6d-6617-4d15-8709-a7623e75faa4-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"c3b1ef6d-6617-4d15-8709-a7623e75faa4\") " pod="openstack/rabbitmq-server-0" Sep 30 19:48:28 crc kubenswrapper[4756]: I0930 19:48:28.402311 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/c3b1ef6d-6617-4d15-8709-a7623e75faa4-config-data\") pod \"rabbitmq-server-0\" (UID: \"c3b1ef6d-6617-4d15-8709-a7623e75faa4\") " pod="openstack/rabbitmq-server-0" Sep 30 19:48:28 crc kubenswrapper[4756]: I0930 19:48:28.406985 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/c3b1ef6d-6617-4d15-8709-a7623e75faa4-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"c3b1ef6d-6617-4d15-8709-a7623e75faa4\") " pod="openstack/rabbitmq-server-0" Sep 30 19:48:28 crc kubenswrapper[4756]: I0930 19:48:28.407040 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/c3b1ef6d-6617-4d15-8709-a7623e75faa4-pod-info\") pod \"rabbitmq-server-0\" (UID: \"c3b1ef6d-6617-4d15-8709-a7623e75faa4\") " pod="openstack/rabbitmq-server-0" Sep 30 19:48:28 crc kubenswrapper[4756]: I0930 19:48:28.415862 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/c3b1ef6d-6617-4d15-8709-a7623e75faa4-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"c3b1ef6d-6617-4d15-8709-a7623e75faa4\") " pod="openstack/rabbitmq-server-0" Sep 30 19:48:28 crc kubenswrapper[4756]: I0930 19:48:28.418253 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w2n7d\" (UniqueName: \"kubernetes.io/projected/c3b1ef6d-6617-4d15-8709-a7623e75faa4-kube-api-access-w2n7d\") pod \"rabbitmq-server-0\" (UID: \"c3b1ef6d-6617-4d15-8709-a7623e75faa4\") " pod="openstack/rabbitmq-server-0" Sep 30 19:48:28 crc kubenswrapper[4756]: I0930 19:48:28.420838 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/c3b1ef6d-6617-4d15-8709-a7623e75faa4-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"c3b1ef6d-6617-4d15-8709-a7623e75faa4\") " pod="openstack/rabbitmq-server-0" Sep 30 19:48:28 crc kubenswrapper[4756]: I0930 19:48:28.421125 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"rabbitmq-server-0\" (UID: \"c3b1ef6d-6617-4d15-8709-a7623e75faa4\") " pod="openstack/rabbitmq-server-0" Sep 30 19:48:28 crc kubenswrapper[4756]: I0930 19:48:28.514301 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Sep 30 19:48:28 crc kubenswrapper[4756]: I0930 19:48:28.515806 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Sep 30 19:48:28 crc kubenswrapper[4756]: I0930 19:48:28.519595 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-default-user" Sep 30 19:48:28 crc kubenswrapper[4756]: I0930 19:48:28.520721 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-config-data" Sep 30 19:48:28 crc kubenswrapper[4756]: I0930 19:48:28.521167 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-server-dockercfg-r9cv8" Sep 30 19:48:28 crc kubenswrapper[4756]: I0930 19:48:28.521676 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-plugins-conf" Sep 30 19:48:28 crc kubenswrapper[4756]: I0930 19:48:28.521844 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-erlang-cookie" Sep 30 19:48:28 crc kubenswrapper[4756]: I0930 19:48:28.522038 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-server-conf" Sep 30 19:48:28 crc kubenswrapper[4756]: I0930 19:48:28.522112 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-cell1-svc" Sep 30 19:48:28 crc kubenswrapper[4756]: I0930 19:48:28.530617 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Sep 30 19:48:28 crc kubenswrapper[4756]: I0930 19:48:28.547465 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Sep 30 19:48:28 crc kubenswrapper[4756]: I0930 19:48:28.603444 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/3bacc7b1-00f4-4489-9c0b-fa038cc443d6-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"3bacc7b1-00f4-4489-9c0b-fa038cc443d6\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 19:48:28 crc kubenswrapper[4756]: I0930 19:48:28.603617 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/3bacc7b1-00f4-4489-9c0b-fa038cc443d6-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"3bacc7b1-00f4-4489-9c0b-fa038cc443d6\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 19:48:28 crc kubenswrapper[4756]: I0930 19:48:28.603711 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/3bacc7b1-00f4-4489-9c0b-fa038cc443d6-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"3bacc7b1-00f4-4489-9c0b-fa038cc443d6\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 19:48:28 crc kubenswrapper[4756]: I0930 19:48:28.603806 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/3bacc7b1-00f4-4489-9c0b-fa038cc443d6-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"3bacc7b1-00f4-4489-9c0b-fa038cc443d6\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 19:48:28 crc kubenswrapper[4756]: I0930 19:48:28.603912 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/3bacc7b1-00f4-4489-9c0b-fa038cc443d6-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"3bacc7b1-00f4-4489-9c0b-fa038cc443d6\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 19:48:28 crc kubenswrapper[4756]: I0930 19:48:28.604032 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/3bacc7b1-00f4-4489-9c0b-fa038cc443d6-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"3bacc7b1-00f4-4489-9c0b-fa038cc443d6\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 19:48:28 crc kubenswrapper[4756]: I0930 19:48:28.604128 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bkdbg\" (UniqueName: \"kubernetes.io/projected/3bacc7b1-00f4-4489-9c0b-fa038cc443d6-kube-api-access-bkdbg\") pod \"rabbitmq-cell1-server-0\" (UID: \"3bacc7b1-00f4-4489-9c0b-fa038cc443d6\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 19:48:28 crc kubenswrapper[4756]: I0930 19:48:28.604230 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/3bacc7b1-00f4-4489-9c0b-fa038cc443d6-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"3bacc7b1-00f4-4489-9c0b-fa038cc443d6\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 19:48:28 crc kubenswrapper[4756]: I0930 19:48:28.604336 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/3bacc7b1-00f4-4489-9c0b-fa038cc443d6-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"3bacc7b1-00f4-4489-9c0b-fa038cc443d6\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 19:48:28 crc kubenswrapper[4756]: I0930 19:48:28.604445 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/3bacc7b1-00f4-4489-9c0b-fa038cc443d6-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"3bacc7b1-00f4-4489-9c0b-fa038cc443d6\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 19:48:28 crc kubenswrapper[4756]: I0930 19:48:28.604689 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"3bacc7b1-00f4-4489-9c0b-fa038cc443d6\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 19:48:28 crc kubenswrapper[4756]: I0930 19:48:28.705423 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/3bacc7b1-00f4-4489-9c0b-fa038cc443d6-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"3bacc7b1-00f4-4489-9c0b-fa038cc443d6\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 19:48:28 crc kubenswrapper[4756]: I0930 19:48:28.705462 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/3bacc7b1-00f4-4489-9c0b-fa038cc443d6-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"3bacc7b1-00f4-4489-9c0b-fa038cc443d6\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 19:48:28 crc kubenswrapper[4756]: I0930 19:48:28.705477 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"3bacc7b1-00f4-4489-9c0b-fa038cc443d6\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 19:48:28 crc kubenswrapper[4756]: I0930 19:48:28.705502 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/3bacc7b1-00f4-4489-9c0b-fa038cc443d6-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"3bacc7b1-00f4-4489-9c0b-fa038cc443d6\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 19:48:28 crc kubenswrapper[4756]: I0930 19:48:28.705530 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/3bacc7b1-00f4-4489-9c0b-fa038cc443d6-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"3bacc7b1-00f4-4489-9c0b-fa038cc443d6\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 19:48:28 crc kubenswrapper[4756]: I0930 19:48:28.705545 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/3bacc7b1-00f4-4489-9c0b-fa038cc443d6-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"3bacc7b1-00f4-4489-9c0b-fa038cc443d6\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 19:48:28 crc kubenswrapper[4756]: I0930 19:48:28.705565 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/3bacc7b1-00f4-4489-9c0b-fa038cc443d6-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"3bacc7b1-00f4-4489-9c0b-fa038cc443d6\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 19:48:28 crc kubenswrapper[4756]: I0930 19:48:28.705598 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/3bacc7b1-00f4-4489-9c0b-fa038cc443d6-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"3bacc7b1-00f4-4489-9c0b-fa038cc443d6\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 19:48:28 crc kubenswrapper[4756]: I0930 19:48:28.705624 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/3bacc7b1-00f4-4489-9c0b-fa038cc443d6-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"3bacc7b1-00f4-4489-9c0b-fa038cc443d6\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 19:48:28 crc kubenswrapper[4756]: I0930 19:48:28.705641 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bkdbg\" (UniqueName: \"kubernetes.io/projected/3bacc7b1-00f4-4489-9c0b-fa038cc443d6-kube-api-access-bkdbg\") pod \"rabbitmq-cell1-server-0\" (UID: \"3bacc7b1-00f4-4489-9c0b-fa038cc443d6\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 19:48:28 crc kubenswrapper[4756]: I0930 19:48:28.705668 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/3bacc7b1-00f4-4489-9c0b-fa038cc443d6-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"3bacc7b1-00f4-4489-9c0b-fa038cc443d6\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 19:48:28 crc kubenswrapper[4756]: I0930 19:48:28.705790 4756 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"3bacc7b1-00f4-4489-9c0b-fa038cc443d6\") device mount path \"/mnt/openstack/pv05\"" pod="openstack/rabbitmq-cell1-server-0" Sep 30 19:48:28 crc kubenswrapper[4756]: I0930 19:48:28.706297 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/3bacc7b1-00f4-4489-9c0b-fa038cc443d6-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"3bacc7b1-00f4-4489-9c0b-fa038cc443d6\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 19:48:28 crc kubenswrapper[4756]: I0930 19:48:28.706636 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/3bacc7b1-00f4-4489-9c0b-fa038cc443d6-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"3bacc7b1-00f4-4489-9c0b-fa038cc443d6\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 19:48:28 crc kubenswrapper[4756]: I0930 19:48:28.706741 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/3bacc7b1-00f4-4489-9c0b-fa038cc443d6-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"3bacc7b1-00f4-4489-9c0b-fa038cc443d6\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 19:48:28 crc kubenswrapper[4756]: I0930 19:48:28.707033 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/3bacc7b1-00f4-4489-9c0b-fa038cc443d6-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"3bacc7b1-00f4-4489-9c0b-fa038cc443d6\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 19:48:28 crc kubenswrapper[4756]: I0930 19:48:28.708154 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/3bacc7b1-00f4-4489-9c0b-fa038cc443d6-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"3bacc7b1-00f4-4489-9c0b-fa038cc443d6\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 19:48:28 crc kubenswrapper[4756]: I0930 19:48:28.711014 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/3bacc7b1-00f4-4489-9c0b-fa038cc443d6-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"3bacc7b1-00f4-4489-9c0b-fa038cc443d6\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 19:48:28 crc kubenswrapper[4756]: I0930 19:48:28.711359 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/3bacc7b1-00f4-4489-9c0b-fa038cc443d6-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"3bacc7b1-00f4-4489-9c0b-fa038cc443d6\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 19:48:28 crc kubenswrapper[4756]: I0930 19:48:28.714837 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/3bacc7b1-00f4-4489-9c0b-fa038cc443d6-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"3bacc7b1-00f4-4489-9c0b-fa038cc443d6\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 19:48:28 crc kubenswrapper[4756]: I0930 19:48:28.720984 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/3bacc7b1-00f4-4489-9c0b-fa038cc443d6-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"3bacc7b1-00f4-4489-9c0b-fa038cc443d6\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 19:48:28 crc kubenswrapper[4756]: I0930 19:48:28.724276 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"3bacc7b1-00f4-4489-9c0b-fa038cc443d6\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 19:48:28 crc kubenswrapper[4756]: I0930 19:48:28.725202 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bkdbg\" (UniqueName: \"kubernetes.io/projected/3bacc7b1-00f4-4489-9c0b-fa038cc443d6-kube-api-access-bkdbg\") pod \"rabbitmq-cell1-server-0\" (UID: \"3bacc7b1-00f4-4489-9c0b-fa038cc443d6\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 19:48:28 crc kubenswrapper[4756]: I0930 19:48:28.894122 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Sep 30 19:48:31 crc kubenswrapper[4756]: I0930 19:48:31.184957 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-cell1-galera-0"] Sep 30 19:48:31 crc kubenswrapper[4756]: I0930 19:48:31.186833 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Sep 30 19:48:31 crc kubenswrapper[4756]: I0930 19:48:31.192192 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Sep 30 19:48:31 crc kubenswrapper[4756]: I0930 19:48:31.209119 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-cell1-svc" Sep 30 19:48:31 crc kubenswrapper[4756]: I0930 19:48:31.209517 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-scripts" Sep 30 19:48:31 crc kubenswrapper[4756]: I0930 19:48:31.209798 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-cell1-dockercfg-22xvd" Sep 30 19:48:31 crc kubenswrapper[4756]: I0930 19:48:31.209992 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-config-data" Sep 30 19:48:31 crc kubenswrapper[4756]: I0930 19:48:31.230778 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Sep 30 19:48:31 crc kubenswrapper[4756]: I0930 19:48:31.215573 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"combined-ca-bundle" Sep 30 19:48:31 crc kubenswrapper[4756]: I0930 19:48:31.268099 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-galera-0"] Sep 30 19:48:31 crc kubenswrapper[4756]: I0930 19:48:31.269746 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Sep 30 19:48:31 crc kubenswrapper[4756]: I0930 19:48:31.273568 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-svc" Sep 30 19:48:31 crc kubenswrapper[4756]: I0930 19:48:31.273572 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Sep 30 19:48:31 crc kubenswrapper[4756]: I0930 19:48:31.273779 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-dockercfg-hxt2w" Sep 30 19:48:31 crc kubenswrapper[4756]: I0930 19:48:31.274170 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-scripts" Sep 30 19:48:31 crc kubenswrapper[4756]: I0930 19:48:31.274468 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config-data" Sep 30 19:48:31 crc kubenswrapper[4756]: I0930 19:48:31.317828 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-qx9p6" event={"ID":"0bc4b3cb-5f9f-4af2-8204-243ea2bd899d","Type":"ContainerStarted","Data":"e759be9f0432593d221a87b8172c5baef36c48e0e67b1ceb5e56ff3532d01081"} Sep 30 19:48:31 crc kubenswrapper[4756]: I0930 19:48:31.359190 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/05ea7072-b87a-4cf4-8f86-f32e5836951a-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"05ea7072-b87a-4cf4-8f86-f32e5836951a\") " pod="openstack/openstack-cell1-galera-0" Sep 30 19:48:31 crc kubenswrapper[4756]: I0930 19:48:31.359270 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"openstack-cell1-galera-0\" (UID: \"05ea7072-b87a-4cf4-8f86-f32e5836951a\") " pod="openstack/openstack-cell1-galera-0" Sep 30 19:48:31 crc kubenswrapper[4756]: I0930 19:48:31.359293 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/05ea7072-b87a-4cf4-8f86-f32e5836951a-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"05ea7072-b87a-4cf4-8f86-f32e5836951a\") " pod="openstack/openstack-cell1-galera-0" Sep 30 19:48:31 crc kubenswrapper[4756]: I0930 19:48:31.359317 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/05ea7072-b87a-4cf4-8f86-f32e5836951a-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"05ea7072-b87a-4cf4-8f86-f32e5836951a\") " pod="openstack/openstack-cell1-galera-0" Sep 30 19:48:31 crc kubenswrapper[4756]: I0930 19:48:31.359333 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/05ea7072-b87a-4cf4-8f86-f32e5836951a-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"05ea7072-b87a-4cf4-8f86-f32e5836951a\") " pod="openstack/openstack-cell1-galera-0" Sep 30 19:48:31 crc kubenswrapper[4756]: I0930 19:48:31.359348 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zctjc\" (UniqueName: \"kubernetes.io/projected/05ea7072-b87a-4cf4-8f86-f32e5836951a-kube-api-access-zctjc\") pod \"openstack-cell1-galera-0\" (UID: \"05ea7072-b87a-4cf4-8f86-f32e5836951a\") " pod="openstack/openstack-cell1-galera-0" Sep 30 19:48:31 crc kubenswrapper[4756]: I0930 19:48:31.359377 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/05ea7072-b87a-4cf4-8f86-f32e5836951a-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"05ea7072-b87a-4cf4-8f86-f32e5836951a\") " pod="openstack/openstack-cell1-galera-0" Sep 30 19:48:31 crc kubenswrapper[4756]: I0930 19:48:31.359425 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/05ea7072-b87a-4cf4-8f86-f32e5836951a-secrets\") pod \"openstack-cell1-galera-0\" (UID: \"05ea7072-b87a-4cf4-8f86-f32e5836951a\") " pod="openstack/openstack-cell1-galera-0" Sep 30 19:48:31 crc kubenswrapper[4756]: I0930 19:48:31.359450 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/05ea7072-b87a-4cf4-8f86-f32e5836951a-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"05ea7072-b87a-4cf4-8f86-f32e5836951a\") " pod="openstack/openstack-cell1-galera-0" Sep 30 19:48:31 crc kubenswrapper[4756]: I0930 19:48:31.460812 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/05ea7072-b87a-4cf4-8f86-f32e5836951a-secrets\") pod \"openstack-cell1-galera-0\" (UID: \"05ea7072-b87a-4cf4-8f86-f32e5836951a\") " pod="openstack/openstack-cell1-galera-0" Sep 30 19:48:31 crc kubenswrapper[4756]: I0930 19:48:31.461075 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/05ea7072-b87a-4cf4-8f86-f32e5836951a-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"05ea7072-b87a-4cf4-8f86-f32e5836951a\") " pod="openstack/openstack-cell1-galera-0" Sep 30 19:48:31 crc kubenswrapper[4756]: I0930 19:48:31.461178 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/feac82fa-065f-4e9a-b649-fa26b2d150b8-config-data-generated\") pod \"openstack-galera-0\" (UID: \"feac82fa-065f-4e9a-b649-fa26b2d150b8\") " pod="openstack/openstack-galera-0" Sep 30 19:48:31 crc kubenswrapper[4756]: I0930 19:48:31.461255 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-42dgk\" (UniqueName: \"kubernetes.io/projected/feac82fa-065f-4e9a-b649-fa26b2d150b8-kube-api-access-42dgk\") pod \"openstack-galera-0\" (UID: \"feac82fa-065f-4e9a-b649-fa26b2d150b8\") " pod="openstack/openstack-galera-0" Sep 30 19:48:31 crc kubenswrapper[4756]: I0930 19:48:31.461329 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/05ea7072-b87a-4cf4-8f86-f32e5836951a-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"05ea7072-b87a-4cf4-8f86-f32e5836951a\") " pod="openstack/openstack-cell1-galera-0" Sep 30 19:48:31 crc kubenswrapper[4756]: I0930 19:48:31.461430 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/feac82fa-065f-4e9a-b649-fa26b2d150b8-config-data-default\") pod \"openstack-galera-0\" (UID: \"feac82fa-065f-4e9a-b649-fa26b2d150b8\") " pod="openstack/openstack-galera-0" Sep 30 19:48:31 crc kubenswrapper[4756]: I0930 19:48:31.461529 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"openstack-cell1-galera-0\" (UID: \"05ea7072-b87a-4cf4-8f86-f32e5836951a\") " pod="openstack/openstack-cell1-galera-0" Sep 30 19:48:31 crc kubenswrapper[4756]: I0930 19:48:31.461610 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/05ea7072-b87a-4cf4-8f86-f32e5836951a-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"05ea7072-b87a-4cf4-8f86-f32e5836951a\") " pod="openstack/openstack-cell1-galera-0" Sep 30 19:48:31 crc kubenswrapper[4756]: I0930 19:48:31.461701 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/05ea7072-b87a-4cf4-8f86-f32e5836951a-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"05ea7072-b87a-4cf4-8f86-f32e5836951a\") " pod="openstack/openstack-cell1-galera-0" Sep 30 19:48:31 crc kubenswrapper[4756]: I0930 19:48:31.461813 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/feac82fa-065f-4e9a-b649-fa26b2d150b8-operator-scripts\") pod \"openstack-galera-0\" (UID: \"feac82fa-065f-4e9a-b649-fa26b2d150b8\") " pod="openstack/openstack-galera-0" Sep 30 19:48:31 crc kubenswrapper[4756]: I0930 19:48:31.461889 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/05ea7072-b87a-4cf4-8f86-f32e5836951a-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"05ea7072-b87a-4cf4-8f86-f32e5836951a\") " pod="openstack/openstack-cell1-galera-0" Sep 30 19:48:31 crc kubenswrapper[4756]: I0930 19:48:31.461958 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zctjc\" (UniqueName: \"kubernetes.io/projected/05ea7072-b87a-4cf4-8f86-f32e5836951a-kube-api-access-zctjc\") pod \"openstack-cell1-galera-0\" (UID: \"05ea7072-b87a-4cf4-8f86-f32e5836951a\") " pod="openstack/openstack-cell1-galera-0" Sep 30 19:48:31 crc kubenswrapper[4756]: I0930 19:48:31.462030 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"openstack-galera-0\" (UID: \"feac82fa-065f-4e9a-b649-fa26b2d150b8\") " pod="openstack/openstack-galera-0" Sep 30 19:48:31 crc kubenswrapper[4756]: I0930 19:48:31.462101 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/feac82fa-065f-4e9a-b649-fa26b2d150b8-kolla-config\") pod \"openstack-galera-0\" (UID: \"feac82fa-065f-4e9a-b649-fa26b2d150b8\") " pod="openstack/openstack-galera-0" Sep 30 19:48:31 crc kubenswrapper[4756]: I0930 19:48:31.462172 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/feac82fa-065f-4e9a-b649-fa26b2d150b8-secrets\") pod \"openstack-galera-0\" (UID: \"feac82fa-065f-4e9a-b649-fa26b2d150b8\") " pod="openstack/openstack-galera-0" Sep 30 19:48:31 crc kubenswrapper[4756]: I0930 19:48:31.462250 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/05ea7072-b87a-4cf4-8f86-f32e5836951a-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"05ea7072-b87a-4cf4-8f86-f32e5836951a\") " pod="openstack/openstack-cell1-galera-0" Sep 30 19:48:31 crc kubenswrapper[4756]: I0930 19:48:31.462328 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/feac82fa-065f-4e9a-b649-fa26b2d150b8-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"feac82fa-065f-4e9a-b649-fa26b2d150b8\") " pod="openstack/openstack-galera-0" Sep 30 19:48:31 crc kubenswrapper[4756]: I0930 19:48:31.463241 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/feac82fa-065f-4e9a-b649-fa26b2d150b8-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"feac82fa-065f-4e9a-b649-fa26b2d150b8\") " pod="openstack/openstack-galera-0" Sep 30 19:48:31 crc kubenswrapper[4756]: I0930 19:48:31.463284 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/05ea7072-b87a-4cf4-8f86-f32e5836951a-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"05ea7072-b87a-4cf4-8f86-f32e5836951a\") " pod="openstack/openstack-cell1-galera-0" Sep 30 19:48:31 crc kubenswrapper[4756]: I0930 19:48:31.462730 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/05ea7072-b87a-4cf4-8f86-f32e5836951a-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"05ea7072-b87a-4cf4-8f86-f32e5836951a\") " pod="openstack/openstack-cell1-galera-0" Sep 30 19:48:31 crc kubenswrapper[4756]: I0930 19:48:31.463782 4756 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"openstack-cell1-galera-0\" (UID: \"05ea7072-b87a-4cf4-8f86-f32e5836951a\") device mount path \"/mnt/openstack/pv04\"" pod="openstack/openstack-cell1-galera-0" Sep 30 19:48:31 crc kubenswrapper[4756]: I0930 19:48:31.464342 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/05ea7072-b87a-4cf4-8f86-f32e5836951a-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"05ea7072-b87a-4cf4-8f86-f32e5836951a\") " pod="openstack/openstack-cell1-galera-0" Sep 30 19:48:31 crc kubenswrapper[4756]: I0930 19:48:31.464944 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/05ea7072-b87a-4cf4-8f86-f32e5836951a-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"05ea7072-b87a-4cf4-8f86-f32e5836951a\") " pod="openstack/openstack-cell1-galera-0" Sep 30 19:48:31 crc kubenswrapper[4756]: I0930 19:48:31.482928 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/05ea7072-b87a-4cf4-8f86-f32e5836951a-secrets\") pod \"openstack-cell1-galera-0\" (UID: \"05ea7072-b87a-4cf4-8f86-f32e5836951a\") " pod="openstack/openstack-cell1-galera-0" Sep 30 19:48:31 crc kubenswrapper[4756]: I0930 19:48:31.490075 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/05ea7072-b87a-4cf4-8f86-f32e5836951a-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"05ea7072-b87a-4cf4-8f86-f32e5836951a\") " pod="openstack/openstack-cell1-galera-0" Sep 30 19:48:31 crc kubenswrapper[4756]: I0930 19:48:31.490283 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/05ea7072-b87a-4cf4-8f86-f32e5836951a-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"05ea7072-b87a-4cf4-8f86-f32e5836951a\") " pod="openstack/openstack-cell1-galera-0" Sep 30 19:48:31 crc kubenswrapper[4756]: I0930 19:48:31.501467 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/memcached-0"] Sep 30 19:48:31 crc kubenswrapper[4756]: I0930 19:48:31.510661 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Sep 30 19:48:31 crc kubenswrapper[4756]: I0930 19:48:31.515521 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-memcached-svc" Sep 30 19:48:31 crc kubenswrapper[4756]: I0930 19:48:31.515703 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"memcached-config-data" Sep 30 19:48:31 crc kubenswrapper[4756]: I0930 19:48:31.522681 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"memcached-memcached-dockercfg-84xh9" Sep 30 19:48:31 crc kubenswrapper[4756]: I0930 19:48:31.534455 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Sep 30 19:48:31 crc kubenswrapper[4756]: I0930 19:48:31.560935 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zctjc\" (UniqueName: \"kubernetes.io/projected/05ea7072-b87a-4cf4-8f86-f32e5836951a-kube-api-access-zctjc\") pod \"openstack-cell1-galera-0\" (UID: \"05ea7072-b87a-4cf4-8f86-f32e5836951a\") " pod="openstack/openstack-cell1-galera-0" Sep 30 19:48:31 crc kubenswrapper[4756]: I0930 19:48:31.566192 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"openstack-galera-0\" (UID: \"feac82fa-065f-4e9a-b649-fa26b2d150b8\") " pod="openstack/openstack-galera-0" Sep 30 19:48:31 crc kubenswrapper[4756]: I0930 19:48:31.566388 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/feac82fa-065f-4e9a-b649-fa26b2d150b8-kolla-config\") pod \"openstack-galera-0\" (UID: \"feac82fa-065f-4e9a-b649-fa26b2d150b8\") " pod="openstack/openstack-galera-0" Sep 30 19:48:31 crc kubenswrapper[4756]: I0930 19:48:31.566512 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/feac82fa-065f-4e9a-b649-fa26b2d150b8-secrets\") pod \"openstack-galera-0\" (UID: \"feac82fa-065f-4e9a-b649-fa26b2d150b8\") " pod="openstack/openstack-galera-0" Sep 30 19:48:31 crc kubenswrapper[4756]: I0930 19:48:31.566608 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/feac82fa-065f-4e9a-b649-fa26b2d150b8-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"feac82fa-065f-4e9a-b649-fa26b2d150b8\") " pod="openstack/openstack-galera-0" Sep 30 19:48:31 crc kubenswrapper[4756]: I0930 19:48:31.566685 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/feac82fa-065f-4e9a-b649-fa26b2d150b8-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"feac82fa-065f-4e9a-b649-fa26b2d150b8\") " pod="openstack/openstack-galera-0" Sep 30 19:48:31 crc kubenswrapper[4756]: I0930 19:48:31.566780 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/feac82fa-065f-4e9a-b649-fa26b2d150b8-config-data-generated\") pod \"openstack-galera-0\" (UID: \"feac82fa-065f-4e9a-b649-fa26b2d150b8\") " pod="openstack/openstack-galera-0" Sep 30 19:48:31 crc kubenswrapper[4756]: I0930 19:48:31.566849 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-42dgk\" (UniqueName: \"kubernetes.io/projected/feac82fa-065f-4e9a-b649-fa26b2d150b8-kube-api-access-42dgk\") pod \"openstack-galera-0\" (UID: \"feac82fa-065f-4e9a-b649-fa26b2d150b8\") " pod="openstack/openstack-galera-0" Sep 30 19:48:31 crc kubenswrapper[4756]: I0930 19:48:31.566935 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/feac82fa-065f-4e9a-b649-fa26b2d150b8-config-data-default\") pod \"openstack-galera-0\" (UID: \"feac82fa-065f-4e9a-b649-fa26b2d150b8\") " pod="openstack/openstack-galera-0" Sep 30 19:48:31 crc kubenswrapper[4756]: I0930 19:48:31.567035 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/feac82fa-065f-4e9a-b649-fa26b2d150b8-operator-scripts\") pod \"openstack-galera-0\" (UID: \"feac82fa-065f-4e9a-b649-fa26b2d150b8\") " pod="openstack/openstack-galera-0" Sep 30 19:48:31 crc kubenswrapper[4756]: I0930 19:48:31.568140 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/feac82fa-065f-4e9a-b649-fa26b2d150b8-operator-scripts\") pod \"openstack-galera-0\" (UID: \"feac82fa-065f-4e9a-b649-fa26b2d150b8\") " pod="openstack/openstack-galera-0" Sep 30 19:48:31 crc kubenswrapper[4756]: I0930 19:48:31.568367 4756 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"openstack-galera-0\" (UID: \"feac82fa-065f-4e9a-b649-fa26b2d150b8\") device mount path \"/mnt/openstack/pv01\"" pod="openstack/openstack-galera-0" Sep 30 19:48:31 crc kubenswrapper[4756]: I0930 19:48:31.573704 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/feac82fa-065f-4e9a-b649-fa26b2d150b8-kolla-config\") pod \"openstack-galera-0\" (UID: \"feac82fa-065f-4e9a-b649-fa26b2d150b8\") " pod="openstack/openstack-galera-0" Sep 30 19:48:31 crc kubenswrapper[4756]: I0930 19:48:31.574093 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/feac82fa-065f-4e9a-b649-fa26b2d150b8-config-data-default\") pod \"openstack-galera-0\" (UID: \"feac82fa-065f-4e9a-b649-fa26b2d150b8\") " pod="openstack/openstack-galera-0" Sep 30 19:48:31 crc kubenswrapper[4756]: I0930 19:48:31.574357 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/feac82fa-065f-4e9a-b649-fa26b2d150b8-config-data-generated\") pod \"openstack-galera-0\" (UID: \"feac82fa-065f-4e9a-b649-fa26b2d150b8\") " pod="openstack/openstack-galera-0" Sep 30 19:48:31 crc kubenswrapper[4756]: I0930 19:48:31.575367 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/feac82fa-065f-4e9a-b649-fa26b2d150b8-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"feac82fa-065f-4e9a-b649-fa26b2d150b8\") " pod="openstack/openstack-galera-0" Sep 30 19:48:31 crc kubenswrapper[4756]: I0930 19:48:31.584103 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/feac82fa-065f-4e9a-b649-fa26b2d150b8-secrets\") pod \"openstack-galera-0\" (UID: \"feac82fa-065f-4e9a-b649-fa26b2d150b8\") " pod="openstack/openstack-galera-0" Sep 30 19:48:31 crc kubenswrapper[4756]: I0930 19:48:31.585640 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/feac82fa-065f-4e9a-b649-fa26b2d150b8-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"feac82fa-065f-4e9a-b649-fa26b2d150b8\") " pod="openstack/openstack-galera-0" Sep 30 19:48:31 crc kubenswrapper[4756]: I0930 19:48:31.585644 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"openstack-cell1-galera-0\" (UID: \"05ea7072-b87a-4cf4-8f86-f32e5836951a\") " pod="openstack/openstack-cell1-galera-0" Sep 30 19:48:31 crc kubenswrapper[4756]: I0930 19:48:31.605363 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"openstack-galera-0\" (UID: \"feac82fa-065f-4e9a-b649-fa26b2d150b8\") " pod="openstack/openstack-galera-0" Sep 30 19:48:31 crc kubenswrapper[4756]: I0930 19:48:31.606995 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-42dgk\" (UniqueName: \"kubernetes.io/projected/feac82fa-065f-4e9a-b649-fa26b2d150b8-kube-api-access-42dgk\") pod \"openstack-galera-0\" (UID: \"feac82fa-065f-4e9a-b649-fa26b2d150b8\") " pod="openstack/openstack-galera-0" Sep 30 19:48:31 crc kubenswrapper[4756]: I0930 19:48:31.669350 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/5d8f3245-22a4-45a4-bbc4-33e1a078f92b-kolla-config\") pod \"memcached-0\" (UID: \"5d8f3245-22a4-45a4-bbc4-33e1a078f92b\") " pod="openstack/memcached-0" Sep 30 19:48:31 crc kubenswrapper[4756]: I0930 19:48:31.669410 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/5d8f3245-22a4-45a4-bbc4-33e1a078f92b-config-data\") pod \"memcached-0\" (UID: \"5d8f3245-22a4-45a4-bbc4-33e1a078f92b\") " pod="openstack/memcached-0" Sep 30 19:48:31 crc kubenswrapper[4756]: I0930 19:48:31.669427 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/5d8f3245-22a4-45a4-bbc4-33e1a078f92b-memcached-tls-certs\") pod \"memcached-0\" (UID: \"5d8f3245-22a4-45a4-bbc4-33e1a078f92b\") " pod="openstack/memcached-0" Sep 30 19:48:31 crc kubenswrapper[4756]: I0930 19:48:31.669451 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kjhdm\" (UniqueName: \"kubernetes.io/projected/5d8f3245-22a4-45a4-bbc4-33e1a078f92b-kube-api-access-kjhdm\") pod \"memcached-0\" (UID: \"5d8f3245-22a4-45a4-bbc4-33e1a078f92b\") " pod="openstack/memcached-0" Sep 30 19:48:31 crc kubenswrapper[4756]: I0930 19:48:31.669492 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5d8f3245-22a4-45a4-bbc4-33e1a078f92b-combined-ca-bundle\") pod \"memcached-0\" (UID: \"5d8f3245-22a4-45a4-bbc4-33e1a078f92b\") " pod="openstack/memcached-0" Sep 30 19:48:31 crc kubenswrapper[4756]: I0930 19:48:31.770537 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/5d8f3245-22a4-45a4-bbc4-33e1a078f92b-kolla-config\") pod \"memcached-0\" (UID: \"5d8f3245-22a4-45a4-bbc4-33e1a078f92b\") " pod="openstack/memcached-0" Sep 30 19:48:31 crc kubenswrapper[4756]: I0930 19:48:31.770854 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/5d8f3245-22a4-45a4-bbc4-33e1a078f92b-config-data\") pod \"memcached-0\" (UID: \"5d8f3245-22a4-45a4-bbc4-33e1a078f92b\") " pod="openstack/memcached-0" Sep 30 19:48:31 crc kubenswrapper[4756]: I0930 19:48:31.770871 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/5d8f3245-22a4-45a4-bbc4-33e1a078f92b-memcached-tls-certs\") pod \"memcached-0\" (UID: \"5d8f3245-22a4-45a4-bbc4-33e1a078f92b\") " pod="openstack/memcached-0" Sep 30 19:48:31 crc kubenswrapper[4756]: I0930 19:48:31.770892 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kjhdm\" (UniqueName: \"kubernetes.io/projected/5d8f3245-22a4-45a4-bbc4-33e1a078f92b-kube-api-access-kjhdm\") pod \"memcached-0\" (UID: \"5d8f3245-22a4-45a4-bbc4-33e1a078f92b\") " pod="openstack/memcached-0" Sep 30 19:48:31 crc kubenswrapper[4756]: I0930 19:48:31.770917 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5d8f3245-22a4-45a4-bbc4-33e1a078f92b-combined-ca-bundle\") pod \"memcached-0\" (UID: \"5d8f3245-22a4-45a4-bbc4-33e1a078f92b\") " pod="openstack/memcached-0" Sep 30 19:48:31 crc kubenswrapper[4756]: I0930 19:48:31.771824 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/5d8f3245-22a4-45a4-bbc4-33e1a078f92b-config-data\") pod \"memcached-0\" (UID: \"5d8f3245-22a4-45a4-bbc4-33e1a078f92b\") " pod="openstack/memcached-0" Sep 30 19:48:31 crc kubenswrapper[4756]: I0930 19:48:31.772557 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/5d8f3245-22a4-45a4-bbc4-33e1a078f92b-kolla-config\") pod \"memcached-0\" (UID: \"5d8f3245-22a4-45a4-bbc4-33e1a078f92b\") " pod="openstack/memcached-0" Sep 30 19:48:31 crc kubenswrapper[4756]: I0930 19:48:31.773842 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/5d8f3245-22a4-45a4-bbc4-33e1a078f92b-memcached-tls-certs\") pod \"memcached-0\" (UID: \"5d8f3245-22a4-45a4-bbc4-33e1a078f92b\") " pod="openstack/memcached-0" Sep 30 19:48:31 crc kubenswrapper[4756]: I0930 19:48:31.775115 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5d8f3245-22a4-45a4-bbc4-33e1a078f92b-combined-ca-bundle\") pod \"memcached-0\" (UID: \"5d8f3245-22a4-45a4-bbc4-33e1a078f92b\") " pod="openstack/memcached-0" Sep 30 19:48:31 crc kubenswrapper[4756]: I0930 19:48:31.786364 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kjhdm\" (UniqueName: \"kubernetes.io/projected/5d8f3245-22a4-45a4-bbc4-33e1a078f92b-kube-api-access-kjhdm\") pod \"memcached-0\" (UID: \"5d8f3245-22a4-45a4-bbc4-33e1a078f92b\") " pod="openstack/memcached-0" Sep 30 19:48:31 crc kubenswrapper[4756]: I0930 19:48:31.834740 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Sep 30 19:48:31 crc kubenswrapper[4756]: I0930 19:48:31.885298 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Sep 30 19:48:31 crc kubenswrapper[4756]: I0930 19:48:31.894699 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Sep 30 19:48:33 crc kubenswrapper[4756]: I0930 19:48:33.096862 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/kube-state-metrics-0"] Sep 30 19:48:33 crc kubenswrapper[4756]: I0930 19:48:33.097850 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Sep 30 19:48:33 crc kubenswrapper[4756]: I0930 19:48:33.099776 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"telemetry-ceilometer-dockercfg-jbcpn" Sep 30 19:48:33 crc kubenswrapper[4756]: I0930 19:48:33.102148 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Sep 30 19:48:33 crc kubenswrapper[4756]: I0930 19:48:33.203533 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z4rsd\" (UniqueName: \"kubernetes.io/projected/718df58f-c1e5-4cab-9fb5-28a9303c6787-kube-api-access-z4rsd\") pod \"kube-state-metrics-0\" (UID: \"718df58f-c1e5-4cab-9fb5-28a9303c6787\") " pod="openstack/kube-state-metrics-0" Sep 30 19:48:33 crc kubenswrapper[4756]: I0930 19:48:33.304652 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z4rsd\" (UniqueName: \"kubernetes.io/projected/718df58f-c1e5-4cab-9fb5-28a9303c6787-kube-api-access-z4rsd\") pod \"kube-state-metrics-0\" (UID: \"718df58f-c1e5-4cab-9fb5-28a9303c6787\") " pod="openstack/kube-state-metrics-0" Sep 30 19:48:33 crc kubenswrapper[4756]: I0930 19:48:33.335218 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z4rsd\" (UniqueName: \"kubernetes.io/projected/718df58f-c1e5-4cab-9fb5-28a9303c6787-kube-api-access-z4rsd\") pod \"kube-state-metrics-0\" (UID: \"718df58f-c1e5-4cab-9fb5-28a9303c6787\") " pod="openstack/kube-state-metrics-0" Sep 30 19:48:33 crc kubenswrapper[4756]: I0930 19:48:33.447431 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Sep 30 19:48:37 crc kubenswrapper[4756]: I0930 19:48:37.166879 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-kfdjm"] Sep 30 19:48:37 crc kubenswrapper[4756]: I0930 19:48:37.168353 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-kfdjm" Sep 30 19:48:37 crc kubenswrapper[4756]: I0930 19:48:37.176596 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-kfdjm"] Sep 30 19:48:37 crc kubenswrapper[4756]: I0930 19:48:37.178609 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncontroller-ovncontroller-dockercfg-gb64d" Sep 30 19:48:37 crc kubenswrapper[4756]: I0930 19:48:37.178879 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovncontroller-ovndbs" Sep 30 19:48:37 crc kubenswrapper[4756]: I0930 19:48:37.179471 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-scripts" Sep 30 19:48:37 crc kubenswrapper[4756]: I0930 19:48:37.217593 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-ovs-kmbtz"] Sep 30 19:48:37 crc kubenswrapper[4756]: I0930 19:48:37.219709 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-kmbtz" Sep 30 19:48:37 crc kubenswrapper[4756]: I0930 19:48:37.226878 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-ovs-kmbtz"] Sep 30 19:48:37 crc kubenswrapper[4756]: I0930 19:48:37.364652 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/9996bc15-3d12-4aa1-a22e-d11726f6c4d8-var-log-ovn\") pod \"ovn-controller-kfdjm\" (UID: \"9996bc15-3d12-4aa1-a22e-d11726f6c4d8\") " pod="openstack/ovn-controller-kfdjm" Sep 30 19:48:37 crc kubenswrapper[4756]: I0930 19:48:37.364714 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/9996bc15-3d12-4aa1-a22e-d11726f6c4d8-var-run\") pod \"ovn-controller-kfdjm\" (UID: \"9996bc15-3d12-4aa1-a22e-d11726f6c4d8\") " pod="openstack/ovn-controller-kfdjm" Sep 30 19:48:37 crc kubenswrapper[4756]: I0930 19:48:37.364742 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/cf5c32f2-40b7-4c8f-ab7b-58b6fa3200ed-var-run\") pod \"ovn-controller-ovs-kmbtz\" (UID: \"cf5c32f2-40b7-4c8f-ab7b-58b6fa3200ed\") " pod="openstack/ovn-controller-ovs-kmbtz" Sep 30 19:48:37 crc kubenswrapper[4756]: I0930 19:48:37.364798 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wl74v\" (UniqueName: \"kubernetes.io/projected/9996bc15-3d12-4aa1-a22e-d11726f6c4d8-kube-api-access-wl74v\") pod \"ovn-controller-kfdjm\" (UID: \"9996bc15-3d12-4aa1-a22e-d11726f6c4d8\") " pod="openstack/ovn-controller-kfdjm" Sep 30 19:48:37 crc kubenswrapper[4756]: I0930 19:48:37.364821 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/cf5c32f2-40b7-4c8f-ab7b-58b6fa3200ed-var-log\") pod \"ovn-controller-ovs-kmbtz\" (UID: \"cf5c32f2-40b7-4c8f-ab7b-58b6fa3200ed\") " pod="openstack/ovn-controller-ovs-kmbtz" Sep 30 19:48:37 crc kubenswrapper[4756]: I0930 19:48:37.364852 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/cf5c32f2-40b7-4c8f-ab7b-58b6fa3200ed-etc-ovs\") pod \"ovn-controller-ovs-kmbtz\" (UID: \"cf5c32f2-40b7-4c8f-ab7b-58b6fa3200ed\") " pod="openstack/ovn-controller-ovs-kmbtz" Sep 30 19:48:37 crc kubenswrapper[4756]: I0930 19:48:37.364868 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/9996bc15-3d12-4aa1-a22e-d11726f6c4d8-var-run-ovn\") pod \"ovn-controller-kfdjm\" (UID: \"9996bc15-3d12-4aa1-a22e-d11726f6c4d8\") " pod="openstack/ovn-controller-kfdjm" Sep 30 19:48:37 crc kubenswrapper[4756]: I0930 19:48:37.364882 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/cf5c32f2-40b7-4c8f-ab7b-58b6fa3200ed-scripts\") pod \"ovn-controller-ovs-kmbtz\" (UID: \"cf5c32f2-40b7-4c8f-ab7b-58b6fa3200ed\") " pod="openstack/ovn-controller-ovs-kmbtz" Sep 30 19:48:37 crc kubenswrapper[4756]: I0930 19:48:37.364948 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nmxc7\" (UniqueName: \"kubernetes.io/projected/cf5c32f2-40b7-4c8f-ab7b-58b6fa3200ed-kube-api-access-nmxc7\") pod \"ovn-controller-ovs-kmbtz\" (UID: \"cf5c32f2-40b7-4c8f-ab7b-58b6fa3200ed\") " pod="openstack/ovn-controller-ovs-kmbtz" Sep 30 19:48:37 crc kubenswrapper[4756]: I0930 19:48:37.364964 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9996bc15-3d12-4aa1-a22e-d11726f6c4d8-combined-ca-bundle\") pod \"ovn-controller-kfdjm\" (UID: \"9996bc15-3d12-4aa1-a22e-d11726f6c4d8\") " pod="openstack/ovn-controller-kfdjm" Sep 30 19:48:37 crc kubenswrapper[4756]: I0930 19:48:37.364980 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/9996bc15-3d12-4aa1-a22e-d11726f6c4d8-scripts\") pod \"ovn-controller-kfdjm\" (UID: \"9996bc15-3d12-4aa1-a22e-d11726f6c4d8\") " pod="openstack/ovn-controller-kfdjm" Sep 30 19:48:37 crc kubenswrapper[4756]: I0930 19:48:37.364996 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/9996bc15-3d12-4aa1-a22e-d11726f6c4d8-ovn-controller-tls-certs\") pod \"ovn-controller-kfdjm\" (UID: \"9996bc15-3d12-4aa1-a22e-d11726f6c4d8\") " pod="openstack/ovn-controller-kfdjm" Sep 30 19:48:37 crc kubenswrapper[4756]: I0930 19:48:37.365020 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/cf5c32f2-40b7-4c8f-ab7b-58b6fa3200ed-var-lib\") pod \"ovn-controller-ovs-kmbtz\" (UID: \"cf5c32f2-40b7-4c8f-ab7b-58b6fa3200ed\") " pod="openstack/ovn-controller-ovs-kmbtz" Sep 30 19:48:37 crc kubenswrapper[4756]: I0930 19:48:37.466690 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wl74v\" (UniqueName: \"kubernetes.io/projected/9996bc15-3d12-4aa1-a22e-d11726f6c4d8-kube-api-access-wl74v\") pod \"ovn-controller-kfdjm\" (UID: \"9996bc15-3d12-4aa1-a22e-d11726f6c4d8\") " pod="openstack/ovn-controller-kfdjm" Sep 30 19:48:37 crc kubenswrapper[4756]: I0930 19:48:37.466739 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/cf5c32f2-40b7-4c8f-ab7b-58b6fa3200ed-var-log\") pod \"ovn-controller-ovs-kmbtz\" (UID: \"cf5c32f2-40b7-4c8f-ab7b-58b6fa3200ed\") " pod="openstack/ovn-controller-ovs-kmbtz" Sep 30 19:48:37 crc kubenswrapper[4756]: I0930 19:48:37.466783 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/cf5c32f2-40b7-4c8f-ab7b-58b6fa3200ed-etc-ovs\") pod \"ovn-controller-ovs-kmbtz\" (UID: \"cf5c32f2-40b7-4c8f-ab7b-58b6fa3200ed\") " pod="openstack/ovn-controller-ovs-kmbtz" Sep 30 19:48:37 crc kubenswrapper[4756]: I0930 19:48:37.466801 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/9996bc15-3d12-4aa1-a22e-d11726f6c4d8-var-run-ovn\") pod \"ovn-controller-kfdjm\" (UID: \"9996bc15-3d12-4aa1-a22e-d11726f6c4d8\") " pod="openstack/ovn-controller-kfdjm" Sep 30 19:48:37 crc kubenswrapper[4756]: I0930 19:48:37.466817 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/cf5c32f2-40b7-4c8f-ab7b-58b6fa3200ed-scripts\") pod \"ovn-controller-ovs-kmbtz\" (UID: \"cf5c32f2-40b7-4c8f-ab7b-58b6fa3200ed\") " pod="openstack/ovn-controller-ovs-kmbtz" Sep 30 19:48:37 crc kubenswrapper[4756]: I0930 19:48:37.466834 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nmxc7\" (UniqueName: \"kubernetes.io/projected/cf5c32f2-40b7-4c8f-ab7b-58b6fa3200ed-kube-api-access-nmxc7\") pod \"ovn-controller-ovs-kmbtz\" (UID: \"cf5c32f2-40b7-4c8f-ab7b-58b6fa3200ed\") " pod="openstack/ovn-controller-ovs-kmbtz" Sep 30 19:48:37 crc kubenswrapper[4756]: I0930 19:48:37.466849 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9996bc15-3d12-4aa1-a22e-d11726f6c4d8-combined-ca-bundle\") pod \"ovn-controller-kfdjm\" (UID: \"9996bc15-3d12-4aa1-a22e-d11726f6c4d8\") " pod="openstack/ovn-controller-kfdjm" Sep 30 19:48:37 crc kubenswrapper[4756]: I0930 19:48:37.466868 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/9996bc15-3d12-4aa1-a22e-d11726f6c4d8-scripts\") pod \"ovn-controller-kfdjm\" (UID: \"9996bc15-3d12-4aa1-a22e-d11726f6c4d8\") " pod="openstack/ovn-controller-kfdjm" Sep 30 19:48:37 crc kubenswrapper[4756]: I0930 19:48:37.466885 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/9996bc15-3d12-4aa1-a22e-d11726f6c4d8-ovn-controller-tls-certs\") pod \"ovn-controller-kfdjm\" (UID: \"9996bc15-3d12-4aa1-a22e-d11726f6c4d8\") " pod="openstack/ovn-controller-kfdjm" Sep 30 19:48:37 crc kubenswrapper[4756]: I0930 19:48:37.466907 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/cf5c32f2-40b7-4c8f-ab7b-58b6fa3200ed-var-lib\") pod \"ovn-controller-ovs-kmbtz\" (UID: \"cf5c32f2-40b7-4c8f-ab7b-58b6fa3200ed\") " pod="openstack/ovn-controller-ovs-kmbtz" Sep 30 19:48:37 crc kubenswrapper[4756]: I0930 19:48:37.466933 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/9996bc15-3d12-4aa1-a22e-d11726f6c4d8-var-log-ovn\") pod \"ovn-controller-kfdjm\" (UID: \"9996bc15-3d12-4aa1-a22e-d11726f6c4d8\") " pod="openstack/ovn-controller-kfdjm" Sep 30 19:48:37 crc kubenswrapper[4756]: I0930 19:48:37.466963 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/9996bc15-3d12-4aa1-a22e-d11726f6c4d8-var-run\") pod \"ovn-controller-kfdjm\" (UID: \"9996bc15-3d12-4aa1-a22e-d11726f6c4d8\") " pod="openstack/ovn-controller-kfdjm" Sep 30 19:48:37 crc kubenswrapper[4756]: I0930 19:48:37.466984 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/cf5c32f2-40b7-4c8f-ab7b-58b6fa3200ed-var-run\") pod \"ovn-controller-ovs-kmbtz\" (UID: \"cf5c32f2-40b7-4c8f-ab7b-58b6fa3200ed\") " pod="openstack/ovn-controller-ovs-kmbtz" Sep 30 19:48:37 crc kubenswrapper[4756]: I0930 19:48:37.467487 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/9996bc15-3d12-4aa1-a22e-d11726f6c4d8-var-run-ovn\") pod \"ovn-controller-kfdjm\" (UID: \"9996bc15-3d12-4aa1-a22e-d11726f6c4d8\") " pod="openstack/ovn-controller-kfdjm" Sep 30 19:48:37 crc kubenswrapper[4756]: I0930 19:48:37.467546 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/cf5c32f2-40b7-4c8f-ab7b-58b6fa3200ed-var-run\") pod \"ovn-controller-ovs-kmbtz\" (UID: \"cf5c32f2-40b7-4c8f-ab7b-58b6fa3200ed\") " pod="openstack/ovn-controller-ovs-kmbtz" Sep 30 19:48:37 crc kubenswrapper[4756]: I0930 19:48:37.467669 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/cf5c32f2-40b7-4c8f-ab7b-58b6fa3200ed-var-log\") pod \"ovn-controller-ovs-kmbtz\" (UID: \"cf5c32f2-40b7-4c8f-ab7b-58b6fa3200ed\") " pod="openstack/ovn-controller-ovs-kmbtz" Sep 30 19:48:37 crc kubenswrapper[4756]: I0930 19:48:37.467796 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/cf5c32f2-40b7-4c8f-ab7b-58b6fa3200ed-etc-ovs\") pod \"ovn-controller-ovs-kmbtz\" (UID: \"cf5c32f2-40b7-4c8f-ab7b-58b6fa3200ed\") " pod="openstack/ovn-controller-ovs-kmbtz" Sep 30 19:48:37 crc kubenswrapper[4756]: I0930 19:48:37.468270 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/cf5c32f2-40b7-4c8f-ab7b-58b6fa3200ed-var-lib\") pod \"ovn-controller-ovs-kmbtz\" (UID: \"cf5c32f2-40b7-4c8f-ab7b-58b6fa3200ed\") " pod="openstack/ovn-controller-ovs-kmbtz" Sep 30 19:48:37 crc kubenswrapper[4756]: I0930 19:48:37.468335 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/9996bc15-3d12-4aa1-a22e-d11726f6c4d8-var-log-ovn\") pod \"ovn-controller-kfdjm\" (UID: \"9996bc15-3d12-4aa1-a22e-d11726f6c4d8\") " pod="openstack/ovn-controller-kfdjm" Sep 30 19:48:37 crc kubenswrapper[4756]: I0930 19:48:37.468335 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/9996bc15-3d12-4aa1-a22e-d11726f6c4d8-var-run\") pod \"ovn-controller-kfdjm\" (UID: \"9996bc15-3d12-4aa1-a22e-d11726f6c4d8\") " pod="openstack/ovn-controller-kfdjm" Sep 30 19:48:37 crc kubenswrapper[4756]: I0930 19:48:37.469488 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/cf5c32f2-40b7-4c8f-ab7b-58b6fa3200ed-scripts\") pod \"ovn-controller-ovs-kmbtz\" (UID: \"cf5c32f2-40b7-4c8f-ab7b-58b6fa3200ed\") " pod="openstack/ovn-controller-ovs-kmbtz" Sep 30 19:48:37 crc kubenswrapper[4756]: I0930 19:48:37.473178 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/9996bc15-3d12-4aa1-a22e-d11726f6c4d8-scripts\") pod \"ovn-controller-kfdjm\" (UID: \"9996bc15-3d12-4aa1-a22e-d11726f6c4d8\") " pod="openstack/ovn-controller-kfdjm" Sep 30 19:48:37 crc kubenswrapper[4756]: I0930 19:48:37.479610 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/9996bc15-3d12-4aa1-a22e-d11726f6c4d8-ovn-controller-tls-certs\") pod \"ovn-controller-kfdjm\" (UID: \"9996bc15-3d12-4aa1-a22e-d11726f6c4d8\") " pod="openstack/ovn-controller-kfdjm" Sep 30 19:48:37 crc kubenswrapper[4756]: I0930 19:48:37.482653 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9996bc15-3d12-4aa1-a22e-d11726f6c4d8-combined-ca-bundle\") pod \"ovn-controller-kfdjm\" (UID: \"9996bc15-3d12-4aa1-a22e-d11726f6c4d8\") " pod="openstack/ovn-controller-kfdjm" Sep 30 19:48:37 crc kubenswrapper[4756]: I0930 19:48:37.485485 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wl74v\" (UniqueName: \"kubernetes.io/projected/9996bc15-3d12-4aa1-a22e-d11726f6c4d8-kube-api-access-wl74v\") pod \"ovn-controller-kfdjm\" (UID: \"9996bc15-3d12-4aa1-a22e-d11726f6c4d8\") " pod="openstack/ovn-controller-kfdjm" Sep 30 19:48:37 crc kubenswrapper[4756]: I0930 19:48:37.488588 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nmxc7\" (UniqueName: \"kubernetes.io/projected/cf5c32f2-40b7-4c8f-ab7b-58b6fa3200ed-kube-api-access-nmxc7\") pod \"ovn-controller-ovs-kmbtz\" (UID: \"cf5c32f2-40b7-4c8f-ab7b-58b6fa3200ed\") " pod="openstack/ovn-controller-ovs-kmbtz" Sep 30 19:48:37 crc kubenswrapper[4756]: I0930 19:48:37.489248 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-kfdjm" Sep 30 19:48:37 crc kubenswrapper[4756]: I0930 19:48:37.543192 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-kmbtz" Sep 30 19:48:38 crc kubenswrapper[4756]: I0930 19:48:38.815865 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Sep 30 19:48:38 crc kubenswrapper[4756]: I0930 19:48:38.865743 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Sep 30 19:48:39 crc kubenswrapper[4756]: W0930 19:48:39.387660 4756 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc3b1ef6d_6617_4d15_8709_a7623e75faa4.slice/crio-ef3b11605b90df6591e1850d9530a4e2bd8e4b06f92d8da767ef0f1d015678c9 WatchSource:0}: Error finding container ef3b11605b90df6591e1850d9530a4e2bd8e4b06f92d8da767ef0f1d015678c9: Status 404 returned error can't find the container with id ef3b11605b90df6591e1850d9530a4e2bd8e4b06f92d8da767ef0f1d015678c9 Sep 30 19:48:39 crc kubenswrapper[4756]: I0930 19:48:39.388013 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-nb-0"] Sep 30 19:48:39 crc kubenswrapper[4756]: I0930 19:48:39.390061 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Sep 30 19:48:39 crc kubenswrapper[4756]: I0930 19:48:39.397588 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-config" Sep 30 19:48:39 crc kubenswrapper[4756]: I0930 19:48:39.397870 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovn-metrics" Sep 30 19:48:39 crc kubenswrapper[4756]: I0930 19:48:39.397886 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovndbcluster-nb-ovndbs" Sep 30 19:48:39 crc kubenswrapper[4756]: I0930 19:48:39.397998 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-scripts" Sep 30 19:48:39 crc kubenswrapper[4756]: I0930 19:48:39.398084 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-nb-dockercfg-xkkgk" Sep 30 19:48:39 crc kubenswrapper[4756]: I0930 19:48:39.406832 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"3bacc7b1-00f4-4489-9c0b-fa038cc443d6","Type":"ContainerStarted","Data":"5087335d4f38bbc0625718888a0e022f4788c689a1bc5a3b239ad5a0644a309e"} Sep 30 19:48:39 crc kubenswrapper[4756]: I0930 19:48:39.413244 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Sep 30 19:48:39 crc kubenswrapper[4756]: E0930 19:48:39.427324 4756 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified" Sep 30 19:48:39 crc kubenswrapper[4756]: E0930 19:48:39.427487 4756 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:nffh5bdhf4h5f8h79h55h77h58fh56dh7bh6fh578hbch55dh68h56bhd9h65dh57ch658hc9h566h666h688h58h65dh684h5d7h6ch575h5d6h88q,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-8x84j,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-675f4bcbfc-fz4nk_openstack(5ca10d7c-3f4f-44e3-b0c3-95e9e20929b2): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Sep 30 19:48:39 crc kubenswrapper[4756]: E0930 19:48:39.428729 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-675f4bcbfc-fz4nk" podUID="5ca10d7c-3f4f-44e3-b0c3-95e9e20929b2" Sep 30 19:48:39 crc kubenswrapper[4756]: E0930 19:48:39.482463 4756 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified" Sep 30 19:48:39 crc kubenswrapper[4756]: E0930 19:48:39.482708 4756 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:ndfhb5h667h568h584h5f9h58dh565h664h587h597h577h64bh5c4h66fh647hbdh68ch5c5h68dh686h5f7h64hd7hc6h55fh57bh98h57fh87h5fh57fq,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-zjvtk,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-78dd6ddcc-wwb2p_openstack(5821e392-3671-42cb-ba62-65d1921a6836): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Sep 30 19:48:39 crc kubenswrapper[4756]: E0930 19:48:39.483892 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-78dd6ddcc-wwb2p" podUID="5821e392-3671-42cb-ba62-65d1921a6836" Sep 30 19:48:39 crc kubenswrapper[4756]: I0930 19:48:39.503464 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/87e13633-3439-4184-a1f8-1afbbe54883a-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"87e13633-3439-4184-a1f8-1afbbe54883a\") " pod="openstack/ovsdbserver-nb-0" Sep 30 19:48:39 crc kubenswrapper[4756]: I0930 19:48:39.503502 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/87e13633-3439-4184-a1f8-1afbbe54883a-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"87e13633-3439-4184-a1f8-1afbbe54883a\") " pod="openstack/ovsdbserver-nb-0" Sep 30 19:48:39 crc kubenswrapper[4756]: I0930 19:48:39.503537 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/87e13633-3439-4184-a1f8-1afbbe54883a-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"87e13633-3439-4184-a1f8-1afbbe54883a\") " pod="openstack/ovsdbserver-nb-0" Sep 30 19:48:39 crc kubenswrapper[4756]: I0930 19:48:39.503557 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vj58x\" (UniqueName: \"kubernetes.io/projected/87e13633-3439-4184-a1f8-1afbbe54883a-kube-api-access-vj58x\") pod \"ovsdbserver-nb-0\" (UID: \"87e13633-3439-4184-a1f8-1afbbe54883a\") " pod="openstack/ovsdbserver-nb-0" Sep 30 19:48:39 crc kubenswrapper[4756]: I0930 19:48:39.503598 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/87e13633-3439-4184-a1f8-1afbbe54883a-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"87e13633-3439-4184-a1f8-1afbbe54883a\") " pod="openstack/ovsdbserver-nb-0" Sep 30 19:48:39 crc kubenswrapper[4756]: I0930 19:48:39.503616 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/87e13633-3439-4184-a1f8-1afbbe54883a-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"87e13633-3439-4184-a1f8-1afbbe54883a\") " pod="openstack/ovsdbserver-nb-0" Sep 30 19:48:39 crc kubenswrapper[4756]: I0930 19:48:39.503637 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"ovsdbserver-nb-0\" (UID: \"87e13633-3439-4184-a1f8-1afbbe54883a\") " pod="openstack/ovsdbserver-nb-0" Sep 30 19:48:39 crc kubenswrapper[4756]: I0930 19:48:39.503680 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/87e13633-3439-4184-a1f8-1afbbe54883a-config\") pod \"ovsdbserver-nb-0\" (UID: \"87e13633-3439-4184-a1f8-1afbbe54883a\") " pod="openstack/ovsdbserver-nb-0" Sep 30 19:48:39 crc kubenswrapper[4756]: I0930 19:48:39.604781 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/87e13633-3439-4184-a1f8-1afbbe54883a-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"87e13633-3439-4184-a1f8-1afbbe54883a\") " pod="openstack/ovsdbserver-nb-0" Sep 30 19:48:39 crc kubenswrapper[4756]: I0930 19:48:39.604964 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/87e13633-3439-4184-a1f8-1afbbe54883a-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"87e13633-3439-4184-a1f8-1afbbe54883a\") " pod="openstack/ovsdbserver-nb-0" Sep 30 19:48:39 crc kubenswrapper[4756]: I0930 19:48:39.605001 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vj58x\" (UniqueName: \"kubernetes.io/projected/87e13633-3439-4184-a1f8-1afbbe54883a-kube-api-access-vj58x\") pod \"ovsdbserver-nb-0\" (UID: \"87e13633-3439-4184-a1f8-1afbbe54883a\") " pod="openstack/ovsdbserver-nb-0" Sep 30 19:48:39 crc kubenswrapper[4756]: I0930 19:48:39.605021 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/87e13633-3439-4184-a1f8-1afbbe54883a-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"87e13633-3439-4184-a1f8-1afbbe54883a\") " pod="openstack/ovsdbserver-nb-0" Sep 30 19:48:39 crc kubenswrapper[4756]: I0930 19:48:39.605064 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/87e13633-3439-4184-a1f8-1afbbe54883a-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"87e13633-3439-4184-a1f8-1afbbe54883a\") " pod="openstack/ovsdbserver-nb-0" Sep 30 19:48:39 crc kubenswrapper[4756]: I0930 19:48:39.605083 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/87e13633-3439-4184-a1f8-1afbbe54883a-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"87e13633-3439-4184-a1f8-1afbbe54883a\") " pod="openstack/ovsdbserver-nb-0" Sep 30 19:48:39 crc kubenswrapper[4756]: I0930 19:48:39.605107 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"ovsdbserver-nb-0\" (UID: \"87e13633-3439-4184-a1f8-1afbbe54883a\") " pod="openstack/ovsdbserver-nb-0" Sep 30 19:48:39 crc kubenswrapper[4756]: I0930 19:48:39.605151 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/87e13633-3439-4184-a1f8-1afbbe54883a-config\") pod \"ovsdbserver-nb-0\" (UID: \"87e13633-3439-4184-a1f8-1afbbe54883a\") " pod="openstack/ovsdbserver-nb-0" Sep 30 19:48:39 crc kubenswrapper[4756]: I0930 19:48:39.605938 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/87e13633-3439-4184-a1f8-1afbbe54883a-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"87e13633-3439-4184-a1f8-1afbbe54883a\") " pod="openstack/ovsdbserver-nb-0" Sep 30 19:48:39 crc kubenswrapper[4756]: I0930 19:48:39.609057 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/87e13633-3439-4184-a1f8-1afbbe54883a-config\") pod \"ovsdbserver-nb-0\" (UID: \"87e13633-3439-4184-a1f8-1afbbe54883a\") " pod="openstack/ovsdbserver-nb-0" Sep 30 19:48:39 crc kubenswrapper[4756]: I0930 19:48:39.609268 4756 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"ovsdbserver-nb-0\" (UID: \"87e13633-3439-4184-a1f8-1afbbe54883a\") device mount path \"/mnt/openstack/pv06\"" pod="openstack/ovsdbserver-nb-0" Sep 30 19:48:39 crc kubenswrapper[4756]: I0930 19:48:39.610091 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/87e13633-3439-4184-a1f8-1afbbe54883a-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"87e13633-3439-4184-a1f8-1afbbe54883a\") " pod="openstack/ovsdbserver-nb-0" Sep 30 19:48:39 crc kubenswrapper[4756]: I0930 19:48:39.617200 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/87e13633-3439-4184-a1f8-1afbbe54883a-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"87e13633-3439-4184-a1f8-1afbbe54883a\") " pod="openstack/ovsdbserver-nb-0" Sep 30 19:48:39 crc kubenswrapper[4756]: I0930 19:48:39.620518 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/87e13633-3439-4184-a1f8-1afbbe54883a-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"87e13633-3439-4184-a1f8-1afbbe54883a\") " pod="openstack/ovsdbserver-nb-0" Sep 30 19:48:39 crc kubenswrapper[4756]: I0930 19:48:39.630972 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/87e13633-3439-4184-a1f8-1afbbe54883a-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"87e13633-3439-4184-a1f8-1afbbe54883a\") " pod="openstack/ovsdbserver-nb-0" Sep 30 19:48:39 crc kubenswrapper[4756]: I0930 19:48:39.639187 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vj58x\" (UniqueName: \"kubernetes.io/projected/87e13633-3439-4184-a1f8-1afbbe54883a-kube-api-access-vj58x\") pod \"ovsdbserver-nb-0\" (UID: \"87e13633-3439-4184-a1f8-1afbbe54883a\") " pod="openstack/ovsdbserver-nb-0" Sep 30 19:48:39 crc kubenswrapper[4756]: I0930 19:48:39.668166 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"ovsdbserver-nb-0\" (UID: \"87e13633-3439-4184-a1f8-1afbbe54883a\") " pod="openstack/ovsdbserver-nb-0" Sep 30 19:48:39 crc kubenswrapper[4756]: I0930 19:48:39.859356 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Sep 30 19:48:39 crc kubenswrapper[4756]: I0930 19:48:39.942267 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Sep 30 19:48:39 crc kubenswrapper[4756]: W0930 19:48:39.954510 4756 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod05ea7072_b87a_4cf4_8f86_f32e5836951a.slice/crio-0913e99d26eda1e4278aa3d309581833c1126df98769b7a7d895021c8366e10e WatchSource:0}: Error finding container 0913e99d26eda1e4278aa3d309581833c1126df98769b7a7d895021c8366e10e: Status 404 returned error can't find the container with id 0913e99d26eda1e4278aa3d309581833c1126df98769b7a7d895021c8366e10e Sep 30 19:48:40 crc kubenswrapper[4756]: I0930 19:48:40.007446 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Sep 30 19:48:40 crc kubenswrapper[4756]: W0930 19:48:40.012777 4756 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod718df58f_c1e5_4cab_9fb5_28a9303c6787.slice/crio-0f5c73b16a7f8fbe4aad09bbabe569e0b17c89990a5295f48465224c447c3907 WatchSource:0}: Error finding container 0f5c73b16a7f8fbe4aad09bbabe569e0b17c89990a5295f48465224c447c3907: Status 404 returned error can't find the container with id 0f5c73b16a7f8fbe4aad09bbabe569e0b17c89990a5295f48465224c447c3907 Sep 30 19:48:40 crc kubenswrapper[4756]: I0930 19:48:40.014766 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Sep 30 19:48:40 crc kubenswrapper[4756]: I0930 19:48:40.021537 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Sep 30 19:48:40 crc kubenswrapper[4756]: I0930 19:48:40.030867 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-kfdjm"] Sep 30 19:48:40 crc kubenswrapper[4756]: I0930 19:48:40.067339 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-sb-0"] Sep 30 19:48:40 crc kubenswrapper[4756]: I0930 19:48:40.074125 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Sep 30 19:48:40 crc kubenswrapper[4756]: I0930 19:48:40.083698 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Sep 30 19:48:40 crc kubenswrapper[4756]: I0930 19:48:40.085767 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovndbcluster-sb-ovndbs" Sep 30 19:48:40 crc kubenswrapper[4756]: I0930 19:48:40.085975 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-scripts" Sep 30 19:48:40 crc kubenswrapper[4756]: I0930 19:48:40.085863 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-config" Sep 30 19:48:40 crc kubenswrapper[4756]: I0930 19:48:40.086307 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-sb-dockercfg-wkhzg" Sep 30 19:48:40 crc kubenswrapper[4756]: I0930 19:48:40.180044 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-ovs-kmbtz"] Sep 30 19:48:40 crc kubenswrapper[4756]: W0930 19:48:40.180580 4756 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podcf5c32f2_40b7_4c8f_ab7b_58b6fa3200ed.slice/crio-3a56a47b8c0530230e171fc2e4a85c823b8e1fe358dbf42bcc9b47456600499b WatchSource:0}: Error finding container 3a56a47b8c0530230e171fc2e4a85c823b8e1fe358dbf42bcc9b47456600499b: Status 404 returned error can't find the container with id 3a56a47b8c0530230e171fc2e4a85c823b8e1fe358dbf42bcc9b47456600499b Sep 30 19:48:40 crc kubenswrapper[4756]: I0930 19:48:40.212850 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/bfe78e50-0ef5-42f5-b989-39beebdd0d7c-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"bfe78e50-0ef5-42f5-b989-39beebdd0d7c\") " pod="openstack/ovsdbserver-sb-0" Sep 30 19:48:40 crc kubenswrapper[4756]: I0930 19:48:40.213118 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/bfe78e50-0ef5-42f5-b989-39beebdd0d7c-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"bfe78e50-0ef5-42f5-b989-39beebdd0d7c\") " pod="openstack/ovsdbserver-sb-0" Sep 30 19:48:40 crc kubenswrapper[4756]: I0930 19:48:40.213167 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bfe78e50-0ef5-42f5-b989-39beebdd0d7c-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"bfe78e50-0ef5-42f5-b989-39beebdd0d7c\") " pod="openstack/ovsdbserver-sb-0" Sep 30 19:48:40 crc kubenswrapper[4756]: I0930 19:48:40.213216 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j8hkd\" (UniqueName: \"kubernetes.io/projected/bfe78e50-0ef5-42f5-b989-39beebdd0d7c-kube-api-access-j8hkd\") pod \"ovsdbserver-sb-0\" (UID: \"bfe78e50-0ef5-42f5-b989-39beebdd0d7c\") " pod="openstack/ovsdbserver-sb-0" Sep 30 19:48:40 crc kubenswrapper[4756]: I0930 19:48:40.213257 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bfe78e50-0ef5-42f5-b989-39beebdd0d7c-config\") pod \"ovsdbserver-sb-0\" (UID: \"bfe78e50-0ef5-42f5-b989-39beebdd0d7c\") " pod="openstack/ovsdbserver-sb-0" Sep 30 19:48:40 crc kubenswrapper[4756]: I0930 19:48:40.213351 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/bfe78e50-0ef5-42f5-b989-39beebdd0d7c-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"bfe78e50-0ef5-42f5-b989-39beebdd0d7c\") " pod="openstack/ovsdbserver-sb-0" Sep 30 19:48:40 crc kubenswrapper[4756]: I0930 19:48:40.213494 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"ovsdbserver-sb-0\" (UID: \"bfe78e50-0ef5-42f5-b989-39beebdd0d7c\") " pod="openstack/ovsdbserver-sb-0" Sep 30 19:48:40 crc kubenswrapper[4756]: I0930 19:48:40.213562 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/bfe78e50-0ef5-42f5-b989-39beebdd0d7c-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"bfe78e50-0ef5-42f5-b989-39beebdd0d7c\") " pod="openstack/ovsdbserver-sb-0" Sep 30 19:48:40 crc kubenswrapper[4756]: I0930 19:48:40.315314 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/bfe78e50-0ef5-42f5-b989-39beebdd0d7c-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"bfe78e50-0ef5-42f5-b989-39beebdd0d7c\") " pod="openstack/ovsdbserver-sb-0" Sep 30 19:48:40 crc kubenswrapper[4756]: I0930 19:48:40.315385 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"ovsdbserver-sb-0\" (UID: \"bfe78e50-0ef5-42f5-b989-39beebdd0d7c\") " pod="openstack/ovsdbserver-sb-0" Sep 30 19:48:40 crc kubenswrapper[4756]: I0930 19:48:40.315432 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/bfe78e50-0ef5-42f5-b989-39beebdd0d7c-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"bfe78e50-0ef5-42f5-b989-39beebdd0d7c\") " pod="openstack/ovsdbserver-sb-0" Sep 30 19:48:40 crc kubenswrapper[4756]: I0930 19:48:40.315469 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/bfe78e50-0ef5-42f5-b989-39beebdd0d7c-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"bfe78e50-0ef5-42f5-b989-39beebdd0d7c\") " pod="openstack/ovsdbserver-sb-0" Sep 30 19:48:40 crc kubenswrapper[4756]: I0930 19:48:40.315507 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/bfe78e50-0ef5-42f5-b989-39beebdd0d7c-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"bfe78e50-0ef5-42f5-b989-39beebdd0d7c\") " pod="openstack/ovsdbserver-sb-0" Sep 30 19:48:40 crc kubenswrapper[4756]: I0930 19:48:40.315524 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bfe78e50-0ef5-42f5-b989-39beebdd0d7c-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"bfe78e50-0ef5-42f5-b989-39beebdd0d7c\") " pod="openstack/ovsdbserver-sb-0" Sep 30 19:48:40 crc kubenswrapper[4756]: I0930 19:48:40.315543 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j8hkd\" (UniqueName: \"kubernetes.io/projected/bfe78e50-0ef5-42f5-b989-39beebdd0d7c-kube-api-access-j8hkd\") pod \"ovsdbserver-sb-0\" (UID: \"bfe78e50-0ef5-42f5-b989-39beebdd0d7c\") " pod="openstack/ovsdbserver-sb-0" Sep 30 19:48:40 crc kubenswrapper[4756]: I0930 19:48:40.315562 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bfe78e50-0ef5-42f5-b989-39beebdd0d7c-config\") pod \"ovsdbserver-sb-0\" (UID: \"bfe78e50-0ef5-42f5-b989-39beebdd0d7c\") " pod="openstack/ovsdbserver-sb-0" Sep 30 19:48:40 crc kubenswrapper[4756]: I0930 19:48:40.316427 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bfe78e50-0ef5-42f5-b989-39beebdd0d7c-config\") pod \"ovsdbserver-sb-0\" (UID: \"bfe78e50-0ef5-42f5-b989-39beebdd0d7c\") " pod="openstack/ovsdbserver-sb-0" Sep 30 19:48:40 crc kubenswrapper[4756]: I0930 19:48:40.318413 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/bfe78e50-0ef5-42f5-b989-39beebdd0d7c-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"bfe78e50-0ef5-42f5-b989-39beebdd0d7c\") " pod="openstack/ovsdbserver-sb-0" Sep 30 19:48:40 crc kubenswrapper[4756]: I0930 19:48:40.319263 4756 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"ovsdbserver-sb-0\" (UID: \"bfe78e50-0ef5-42f5-b989-39beebdd0d7c\") device mount path \"/mnt/openstack/pv02\"" pod="openstack/ovsdbserver-sb-0" Sep 30 19:48:40 crc kubenswrapper[4756]: I0930 19:48:40.319615 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/bfe78e50-0ef5-42f5-b989-39beebdd0d7c-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"bfe78e50-0ef5-42f5-b989-39beebdd0d7c\") " pod="openstack/ovsdbserver-sb-0" Sep 30 19:48:40 crc kubenswrapper[4756]: I0930 19:48:40.323411 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bfe78e50-0ef5-42f5-b989-39beebdd0d7c-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"bfe78e50-0ef5-42f5-b989-39beebdd0d7c\") " pod="openstack/ovsdbserver-sb-0" Sep 30 19:48:40 crc kubenswrapper[4756]: I0930 19:48:40.338662 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j8hkd\" (UniqueName: \"kubernetes.io/projected/bfe78e50-0ef5-42f5-b989-39beebdd0d7c-kube-api-access-j8hkd\") pod \"ovsdbserver-sb-0\" (UID: \"bfe78e50-0ef5-42f5-b989-39beebdd0d7c\") " pod="openstack/ovsdbserver-sb-0" Sep 30 19:48:40 crc kubenswrapper[4756]: I0930 19:48:40.352294 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/bfe78e50-0ef5-42f5-b989-39beebdd0d7c-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"bfe78e50-0ef5-42f5-b989-39beebdd0d7c\") " pod="openstack/ovsdbserver-sb-0" Sep 30 19:48:40 crc kubenswrapper[4756]: I0930 19:48:40.353522 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/bfe78e50-0ef5-42f5-b989-39beebdd0d7c-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"bfe78e50-0ef5-42f5-b989-39beebdd0d7c\") " pod="openstack/ovsdbserver-sb-0" Sep 30 19:48:40 crc kubenswrapper[4756]: I0930 19:48:40.355426 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"ovsdbserver-sb-0\" (UID: \"bfe78e50-0ef5-42f5-b989-39beebdd0d7c\") " pod="openstack/ovsdbserver-sb-0" Sep 30 19:48:40 crc kubenswrapper[4756]: I0930 19:48:40.404336 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Sep 30 19:48:40 crc kubenswrapper[4756]: I0930 19:48:40.419431 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"c3b1ef6d-6617-4d15-8709-a7623e75faa4","Type":"ContainerStarted","Data":"ef3b11605b90df6591e1850d9530a4e2bd8e4b06f92d8da767ef0f1d015678c9"} Sep 30 19:48:40 crc kubenswrapper[4756]: I0930 19:48:40.426110 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-kmbtz" event={"ID":"cf5c32f2-40b7-4c8f-ab7b-58b6fa3200ed","Type":"ContainerStarted","Data":"3a56a47b8c0530230e171fc2e4a85c823b8e1fe358dbf42bcc9b47456600499b"} Sep 30 19:48:40 crc kubenswrapper[4756]: I0930 19:48:40.428009 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-kfdjm" event={"ID":"9996bc15-3d12-4aa1-a22e-d11726f6c4d8","Type":"ContainerStarted","Data":"e9ee3bd8f7d75ab35b946607378c7854f04c393b1d7b5eca7978c162d02e40c6"} Sep 30 19:48:40 crc kubenswrapper[4756]: I0930 19:48:40.429717 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"718df58f-c1e5-4cab-9fb5-28a9303c6787","Type":"ContainerStarted","Data":"0f5c73b16a7f8fbe4aad09bbabe569e0b17c89990a5295f48465224c447c3907"} Sep 30 19:48:40 crc kubenswrapper[4756]: I0930 19:48:40.431825 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"feac82fa-065f-4e9a-b649-fa26b2d150b8","Type":"ContainerStarted","Data":"a50a0a986633a5f1ab92b36d62bd526748b09fae436384eae10f2977e3194410"} Sep 30 19:48:40 crc kubenswrapper[4756]: I0930 19:48:40.433853 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"05ea7072-b87a-4cf4-8f86-f32e5836951a","Type":"ContainerStarted","Data":"0913e99d26eda1e4278aa3d309581833c1126df98769b7a7d895021c8366e10e"} Sep 30 19:48:40 crc kubenswrapper[4756]: I0930 19:48:40.436242 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"5d8f3245-22a4-45a4-bbc4-33e1a078f92b","Type":"ContainerStarted","Data":"dcf469d5d377557749ef58864624ab85356b6cc6830ddb64c8af3f562e9d0ffe"} Sep 30 19:48:40 crc kubenswrapper[4756]: I0930 19:48:40.438032 4756 generic.go:334] "Generic (PLEG): container finished" podID="0bc4b3cb-5f9f-4af2-8204-243ea2bd899d" containerID="f8b0a4a99826cf0bce24b22201a6ce90e6e4821c5244f6cb16b5bdc12531cbad" exitCode=0 Sep 30 19:48:40 crc kubenswrapper[4756]: I0930 19:48:40.438101 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-qx9p6" event={"ID":"0bc4b3cb-5f9f-4af2-8204-243ea2bd899d","Type":"ContainerDied","Data":"f8b0a4a99826cf0bce24b22201a6ce90e6e4821c5244f6cb16b5bdc12531cbad"} Sep 30 19:48:40 crc kubenswrapper[4756]: I0930 19:48:40.440576 4756 generic.go:334] "Generic (PLEG): container finished" podID="61d939ce-b9ae-456e-949f-ebf03ff09847" containerID="048a054000d30a821e6b8f1811c4d16dc18ca06ab88a0e5a6f444962c3bb3a00" exitCode=0 Sep 30 19:48:40 crc kubenswrapper[4756]: I0930 19:48:40.440614 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-666b6646f7-wz57l" event={"ID":"61d939ce-b9ae-456e-949f-ebf03ff09847","Type":"ContainerDied","Data":"048a054000d30a821e6b8f1811c4d16dc18ca06ab88a0e5a6f444962c3bb3a00"} Sep 30 19:48:40 crc kubenswrapper[4756]: I0930 19:48:40.486508 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Sep 30 19:48:40 crc kubenswrapper[4756]: I0930 19:48:40.795555 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-fz4nk" Sep 30 19:48:40 crc kubenswrapper[4756]: I0930 19:48:40.910446 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-wwb2p" Sep 30 19:48:40 crc kubenswrapper[4756]: I0930 19:48:40.926072 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8x84j\" (UniqueName: \"kubernetes.io/projected/5ca10d7c-3f4f-44e3-b0c3-95e9e20929b2-kube-api-access-8x84j\") pod \"5ca10d7c-3f4f-44e3-b0c3-95e9e20929b2\" (UID: \"5ca10d7c-3f4f-44e3-b0c3-95e9e20929b2\") " Sep 30 19:48:40 crc kubenswrapper[4756]: I0930 19:48:40.926471 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5ca10d7c-3f4f-44e3-b0c3-95e9e20929b2-config\") pod \"5ca10d7c-3f4f-44e3-b0c3-95e9e20929b2\" (UID: \"5ca10d7c-3f4f-44e3-b0c3-95e9e20929b2\") " Sep 30 19:48:40 crc kubenswrapper[4756]: I0930 19:48:40.927130 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5ca10d7c-3f4f-44e3-b0c3-95e9e20929b2-config" (OuterVolumeSpecName: "config") pod "5ca10d7c-3f4f-44e3-b0c3-95e9e20929b2" (UID: "5ca10d7c-3f4f-44e3-b0c3-95e9e20929b2"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:48:40 crc kubenswrapper[4756]: I0930 19:48:40.930044 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5ca10d7c-3f4f-44e3-b0c3-95e9e20929b2-kube-api-access-8x84j" (OuterVolumeSpecName: "kube-api-access-8x84j") pod "5ca10d7c-3f4f-44e3-b0c3-95e9e20929b2" (UID: "5ca10d7c-3f4f-44e3-b0c3-95e9e20929b2"). InnerVolumeSpecName "kube-api-access-8x84j". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:48:41 crc kubenswrapper[4756]: I0930 19:48:41.028470 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5821e392-3671-42cb-ba62-65d1921a6836-dns-svc\") pod \"5821e392-3671-42cb-ba62-65d1921a6836\" (UID: \"5821e392-3671-42cb-ba62-65d1921a6836\") " Sep 30 19:48:41 crc kubenswrapper[4756]: I0930 19:48:41.028517 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5821e392-3671-42cb-ba62-65d1921a6836-config\") pod \"5821e392-3671-42cb-ba62-65d1921a6836\" (UID: \"5821e392-3671-42cb-ba62-65d1921a6836\") " Sep 30 19:48:41 crc kubenswrapper[4756]: I0930 19:48:41.028547 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zjvtk\" (UniqueName: \"kubernetes.io/projected/5821e392-3671-42cb-ba62-65d1921a6836-kube-api-access-zjvtk\") pod \"5821e392-3671-42cb-ba62-65d1921a6836\" (UID: \"5821e392-3671-42cb-ba62-65d1921a6836\") " Sep 30 19:48:41 crc kubenswrapper[4756]: I0930 19:48:41.028923 4756 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5ca10d7c-3f4f-44e3-b0c3-95e9e20929b2-config\") on node \"crc\" DevicePath \"\"" Sep 30 19:48:41 crc kubenswrapper[4756]: I0930 19:48:41.028940 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8x84j\" (UniqueName: \"kubernetes.io/projected/5ca10d7c-3f4f-44e3-b0c3-95e9e20929b2-kube-api-access-8x84j\") on node \"crc\" DevicePath \"\"" Sep 30 19:48:41 crc kubenswrapper[4756]: I0930 19:48:41.029015 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5821e392-3671-42cb-ba62-65d1921a6836-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "5821e392-3671-42cb-ba62-65d1921a6836" (UID: "5821e392-3671-42cb-ba62-65d1921a6836"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:48:41 crc kubenswrapper[4756]: I0930 19:48:41.029034 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5821e392-3671-42cb-ba62-65d1921a6836-config" (OuterVolumeSpecName: "config") pod "5821e392-3671-42cb-ba62-65d1921a6836" (UID: "5821e392-3671-42cb-ba62-65d1921a6836"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:48:41 crc kubenswrapper[4756]: I0930 19:48:41.032598 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5821e392-3671-42cb-ba62-65d1921a6836-kube-api-access-zjvtk" (OuterVolumeSpecName: "kube-api-access-zjvtk") pod "5821e392-3671-42cb-ba62-65d1921a6836" (UID: "5821e392-3671-42cb-ba62-65d1921a6836"). InnerVolumeSpecName "kube-api-access-zjvtk". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:48:41 crc kubenswrapper[4756]: I0930 19:48:41.102559 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Sep 30 19:48:41 crc kubenswrapper[4756]: I0930 19:48:41.130053 4756 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5821e392-3671-42cb-ba62-65d1921a6836-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 30 19:48:41 crc kubenswrapper[4756]: I0930 19:48:41.130085 4756 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5821e392-3671-42cb-ba62-65d1921a6836-config\") on node \"crc\" DevicePath \"\"" Sep 30 19:48:41 crc kubenswrapper[4756]: I0930 19:48:41.130121 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zjvtk\" (UniqueName: \"kubernetes.io/projected/5821e392-3671-42cb-ba62-65d1921a6836-kube-api-access-zjvtk\") on node \"crc\" DevicePath \"\"" Sep 30 19:48:41 crc kubenswrapper[4756]: I0930 19:48:41.484081 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-78dd6ddcc-wwb2p" event={"ID":"5821e392-3671-42cb-ba62-65d1921a6836","Type":"ContainerDied","Data":"c658e5beec386f67eb8fb934ba9be6e1c99e8c2b5432f7bc47a56ce0a877e0c5"} Sep 30 19:48:41 crc kubenswrapper[4756]: I0930 19:48:41.484257 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-wwb2p" Sep 30 19:48:41 crc kubenswrapper[4756]: I0930 19:48:41.487102 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-qx9p6" event={"ID":"0bc4b3cb-5f9f-4af2-8204-243ea2bd899d","Type":"ContainerStarted","Data":"fa7bfdcacf7cd3c19a653dd3e51e41da00b1f006f28417003c6c25944d61f09e"} Sep 30 19:48:41 crc kubenswrapper[4756]: I0930 19:48:41.487281 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-57d769cc4f-qx9p6" Sep 30 19:48:41 crc kubenswrapper[4756]: I0930 19:48:41.489949 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-666b6646f7-wz57l" event={"ID":"61d939ce-b9ae-456e-949f-ebf03ff09847","Type":"ContainerStarted","Data":"a700766bcf18761b57158ff7c458a323fbe44f7a5d15d5c9e54531cb5b1489ae"} Sep 30 19:48:41 crc kubenswrapper[4756]: I0930 19:48:41.490314 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-666b6646f7-wz57l" Sep 30 19:48:41 crc kubenswrapper[4756]: I0930 19:48:41.491163 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"87e13633-3439-4184-a1f8-1afbbe54883a","Type":"ContainerStarted","Data":"bc44bc513fdd73eee4a5df8aee511b36430a81186729183142b396ffd487277b"} Sep 30 19:48:41 crc kubenswrapper[4756]: I0930 19:48:41.494169 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-675f4bcbfc-fz4nk" event={"ID":"5ca10d7c-3f4f-44e3-b0c3-95e9e20929b2","Type":"ContainerDied","Data":"f87d0450323ea77df80b04495fe9ee2c5aa62335746039b3d4ed38ccaf306dde"} Sep 30 19:48:41 crc kubenswrapper[4756]: I0930 19:48:41.494285 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-fz4nk" Sep 30 19:48:41 crc kubenswrapper[4756]: I0930 19:48:41.526079 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-wwb2p"] Sep 30 19:48:41 crc kubenswrapper[4756]: I0930 19:48:41.546871 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-wwb2p"] Sep 30 19:48:41 crc kubenswrapper[4756]: I0930 19:48:41.548516 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-57d769cc4f-qx9p6" podStartSLOduration=6.168144865 podStartE2EDuration="14.548498257s" podCreationTimestamp="2025-09-30 19:48:27 +0000 UTC" firstStartedPulling="2025-09-30 19:48:31.137421833 +0000 UTC m=+1040.758355310" lastFinishedPulling="2025-09-30 19:48:39.517775225 +0000 UTC m=+1049.138708702" observedRunningTime="2025-09-30 19:48:41.539498712 +0000 UTC m=+1051.160432179" watchObservedRunningTime="2025-09-30 19:48:41.548498257 +0000 UTC m=+1051.169431734" Sep 30 19:48:41 crc kubenswrapper[4756]: I0930 19:48:41.559935 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-666b6646f7-wz57l" podStartSLOduration=2.980585327 podStartE2EDuration="14.559920186s" podCreationTimestamp="2025-09-30 19:48:27 +0000 UTC" firstStartedPulling="2025-09-30 19:48:27.919355277 +0000 UTC m=+1037.540288754" lastFinishedPulling="2025-09-30 19:48:39.498690136 +0000 UTC m=+1049.119623613" observedRunningTime="2025-09-30 19:48:41.554385891 +0000 UTC m=+1051.175319378" watchObservedRunningTime="2025-09-30 19:48:41.559920186 +0000 UTC m=+1051.180853663" Sep 30 19:48:41 crc kubenswrapper[4756]: I0930 19:48:41.592977 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-fz4nk"] Sep 30 19:48:41 crc kubenswrapper[4756]: W0930 19:48:41.595256 4756 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podbfe78e50_0ef5_42f5_b989_39beebdd0d7c.slice/crio-5807829cd1ab394c7929eb9fd62e7781b847af8672e24a208669870f294fc279 WatchSource:0}: Error finding container 5807829cd1ab394c7929eb9fd62e7781b847af8672e24a208669870f294fc279: Status 404 returned error can't find the container with id 5807829cd1ab394c7929eb9fd62e7781b847af8672e24a208669870f294fc279 Sep 30 19:48:41 crc kubenswrapper[4756]: I0930 19:48:41.598074 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-fz4nk"] Sep 30 19:48:42 crc kubenswrapper[4756]: I0930 19:48:42.507386 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"bfe78e50-0ef5-42f5-b989-39beebdd0d7c","Type":"ContainerStarted","Data":"5807829cd1ab394c7929eb9fd62e7781b847af8672e24a208669870f294fc279"} Sep 30 19:48:43 crc kubenswrapper[4756]: I0930 19:48:43.125242 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5821e392-3671-42cb-ba62-65d1921a6836" path="/var/lib/kubelet/pods/5821e392-3671-42cb-ba62-65d1921a6836/volumes" Sep 30 19:48:43 crc kubenswrapper[4756]: I0930 19:48:43.125755 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5ca10d7c-3f4f-44e3-b0c3-95e9e20929b2" path="/var/lib/kubelet/pods/5ca10d7c-3f4f-44e3-b0c3-95e9e20929b2/volumes" Sep 30 19:48:47 crc kubenswrapper[4756]: I0930 19:48:47.421509 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-666b6646f7-wz57l" Sep 30 19:48:47 crc kubenswrapper[4756]: I0930 19:48:47.665006 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-57d769cc4f-qx9p6" Sep 30 19:48:47 crc kubenswrapper[4756]: I0930 19:48:47.733766 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-wz57l"] Sep 30 19:48:47 crc kubenswrapper[4756]: I0930 19:48:47.734091 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-666b6646f7-wz57l" podUID="61d939ce-b9ae-456e-949f-ebf03ff09847" containerName="dnsmasq-dns" containerID="cri-o://a700766bcf18761b57158ff7c458a323fbe44f7a5d15d5c9e54531cb5b1489ae" gracePeriod=10 Sep 30 19:48:48 crc kubenswrapper[4756]: I0930 19:48:48.447084 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-666b6646f7-wz57l" Sep 30 19:48:48 crc kubenswrapper[4756]: I0930 19:48:48.561110 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"5d8f3245-22a4-45a4-bbc4-33e1a078f92b","Type":"ContainerStarted","Data":"4743fe1d3afca0374e30323f82a3b45ead65b0e37c7b34d43281655e0227ef30"} Sep 30 19:48:48 crc kubenswrapper[4756]: I0930 19:48:48.562034 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/memcached-0" Sep 30 19:48:48 crc kubenswrapper[4756]: I0930 19:48:48.562972 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/61d939ce-b9ae-456e-949f-ebf03ff09847-dns-svc\") pod \"61d939ce-b9ae-456e-949f-ebf03ff09847\" (UID: \"61d939ce-b9ae-456e-949f-ebf03ff09847\") " Sep 30 19:48:48 crc kubenswrapper[4756]: I0930 19:48:48.563057 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/61d939ce-b9ae-456e-949f-ebf03ff09847-config\") pod \"61d939ce-b9ae-456e-949f-ebf03ff09847\" (UID: \"61d939ce-b9ae-456e-949f-ebf03ff09847\") " Sep 30 19:48:48 crc kubenswrapper[4756]: I0930 19:48:48.563081 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ggv4v\" (UniqueName: \"kubernetes.io/projected/61d939ce-b9ae-456e-949f-ebf03ff09847-kube-api-access-ggv4v\") pod \"61d939ce-b9ae-456e-949f-ebf03ff09847\" (UID: \"61d939ce-b9ae-456e-949f-ebf03ff09847\") " Sep 30 19:48:48 crc kubenswrapper[4756]: I0930 19:48:48.573518 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/61d939ce-b9ae-456e-949f-ebf03ff09847-kube-api-access-ggv4v" (OuterVolumeSpecName: "kube-api-access-ggv4v") pod "61d939ce-b9ae-456e-949f-ebf03ff09847" (UID: "61d939ce-b9ae-456e-949f-ebf03ff09847"). InnerVolumeSpecName "kube-api-access-ggv4v". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:48:48 crc kubenswrapper[4756]: I0930 19:48:48.580376 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/memcached-0" podStartSLOduration=10.546592218 podStartE2EDuration="17.58035864s" podCreationTimestamp="2025-09-30 19:48:31 +0000 UTC" firstStartedPulling="2025-09-30 19:48:40.027102339 +0000 UTC m=+1049.648035816" lastFinishedPulling="2025-09-30 19:48:47.060868761 +0000 UTC m=+1056.681802238" observedRunningTime="2025-09-30 19:48:48.578096341 +0000 UTC m=+1058.199029838" watchObservedRunningTime="2025-09-30 19:48:48.58035864 +0000 UTC m=+1058.201292117" Sep 30 19:48:48 crc kubenswrapper[4756]: I0930 19:48:48.582610 4756 generic.go:334] "Generic (PLEG): container finished" podID="61d939ce-b9ae-456e-949f-ebf03ff09847" containerID="a700766bcf18761b57158ff7c458a323fbe44f7a5d15d5c9e54531cb5b1489ae" exitCode=0 Sep 30 19:48:48 crc kubenswrapper[4756]: I0930 19:48:48.593444 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-666b6646f7-wz57l" event={"ID":"61d939ce-b9ae-456e-949f-ebf03ff09847","Type":"ContainerDied","Data":"a700766bcf18761b57158ff7c458a323fbe44f7a5d15d5c9e54531cb5b1489ae"} Sep 30 19:48:48 crc kubenswrapper[4756]: I0930 19:48:48.593504 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-666b6646f7-wz57l" event={"ID":"61d939ce-b9ae-456e-949f-ebf03ff09847","Type":"ContainerDied","Data":"c60870c448882a05569ada145951e2c74edb0e16173fb053b804d2a292ece150"} Sep 30 19:48:48 crc kubenswrapper[4756]: I0930 19:48:48.593595 4756 scope.go:117] "RemoveContainer" containerID="a700766bcf18761b57158ff7c458a323fbe44f7a5d15d5c9e54531cb5b1489ae" Sep 30 19:48:48 crc kubenswrapper[4756]: I0930 19:48:48.593709 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-666b6646f7-wz57l" Sep 30 19:48:48 crc kubenswrapper[4756]: I0930 19:48:48.633959 4756 scope.go:117] "RemoveContainer" containerID="048a054000d30a821e6b8f1811c4d16dc18ca06ab88a0e5a6f444962c3bb3a00" Sep 30 19:48:48 crc kubenswrapper[4756]: I0930 19:48:48.667291 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ggv4v\" (UniqueName: \"kubernetes.io/projected/61d939ce-b9ae-456e-949f-ebf03ff09847-kube-api-access-ggv4v\") on node \"crc\" DevicePath \"\"" Sep 30 19:48:48 crc kubenswrapper[4756]: I0930 19:48:48.677040 4756 scope.go:117] "RemoveContainer" containerID="a700766bcf18761b57158ff7c458a323fbe44f7a5d15d5c9e54531cb5b1489ae" Sep 30 19:48:48 crc kubenswrapper[4756]: E0930 19:48:48.677345 4756 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a700766bcf18761b57158ff7c458a323fbe44f7a5d15d5c9e54531cb5b1489ae\": container with ID starting with a700766bcf18761b57158ff7c458a323fbe44f7a5d15d5c9e54531cb5b1489ae not found: ID does not exist" containerID="a700766bcf18761b57158ff7c458a323fbe44f7a5d15d5c9e54531cb5b1489ae" Sep 30 19:48:48 crc kubenswrapper[4756]: I0930 19:48:48.677374 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a700766bcf18761b57158ff7c458a323fbe44f7a5d15d5c9e54531cb5b1489ae"} err="failed to get container status \"a700766bcf18761b57158ff7c458a323fbe44f7a5d15d5c9e54531cb5b1489ae\": rpc error: code = NotFound desc = could not find container \"a700766bcf18761b57158ff7c458a323fbe44f7a5d15d5c9e54531cb5b1489ae\": container with ID starting with a700766bcf18761b57158ff7c458a323fbe44f7a5d15d5c9e54531cb5b1489ae not found: ID does not exist" Sep 30 19:48:48 crc kubenswrapper[4756]: I0930 19:48:48.677419 4756 scope.go:117] "RemoveContainer" containerID="048a054000d30a821e6b8f1811c4d16dc18ca06ab88a0e5a6f444962c3bb3a00" Sep 30 19:48:48 crc kubenswrapper[4756]: E0930 19:48:48.677682 4756 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"048a054000d30a821e6b8f1811c4d16dc18ca06ab88a0e5a6f444962c3bb3a00\": container with ID starting with 048a054000d30a821e6b8f1811c4d16dc18ca06ab88a0e5a6f444962c3bb3a00 not found: ID does not exist" containerID="048a054000d30a821e6b8f1811c4d16dc18ca06ab88a0e5a6f444962c3bb3a00" Sep 30 19:48:48 crc kubenswrapper[4756]: I0930 19:48:48.677730 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"048a054000d30a821e6b8f1811c4d16dc18ca06ab88a0e5a6f444962c3bb3a00"} err="failed to get container status \"048a054000d30a821e6b8f1811c4d16dc18ca06ab88a0e5a6f444962c3bb3a00\": rpc error: code = NotFound desc = could not find container \"048a054000d30a821e6b8f1811c4d16dc18ca06ab88a0e5a6f444962c3bb3a00\": container with ID starting with 048a054000d30a821e6b8f1811c4d16dc18ca06ab88a0e5a6f444962c3bb3a00 not found: ID does not exist" Sep 30 19:48:48 crc kubenswrapper[4756]: I0930 19:48:48.810274 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/61d939ce-b9ae-456e-949f-ebf03ff09847-config" (OuterVolumeSpecName: "config") pod "61d939ce-b9ae-456e-949f-ebf03ff09847" (UID: "61d939ce-b9ae-456e-949f-ebf03ff09847"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:48:48 crc kubenswrapper[4756]: I0930 19:48:48.821188 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/61d939ce-b9ae-456e-949f-ebf03ff09847-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "61d939ce-b9ae-456e-949f-ebf03ff09847" (UID: "61d939ce-b9ae-456e-949f-ebf03ff09847"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:48:48 crc kubenswrapper[4756]: I0930 19:48:48.869468 4756 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/61d939ce-b9ae-456e-949f-ebf03ff09847-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 30 19:48:48 crc kubenswrapper[4756]: I0930 19:48:48.869502 4756 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/61d939ce-b9ae-456e-949f-ebf03ff09847-config\") on node \"crc\" DevicePath \"\"" Sep 30 19:48:48 crc kubenswrapper[4756]: I0930 19:48:48.921354 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-metrics-tnm2n"] Sep 30 19:48:48 crc kubenswrapper[4756]: E0930 19:48:48.921803 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="61d939ce-b9ae-456e-949f-ebf03ff09847" containerName="init" Sep 30 19:48:48 crc kubenswrapper[4756]: I0930 19:48:48.921825 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="61d939ce-b9ae-456e-949f-ebf03ff09847" containerName="init" Sep 30 19:48:48 crc kubenswrapper[4756]: E0930 19:48:48.921890 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="61d939ce-b9ae-456e-949f-ebf03ff09847" containerName="dnsmasq-dns" Sep 30 19:48:48 crc kubenswrapper[4756]: I0930 19:48:48.921902 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="61d939ce-b9ae-456e-949f-ebf03ff09847" containerName="dnsmasq-dns" Sep 30 19:48:48 crc kubenswrapper[4756]: I0930 19:48:48.922132 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="61d939ce-b9ae-456e-949f-ebf03ff09847" containerName="dnsmasq-dns" Sep 30 19:48:48 crc kubenswrapper[4756]: I0930 19:48:48.923054 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-tnm2n" Sep 30 19:48:48 crc kubenswrapper[4756]: I0930 19:48:48.928644 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-metrics-config" Sep 30 19:48:48 crc kubenswrapper[4756]: I0930 19:48:48.938160 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-metrics-tnm2n"] Sep 30 19:48:48 crc kubenswrapper[4756]: I0930 19:48:48.947352 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-wz57l"] Sep 30 19:48:48 crc kubenswrapper[4756]: I0930 19:48:48.953901 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-wz57l"] Sep 30 19:48:49 crc kubenswrapper[4756]: I0930 19:48:49.072511 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ww2jq\" (UniqueName: \"kubernetes.io/projected/9e9466ec-a908-431a-9c47-fda720095cd8-kube-api-access-ww2jq\") pod \"ovn-controller-metrics-tnm2n\" (UID: \"9e9466ec-a908-431a-9c47-fda720095cd8\") " pod="openstack/ovn-controller-metrics-tnm2n" Sep 30 19:48:49 crc kubenswrapper[4756]: I0930 19:48:49.072552 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/9e9466ec-a908-431a-9c47-fda720095cd8-ovn-rundir\") pod \"ovn-controller-metrics-tnm2n\" (UID: \"9e9466ec-a908-431a-9c47-fda720095cd8\") " pod="openstack/ovn-controller-metrics-tnm2n" Sep 30 19:48:49 crc kubenswrapper[4756]: I0930 19:48:49.072686 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9e9466ec-a908-431a-9c47-fda720095cd8-combined-ca-bundle\") pod \"ovn-controller-metrics-tnm2n\" (UID: \"9e9466ec-a908-431a-9c47-fda720095cd8\") " pod="openstack/ovn-controller-metrics-tnm2n" Sep 30 19:48:49 crc kubenswrapper[4756]: I0930 19:48:49.072750 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9e9466ec-a908-431a-9c47-fda720095cd8-config\") pod \"ovn-controller-metrics-tnm2n\" (UID: \"9e9466ec-a908-431a-9c47-fda720095cd8\") " pod="openstack/ovn-controller-metrics-tnm2n" Sep 30 19:48:49 crc kubenswrapper[4756]: I0930 19:48:49.072886 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/9e9466ec-a908-431a-9c47-fda720095cd8-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-tnm2n\" (UID: \"9e9466ec-a908-431a-9c47-fda720095cd8\") " pod="openstack/ovn-controller-metrics-tnm2n" Sep 30 19:48:49 crc kubenswrapper[4756]: I0930 19:48:49.072978 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/9e9466ec-a908-431a-9c47-fda720095cd8-ovs-rundir\") pod \"ovn-controller-metrics-tnm2n\" (UID: \"9e9466ec-a908-431a-9c47-fda720095cd8\") " pod="openstack/ovn-controller-metrics-tnm2n" Sep 30 19:48:49 crc kubenswrapper[4756]: I0930 19:48:49.123573 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="61d939ce-b9ae-456e-949f-ebf03ff09847" path="/var/lib/kubelet/pods/61d939ce-b9ae-456e-949f-ebf03ff09847/volumes" Sep 30 19:48:49 crc kubenswrapper[4756]: I0930 19:48:49.161067 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-7fd796d7df-shtwv"] Sep 30 19:48:49 crc kubenswrapper[4756]: I0930 19:48:49.162376 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7fd796d7df-shtwv" Sep 30 19:48:49 crc kubenswrapper[4756]: I0930 19:48:49.164680 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-nb" Sep 30 19:48:49 crc kubenswrapper[4756]: I0930 19:48:49.174178 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/9e9466ec-a908-431a-9c47-fda720095cd8-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-tnm2n\" (UID: \"9e9466ec-a908-431a-9c47-fda720095cd8\") " pod="openstack/ovn-controller-metrics-tnm2n" Sep 30 19:48:49 crc kubenswrapper[4756]: I0930 19:48:49.174270 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/9e9466ec-a908-431a-9c47-fda720095cd8-ovs-rundir\") pod \"ovn-controller-metrics-tnm2n\" (UID: \"9e9466ec-a908-431a-9c47-fda720095cd8\") " pod="openstack/ovn-controller-metrics-tnm2n" Sep 30 19:48:49 crc kubenswrapper[4756]: I0930 19:48:49.174339 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ww2jq\" (UniqueName: \"kubernetes.io/projected/9e9466ec-a908-431a-9c47-fda720095cd8-kube-api-access-ww2jq\") pod \"ovn-controller-metrics-tnm2n\" (UID: \"9e9466ec-a908-431a-9c47-fda720095cd8\") " pod="openstack/ovn-controller-metrics-tnm2n" Sep 30 19:48:49 crc kubenswrapper[4756]: I0930 19:48:49.174367 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/9e9466ec-a908-431a-9c47-fda720095cd8-ovn-rundir\") pod \"ovn-controller-metrics-tnm2n\" (UID: \"9e9466ec-a908-431a-9c47-fda720095cd8\") " pod="openstack/ovn-controller-metrics-tnm2n" Sep 30 19:48:49 crc kubenswrapper[4756]: I0930 19:48:49.174449 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9e9466ec-a908-431a-9c47-fda720095cd8-combined-ca-bundle\") pod \"ovn-controller-metrics-tnm2n\" (UID: \"9e9466ec-a908-431a-9c47-fda720095cd8\") " pod="openstack/ovn-controller-metrics-tnm2n" Sep 30 19:48:49 crc kubenswrapper[4756]: I0930 19:48:49.174603 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/9e9466ec-a908-431a-9c47-fda720095cd8-ovs-rundir\") pod \"ovn-controller-metrics-tnm2n\" (UID: \"9e9466ec-a908-431a-9c47-fda720095cd8\") " pod="openstack/ovn-controller-metrics-tnm2n" Sep 30 19:48:49 crc kubenswrapper[4756]: I0930 19:48:49.174748 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/9e9466ec-a908-431a-9c47-fda720095cd8-ovn-rundir\") pod \"ovn-controller-metrics-tnm2n\" (UID: \"9e9466ec-a908-431a-9c47-fda720095cd8\") " pod="openstack/ovn-controller-metrics-tnm2n" Sep 30 19:48:49 crc kubenswrapper[4756]: I0930 19:48:49.175493 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9e9466ec-a908-431a-9c47-fda720095cd8-config\") pod \"ovn-controller-metrics-tnm2n\" (UID: \"9e9466ec-a908-431a-9c47-fda720095cd8\") " pod="openstack/ovn-controller-metrics-tnm2n" Sep 30 19:48:49 crc kubenswrapper[4756]: I0930 19:48:49.175558 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7fd796d7df-shtwv"] Sep 30 19:48:49 crc kubenswrapper[4756]: I0930 19:48:49.176096 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9e9466ec-a908-431a-9c47-fda720095cd8-config\") pod \"ovn-controller-metrics-tnm2n\" (UID: \"9e9466ec-a908-431a-9c47-fda720095cd8\") " pod="openstack/ovn-controller-metrics-tnm2n" Sep 30 19:48:49 crc kubenswrapper[4756]: I0930 19:48:49.251451 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/9e9466ec-a908-431a-9c47-fda720095cd8-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-tnm2n\" (UID: \"9e9466ec-a908-431a-9c47-fda720095cd8\") " pod="openstack/ovn-controller-metrics-tnm2n" Sep 30 19:48:49 crc kubenswrapper[4756]: I0930 19:48:49.252268 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9e9466ec-a908-431a-9c47-fda720095cd8-combined-ca-bundle\") pod \"ovn-controller-metrics-tnm2n\" (UID: \"9e9466ec-a908-431a-9c47-fda720095cd8\") " pod="openstack/ovn-controller-metrics-tnm2n" Sep 30 19:48:49 crc kubenswrapper[4756]: I0930 19:48:49.277343 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/88307fdb-545f-4b08-9030-c501648884a4-dns-svc\") pod \"dnsmasq-dns-7fd796d7df-shtwv\" (UID: \"88307fdb-545f-4b08-9030-c501648884a4\") " pod="openstack/dnsmasq-dns-7fd796d7df-shtwv" Sep 30 19:48:49 crc kubenswrapper[4756]: I0930 19:48:49.277686 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/88307fdb-545f-4b08-9030-c501648884a4-config\") pod \"dnsmasq-dns-7fd796d7df-shtwv\" (UID: \"88307fdb-545f-4b08-9030-c501648884a4\") " pod="openstack/dnsmasq-dns-7fd796d7df-shtwv" Sep 30 19:48:49 crc kubenswrapper[4756]: I0930 19:48:49.277878 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-smmm8\" (UniqueName: \"kubernetes.io/projected/88307fdb-545f-4b08-9030-c501648884a4-kube-api-access-smmm8\") pod \"dnsmasq-dns-7fd796d7df-shtwv\" (UID: \"88307fdb-545f-4b08-9030-c501648884a4\") " pod="openstack/dnsmasq-dns-7fd796d7df-shtwv" Sep 30 19:48:49 crc kubenswrapper[4756]: I0930 19:48:49.278002 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/88307fdb-545f-4b08-9030-c501648884a4-ovsdbserver-nb\") pod \"dnsmasq-dns-7fd796d7df-shtwv\" (UID: \"88307fdb-545f-4b08-9030-c501648884a4\") " pod="openstack/dnsmasq-dns-7fd796d7df-shtwv" Sep 30 19:48:49 crc kubenswrapper[4756]: I0930 19:48:49.352538 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ww2jq\" (UniqueName: \"kubernetes.io/projected/9e9466ec-a908-431a-9c47-fda720095cd8-kube-api-access-ww2jq\") pod \"ovn-controller-metrics-tnm2n\" (UID: \"9e9466ec-a908-431a-9c47-fda720095cd8\") " pod="openstack/ovn-controller-metrics-tnm2n" Sep 30 19:48:49 crc kubenswrapper[4756]: I0930 19:48:49.380523 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/88307fdb-545f-4b08-9030-c501648884a4-dns-svc\") pod \"dnsmasq-dns-7fd796d7df-shtwv\" (UID: \"88307fdb-545f-4b08-9030-c501648884a4\") " pod="openstack/dnsmasq-dns-7fd796d7df-shtwv" Sep 30 19:48:49 crc kubenswrapper[4756]: I0930 19:48:49.380609 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/88307fdb-545f-4b08-9030-c501648884a4-config\") pod \"dnsmasq-dns-7fd796d7df-shtwv\" (UID: \"88307fdb-545f-4b08-9030-c501648884a4\") " pod="openstack/dnsmasq-dns-7fd796d7df-shtwv" Sep 30 19:48:49 crc kubenswrapper[4756]: I0930 19:48:49.380644 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-smmm8\" (UniqueName: \"kubernetes.io/projected/88307fdb-545f-4b08-9030-c501648884a4-kube-api-access-smmm8\") pod \"dnsmasq-dns-7fd796d7df-shtwv\" (UID: \"88307fdb-545f-4b08-9030-c501648884a4\") " pod="openstack/dnsmasq-dns-7fd796d7df-shtwv" Sep 30 19:48:49 crc kubenswrapper[4756]: I0930 19:48:49.380683 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/88307fdb-545f-4b08-9030-c501648884a4-ovsdbserver-nb\") pod \"dnsmasq-dns-7fd796d7df-shtwv\" (UID: \"88307fdb-545f-4b08-9030-c501648884a4\") " pod="openstack/dnsmasq-dns-7fd796d7df-shtwv" Sep 30 19:48:49 crc kubenswrapper[4756]: I0930 19:48:49.381820 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/88307fdb-545f-4b08-9030-c501648884a4-dns-svc\") pod \"dnsmasq-dns-7fd796d7df-shtwv\" (UID: \"88307fdb-545f-4b08-9030-c501648884a4\") " pod="openstack/dnsmasq-dns-7fd796d7df-shtwv" Sep 30 19:48:49 crc kubenswrapper[4756]: I0930 19:48:49.382632 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/88307fdb-545f-4b08-9030-c501648884a4-ovsdbserver-nb\") pod \"dnsmasq-dns-7fd796d7df-shtwv\" (UID: \"88307fdb-545f-4b08-9030-c501648884a4\") " pod="openstack/dnsmasq-dns-7fd796d7df-shtwv" Sep 30 19:48:49 crc kubenswrapper[4756]: I0930 19:48:49.382968 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/88307fdb-545f-4b08-9030-c501648884a4-config\") pod \"dnsmasq-dns-7fd796d7df-shtwv\" (UID: \"88307fdb-545f-4b08-9030-c501648884a4\") " pod="openstack/dnsmasq-dns-7fd796d7df-shtwv" Sep 30 19:48:49 crc kubenswrapper[4756]: I0930 19:48:49.398378 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7fd796d7df-shtwv"] Sep 30 19:48:49 crc kubenswrapper[4756]: E0930 19:48:49.399019 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="unmounted volumes=[kube-api-access-smmm8], unattached volumes=[], failed to process volumes=[]: context canceled" pod="openstack/dnsmasq-dns-7fd796d7df-shtwv" podUID="88307fdb-545f-4b08-9030-c501648884a4" Sep 30 19:48:49 crc kubenswrapper[4756]: I0930 19:48:49.406347 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-smmm8\" (UniqueName: \"kubernetes.io/projected/88307fdb-545f-4b08-9030-c501648884a4-kube-api-access-smmm8\") pod \"dnsmasq-dns-7fd796d7df-shtwv\" (UID: \"88307fdb-545f-4b08-9030-c501648884a4\") " pod="openstack/dnsmasq-dns-7fd796d7df-shtwv" Sep 30 19:48:49 crc kubenswrapper[4756]: I0930 19:48:49.438582 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-86db49b7ff-wd9cd"] Sep 30 19:48:49 crc kubenswrapper[4756]: I0930 19:48:49.440175 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-86db49b7ff-wd9cd" Sep 30 19:48:49 crc kubenswrapper[4756]: I0930 19:48:49.444695 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-86db49b7ff-wd9cd"] Sep 30 19:48:49 crc kubenswrapper[4756]: I0930 19:48:49.456073 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-sb" Sep 30 19:48:49 crc kubenswrapper[4756]: I0930 19:48:49.571286 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-tnm2n" Sep 30 19:48:49 crc kubenswrapper[4756]: I0930 19:48:49.584220 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/9db1a87d-2ea3-4ec9-bf4d-a72708411f6e-ovsdbserver-sb\") pod \"dnsmasq-dns-86db49b7ff-wd9cd\" (UID: \"9db1a87d-2ea3-4ec9-bf4d-a72708411f6e\") " pod="openstack/dnsmasq-dns-86db49b7ff-wd9cd" Sep 30 19:48:49 crc kubenswrapper[4756]: I0930 19:48:49.584311 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/9db1a87d-2ea3-4ec9-bf4d-a72708411f6e-ovsdbserver-nb\") pod \"dnsmasq-dns-86db49b7ff-wd9cd\" (UID: \"9db1a87d-2ea3-4ec9-bf4d-a72708411f6e\") " pod="openstack/dnsmasq-dns-86db49b7ff-wd9cd" Sep 30 19:48:49 crc kubenswrapper[4756]: I0930 19:48:49.584340 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r99k4\" (UniqueName: \"kubernetes.io/projected/9db1a87d-2ea3-4ec9-bf4d-a72708411f6e-kube-api-access-r99k4\") pod \"dnsmasq-dns-86db49b7ff-wd9cd\" (UID: \"9db1a87d-2ea3-4ec9-bf4d-a72708411f6e\") " pod="openstack/dnsmasq-dns-86db49b7ff-wd9cd" Sep 30 19:48:49 crc kubenswrapper[4756]: I0930 19:48:49.584362 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9db1a87d-2ea3-4ec9-bf4d-a72708411f6e-dns-svc\") pod \"dnsmasq-dns-86db49b7ff-wd9cd\" (UID: \"9db1a87d-2ea3-4ec9-bf4d-a72708411f6e\") " pod="openstack/dnsmasq-dns-86db49b7ff-wd9cd" Sep 30 19:48:49 crc kubenswrapper[4756]: I0930 19:48:49.584430 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9db1a87d-2ea3-4ec9-bf4d-a72708411f6e-config\") pod \"dnsmasq-dns-86db49b7ff-wd9cd\" (UID: \"9db1a87d-2ea3-4ec9-bf4d-a72708411f6e\") " pod="openstack/dnsmasq-dns-86db49b7ff-wd9cd" Sep 30 19:48:49 crc kubenswrapper[4756]: I0930 19:48:49.628287 4756 generic.go:334] "Generic (PLEG): container finished" podID="cf5c32f2-40b7-4c8f-ab7b-58b6fa3200ed" containerID="72c557f5ab2787a56fb20fd470b72d16e6f45d14b19c838d4ca4877b21ee5f12" exitCode=0 Sep 30 19:48:49 crc kubenswrapper[4756]: I0930 19:48:49.628353 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-kmbtz" event={"ID":"cf5c32f2-40b7-4c8f-ab7b-58b6fa3200ed","Type":"ContainerDied","Data":"72c557f5ab2787a56fb20fd470b72d16e6f45d14b19c838d4ca4877b21ee5f12"} Sep 30 19:48:49 crc kubenswrapper[4756]: I0930 19:48:49.632194 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"feac82fa-065f-4e9a-b649-fa26b2d150b8","Type":"ContainerStarted","Data":"8072ad59b49efc7ab3149a130a357ae4c8ebf10c90e906b83a1c2c7c39a26605"} Sep 30 19:48:49 crc kubenswrapper[4756]: I0930 19:48:49.633706 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"3bacc7b1-00f4-4489-9c0b-fa038cc443d6","Type":"ContainerStarted","Data":"3b361be86fa33c2d42005a3a29c1306df75cc2f04957244da9accd2d1ab215f9"} Sep 30 19:48:49 crc kubenswrapper[4756]: I0930 19:48:49.635776 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"bfe78e50-0ef5-42f5-b989-39beebdd0d7c","Type":"ContainerStarted","Data":"b97d2d37d038e0a3a68f083bf479bac02d181211d1a60c673e2f3e39c0ffddc7"} Sep 30 19:48:49 crc kubenswrapper[4756]: I0930 19:48:49.637639 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"05ea7072-b87a-4cf4-8f86-f32e5836951a","Type":"ContainerStarted","Data":"f505cfd159ce82f4bb52b76159f6e70bf9d14da0427ec40a08f30173d7ae97d4"} Sep 30 19:48:49 crc kubenswrapper[4756]: I0930 19:48:49.639375 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"c3b1ef6d-6617-4d15-8709-a7623e75faa4","Type":"ContainerStarted","Data":"d8b917c3d73dba41be8fa6de0c7e43dda4897a0fa616dbea5cb7b4cf69c7fa60"} Sep 30 19:48:49 crc kubenswrapper[4756]: I0930 19:48:49.642303 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-kfdjm" event={"ID":"9996bc15-3d12-4aa1-a22e-d11726f6c4d8","Type":"ContainerStarted","Data":"b49cb0b0ecf6e81c4dcede589eabe05df7545b0c049a2725e9fa0033649ddb11"} Sep 30 19:48:49 crc kubenswrapper[4756]: I0930 19:48:49.642438 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-kfdjm" Sep 30 19:48:49 crc kubenswrapper[4756]: I0930 19:48:49.643485 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"718df58f-c1e5-4cab-9fb5-28a9303c6787","Type":"ContainerStarted","Data":"36ccac6330607fd278f4224f86dd7f08c4029700687d7fc1504a00b684714a34"} Sep 30 19:48:49 crc kubenswrapper[4756]: I0930 19:48:49.644119 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/kube-state-metrics-0" Sep 30 19:48:49 crc kubenswrapper[4756]: I0930 19:48:49.660273 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"87e13633-3439-4184-a1f8-1afbbe54883a","Type":"ContainerStarted","Data":"80a97405b4312c52e3a31eee94edf96e1e860adff64ef35aedcd06d9bb1f2983"} Sep 30 19:48:49 crc kubenswrapper[4756]: I0930 19:48:49.662179 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7fd796d7df-shtwv" Sep 30 19:48:49 crc kubenswrapper[4756]: I0930 19:48:49.670213 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/kube-state-metrics-0" podStartSLOduration=8.292293122 podStartE2EDuration="16.670196279s" podCreationTimestamp="2025-09-30 19:48:33 +0000 UTC" firstStartedPulling="2025-09-30 19:48:40.019769807 +0000 UTC m=+1049.640703294" lastFinishedPulling="2025-09-30 19:48:48.397672974 +0000 UTC m=+1058.018606451" observedRunningTime="2025-09-30 19:48:49.664467709 +0000 UTC m=+1059.285401196" watchObservedRunningTime="2025-09-30 19:48:49.670196279 +0000 UTC m=+1059.291129756" Sep 30 19:48:49 crc kubenswrapper[4756]: I0930 19:48:49.677783 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7fd796d7df-shtwv" Sep 30 19:48:49 crc kubenswrapper[4756]: I0930 19:48:49.685585 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/9db1a87d-2ea3-4ec9-bf4d-a72708411f6e-ovsdbserver-sb\") pod \"dnsmasq-dns-86db49b7ff-wd9cd\" (UID: \"9db1a87d-2ea3-4ec9-bf4d-a72708411f6e\") " pod="openstack/dnsmasq-dns-86db49b7ff-wd9cd" Sep 30 19:48:49 crc kubenswrapper[4756]: I0930 19:48:49.685644 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/9db1a87d-2ea3-4ec9-bf4d-a72708411f6e-ovsdbserver-nb\") pod \"dnsmasq-dns-86db49b7ff-wd9cd\" (UID: \"9db1a87d-2ea3-4ec9-bf4d-a72708411f6e\") " pod="openstack/dnsmasq-dns-86db49b7ff-wd9cd" Sep 30 19:48:49 crc kubenswrapper[4756]: I0930 19:48:49.685675 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r99k4\" (UniqueName: \"kubernetes.io/projected/9db1a87d-2ea3-4ec9-bf4d-a72708411f6e-kube-api-access-r99k4\") pod \"dnsmasq-dns-86db49b7ff-wd9cd\" (UID: \"9db1a87d-2ea3-4ec9-bf4d-a72708411f6e\") " pod="openstack/dnsmasq-dns-86db49b7ff-wd9cd" Sep 30 19:48:49 crc kubenswrapper[4756]: I0930 19:48:49.685712 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9db1a87d-2ea3-4ec9-bf4d-a72708411f6e-dns-svc\") pod \"dnsmasq-dns-86db49b7ff-wd9cd\" (UID: \"9db1a87d-2ea3-4ec9-bf4d-a72708411f6e\") " pod="openstack/dnsmasq-dns-86db49b7ff-wd9cd" Sep 30 19:48:49 crc kubenswrapper[4756]: I0930 19:48:49.685750 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9db1a87d-2ea3-4ec9-bf4d-a72708411f6e-config\") pod \"dnsmasq-dns-86db49b7ff-wd9cd\" (UID: \"9db1a87d-2ea3-4ec9-bf4d-a72708411f6e\") " pod="openstack/dnsmasq-dns-86db49b7ff-wd9cd" Sep 30 19:48:49 crc kubenswrapper[4756]: I0930 19:48:49.686659 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9db1a87d-2ea3-4ec9-bf4d-a72708411f6e-config\") pod \"dnsmasq-dns-86db49b7ff-wd9cd\" (UID: \"9db1a87d-2ea3-4ec9-bf4d-a72708411f6e\") " pod="openstack/dnsmasq-dns-86db49b7ff-wd9cd" Sep 30 19:48:49 crc kubenswrapper[4756]: I0930 19:48:49.687189 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/9db1a87d-2ea3-4ec9-bf4d-a72708411f6e-ovsdbserver-sb\") pod \"dnsmasq-dns-86db49b7ff-wd9cd\" (UID: \"9db1a87d-2ea3-4ec9-bf4d-a72708411f6e\") " pod="openstack/dnsmasq-dns-86db49b7ff-wd9cd" Sep 30 19:48:49 crc kubenswrapper[4756]: I0930 19:48:49.687722 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/9db1a87d-2ea3-4ec9-bf4d-a72708411f6e-ovsdbserver-nb\") pod \"dnsmasq-dns-86db49b7ff-wd9cd\" (UID: \"9db1a87d-2ea3-4ec9-bf4d-a72708411f6e\") " pod="openstack/dnsmasq-dns-86db49b7ff-wd9cd" Sep 30 19:48:49 crc kubenswrapper[4756]: I0930 19:48:49.688278 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9db1a87d-2ea3-4ec9-bf4d-a72708411f6e-dns-svc\") pod \"dnsmasq-dns-86db49b7ff-wd9cd\" (UID: \"9db1a87d-2ea3-4ec9-bf4d-a72708411f6e\") " pod="openstack/dnsmasq-dns-86db49b7ff-wd9cd" Sep 30 19:48:49 crc kubenswrapper[4756]: I0930 19:48:49.707491 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r99k4\" (UniqueName: \"kubernetes.io/projected/9db1a87d-2ea3-4ec9-bf4d-a72708411f6e-kube-api-access-r99k4\") pod \"dnsmasq-dns-86db49b7ff-wd9cd\" (UID: \"9db1a87d-2ea3-4ec9-bf4d-a72708411f6e\") " pod="openstack/dnsmasq-dns-86db49b7ff-wd9cd" Sep 30 19:48:49 crc kubenswrapper[4756]: I0930 19:48:49.747878 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-kfdjm" podStartSLOduration=5.182262645 podStartE2EDuration="12.747858679s" podCreationTimestamp="2025-09-30 19:48:37 +0000 UTC" firstStartedPulling="2025-09-30 19:48:40.044052842 +0000 UTC m=+1049.664986309" lastFinishedPulling="2025-09-30 19:48:47.609648866 +0000 UTC m=+1057.230582343" observedRunningTime="2025-09-30 19:48:49.739030238 +0000 UTC m=+1059.359963725" watchObservedRunningTime="2025-09-30 19:48:49.747858679 +0000 UTC m=+1059.368792156" Sep 30 19:48:49 crc kubenswrapper[4756]: I0930 19:48:49.775004 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-86db49b7ff-wd9cd" Sep 30 19:48:49 crc kubenswrapper[4756]: I0930 19:48:49.786585 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-smmm8\" (UniqueName: \"kubernetes.io/projected/88307fdb-545f-4b08-9030-c501648884a4-kube-api-access-smmm8\") pod \"88307fdb-545f-4b08-9030-c501648884a4\" (UID: \"88307fdb-545f-4b08-9030-c501648884a4\") " Sep 30 19:48:49 crc kubenswrapper[4756]: I0930 19:48:49.786631 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/88307fdb-545f-4b08-9030-c501648884a4-ovsdbserver-nb\") pod \"88307fdb-545f-4b08-9030-c501648884a4\" (UID: \"88307fdb-545f-4b08-9030-c501648884a4\") " Sep 30 19:48:49 crc kubenswrapper[4756]: I0930 19:48:49.786653 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/88307fdb-545f-4b08-9030-c501648884a4-dns-svc\") pod \"88307fdb-545f-4b08-9030-c501648884a4\" (UID: \"88307fdb-545f-4b08-9030-c501648884a4\") " Sep 30 19:48:49 crc kubenswrapper[4756]: I0930 19:48:49.786752 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/88307fdb-545f-4b08-9030-c501648884a4-config\") pod \"88307fdb-545f-4b08-9030-c501648884a4\" (UID: \"88307fdb-545f-4b08-9030-c501648884a4\") " Sep 30 19:48:49 crc kubenswrapper[4756]: I0930 19:48:49.788033 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/88307fdb-545f-4b08-9030-c501648884a4-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "88307fdb-545f-4b08-9030-c501648884a4" (UID: "88307fdb-545f-4b08-9030-c501648884a4"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:48:49 crc kubenswrapper[4756]: I0930 19:48:49.788317 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/88307fdb-545f-4b08-9030-c501648884a4-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "88307fdb-545f-4b08-9030-c501648884a4" (UID: "88307fdb-545f-4b08-9030-c501648884a4"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:48:49 crc kubenswrapper[4756]: I0930 19:48:49.788618 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/88307fdb-545f-4b08-9030-c501648884a4-config" (OuterVolumeSpecName: "config") pod "88307fdb-545f-4b08-9030-c501648884a4" (UID: "88307fdb-545f-4b08-9030-c501648884a4"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:48:49 crc kubenswrapper[4756]: I0930 19:48:49.800334 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/88307fdb-545f-4b08-9030-c501648884a4-kube-api-access-smmm8" (OuterVolumeSpecName: "kube-api-access-smmm8") pod "88307fdb-545f-4b08-9030-c501648884a4" (UID: "88307fdb-545f-4b08-9030-c501648884a4"). InnerVolumeSpecName "kube-api-access-smmm8". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:48:49 crc kubenswrapper[4756]: I0930 19:48:49.889289 4756 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/88307fdb-545f-4b08-9030-c501648884a4-config\") on node \"crc\" DevicePath \"\"" Sep 30 19:48:49 crc kubenswrapper[4756]: I0930 19:48:49.889635 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-smmm8\" (UniqueName: \"kubernetes.io/projected/88307fdb-545f-4b08-9030-c501648884a4-kube-api-access-smmm8\") on node \"crc\" DevicePath \"\"" Sep 30 19:48:49 crc kubenswrapper[4756]: I0930 19:48:49.889649 4756 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/88307fdb-545f-4b08-9030-c501648884a4-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Sep 30 19:48:49 crc kubenswrapper[4756]: I0930 19:48:49.889661 4756 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/88307fdb-545f-4b08-9030-c501648884a4-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 30 19:48:50 crc kubenswrapper[4756]: I0930 19:48:50.053540 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-metrics-tnm2n"] Sep 30 19:48:50 crc kubenswrapper[4756]: W0930 19:48:50.064226 4756 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9e9466ec_a908_431a_9c47_fda720095cd8.slice/crio-168a227c3f7e1651059e907990bc1c808a02888d89b0330c43178f26f6d733e9 WatchSource:0}: Error finding container 168a227c3f7e1651059e907990bc1c808a02888d89b0330c43178f26f6d733e9: Status 404 returned error can't find the container with id 168a227c3f7e1651059e907990bc1c808a02888d89b0330c43178f26f6d733e9 Sep 30 19:48:50 crc kubenswrapper[4756]: I0930 19:48:50.212285 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-86db49b7ff-wd9cd"] Sep 30 19:48:50 crc kubenswrapper[4756]: W0930 19:48:50.216698 4756 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9db1a87d_2ea3_4ec9_bf4d_a72708411f6e.slice/crio-84d8d8410e09fe3246e3d5d3ce05c109ca763ad755e5148c2075b4bc78b42ece WatchSource:0}: Error finding container 84d8d8410e09fe3246e3d5d3ce05c109ca763ad755e5148c2075b4bc78b42ece: Status 404 returned error can't find the container with id 84d8d8410e09fe3246e3d5d3ce05c109ca763ad755e5148c2075b4bc78b42ece Sep 30 19:48:50 crc kubenswrapper[4756]: I0930 19:48:50.677577 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-kmbtz" event={"ID":"cf5c32f2-40b7-4c8f-ab7b-58b6fa3200ed","Type":"ContainerStarted","Data":"291eb080a7f4ba699c9eeb5b409724fd85cb98d31853893faa9a4c9845063aea"} Sep 30 19:48:50 crc kubenswrapper[4756]: I0930 19:48:50.677901 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-kmbtz" event={"ID":"cf5c32f2-40b7-4c8f-ab7b-58b6fa3200ed","Type":"ContainerStarted","Data":"223b5334941e108a9544fa45c9f6b28cb4f077714ac53ffb084467fb3d266eb6"} Sep 30 19:48:50 crc kubenswrapper[4756]: I0930 19:48:50.677917 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-ovs-kmbtz" Sep 30 19:48:50 crc kubenswrapper[4756]: I0930 19:48:50.677930 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-ovs-kmbtz" Sep 30 19:48:50 crc kubenswrapper[4756]: I0930 19:48:50.679987 4756 generic.go:334] "Generic (PLEG): container finished" podID="9db1a87d-2ea3-4ec9-bf4d-a72708411f6e" containerID="19075cdc31fd062688f54d204c9a29469ec5be5e49780dfca17853ecef0396aa" exitCode=0 Sep 30 19:48:50 crc kubenswrapper[4756]: I0930 19:48:50.680063 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-86db49b7ff-wd9cd" event={"ID":"9db1a87d-2ea3-4ec9-bf4d-a72708411f6e","Type":"ContainerDied","Data":"19075cdc31fd062688f54d204c9a29469ec5be5e49780dfca17853ecef0396aa"} Sep 30 19:48:50 crc kubenswrapper[4756]: I0930 19:48:50.680137 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-86db49b7ff-wd9cd" event={"ID":"9db1a87d-2ea3-4ec9-bf4d-a72708411f6e","Type":"ContainerStarted","Data":"84d8d8410e09fe3246e3d5d3ce05c109ca763ad755e5148c2075b4bc78b42ece"} Sep 30 19:48:50 crc kubenswrapper[4756]: I0930 19:48:50.682315 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-tnm2n" event={"ID":"9e9466ec-a908-431a-9c47-fda720095cd8","Type":"ContainerStarted","Data":"168a227c3f7e1651059e907990bc1c808a02888d89b0330c43178f26f6d733e9"} Sep 30 19:48:50 crc kubenswrapper[4756]: I0930 19:48:50.682375 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7fd796d7df-shtwv" Sep 30 19:48:50 crc kubenswrapper[4756]: I0930 19:48:50.707118 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-ovs-kmbtz" podStartSLOduration=6.380190408 podStartE2EDuration="13.707099024s" podCreationTimestamp="2025-09-30 19:48:37 +0000 UTC" firstStartedPulling="2025-09-30 19:48:40.182790508 +0000 UTC m=+1049.803723985" lastFinishedPulling="2025-09-30 19:48:47.509699114 +0000 UTC m=+1057.130632601" observedRunningTime="2025-09-30 19:48:50.702548245 +0000 UTC m=+1060.323481722" watchObservedRunningTime="2025-09-30 19:48:50.707099024 +0000 UTC m=+1060.328032501" Sep 30 19:48:50 crc kubenswrapper[4756]: I0930 19:48:50.757228 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7fd796d7df-shtwv"] Sep 30 19:48:50 crc kubenswrapper[4756]: I0930 19:48:50.761951 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-7fd796d7df-shtwv"] Sep 30 19:48:51 crc kubenswrapper[4756]: I0930 19:48:51.126321 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="88307fdb-545f-4b08-9030-c501648884a4" path="/var/lib/kubelet/pods/88307fdb-545f-4b08-9030-c501648884a4/volumes" Sep 30 19:48:51 crc kubenswrapper[4756]: I0930 19:48:51.693805 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-86db49b7ff-wd9cd" event={"ID":"9db1a87d-2ea3-4ec9-bf4d-a72708411f6e","Type":"ContainerStarted","Data":"60bfb6c548623904a95282fcfec56d46ecba4c66bb2fa6b0ef8f81c3ba789b54"} Sep 30 19:48:51 crc kubenswrapper[4756]: I0930 19:48:51.713682 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-86db49b7ff-wd9cd" podStartSLOduration=2.713666175 podStartE2EDuration="2.713666175s" podCreationTimestamp="2025-09-30 19:48:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 19:48:51.707858914 +0000 UTC m=+1061.328792391" watchObservedRunningTime="2025-09-30 19:48:51.713666175 +0000 UTC m=+1061.334599652" Sep 30 19:48:52 crc kubenswrapper[4756]: I0930 19:48:52.704408 4756 generic.go:334] "Generic (PLEG): container finished" podID="feac82fa-065f-4e9a-b649-fa26b2d150b8" containerID="8072ad59b49efc7ab3149a130a357ae4c8ebf10c90e906b83a1c2c7c39a26605" exitCode=0 Sep 30 19:48:52 crc kubenswrapper[4756]: I0930 19:48:52.704740 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"feac82fa-065f-4e9a-b649-fa26b2d150b8","Type":"ContainerDied","Data":"8072ad59b49efc7ab3149a130a357ae4c8ebf10c90e906b83a1c2c7c39a26605"} Sep 30 19:48:52 crc kubenswrapper[4756]: I0930 19:48:52.709718 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"05ea7072-b87a-4cf4-8f86-f32e5836951a","Type":"ContainerDied","Data":"f505cfd159ce82f4bb52b76159f6e70bf9d14da0427ec40a08f30173d7ae97d4"} Sep 30 19:48:52 crc kubenswrapper[4756]: I0930 19:48:52.711479 4756 generic.go:334] "Generic (PLEG): container finished" podID="05ea7072-b87a-4cf4-8f86-f32e5836951a" containerID="f505cfd159ce82f4bb52b76159f6e70bf9d14da0427ec40a08f30173d7ae97d4" exitCode=0 Sep 30 19:48:52 crc kubenswrapper[4756]: I0930 19:48:52.712016 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-86db49b7ff-wd9cd" Sep 30 19:48:53 crc kubenswrapper[4756]: I0930 19:48:53.454048 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/kube-state-metrics-0" Sep 30 19:48:53 crc kubenswrapper[4756]: I0930 19:48:53.725512 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"87e13633-3439-4184-a1f8-1afbbe54883a","Type":"ContainerStarted","Data":"6767daa671c57644b127486af8258b0a0685ce6fa8c75b10d7e4ec93b1682e95"} Sep 30 19:48:53 crc kubenswrapper[4756]: I0930 19:48:53.729349 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"feac82fa-065f-4e9a-b649-fa26b2d150b8","Type":"ContainerStarted","Data":"366cf47beb3bf314873bbe2aee65a48d7de8c63bb8eb30ed71c2c3e6ba1b8b24"} Sep 30 19:48:53 crc kubenswrapper[4756]: I0930 19:48:53.732020 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"bfe78e50-0ef5-42f5-b989-39beebdd0d7c","Type":"ContainerStarted","Data":"1e54723557bbe1ca80eed69c4f2a6744b98440dd8b61a4a9acaa14272039ec98"} Sep 30 19:48:53 crc kubenswrapper[4756]: I0930 19:48:53.733682 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"05ea7072-b87a-4cf4-8f86-f32e5836951a","Type":"ContainerStarted","Data":"c7c419536d0be0324d543ae02bef0262d1e3a0cb35dd8af1179be449734d0c2e"} Sep 30 19:48:53 crc kubenswrapper[4756]: I0930 19:48:53.735618 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-tnm2n" event={"ID":"9e9466ec-a908-431a-9c47-fda720095cd8","Type":"ContainerStarted","Data":"a689bb821757f51e26e8f98c83247791b78940eeab2a2b87776e95ccb89ff5af"} Sep 30 19:48:53 crc kubenswrapper[4756]: I0930 19:48:53.761865 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-nb-0" podStartSLOduration=3.647185018 podStartE2EDuration="15.761840834s" podCreationTimestamp="2025-09-30 19:48:38 +0000 UTC" firstStartedPulling="2025-09-30 19:48:40.519548931 +0000 UTC m=+1050.140482408" lastFinishedPulling="2025-09-30 19:48:52.634204757 +0000 UTC m=+1062.255138224" observedRunningTime="2025-09-30 19:48:53.74254072 +0000 UTC m=+1063.363474207" watchObservedRunningTime="2025-09-30 19:48:53.761840834 +0000 UTC m=+1063.382774321" Sep 30 19:48:53 crc kubenswrapper[4756]: I0930 19:48:53.772552 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-metrics-tnm2n" podStartSLOduration=3.18021541 podStartE2EDuration="5.772527833s" podCreationTimestamp="2025-09-30 19:48:48 +0000 UTC" firstStartedPulling="2025-09-30 19:48:50.066277232 +0000 UTC m=+1059.687210709" lastFinishedPulling="2025-09-30 19:48:52.658589655 +0000 UTC m=+1062.279523132" observedRunningTime="2025-09-30 19:48:53.763007615 +0000 UTC m=+1063.383941102" watchObservedRunningTime="2025-09-30 19:48:53.772527833 +0000 UTC m=+1063.393461320" Sep 30 19:48:53 crc kubenswrapper[4756]: I0930 19:48:53.815493 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-cell1-galera-0" podStartSLOduration=16.715243287 podStartE2EDuration="23.815476276s" podCreationTimestamp="2025-09-30 19:48:30 +0000 UTC" firstStartedPulling="2025-09-30 19:48:39.96057664 +0000 UTC m=+1049.581510117" lastFinishedPulling="2025-09-30 19:48:47.060809629 +0000 UTC m=+1056.681743106" observedRunningTime="2025-09-30 19:48:53.811585304 +0000 UTC m=+1063.432518781" watchObservedRunningTime="2025-09-30 19:48:53.815476276 +0000 UTC m=+1063.436409763" Sep 30 19:48:53 crc kubenswrapper[4756]: I0930 19:48:53.831309 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-galera-0" podStartSLOduration=16.366371367 podStartE2EDuration="23.8312948s" podCreationTimestamp="2025-09-30 19:48:30 +0000 UTC" firstStartedPulling="2025-09-30 19:48:40.044035771 +0000 UTC m=+1049.664969248" lastFinishedPulling="2025-09-30 19:48:47.508959204 +0000 UTC m=+1057.129892681" observedRunningTime="2025-09-30 19:48:53.830838518 +0000 UTC m=+1063.451771995" watchObservedRunningTime="2025-09-30 19:48:53.8312948 +0000 UTC m=+1063.452228287" Sep 30 19:48:53 crc kubenswrapper[4756]: I0930 19:48:53.853335 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-sb-0" podStartSLOduration=3.836872765 podStartE2EDuration="14.853312695s" podCreationTimestamp="2025-09-30 19:48:39 +0000 UTC" firstStartedPulling="2025-09-30 19:48:41.601439291 +0000 UTC m=+1051.222372768" lastFinishedPulling="2025-09-30 19:48:52.617879221 +0000 UTC m=+1062.238812698" observedRunningTime="2025-09-30 19:48:53.84813177 +0000 UTC m=+1063.469065257" watchObservedRunningTime="2025-09-30 19:48:53.853312695 +0000 UTC m=+1063.474246172" Sep 30 19:48:54 crc kubenswrapper[4756]: I0930 19:48:54.859995 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-nb-0" Sep 30 19:48:54 crc kubenswrapper[4756]: I0930 19:48:54.860073 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-nb-0" Sep 30 19:48:54 crc kubenswrapper[4756]: I0930 19:48:54.920707 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-nb-0" Sep 30 19:48:55 crc kubenswrapper[4756]: I0930 19:48:55.405513 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-sb-0" Sep 30 19:48:55 crc kubenswrapper[4756]: I0930 19:48:55.405596 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-sb-0" Sep 30 19:48:55 crc kubenswrapper[4756]: I0930 19:48:55.464813 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-sb-0" Sep 30 19:48:55 crc kubenswrapper[4756]: I0930 19:48:55.816682 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-nb-0" Sep 30 19:48:55 crc kubenswrapper[4756]: I0930 19:48:55.824975 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-sb-0" Sep 30 19:48:56 crc kubenswrapper[4756]: I0930 19:48:56.087286 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-northd-0"] Sep 30 19:48:56 crc kubenswrapper[4756]: I0930 19:48:56.088823 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Sep 30 19:48:56 crc kubenswrapper[4756]: I0930 19:48:56.096816 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-scripts" Sep 30 19:48:56 crc kubenswrapper[4756]: I0930 19:48:56.096840 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-config" Sep 30 19:48:56 crc kubenswrapper[4756]: I0930 19:48:56.097115 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovnnorthd-ovndbs" Sep 30 19:48:56 crc kubenswrapper[4756]: I0930 19:48:56.097270 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovnnorthd-ovnnorthd-dockercfg-xtmrf" Sep 30 19:48:56 crc kubenswrapper[4756]: I0930 19:48:56.112171 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-0"] Sep 30 19:48:56 crc kubenswrapper[4756]: I0930 19:48:56.222422 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vvh5t\" (UniqueName: \"kubernetes.io/projected/f4c19f17-3c18-484d-acfd-99c72926bdef-kube-api-access-vvh5t\") pod \"ovn-northd-0\" (UID: \"f4c19f17-3c18-484d-acfd-99c72926bdef\") " pod="openstack/ovn-northd-0" Sep 30 19:48:56 crc kubenswrapper[4756]: I0930 19:48:56.222481 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f4c19f17-3c18-484d-acfd-99c72926bdef-config\") pod \"ovn-northd-0\" (UID: \"f4c19f17-3c18-484d-acfd-99c72926bdef\") " pod="openstack/ovn-northd-0" Sep 30 19:48:56 crc kubenswrapper[4756]: I0930 19:48:56.222645 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/f4c19f17-3c18-484d-acfd-99c72926bdef-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"f4c19f17-3c18-484d-acfd-99c72926bdef\") " pod="openstack/ovn-northd-0" Sep 30 19:48:56 crc kubenswrapper[4756]: I0930 19:48:56.222747 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/f4c19f17-3c18-484d-acfd-99c72926bdef-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"f4c19f17-3c18-484d-acfd-99c72926bdef\") " pod="openstack/ovn-northd-0" Sep 30 19:48:56 crc kubenswrapper[4756]: I0930 19:48:56.222843 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f4c19f17-3c18-484d-acfd-99c72926bdef-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"f4c19f17-3c18-484d-acfd-99c72926bdef\") " pod="openstack/ovn-northd-0" Sep 30 19:48:56 crc kubenswrapper[4756]: I0930 19:48:56.222917 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/f4c19f17-3c18-484d-acfd-99c72926bdef-scripts\") pod \"ovn-northd-0\" (UID: \"f4c19f17-3c18-484d-acfd-99c72926bdef\") " pod="openstack/ovn-northd-0" Sep 30 19:48:56 crc kubenswrapper[4756]: I0930 19:48:56.223025 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/f4c19f17-3c18-484d-acfd-99c72926bdef-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"f4c19f17-3c18-484d-acfd-99c72926bdef\") " pod="openstack/ovn-northd-0" Sep 30 19:48:56 crc kubenswrapper[4756]: I0930 19:48:56.324935 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/f4c19f17-3c18-484d-acfd-99c72926bdef-scripts\") pod \"ovn-northd-0\" (UID: \"f4c19f17-3c18-484d-acfd-99c72926bdef\") " pod="openstack/ovn-northd-0" Sep 30 19:48:56 crc kubenswrapper[4756]: I0930 19:48:56.325062 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/f4c19f17-3c18-484d-acfd-99c72926bdef-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"f4c19f17-3c18-484d-acfd-99c72926bdef\") " pod="openstack/ovn-northd-0" Sep 30 19:48:56 crc kubenswrapper[4756]: I0930 19:48:56.325533 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/f4c19f17-3c18-484d-acfd-99c72926bdef-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"f4c19f17-3c18-484d-acfd-99c72926bdef\") " pod="openstack/ovn-northd-0" Sep 30 19:48:56 crc kubenswrapper[4756]: I0930 19:48:56.325200 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vvh5t\" (UniqueName: \"kubernetes.io/projected/f4c19f17-3c18-484d-acfd-99c72926bdef-kube-api-access-vvh5t\") pod \"ovn-northd-0\" (UID: \"f4c19f17-3c18-484d-acfd-99c72926bdef\") " pod="openstack/ovn-northd-0" Sep 30 19:48:56 crc kubenswrapper[4756]: I0930 19:48:56.325604 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f4c19f17-3c18-484d-acfd-99c72926bdef-config\") pod \"ovn-northd-0\" (UID: \"f4c19f17-3c18-484d-acfd-99c72926bdef\") " pod="openstack/ovn-northd-0" Sep 30 19:48:56 crc kubenswrapper[4756]: I0930 19:48:56.325634 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/f4c19f17-3c18-484d-acfd-99c72926bdef-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"f4c19f17-3c18-484d-acfd-99c72926bdef\") " pod="openstack/ovn-northd-0" Sep 30 19:48:56 crc kubenswrapper[4756]: I0930 19:48:56.325759 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/f4c19f17-3c18-484d-acfd-99c72926bdef-scripts\") pod \"ovn-northd-0\" (UID: \"f4c19f17-3c18-484d-acfd-99c72926bdef\") " pod="openstack/ovn-northd-0" Sep 30 19:48:56 crc kubenswrapper[4756]: I0930 19:48:56.326253 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f4c19f17-3c18-484d-acfd-99c72926bdef-config\") pod \"ovn-northd-0\" (UID: \"f4c19f17-3c18-484d-acfd-99c72926bdef\") " pod="openstack/ovn-northd-0" Sep 30 19:48:56 crc kubenswrapper[4756]: I0930 19:48:56.326343 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/f4c19f17-3c18-484d-acfd-99c72926bdef-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"f4c19f17-3c18-484d-acfd-99c72926bdef\") " pod="openstack/ovn-northd-0" Sep 30 19:48:56 crc kubenswrapper[4756]: I0930 19:48:56.326709 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f4c19f17-3c18-484d-acfd-99c72926bdef-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"f4c19f17-3c18-484d-acfd-99c72926bdef\") " pod="openstack/ovn-northd-0" Sep 30 19:48:56 crc kubenswrapper[4756]: I0930 19:48:56.332082 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/f4c19f17-3c18-484d-acfd-99c72926bdef-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"f4c19f17-3c18-484d-acfd-99c72926bdef\") " pod="openstack/ovn-northd-0" Sep 30 19:48:56 crc kubenswrapper[4756]: I0930 19:48:56.340130 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/f4c19f17-3c18-484d-acfd-99c72926bdef-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"f4c19f17-3c18-484d-acfd-99c72926bdef\") " pod="openstack/ovn-northd-0" Sep 30 19:48:56 crc kubenswrapper[4756]: I0930 19:48:56.340440 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f4c19f17-3c18-484d-acfd-99c72926bdef-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"f4c19f17-3c18-484d-acfd-99c72926bdef\") " pod="openstack/ovn-northd-0" Sep 30 19:48:56 crc kubenswrapper[4756]: I0930 19:48:56.343354 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vvh5t\" (UniqueName: \"kubernetes.io/projected/f4c19f17-3c18-484d-acfd-99c72926bdef-kube-api-access-vvh5t\") pod \"ovn-northd-0\" (UID: \"f4c19f17-3c18-484d-acfd-99c72926bdef\") " pod="openstack/ovn-northd-0" Sep 30 19:48:56 crc kubenswrapper[4756]: I0930 19:48:56.469299 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Sep 30 19:48:56 crc kubenswrapper[4756]: I0930 19:48:56.886340 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/memcached-0" Sep 30 19:48:56 crc kubenswrapper[4756]: I0930 19:48:56.927764 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-0"] Sep 30 19:48:57 crc kubenswrapper[4756]: I0930 19:48:57.772272 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"f4c19f17-3c18-484d-acfd-99c72926bdef","Type":"ContainerStarted","Data":"c3a68d5854b68b5f841c43337e1fbeecbe6820158c388a7e166ab069969d7462"} Sep 30 19:48:59 crc kubenswrapper[4756]: I0930 19:48:59.777643 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-86db49b7ff-wd9cd" Sep 30 19:48:59 crc kubenswrapper[4756]: I0930 19:48:59.864755 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-qx9p6"] Sep 30 19:48:59 crc kubenswrapper[4756]: I0930 19:48:59.865031 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-57d769cc4f-qx9p6" podUID="0bc4b3cb-5f9f-4af2-8204-243ea2bd899d" containerName="dnsmasq-dns" containerID="cri-o://fa7bfdcacf7cd3c19a653dd3e51e41da00b1f006f28417003c6c25944d61f09e" gracePeriod=10 Sep 30 19:49:01 crc kubenswrapper[4756]: I0930 19:49:01.835800 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-cell1-galera-0" Sep 30 19:49:01 crc kubenswrapper[4756]: I0930 19:49:01.836154 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-cell1-galera-0" Sep 30 19:49:01 crc kubenswrapper[4756]: I0930 19:49:01.895913 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-galera-0" Sep 30 19:49:01 crc kubenswrapper[4756]: I0930 19:49:01.895997 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-galera-0" Sep 30 19:49:01 crc kubenswrapper[4756]: I0930 19:49:01.896183 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-cell1-galera-0" Sep 30 19:49:02 crc kubenswrapper[4756]: I0930 19:49:02.664258 4756 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-57d769cc4f-qx9p6" podUID="0bc4b3cb-5f9f-4af2-8204-243ea2bd899d" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.103:5353: connect: connection refused" Sep 30 19:49:02 crc kubenswrapper[4756]: I0930 19:49:02.889672 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-cell1-galera-0" Sep 30 19:49:03 crc kubenswrapper[4756]: I0930 19:49:03.359792 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-698758b865-nzmxq"] Sep 30 19:49:03 crc kubenswrapper[4756]: I0930 19:49:03.361018 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-698758b865-nzmxq" Sep 30 19:49:03 crc kubenswrapper[4756]: I0930 19:49:03.377932 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-698758b865-nzmxq"] Sep 30 19:49:03 crc kubenswrapper[4756]: I0930 19:49:03.465023 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/305c7f11-e862-489d-ba96-2426b722652c-config\") pod \"dnsmasq-dns-698758b865-nzmxq\" (UID: \"305c7f11-e862-489d-ba96-2426b722652c\") " pod="openstack/dnsmasq-dns-698758b865-nzmxq" Sep 30 19:49:03 crc kubenswrapper[4756]: I0930 19:49:03.465088 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/305c7f11-e862-489d-ba96-2426b722652c-ovsdbserver-nb\") pod \"dnsmasq-dns-698758b865-nzmxq\" (UID: \"305c7f11-e862-489d-ba96-2426b722652c\") " pod="openstack/dnsmasq-dns-698758b865-nzmxq" Sep 30 19:49:03 crc kubenswrapper[4756]: I0930 19:49:03.465158 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/305c7f11-e862-489d-ba96-2426b722652c-dns-svc\") pod \"dnsmasq-dns-698758b865-nzmxq\" (UID: \"305c7f11-e862-489d-ba96-2426b722652c\") " pod="openstack/dnsmasq-dns-698758b865-nzmxq" Sep 30 19:49:03 crc kubenswrapper[4756]: I0930 19:49:03.465178 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pq79m\" (UniqueName: \"kubernetes.io/projected/305c7f11-e862-489d-ba96-2426b722652c-kube-api-access-pq79m\") pod \"dnsmasq-dns-698758b865-nzmxq\" (UID: \"305c7f11-e862-489d-ba96-2426b722652c\") " pod="openstack/dnsmasq-dns-698758b865-nzmxq" Sep 30 19:49:03 crc kubenswrapper[4756]: I0930 19:49:03.465215 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/305c7f11-e862-489d-ba96-2426b722652c-ovsdbserver-sb\") pod \"dnsmasq-dns-698758b865-nzmxq\" (UID: \"305c7f11-e862-489d-ba96-2426b722652c\") " pod="openstack/dnsmasq-dns-698758b865-nzmxq" Sep 30 19:49:03 crc kubenswrapper[4756]: I0930 19:49:03.566417 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/305c7f11-e862-489d-ba96-2426b722652c-ovsdbserver-nb\") pod \"dnsmasq-dns-698758b865-nzmxq\" (UID: \"305c7f11-e862-489d-ba96-2426b722652c\") " pod="openstack/dnsmasq-dns-698758b865-nzmxq" Sep 30 19:49:03 crc kubenswrapper[4756]: I0930 19:49:03.566522 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/305c7f11-e862-489d-ba96-2426b722652c-dns-svc\") pod \"dnsmasq-dns-698758b865-nzmxq\" (UID: \"305c7f11-e862-489d-ba96-2426b722652c\") " pod="openstack/dnsmasq-dns-698758b865-nzmxq" Sep 30 19:49:03 crc kubenswrapper[4756]: I0930 19:49:03.566545 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pq79m\" (UniqueName: \"kubernetes.io/projected/305c7f11-e862-489d-ba96-2426b722652c-kube-api-access-pq79m\") pod \"dnsmasq-dns-698758b865-nzmxq\" (UID: \"305c7f11-e862-489d-ba96-2426b722652c\") " pod="openstack/dnsmasq-dns-698758b865-nzmxq" Sep 30 19:49:03 crc kubenswrapper[4756]: I0930 19:49:03.566577 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/305c7f11-e862-489d-ba96-2426b722652c-ovsdbserver-sb\") pod \"dnsmasq-dns-698758b865-nzmxq\" (UID: \"305c7f11-e862-489d-ba96-2426b722652c\") " pod="openstack/dnsmasq-dns-698758b865-nzmxq" Sep 30 19:49:03 crc kubenswrapper[4756]: I0930 19:49:03.566620 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/305c7f11-e862-489d-ba96-2426b722652c-config\") pod \"dnsmasq-dns-698758b865-nzmxq\" (UID: \"305c7f11-e862-489d-ba96-2426b722652c\") " pod="openstack/dnsmasq-dns-698758b865-nzmxq" Sep 30 19:49:03 crc kubenswrapper[4756]: I0930 19:49:03.567513 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/305c7f11-e862-489d-ba96-2426b722652c-dns-svc\") pod \"dnsmasq-dns-698758b865-nzmxq\" (UID: \"305c7f11-e862-489d-ba96-2426b722652c\") " pod="openstack/dnsmasq-dns-698758b865-nzmxq" Sep 30 19:49:03 crc kubenswrapper[4756]: I0930 19:49:03.567552 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/305c7f11-e862-489d-ba96-2426b722652c-config\") pod \"dnsmasq-dns-698758b865-nzmxq\" (UID: \"305c7f11-e862-489d-ba96-2426b722652c\") " pod="openstack/dnsmasq-dns-698758b865-nzmxq" Sep 30 19:49:03 crc kubenswrapper[4756]: I0930 19:49:03.567738 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/305c7f11-e862-489d-ba96-2426b722652c-ovsdbserver-sb\") pod \"dnsmasq-dns-698758b865-nzmxq\" (UID: \"305c7f11-e862-489d-ba96-2426b722652c\") " pod="openstack/dnsmasq-dns-698758b865-nzmxq" Sep 30 19:49:03 crc kubenswrapper[4756]: I0930 19:49:03.567831 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/305c7f11-e862-489d-ba96-2426b722652c-ovsdbserver-nb\") pod \"dnsmasq-dns-698758b865-nzmxq\" (UID: \"305c7f11-e862-489d-ba96-2426b722652c\") " pod="openstack/dnsmasq-dns-698758b865-nzmxq" Sep 30 19:49:03 crc kubenswrapper[4756]: I0930 19:49:03.608610 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pq79m\" (UniqueName: \"kubernetes.io/projected/305c7f11-e862-489d-ba96-2426b722652c-kube-api-access-pq79m\") pod \"dnsmasq-dns-698758b865-nzmxq\" (UID: \"305c7f11-e862-489d-ba96-2426b722652c\") " pod="openstack/dnsmasq-dns-698758b865-nzmxq" Sep 30 19:49:03 crc kubenswrapper[4756]: I0930 19:49:03.676562 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-698758b865-nzmxq" Sep 30 19:49:04 crc kubenswrapper[4756]: I0930 19:49:04.533500 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-storage-0"] Sep 30 19:49:04 crc kubenswrapper[4756]: I0930 19:49:04.543362 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-storage-0" Sep 30 19:49:04 crc kubenswrapper[4756]: I0930 19:49:04.546450 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-files" Sep 30 19:49:04 crc kubenswrapper[4756]: I0930 19:49:04.546728 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-conf" Sep 30 19:49:04 crc kubenswrapper[4756]: I0930 19:49:04.546774 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-storage-config-data" Sep 30 19:49:04 crc kubenswrapper[4756]: I0930 19:49:04.547191 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-swift-dockercfg-t4vds" Sep 30 19:49:04 crc kubenswrapper[4756]: I0930 19:49:04.571559 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-storage-0"] Sep 30 19:49:04 crc kubenswrapper[4756]: I0930 19:49:04.582922 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/26be10f5-be18-4ab5-9f8e-0d6f2a8dcb4f-etc-swift\") pod \"swift-storage-0\" (UID: \"26be10f5-be18-4ab5-9f8e-0d6f2a8dcb4f\") " pod="openstack/swift-storage-0" Sep 30 19:49:04 crc kubenswrapper[4756]: I0930 19:49:04.583013 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/26be10f5-be18-4ab5-9f8e-0d6f2a8dcb4f-lock\") pod \"swift-storage-0\" (UID: \"26be10f5-be18-4ab5-9f8e-0d6f2a8dcb4f\") " pod="openstack/swift-storage-0" Sep 30 19:49:04 crc kubenswrapper[4756]: I0930 19:49:04.583059 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/26be10f5-be18-4ab5-9f8e-0d6f2a8dcb4f-cache\") pod \"swift-storage-0\" (UID: \"26be10f5-be18-4ab5-9f8e-0d6f2a8dcb4f\") " pod="openstack/swift-storage-0" Sep 30 19:49:04 crc kubenswrapper[4756]: I0930 19:49:04.583095 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"swift-storage-0\" (UID: \"26be10f5-be18-4ab5-9f8e-0d6f2a8dcb4f\") " pod="openstack/swift-storage-0" Sep 30 19:49:04 crc kubenswrapper[4756]: I0930 19:49:04.583153 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c5zm2\" (UniqueName: \"kubernetes.io/projected/26be10f5-be18-4ab5-9f8e-0d6f2a8dcb4f-kube-api-access-c5zm2\") pod \"swift-storage-0\" (UID: \"26be10f5-be18-4ab5-9f8e-0d6f2a8dcb4f\") " pod="openstack/swift-storage-0" Sep 30 19:49:04 crc kubenswrapper[4756]: I0930 19:49:04.684579 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/26be10f5-be18-4ab5-9f8e-0d6f2a8dcb4f-etc-swift\") pod \"swift-storage-0\" (UID: \"26be10f5-be18-4ab5-9f8e-0d6f2a8dcb4f\") " pod="openstack/swift-storage-0" Sep 30 19:49:04 crc kubenswrapper[4756]: I0930 19:49:04.684667 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/26be10f5-be18-4ab5-9f8e-0d6f2a8dcb4f-lock\") pod \"swift-storage-0\" (UID: \"26be10f5-be18-4ab5-9f8e-0d6f2a8dcb4f\") " pod="openstack/swift-storage-0" Sep 30 19:49:04 crc kubenswrapper[4756]: I0930 19:49:04.684721 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/26be10f5-be18-4ab5-9f8e-0d6f2a8dcb4f-cache\") pod \"swift-storage-0\" (UID: \"26be10f5-be18-4ab5-9f8e-0d6f2a8dcb4f\") " pod="openstack/swift-storage-0" Sep 30 19:49:04 crc kubenswrapper[4756]: I0930 19:49:04.684760 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"swift-storage-0\" (UID: \"26be10f5-be18-4ab5-9f8e-0d6f2a8dcb4f\") " pod="openstack/swift-storage-0" Sep 30 19:49:04 crc kubenswrapper[4756]: I0930 19:49:04.684805 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c5zm2\" (UniqueName: \"kubernetes.io/projected/26be10f5-be18-4ab5-9f8e-0d6f2a8dcb4f-kube-api-access-c5zm2\") pod \"swift-storage-0\" (UID: \"26be10f5-be18-4ab5-9f8e-0d6f2a8dcb4f\") " pod="openstack/swift-storage-0" Sep 30 19:49:04 crc kubenswrapper[4756]: E0930 19:49:04.684878 4756 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Sep 30 19:49:04 crc kubenswrapper[4756]: E0930 19:49:04.684923 4756 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Sep 30 19:49:04 crc kubenswrapper[4756]: E0930 19:49:04.685001 4756 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/26be10f5-be18-4ab5-9f8e-0d6f2a8dcb4f-etc-swift podName:26be10f5-be18-4ab5-9f8e-0d6f2a8dcb4f nodeName:}" failed. No retries permitted until 2025-09-30 19:49:05.184970634 +0000 UTC m=+1074.805904181 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/26be10f5-be18-4ab5-9f8e-0d6f2a8dcb4f-etc-swift") pod "swift-storage-0" (UID: "26be10f5-be18-4ab5-9f8e-0d6f2a8dcb4f") : configmap "swift-ring-files" not found Sep 30 19:49:04 crc kubenswrapper[4756]: I0930 19:49:04.685660 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/26be10f5-be18-4ab5-9f8e-0d6f2a8dcb4f-lock\") pod \"swift-storage-0\" (UID: \"26be10f5-be18-4ab5-9f8e-0d6f2a8dcb4f\") " pod="openstack/swift-storage-0" Sep 30 19:49:04 crc kubenswrapper[4756]: I0930 19:49:04.685910 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/26be10f5-be18-4ab5-9f8e-0d6f2a8dcb4f-cache\") pod \"swift-storage-0\" (UID: \"26be10f5-be18-4ab5-9f8e-0d6f2a8dcb4f\") " pod="openstack/swift-storage-0" Sep 30 19:49:04 crc kubenswrapper[4756]: I0930 19:49:04.686288 4756 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"swift-storage-0\" (UID: \"26be10f5-be18-4ab5-9f8e-0d6f2a8dcb4f\") device mount path \"/mnt/openstack/pv09\"" pod="openstack/swift-storage-0" Sep 30 19:49:04 crc kubenswrapper[4756]: I0930 19:49:04.713012 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c5zm2\" (UniqueName: \"kubernetes.io/projected/26be10f5-be18-4ab5-9f8e-0d6f2a8dcb4f-kube-api-access-c5zm2\") pod \"swift-storage-0\" (UID: \"26be10f5-be18-4ab5-9f8e-0d6f2a8dcb4f\") " pod="openstack/swift-storage-0" Sep 30 19:49:04 crc kubenswrapper[4756]: I0930 19:49:04.749129 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"swift-storage-0\" (UID: \"26be10f5-be18-4ab5-9f8e-0d6f2a8dcb4f\") " pod="openstack/swift-storage-0" Sep 30 19:49:05 crc kubenswrapper[4756]: I0930 19:49:05.064047 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-ring-rebalance-qf8st"] Sep 30 19:49:05 crc kubenswrapper[4756]: I0930 19:49:05.065369 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-qf8st" Sep 30 19:49:05 crc kubenswrapper[4756]: I0930 19:49:05.067825 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-proxy-config-data" Sep 30 19:49:05 crc kubenswrapper[4756]: I0930 19:49:05.068439 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-config-data" Sep 30 19:49:05 crc kubenswrapper[4756]: I0930 19:49:05.069081 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-scripts" Sep 30 19:49:05 crc kubenswrapper[4756]: I0930 19:49:05.076925 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-ring-rebalance-qf8st"] Sep 30 19:49:05 crc kubenswrapper[4756]: I0930 19:49:05.090659 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/988b9bea-09e1-48d3-9f07-5b7a8099f23f-combined-ca-bundle\") pod \"swift-ring-rebalance-qf8st\" (UID: \"988b9bea-09e1-48d3-9f07-5b7a8099f23f\") " pod="openstack/swift-ring-rebalance-qf8st" Sep 30 19:49:05 crc kubenswrapper[4756]: I0930 19:49:05.090710 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/988b9bea-09e1-48d3-9f07-5b7a8099f23f-ring-data-devices\") pod \"swift-ring-rebalance-qf8st\" (UID: \"988b9bea-09e1-48d3-9f07-5b7a8099f23f\") " pod="openstack/swift-ring-rebalance-qf8st" Sep 30 19:49:05 crc kubenswrapper[4756]: I0930 19:49:05.090783 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/988b9bea-09e1-48d3-9f07-5b7a8099f23f-scripts\") pod \"swift-ring-rebalance-qf8st\" (UID: \"988b9bea-09e1-48d3-9f07-5b7a8099f23f\") " pod="openstack/swift-ring-rebalance-qf8st" Sep 30 19:49:05 crc kubenswrapper[4756]: I0930 19:49:05.090902 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/988b9bea-09e1-48d3-9f07-5b7a8099f23f-etc-swift\") pod \"swift-ring-rebalance-qf8st\" (UID: \"988b9bea-09e1-48d3-9f07-5b7a8099f23f\") " pod="openstack/swift-ring-rebalance-qf8st" Sep 30 19:49:05 crc kubenswrapper[4756]: I0930 19:49:05.090948 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/988b9bea-09e1-48d3-9f07-5b7a8099f23f-dispersionconf\") pod \"swift-ring-rebalance-qf8st\" (UID: \"988b9bea-09e1-48d3-9f07-5b7a8099f23f\") " pod="openstack/swift-ring-rebalance-qf8st" Sep 30 19:49:05 crc kubenswrapper[4756]: I0930 19:49:05.091107 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mscbg\" (UniqueName: \"kubernetes.io/projected/988b9bea-09e1-48d3-9f07-5b7a8099f23f-kube-api-access-mscbg\") pod \"swift-ring-rebalance-qf8st\" (UID: \"988b9bea-09e1-48d3-9f07-5b7a8099f23f\") " pod="openstack/swift-ring-rebalance-qf8st" Sep 30 19:49:05 crc kubenswrapper[4756]: I0930 19:49:05.091184 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/988b9bea-09e1-48d3-9f07-5b7a8099f23f-swiftconf\") pod \"swift-ring-rebalance-qf8st\" (UID: \"988b9bea-09e1-48d3-9f07-5b7a8099f23f\") " pod="openstack/swift-ring-rebalance-qf8st" Sep 30 19:49:05 crc kubenswrapper[4756]: I0930 19:49:05.192840 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/26be10f5-be18-4ab5-9f8e-0d6f2a8dcb4f-etc-swift\") pod \"swift-storage-0\" (UID: \"26be10f5-be18-4ab5-9f8e-0d6f2a8dcb4f\") " pod="openstack/swift-storage-0" Sep 30 19:49:05 crc kubenswrapper[4756]: I0930 19:49:05.192883 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/988b9bea-09e1-48d3-9f07-5b7a8099f23f-scripts\") pod \"swift-ring-rebalance-qf8st\" (UID: \"988b9bea-09e1-48d3-9f07-5b7a8099f23f\") " pod="openstack/swift-ring-rebalance-qf8st" Sep 30 19:49:05 crc kubenswrapper[4756]: I0930 19:49:05.192908 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/988b9bea-09e1-48d3-9f07-5b7a8099f23f-etc-swift\") pod \"swift-ring-rebalance-qf8st\" (UID: \"988b9bea-09e1-48d3-9f07-5b7a8099f23f\") " pod="openstack/swift-ring-rebalance-qf8st" Sep 30 19:49:05 crc kubenswrapper[4756]: I0930 19:49:05.192926 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/988b9bea-09e1-48d3-9f07-5b7a8099f23f-dispersionconf\") pod \"swift-ring-rebalance-qf8st\" (UID: \"988b9bea-09e1-48d3-9f07-5b7a8099f23f\") " pod="openstack/swift-ring-rebalance-qf8st" Sep 30 19:49:05 crc kubenswrapper[4756]: I0930 19:49:05.192972 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mscbg\" (UniqueName: \"kubernetes.io/projected/988b9bea-09e1-48d3-9f07-5b7a8099f23f-kube-api-access-mscbg\") pod \"swift-ring-rebalance-qf8st\" (UID: \"988b9bea-09e1-48d3-9f07-5b7a8099f23f\") " pod="openstack/swift-ring-rebalance-qf8st" Sep 30 19:49:05 crc kubenswrapper[4756]: I0930 19:49:05.193011 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/988b9bea-09e1-48d3-9f07-5b7a8099f23f-swiftconf\") pod \"swift-ring-rebalance-qf8st\" (UID: \"988b9bea-09e1-48d3-9f07-5b7a8099f23f\") " pod="openstack/swift-ring-rebalance-qf8st" Sep 30 19:49:05 crc kubenswrapper[4756]: I0930 19:49:05.193045 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/988b9bea-09e1-48d3-9f07-5b7a8099f23f-combined-ca-bundle\") pod \"swift-ring-rebalance-qf8st\" (UID: \"988b9bea-09e1-48d3-9f07-5b7a8099f23f\") " pod="openstack/swift-ring-rebalance-qf8st" Sep 30 19:49:05 crc kubenswrapper[4756]: I0930 19:49:05.193068 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/988b9bea-09e1-48d3-9f07-5b7a8099f23f-ring-data-devices\") pod \"swift-ring-rebalance-qf8st\" (UID: \"988b9bea-09e1-48d3-9f07-5b7a8099f23f\") " pod="openstack/swift-ring-rebalance-qf8st" Sep 30 19:49:05 crc kubenswrapper[4756]: I0930 19:49:05.193514 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/988b9bea-09e1-48d3-9f07-5b7a8099f23f-etc-swift\") pod \"swift-ring-rebalance-qf8st\" (UID: \"988b9bea-09e1-48d3-9f07-5b7a8099f23f\") " pod="openstack/swift-ring-rebalance-qf8st" Sep 30 19:49:05 crc kubenswrapper[4756]: E0930 19:49:05.193646 4756 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Sep 30 19:49:05 crc kubenswrapper[4756]: E0930 19:49:05.193665 4756 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Sep 30 19:49:05 crc kubenswrapper[4756]: E0930 19:49:05.193706 4756 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/26be10f5-be18-4ab5-9f8e-0d6f2a8dcb4f-etc-swift podName:26be10f5-be18-4ab5-9f8e-0d6f2a8dcb4f nodeName:}" failed. No retries permitted until 2025-09-30 19:49:06.193689862 +0000 UTC m=+1075.814623339 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/26be10f5-be18-4ab5-9f8e-0d6f2a8dcb4f-etc-swift") pod "swift-storage-0" (UID: "26be10f5-be18-4ab5-9f8e-0d6f2a8dcb4f") : configmap "swift-ring-files" not found Sep 30 19:49:05 crc kubenswrapper[4756]: I0930 19:49:05.193870 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/988b9bea-09e1-48d3-9f07-5b7a8099f23f-ring-data-devices\") pod \"swift-ring-rebalance-qf8st\" (UID: \"988b9bea-09e1-48d3-9f07-5b7a8099f23f\") " pod="openstack/swift-ring-rebalance-qf8st" Sep 30 19:49:05 crc kubenswrapper[4756]: I0930 19:49:05.193888 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/988b9bea-09e1-48d3-9f07-5b7a8099f23f-scripts\") pod \"swift-ring-rebalance-qf8st\" (UID: \"988b9bea-09e1-48d3-9f07-5b7a8099f23f\") " pod="openstack/swift-ring-rebalance-qf8st" Sep 30 19:49:05 crc kubenswrapper[4756]: I0930 19:49:05.198960 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/988b9bea-09e1-48d3-9f07-5b7a8099f23f-dispersionconf\") pod \"swift-ring-rebalance-qf8st\" (UID: \"988b9bea-09e1-48d3-9f07-5b7a8099f23f\") " pod="openstack/swift-ring-rebalance-qf8st" Sep 30 19:49:05 crc kubenswrapper[4756]: I0930 19:49:05.198959 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/988b9bea-09e1-48d3-9f07-5b7a8099f23f-swiftconf\") pod \"swift-ring-rebalance-qf8st\" (UID: \"988b9bea-09e1-48d3-9f07-5b7a8099f23f\") " pod="openstack/swift-ring-rebalance-qf8st" Sep 30 19:49:05 crc kubenswrapper[4756]: I0930 19:49:05.201504 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/988b9bea-09e1-48d3-9f07-5b7a8099f23f-combined-ca-bundle\") pod \"swift-ring-rebalance-qf8st\" (UID: \"988b9bea-09e1-48d3-9f07-5b7a8099f23f\") " pod="openstack/swift-ring-rebalance-qf8st" Sep 30 19:49:05 crc kubenswrapper[4756]: I0930 19:49:05.211938 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mscbg\" (UniqueName: \"kubernetes.io/projected/988b9bea-09e1-48d3-9f07-5b7a8099f23f-kube-api-access-mscbg\") pod \"swift-ring-rebalance-qf8st\" (UID: \"988b9bea-09e1-48d3-9f07-5b7a8099f23f\") " pod="openstack/swift-ring-rebalance-qf8st" Sep 30 19:49:05 crc kubenswrapper[4756]: I0930 19:49:05.438232 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-qf8st" Sep 30 19:49:05 crc kubenswrapper[4756]: I0930 19:49:05.565052 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-galera-0" Sep 30 19:49:05 crc kubenswrapper[4756]: I0930 19:49:05.629636 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-galera-0" Sep 30 19:49:05 crc kubenswrapper[4756]: I0930 19:49:05.638011 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-qx9p6" Sep 30 19:49:05 crc kubenswrapper[4756]: I0930 19:49:05.703116 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0bc4b3cb-5f9f-4af2-8204-243ea2bd899d-dns-svc\") pod \"0bc4b3cb-5f9f-4af2-8204-243ea2bd899d\" (UID: \"0bc4b3cb-5f9f-4af2-8204-243ea2bd899d\") " Sep 30 19:49:05 crc kubenswrapper[4756]: I0930 19:49:05.703511 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0bc4b3cb-5f9f-4af2-8204-243ea2bd899d-config\") pod \"0bc4b3cb-5f9f-4af2-8204-243ea2bd899d\" (UID: \"0bc4b3cb-5f9f-4af2-8204-243ea2bd899d\") " Sep 30 19:49:05 crc kubenswrapper[4756]: I0930 19:49:05.703672 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lxrxx\" (UniqueName: \"kubernetes.io/projected/0bc4b3cb-5f9f-4af2-8204-243ea2bd899d-kube-api-access-lxrxx\") pod \"0bc4b3cb-5f9f-4af2-8204-243ea2bd899d\" (UID: \"0bc4b3cb-5f9f-4af2-8204-243ea2bd899d\") " Sep 30 19:49:05 crc kubenswrapper[4756]: I0930 19:49:05.708998 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0bc4b3cb-5f9f-4af2-8204-243ea2bd899d-kube-api-access-lxrxx" (OuterVolumeSpecName: "kube-api-access-lxrxx") pod "0bc4b3cb-5f9f-4af2-8204-243ea2bd899d" (UID: "0bc4b3cb-5f9f-4af2-8204-243ea2bd899d"). InnerVolumeSpecName "kube-api-access-lxrxx". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:49:05 crc kubenswrapper[4756]: I0930 19:49:05.763159 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0bc4b3cb-5f9f-4af2-8204-243ea2bd899d-config" (OuterVolumeSpecName: "config") pod "0bc4b3cb-5f9f-4af2-8204-243ea2bd899d" (UID: "0bc4b3cb-5f9f-4af2-8204-243ea2bd899d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:49:05 crc kubenswrapper[4756]: I0930 19:49:05.791864 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0bc4b3cb-5f9f-4af2-8204-243ea2bd899d-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "0bc4b3cb-5f9f-4af2-8204-243ea2bd899d" (UID: "0bc4b3cb-5f9f-4af2-8204-243ea2bd899d"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:49:05 crc kubenswrapper[4756]: I0930 19:49:05.805255 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lxrxx\" (UniqueName: \"kubernetes.io/projected/0bc4b3cb-5f9f-4af2-8204-243ea2bd899d-kube-api-access-lxrxx\") on node \"crc\" DevicePath \"\"" Sep 30 19:49:05 crc kubenswrapper[4756]: I0930 19:49:05.805290 4756 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0bc4b3cb-5f9f-4af2-8204-243ea2bd899d-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 30 19:49:05 crc kubenswrapper[4756]: I0930 19:49:05.805299 4756 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0bc4b3cb-5f9f-4af2-8204-243ea2bd899d-config\") on node \"crc\" DevicePath \"\"" Sep 30 19:49:05 crc kubenswrapper[4756]: I0930 19:49:05.843721 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-698758b865-nzmxq"] Sep 30 19:49:05 crc kubenswrapper[4756]: I0930 19:49:05.847767 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"f4c19f17-3c18-484d-acfd-99c72926bdef","Type":"ContainerStarted","Data":"67fa22128871f16827cabfb95e325f7d8e14ac3c57908b0201283ac2b4a62c35"} Sep 30 19:49:05 crc kubenswrapper[4756]: I0930 19:49:05.847796 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"f4c19f17-3c18-484d-acfd-99c72926bdef","Type":"ContainerStarted","Data":"665a9d7a98e4cec82848a7a7dd2273a052bb65806f4b6908a9c7dcf1d34d25df"} Sep 30 19:49:05 crc kubenswrapper[4756]: I0930 19:49:05.847830 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-northd-0" Sep 30 19:49:05 crc kubenswrapper[4756]: I0930 19:49:05.860520 4756 generic.go:334] "Generic (PLEG): container finished" podID="0bc4b3cb-5f9f-4af2-8204-243ea2bd899d" containerID="fa7bfdcacf7cd3c19a653dd3e51e41da00b1f006f28417003c6c25944d61f09e" exitCode=0 Sep 30 19:49:05 crc kubenswrapper[4756]: I0930 19:49:05.860614 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-qx9p6" event={"ID":"0bc4b3cb-5f9f-4af2-8204-243ea2bd899d","Type":"ContainerDied","Data":"fa7bfdcacf7cd3c19a653dd3e51e41da00b1f006f28417003c6c25944d61f09e"} Sep 30 19:49:05 crc kubenswrapper[4756]: I0930 19:49:05.860643 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-qx9p6" event={"ID":"0bc4b3cb-5f9f-4af2-8204-243ea2bd899d","Type":"ContainerDied","Data":"e759be9f0432593d221a87b8172c5baef36c48e0e67b1ceb5e56ff3532d01081"} Sep 30 19:49:05 crc kubenswrapper[4756]: I0930 19:49:05.860661 4756 scope.go:117] "RemoveContainer" containerID="fa7bfdcacf7cd3c19a653dd3e51e41da00b1f006f28417003c6c25944d61f09e" Sep 30 19:49:05 crc kubenswrapper[4756]: I0930 19:49:05.860676 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-qx9p6" Sep 30 19:49:05 crc kubenswrapper[4756]: I0930 19:49:05.876632 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-northd-0" podStartSLOduration=1.495860992 podStartE2EDuration="9.876597743s" podCreationTimestamp="2025-09-30 19:48:56 +0000 UTC" firstStartedPulling="2025-09-30 19:48:56.928810369 +0000 UTC m=+1066.549743846" lastFinishedPulling="2025-09-30 19:49:05.30954712 +0000 UTC m=+1074.930480597" observedRunningTime="2025-09-30 19:49:05.868625365 +0000 UTC m=+1075.489558842" watchObservedRunningTime="2025-09-30 19:49:05.876597743 +0000 UTC m=+1075.497531230" Sep 30 19:49:05 crc kubenswrapper[4756]: I0930 19:49:05.900331 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-qx9p6"] Sep 30 19:49:05 crc kubenswrapper[4756]: I0930 19:49:05.900953 4756 scope.go:117] "RemoveContainer" containerID="f8b0a4a99826cf0bce24b22201a6ce90e6e4821c5244f6cb16b5bdc12531cbad" Sep 30 19:49:05 crc kubenswrapper[4756]: I0930 19:49:05.905730 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-qx9p6"] Sep 30 19:49:05 crc kubenswrapper[4756]: I0930 19:49:05.939593 4756 scope.go:117] "RemoveContainer" containerID="fa7bfdcacf7cd3c19a653dd3e51e41da00b1f006f28417003c6c25944d61f09e" Sep 30 19:49:05 crc kubenswrapper[4756]: E0930 19:49:05.940115 4756 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fa7bfdcacf7cd3c19a653dd3e51e41da00b1f006f28417003c6c25944d61f09e\": container with ID starting with fa7bfdcacf7cd3c19a653dd3e51e41da00b1f006f28417003c6c25944d61f09e not found: ID does not exist" containerID="fa7bfdcacf7cd3c19a653dd3e51e41da00b1f006f28417003c6c25944d61f09e" Sep 30 19:49:05 crc kubenswrapper[4756]: I0930 19:49:05.940160 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fa7bfdcacf7cd3c19a653dd3e51e41da00b1f006f28417003c6c25944d61f09e"} err="failed to get container status \"fa7bfdcacf7cd3c19a653dd3e51e41da00b1f006f28417003c6c25944d61f09e\": rpc error: code = NotFound desc = could not find container \"fa7bfdcacf7cd3c19a653dd3e51e41da00b1f006f28417003c6c25944d61f09e\": container with ID starting with fa7bfdcacf7cd3c19a653dd3e51e41da00b1f006f28417003c6c25944d61f09e not found: ID does not exist" Sep 30 19:49:05 crc kubenswrapper[4756]: I0930 19:49:05.940187 4756 scope.go:117] "RemoveContainer" containerID="f8b0a4a99826cf0bce24b22201a6ce90e6e4821c5244f6cb16b5bdc12531cbad" Sep 30 19:49:05 crc kubenswrapper[4756]: E0930 19:49:05.940708 4756 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f8b0a4a99826cf0bce24b22201a6ce90e6e4821c5244f6cb16b5bdc12531cbad\": container with ID starting with f8b0a4a99826cf0bce24b22201a6ce90e6e4821c5244f6cb16b5bdc12531cbad not found: ID does not exist" containerID="f8b0a4a99826cf0bce24b22201a6ce90e6e4821c5244f6cb16b5bdc12531cbad" Sep 30 19:49:05 crc kubenswrapper[4756]: I0930 19:49:05.940832 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f8b0a4a99826cf0bce24b22201a6ce90e6e4821c5244f6cb16b5bdc12531cbad"} err="failed to get container status \"f8b0a4a99826cf0bce24b22201a6ce90e6e4821c5244f6cb16b5bdc12531cbad\": rpc error: code = NotFound desc = could not find container \"f8b0a4a99826cf0bce24b22201a6ce90e6e4821c5244f6cb16b5bdc12531cbad\": container with ID starting with f8b0a4a99826cf0bce24b22201a6ce90e6e4821c5244f6cb16b5bdc12531cbad not found: ID does not exist" Sep 30 19:49:05 crc kubenswrapper[4756]: I0930 19:49:05.943246 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-ring-rebalance-qf8st"] Sep 30 19:49:05 crc kubenswrapper[4756]: W0930 19:49:05.946319 4756 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod988b9bea_09e1_48d3_9f07_5b7a8099f23f.slice/crio-a230bd7ee3780af9d2aa2073255fca917890e6f68d0526ac3ccf152a4b6a91ce WatchSource:0}: Error finding container a230bd7ee3780af9d2aa2073255fca917890e6f68d0526ac3ccf152a4b6a91ce: Status 404 returned error can't find the container with id a230bd7ee3780af9d2aa2073255fca917890e6f68d0526ac3ccf152a4b6a91ce Sep 30 19:49:06 crc kubenswrapper[4756]: I0930 19:49:06.210931 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/26be10f5-be18-4ab5-9f8e-0d6f2a8dcb4f-etc-swift\") pod \"swift-storage-0\" (UID: \"26be10f5-be18-4ab5-9f8e-0d6f2a8dcb4f\") " pod="openstack/swift-storage-0" Sep 30 19:49:06 crc kubenswrapper[4756]: E0930 19:49:06.211156 4756 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Sep 30 19:49:06 crc kubenswrapper[4756]: E0930 19:49:06.211197 4756 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Sep 30 19:49:06 crc kubenswrapper[4756]: E0930 19:49:06.211300 4756 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/26be10f5-be18-4ab5-9f8e-0d6f2a8dcb4f-etc-swift podName:26be10f5-be18-4ab5-9f8e-0d6f2a8dcb4f nodeName:}" failed. No retries permitted until 2025-09-30 19:49:08.21126913 +0000 UTC m=+1077.832202647 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/26be10f5-be18-4ab5-9f8e-0d6f2a8dcb4f-etc-swift") pod "swift-storage-0" (UID: "26be10f5-be18-4ab5-9f8e-0d6f2a8dcb4f") : configmap "swift-ring-files" not found Sep 30 19:49:06 crc kubenswrapper[4756]: I0930 19:49:06.872579 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-qf8st" event={"ID":"988b9bea-09e1-48d3-9f07-5b7a8099f23f","Type":"ContainerStarted","Data":"a230bd7ee3780af9d2aa2073255fca917890e6f68d0526ac3ccf152a4b6a91ce"} Sep 30 19:49:06 crc kubenswrapper[4756]: I0930 19:49:06.880516 4756 generic.go:334] "Generic (PLEG): container finished" podID="305c7f11-e862-489d-ba96-2426b722652c" containerID="fdc1d64f6bda7bb66eee9974fda6ebe1033da22e5e912bb54c3187aaa145b809" exitCode=0 Sep 30 19:49:06 crc kubenswrapper[4756]: I0930 19:49:06.880613 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-698758b865-nzmxq" event={"ID":"305c7f11-e862-489d-ba96-2426b722652c","Type":"ContainerDied","Data":"fdc1d64f6bda7bb66eee9974fda6ebe1033da22e5e912bb54c3187aaa145b809"} Sep 30 19:49:06 crc kubenswrapper[4756]: I0930 19:49:06.880646 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-698758b865-nzmxq" event={"ID":"305c7f11-e862-489d-ba96-2426b722652c","Type":"ContainerStarted","Data":"e15870dd12868d01320e27b3e586f284d7b81cb8c4e8212330928971ab1fdf84"} Sep 30 19:49:07 crc kubenswrapper[4756]: I0930 19:49:07.137247 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0bc4b3cb-5f9f-4af2-8204-243ea2bd899d" path="/var/lib/kubelet/pods/0bc4b3cb-5f9f-4af2-8204-243ea2bd899d/volumes" Sep 30 19:49:07 crc kubenswrapper[4756]: I0930 19:49:07.137815 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-db-create-qw4hr"] Sep 30 19:49:07 crc kubenswrapper[4756]: E0930 19:49:07.138070 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0bc4b3cb-5f9f-4af2-8204-243ea2bd899d" containerName="init" Sep 30 19:49:07 crc kubenswrapper[4756]: I0930 19:49:07.138083 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="0bc4b3cb-5f9f-4af2-8204-243ea2bd899d" containerName="init" Sep 30 19:49:07 crc kubenswrapper[4756]: E0930 19:49:07.138102 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0bc4b3cb-5f9f-4af2-8204-243ea2bd899d" containerName="dnsmasq-dns" Sep 30 19:49:07 crc kubenswrapper[4756]: I0930 19:49:07.138108 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="0bc4b3cb-5f9f-4af2-8204-243ea2bd899d" containerName="dnsmasq-dns" Sep 30 19:49:07 crc kubenswrapper[4756]: I0930 19:49:07.138280 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="0bc4b3cb-5f9f-4af2-8204-243ea2bd899d" containerName="dnsmasq-dns" Sep 30 19:49:07 crc kubenswrapper[4756]: I0930 19:49:07.138770 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-create-qw4hr"] Sep 30 19:49:07 crc kubenswrapper[4756]: I0930 19:49:07.138835 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-qw4hr" Sep 30 19:49:07 crc kubenswrapper[4756]: I0930 19:49:07.233669 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q292n\" (UniqueName: \"kubernetes.io/projected/4701f9b9-b166-4254-9cd3-73625df60ce0-kube-api-access-q292n\") pod \"glance-db-create-qw4hr\" (UID: \"4701f9b9-b166-4254-9cd3-73625df60ce0\") " pod="openstack/glance-db-create-qw4hr" Sep 30 19:49:07 crc kubenswrapper[4756]: I0930 19:49:07.335035 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q292n\" (UniqueName: \"kubernetes.io/projected/4701f9b9-b166-4254-9cd3-73625df60ce0-kube-api-access-q292n\") pod \"glance-db-create-qw4hr\" (UID: \"4701f9b9-b166-4254-9cd3-73625df60ce0\") " pod="openstack/glance-db-create-qw4hr" Sep 30 19:49:07 crc kubenswrapper[4756]: I0930 19:49:07.361421 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q292n\" (UniqueName: \"kubernetes.io/projected/4701f9b9-b166-4254-9cd3-73625df60ce0-kube-api-access-q292n\") pod \"glance-db-create-qw4hr\" (UID: \"4701f9b9-b166-4254-9cd3-73625df60ce0\") " pod="openstack/glance-db-create-qw4hr" Sep 30 19:49:07 crc kubenswrapper[4756]: I0930 19:49:07.455476 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-qw4hr" Sep 30 19:49:07 crc kubenswrapper[4756]: I0930 19:49:07.888768 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-698758b865-nzmxq" event={"ID":"305c7f11-e862-489d-ba96-2426b722652c","Type":"ContainerStarted","Data":"70b065679a123a719144084abb75b201a20b68a2f9edce0ba39bdc3f95c7a5de"} Sep 30 19:49:07 crc kubenswrapper[4756]: I0930 19:49:07.888881 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-698758b865-nzmxq" Sep 30 19:49:07 crc kubenswrapper[4756]: I0930 19:49:07.909998 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-698758b865-nzmxq" podStartSLOduration=4.909983035 podStartE2EDuration="4.909983035s" podCreationTimestamp="2025-09-30 19:49:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 19:49:07.906293309 +0000 UTC m=+1077.527226786" watchObservedRunningTime="2025-09-30 19:49:07.909983035 +0000 UTC m=+1077.530916512" Sep 30 19:49:08 crc kubenswrapper[4756]: I0930 19:49:08.248783 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/26be10f5-be18-4ab5-9f8e-0d6f2a8dcb4f-etc-swift\") pod \"swift-storage-0\" (UID: \"26be10f5-be18-4ab5-9f8e-0d6f2a8dcb4f\") " pod="openstack/swift-storage-0" Sep 30 19:49:08 crc kubenswrapper[4756]: E0930 19:49:08.248947 4756 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Sep 30 19:49:08 crc kubenswrapper[4756]: E0930 19:49:08.248984 4756 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Sep 30 19:49:08 crc kubenswrapper[4756]: E0930 19:49:08.249046 4756 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/26be10f5-be18-4ab5-9f8e-0d6f2a8dcb4f-etc-swift podName:26be10f5-be18-4ab5-9f8e-0d6f2a8dcb4f nodeName:}" failed. No retries permitted until 2025-09-30 19:49:12.249029358 +0000 UTC m=+1081.869962835 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/26be10f5-be18-4ab5-9f8e-0d6f2a8dcb4f-etc-swift") pod "swift-storage-0" (UID: "26be10f5-be18-4ab5-9f8e-0d6f2a8dcb4f") : configmap "swift-ring-files" not found Sep 30 19:49:09 crc kubenswrapper[4756]: I0930 19:49:09.273892 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-create-qw4hr"] Sep 30 19:49:09 crc kubenswrapper[4756]: I0930 19:49:09.919969 4756 generic.go:334] "Generic (PLEG): container finished" podID="4701f9b9-b166-4254-9cd3-73625df60ce0" containerID="e64c4560859d931a12d9625b07d841a4db85b1016b5f67c49bd7fe3814abe86a" exitCode=0 Sep 30 19:49:09 crc kubenswrapper[4756]: I0930 19:49:09.920038 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-qw4hr" event={"ID":"4701f9b9-b166-4254-9cd3-73625df60ce0","Type":"ContainerDied","Data":"e64c4560859d931a12d9625b07d841a4db85b1016b5f67c49bd7fe3814abe86a"} Sep 30 19:49:09 crc kubenswrapper[4756]: I0930 19:49:09.920064 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-qw4hr" event={"ID":"4701f9b9-b166-4254-9cd3-73625df60ce0","Type":"ContainerStarted","Data":"28e731f1a28a5ebaefc4e947ef28a2a6d749be87ebc4ac18096b8a25ee1226d0"} Sep 30 19:49:09 crc kubenswrapper[4756]: I0930 19:49:09.922768 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-qf8st" event={"ID":"988b9bea-09e1-48d3-9f07-5b7a8099f23f","Type":"ContainerStarted","Data":"b9039dde4f47a71c71085a8872d287b5cca916163354551bfbc89d93d31530ff"} Sep 30 19:49:09 crc kubenswrapper[4756]: I0930 19:49:09.954932 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-ring-rebalance-qf8st" podStartSLOduration=2.001901327 podStartE2EDuration="4.954912379s" podCreationTimestamp="2025-09-30 19:49:05 +0000 UTC" firstStartedPulling="2025-09-30 19:49:05.949136819 +0000 UTC m=+1075.570070296" lastFinishedPulling="2025-09-30 19:49:08.902147871 +0000 UTC m=+1078.523081348" observedRunningTime="2025-09-30 19:49:09.951277464 +0000 UTC m=+1079.572210951" watchObservedRunningTime="2025-09-30 19:49:09.954912379 +0000 UTC m=+1079.575845856" Sep 30 19:49:11 crc kubenswrapper[4756]: I0930 19:49:11.358764 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-qw4hr" Sep 30 19:49:11 crc kubenswrapper[4756]: I0930 19:49:11.373497 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-create-27gdt"] Sep 30 19:49:11 crc kubenswrapper[4756]: E0930 19:49:11.373947 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4701f9b9-b166-4254-9cd3-73625df60ce0" containerName="mariadb-database-create" Sep 30 19:49:11 crc kubenswrapper[4756]: I0930 19:49:11.373969 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="4701f9b9-b166-4254-9cd3-73625df60ce0" containerName="mariadb-database-create" Sep 30 19:49:11 crc kubenswrapper[4756]: I0930 19:49:11.374164 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="4701f9b9-b166-4254-9cd3-73625df60ce0" containerName="mariadb-database-create" Sep 30 19:49:11 crc kubenswrapper[4756]: I0930 19:49:11.374878 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-27gdt" Sep 30 19:49:11 crc kubenswrapper[4756]: I0930 19:49:11.383658 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-27gdt"] Sep 30 19:49:11 crc kubenswrapper[4756]: I0930 19:49:11.525052 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-q292n\" (UniqueName: \"kubernetes.io/projected/4701f9b9-b166-4254-9cd3-73625df60ce0-kube-api-access-q292n\") pod \"4701f9b9-b166-4254-9cd3-73625df60ce0\" (UID: \"4701f9b9-b166-4254-9cd3-73625df60ce0\") " Sep 30 19:49:11 crc kubenswrapper[4756]: I0930 19:49:11.525592 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gwkwj\" (UniqueName: \"kubernetes.io/projected/f3bb8b1f-f343-4c14-a5da-70dc960397db-kube-api-access-gwkwj\") pod \"keystone-db-create-27gdt\" (UID: \"f3bb8b1f-f343-4c14-a5da-70dc960397db\") " pod="openstack/keystone-db-create-27gdt" Sep 30 19:49:11 crc kubenswrapper[4756]: I0930 19:49:11.532581 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4701f9b9-b166-4254-9cd3-73625df60ce0-kube-api-access-q292n" (OuterVolumeSpecName: "kube-api-access-q292n") pod "4701f9b9-b166-4254-9cd3-73625df60ce0" (UID: "4701f9b9-b166-4254-9cd3-73625df60ce0"). InnerVolumeSpecName "kube-api-access-q292n". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:49:11 crc kubenswrapper[4756]: I0930 19:49:11.538856 4756 patch_prober.go:28] interesting pod/machine-config-daemon-4n9zj container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 19:49:11 crc kubenswrapper[4756]: I0930 19:49:11.538912 4756 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 19:49:11 crc kubenswrapper[4756]: I0930 19:49:11.627783 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gwkwj\" (UniqueName: \"kubernetes.io/projected/f3bb8b1f-f343-4c14-a5da-70dc960397db-kube-api-access-gwkwj\") pod \"keystone-db-create-27gdt\" (UID: \"f3bb8b1f-f343-4c14-a5da-70dc960397db\") " pod="openstack/keystone-db-create-27gdt" Sep 30 19:49:11 crc kubenswrapper[4756]: I0930 19:49:11.627991 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-q292n\" (UniqueName: \"kubernetes.io/projected/4701f9b9-b166-4254-9cd3-73625df60ce0-kube-api-access-q292n\") on node \"crc\" DevicePath \"\"" Sep 30 19:49:11 crc kubenswrapper[4756]: I0930 19:49:11.649568 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gwkwj\" (UniqueName: \"kubernetes.io/projected/f3bb8b1f-f343-4c14-a5da-70dc960397db-kube-api-access-gwkwj\") pod \"keystone-db-create-27gdt\" (UID: \"f3bb8b1f-f343-4c14-a5da-70dc960397db\") " pod="openstack/keystone-db-create-27gdt" Sep 30 19:49:11 crc kubenswrapper[4756]: I0930 19:49:11.698913 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-27gdt" Sep 30 19:49:11 crc kubenswrapper[4756]: I0930 19:49:11.699506 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-db-create-wvmsq"] Sep 30 19:49:11 crc kubenswrapper[4756]: I0930 19:49:11.701097 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-wvmsq" Sep 30 19:49:11 crc kubenswrapper[4756]: I0930 19:49:11.706484 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-create-wvmsq"] Sep 30 19:49:11 crc kubenswrapper[4756]: I0930 19:49:11.833684 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7xm8v\" (UniqueName: \"kubernetes.io/projected/bbdacb5e-ec79-4c63-b745-cae6af7434d6-kube-api-access-7xm8v\") pod \"placement-db-create-wvmsq\" (UID: \"bbdacb5e-ec79-4c63-b745-cae6af7434d6\") " pod="openstack/placement-db-create-wvmsq" Sep 30 19:49:11 crc kubenswrapper[4756]: I0930 19:49:11.934651 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7xm8v\" (UniqueName: \"kubernetes.io/projected/bbdacb5e-ec79-4c63-b745-cae6af7434d6-kube-api-access-7xm8v\") pod \"placement-db-create-wvmsq\" (UID: \"bbdacb5e-ec79-4c63-b745-cae6af7434d6\") " pod="openstack/placement-db-create-wvmsq" Sep 30 19:49:11 crc kubenswrapper[4756]: I0930 19:49:11.941984 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-qw4hr" event={"ID":"4701f9b9-b166-4254-9cd3-73625df60ce0","Type":"ContainerDied","Data":"28e731f1a28a5ebaefc4e947ef28a2a6d749be87ebc4ac18096b8a25ee1226d0"} Sep 30 19:49:11 crc kubenswrapper[4756]: I0930 19:49:11.942027 4756 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="28e731f1a28a5ebaefc4e947ef28a2a6d749be87ebc4ac18096b8a25ee1226d0" Sep 30 19:49:11 crc kubenswrapper[4756]: I0930 19:49:11.942039 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-qw4hr" Sep 30 19:49:11 crc kubenswrapper[4756]: I0930 19:49:11.950694 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7xm8v\" (UniqueName: \"kubernetes.io/projected/bbdacb5e-ec79-4c63-b745-cae6af7434d6-kube-api-access-7xm8v\") pod \"placement-db-create-wvmsq\" (UID: \"bbdacb5e-ec79-4c63-b745-cae6af7434d6\") " pod="openstack/placement-db-create-wvmsq" Sep 30 19:49:12 crc kubenswrapper[4756]: I0930 19:49:12.029499 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-wvmsq" Sep 30 19:49:12 crc kubenswrapper[4756]: I0930 19:49:12.122970 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-27gdt"] Sep 30 19:49:12 crc kubenswrapper[4756]: I0930 19:49:12.340364 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/26be10f5-be18-4ab5-9f8e-0d6f2a8dcb4f-etc-swift\") pod \"swift-storage-0\" (UID: \"26be10f5-be18-4ab5-9f8e-0d6f2a8dcb4f\") " pod="openstack/swift-storage-0" Sep 30 19:49:12 crc kubenswrapper[4756]: E0930 19:49:12.340514 4756 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Sep 30 19:49:12 crc kubenswrapper[4756]: E0930 19:49:12.340536 4756 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Sep 30 19:49:12 crc kubenswrapper[4756]: E0930 19:49:12.340602 4756 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/26be10f5-be18-4ab5-9f8e-0d6f2a8dcb4f-etc-swift podName:26be10f5-be18-4ab5-9f8e-0d6f2a8dcb4f nodeName:}" failed. No retries permitted until 2025-09-30 19:49:20.340583381 +0000 UTC m=+1089.961516858 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/26be10f5-be18-4ab5-9f8e-0d6f2a8dcb4f-etc-swift") pod "swift-storage-0" (UID: "26be10f5-be18-4ab5-9f8e-0d6f2a8dcb4f") : configmap "swift-ring-files" not found Sep 30 19:49:12 crc kubenswrapper[4756]: I0930 19:49:12.526353 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-create-wvmsq"] Sep 30 19:49:12 crc kubenswrapper[4756]: W0930 19:49:12.529224 4756 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podbbdacb5e_ec79_4c63_b745_cae6af7434d6.slice/crio-8bb178eb4dca784a0848947674f1cb5feb97a8ee0851bf22ffea0cf4fcda553c WatchSource:0}: Error finding container 8bb178eb4dca784a0848947674f1cb5feb97a8ee0851bf22ffea0cf4fcda553c: Status 404 returned error can't find the container with id 8bb178eb4dca784a0848947674f1cb5feb97a8ee0851bf22ffea0cf4fcda553c Sep 30 19:49:12 crc kubenswrapper[4756]: I0930 19:49:12.952899 4756 generic.go:334] "Generic (PLEG): container finished" podID="bbdacb5e-ec79-4c63-b745-cae6af7434d6" containerID="fb299811d5b1ee6b8b6e9633ad9ea2340ebae147f0ca313c0124b1d085c0b499" exitCode=0 Sep 30 19:49:12 crc kubenswrapper[4756]: I0930 19:49:12.953029 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-wvmsq" event={"ID":"bbdacb5e-ec79-4c63-b745-cae6af7434d6","Type":"ContainerDied","Data":"fb299811d5b1ee6b8b6e9633ad9ea2340ebae147f0ca313c0124b1d085c0b499"} Sep 30 19:49:12 crc kubenswrapper[4756]: I0930 19:49:12.953194 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-wvmsq" event={"ID":"bbdacb5e-ec79-4c63-b745-cae6af7434d6","Type":"ContainerStarted","Data":"8bb178eb4dca784a0848947674f1cb5feb97a8ee0851bf22ffea0cf4fcda553c"} Sep 30 19:49:12 crc kubenswrapper[4756]: I0930 19:49:12.954666 4756 generic.go:334] "Generic (PLEG): container finished" podID="f3bb8b1f-f343-4c14-a5da-70dc960397db" containerID="ae0fbf9992f7682bada4b00715597be672fc0de41bb3330f05ac33f88569fad7" exitCode=0 Sep 30 19:49:12 crc kubenswrapper[4756]: I0930 19:49:12.954713 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-27gdt" event={"ID":"f3bb8b1f-f343-4c14-a5da-70dc960397db","Type":"ContainerDied","Data":"ae0fbf9992f7682bada4b00715597be672fc0de41bb3330f05ac33f88569fad7"} Sep 30 19:49:12 crc kubenswrapper[4756]: I0930 19:49:12.954736 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-27gdt" event={"ID":"f3bb8b1f-f343-4c14-a5da-70dc960397db","Type":"ContainerStarted","Data":"2bbc569e59c09b39de4e409d8a396ff54b88413dc2dbeaf781f6be5b5b69863f"} Sep 30 19:49:13 crc kubenswrapper[4756]: I0930 19:49:13.679759 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-698758b865-nzmxq" Sep 30 19:49:13 crc kubenswrapper[4756]: I0930 19:49:13.758733 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-86db49b7ff-wd9cd"] Sep 30 19:49:13 crc kubenswrapper[4756]: I0930 19:49:13.759001 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-86db49b7ff-wd9cd" podUID="9db1a87d-2ea3-4ec9-bf4d-a72708411f6e" containerName="dnsmasq-dns" containerID="cri-o://60bfb6c548623904a95282fcfec56d46ecba4c66bb2fa6b0ef8f81c3ba789b54" gracePeriod=10 Sep 30 19:49:13 crc kubenswrapper[4756]: I0930 19:49:13.968990 4756 generic.go:334] "Generic (PLEG): container finished" podID="9db1a87d-2ea3-4ec9-bf4d-a72708411f6e" containerID="60bfb6c548623904a95282fcfec56d46ecba4c66bb2fa6b0ef8f81c3ba789b54" exitCode=0 Sep 30 19:49:13 crc kubenswrapper[4756]: I0930 19:49:13.969106 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-86db49b7ff-wd9cd" event={"ID":"9db1a87d-2ea3-4ec9-bf4d-a72708411f6e","Type":"ContainerDied","Data":"60bfb6c548623904a95282fcfec56d46ecba4c66bb2fa6b0ef8f81c3ba789b54"} Sep 30 19:49:14 crc kubenswrapper[4756]: I0930 19:49:14.212888 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-86db49b7ff-wd9cd" Sep 30 19:49:14 crc kubenswrapper[4756]: I0930 19:49:14.345163 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-wvmsq" Sep 30 19:49:14 crc kubenswrapper[4756]: I0930 19:49:14.355688 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-27gdt" Sep 30 19:49:14 crc kubenswrapper[4756]: I0930 19:49:14.380444 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9db1a87d-2ea3-4ec9-bf4d-a72708411f6e-config\") pod \"9db1a87d-2ea3-4ec9-bf4d-a72708411f6e\" (UID: \"9db1a87d-2ea3-4ec9-bf4d-a72708411f6e\") " Sep 30 19:49:14 crc kubenswrapper[4756]: I0930 19:49:14.380492 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/9db1a87d-2ea3-4ec9-bf4d-a72708411f6e-ovsdbserver-sb\") pod \"9db1a87d-2ea3-4ec9-bf4d-a72708411f6e\" (UID: \"9db1a87d-2ea3-4ec9-bf4d-a72708411f6e\") " Sep 30 19:49:14 crc kubenswrapper[4756]: I0930 19:49:14.380534 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9db1a87d-2ea3-4ec9-bf4d-a72708411f6e-dns-svc\") pod \"9db1a87d-2ea3-4ec9-bf4d-a72708411f6e\" (UID: \"9db1a87d-2ea3-4ec9-bf4d-a72708411f6e\") " Sep 30 19:49:14 crc kubenswrapper[4756]: I0930 19:49:14.380674 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-r99k4\" (UniqueName: \"kubernetes.io/projected/9db1a87d-2ea3-4ec9-bf4d-a72708411f6e-kube-api-access-r99k4\") pod \"9db1a87d-2ea3-4ec9-bf4d-a72708411f6e\" (UID: \"9db1a87d-2ea3-4ec9-bf4d-a72708411f6e\") " Sep 30 19:49:14 crc kubenswrapper[4756]: I0930 19:49:14.380731 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/9db1a87d-2ea3-4ec9-bf4d-a72708411f6e-ovsdbserver-nb\") pod \"9db1a87d-2ea3-4ec9-bf4d-a72708411f6e\" (UID: \"9db1a87d-2ea3-4ec9-bf4d-a72708411f6e\") " Sep 30 19:49:14 crc kubenswrapper[4756]: I0930 19:49:14.386010 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9db1a87d-2ea3-4ec9-bf4d-a72708411f6e-kube-api-access-r99k4" (OuterVolumeSpecName: "kube-api-access-r99k4") pod "9db1a87d-2ea3-4ec9-bf4d-a72708411f6e" (UID: "9db1a87d-2ea3-4ec9-bf4d-a72708411f6e"). InnerVolumeSpecName "kube-api-access-r99k4". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:49:14 crc kubenswrapper[4756]: I0930 19:49:14.419113 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9db1a87d-2ea3-4ec9-bf4d-a72708411f6e-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "9db1a87d-2ea3-4ec9-bf4d-a72708411f6e" (UID: "9db1a87d-2ea3-4ec9-bf4d-a72708411f6e"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:49:14 crc kubenswrapper[4756]: I0930 19:49:14.419818 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9db1a87d-2ea3-4ec9-bf4d-a72708411f6e-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "9db1a87d-2ea3-4ec9-bf4d-a72708411f6e" (UID: "9db1a87d-2ea3-4ec9-bf4d-a72708411f6e"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:49:14 crc kubenswrapper[4756]: I0930 19:49:14.425022 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9db1a87d-2ea3-4ec9-bf4d-a72708411f6e-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "9db1a87d-2ea3-4ec9-bf4d-a72708411f6e" (UID: "9db1a87d-2ea3-4ec9-bf4d-a72708411f6e"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:49:14 crc kubenswrapper[4756]: I0930 19:49:14.426022 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9db1a87d-2ea3-4ec9-bf4d-a72708411f6e-config" (OuterVolumeSpecName: "config") pod "9db1a87d-2ea3-4ec9-bf4d-a72708411f6e" (UID: "9db1a87d-2ea3-4ec9-bf4d-a72708411f6e"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:49:14 crc kubenswrapper[4756]: I0930 19:49:14.482323 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gwkwj\" (UniqueName: \"kubernetes.io/projected/f3bb8b1f-f343-4c14-a5da-70dc960397db-kube-api-access-gwkwj\") pod \"f3bb8b1f-f343-4c14-a5da-70dc960397db\" (UID: \"f3bb8b1f-f343-4c14-a5da-70dc960397db\") " Sep 30 19:49:14 crc kubenswrapper[4756]: I0930 19:49:14.482443 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7xm8v\" (UniqueName: \"kubernetes.io/projected/bbdacb5e-ec79-4c63-b745-cae6af7434d6-kube-api-access-7xm8v\") pod \"bbdacb5e-ec79-4c63-b745-cae6af7434d6\" (UID: \"bbdacb5e-ec79-4c63-b745-cae6af7434d6\") " Sep 30 19:49:14 crc kubenswrapper[4756]: I0930 19:49:14.483072 4756 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9db1a87d-2ea3-4ec9-bf4d-a72708411f6e-config\") on node \"crc\" DevicePath \"\"" Sep 30 19:49:14 crc kubenswrapper[4756]: I0930 19:49:14.483100 4756 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/9db1a87d-2ea3-4ec9-bf4d-a72708411f6e-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Sep 30 19:49:14 crc kubenswrapper[4756]: I0930 19:49:14.483161 4756 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9db1a87d-2ea3-4ec9-bf4d-a72708411f6e-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 30 19:49:14 crc kubenswrapper[4756]: I0930 19:49:14.483181 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-r99k4\" (UniqueName: \"kubernetes.io/projected/9db1a87d-2ea3-4ec9-bf4d-a72708411f6e-kube-api-access-r99k4\") on node \"crc\" DevicePath \"\"" Sep 30 19:49:14 crc kubenswrapper[4756]: I0930 19:49:14.483198 4756 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/9db1a87d-2ea3-4ec9-bf4d-a72708411f6e-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Sep 30 19:49:14 crc kubenswrapper[4756]: I0930 19:49:14.485177 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f3bb8b1f-f343-4c14-a5da-70dc960397db-kube-api-access-gwkwj" (OuterVolumeSpecName: "kube-api-access-gwkwj") pod "f3bb8b1f-f343-4c14-a5da-70dc960397db" (UID: "f3bb8b1f-f343-4c14-a5da-70dc960397db"). InnerVolumeSpecName "kube-api-access-gwkwj". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:49:14 crc kubenswrapper[4756]: I0930 19:49:14.485802 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bbdacb5e-ec79-4c63-b745-cae6af7434d6-kube-api-access-7xm8v" (OuterVolumeSpecName: "kube-api-access-7xm8v") pod "bbdacb5e-ec79-4c63-b745-cae6af7434d6" (UID: "bbdacb5e-ec79-4c63-b745-cae6af7434d6"). InnerVolumeSpecName "kube-api-access-7xm8v". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:49:14 crc kubenswrapper[4756]: I0930 19:49:14.585143 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gwkwj\" (UniqueName: \"kubernetes.io/projected/f3bb8b1f-f343-4c14-a5da-70dc960397db-kube-api-access-gwkwj\") on node \"crc\" DevicePath \"\"" Sep 30 19:49:14 crc kubenswrapper[4756]: I0930 19:49:14.585192 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7xm8v\" (UniqueName: \"kubernetes.io/projected/bbdacb5e-ec79-4c63-b745-cae6af7434d6-kube-api-access-7xm8v\") on node \"crc\" DevicePath \"\"" Sep 30 19:49:14 crc kubenswrapper[4756]: I0930 19:49:14.977919 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-wvmsq" event={"ID":"bbdacb5e-ec79-4c63-b745-cae6af7434d6","Type":"ContainerDied","Data":"8bb178eb4dca784a0848947674f1cb5feb97a8ee0851bf22ffea0cf4fcda553c"} Sep 30 19:49:14 crc kubenswrapper[4756]: I0930 19:49:14.978152 4756 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8bb178eb4dca784a0848947674f1cb5feb97a8ee0851bf22ffea0cf4fcda553c" Sep 30 19:49:14 crc kubenswrapper[4756]: I0930 19:49:14.977989 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-wvmsq" Sep 30 19:49:14 crc kubenswrapper[4756]: I0930 19:49:14.980354 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-27gdt" Sep 30 19:49:14 crc kubenswrapper[4756]: I0930 19:49:14.980343 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-27gdt" event={"ID":"f3bb8b1f-f343-4c14-a5da-70dc960397db","Type":"ContainerDied","Data":"2bbc569e59c09b39de4e409d8a396ff54b88413dc2dbeaf781f6be5b5b69863f"} Sep 30 19:49:14 crc kubenswrapper[4756]: I0930 19:49:14.980538 4756 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2bbc569e59c09b39de4e409d8a396ff54b88413dc2dbeaf781f6be5b5b69863f" Sep 30 19:49:14 crc kubenswrapper[4756]: I0930 19:49:14.983708 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-86db49b7ff-wd9cd" event={"ID":"9db1a87d-2ea3-4ec9-bf4d-a72708411f6e","Type":"ContainerDied","Data":"84d8d8410e09fe3246e3d5d3ce05c109ca763ad755e5148c2075b4bc78b42ece"} Sep 30 19:49:14 crc kubenswrapper[4756]: I0930 19:49:14.983738 4756 scope.go:117] "RemoveContainer" containerID="60bfb6c548623904a95282fcfec56d46ecba4c66bb2fa6b0ef8f81c3ba789b54" Sep 30 19:49:14 crc kubenswrapper[4756]: I0930 19:49:14.983864 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-86db49b7ff-wd9cd" Sep 30 19:49:15 crc kubenswrapper[4756]: I0930 19:49:15.014681 4756 scope.go:117] "RemoveContainer" containerID="19075cdc31fd062688f54d204c9a29469ec5be5e49780dfca17853ecef0396aa" Sep 30 19:49:15 crc kubenswrapper[4756]: I0930 19:49:15.043722 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-86db49b7ff-wd9cd"] Sep 30 19:49:15 crc kubenswrapper[4756]: I0930 19:49:15.055548 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-86db49b7ff-wd9cd"] Sep 30 19:49:15 crc kubenswrapper[4756]: I0930 19:49:15.125778 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9db1a87d-2ea3-4ec9-bf4d-a72708411f6e" path="/var/lib/kubelet/pods/9db1a87d-2ea3-4ec9-bf4d-a72708411f6e/volumes" Sep 30 19:49:15 crc kubenswrapper[4756]: I0930 19:49:15.994670 4756 generic.go:334] "Generic (PLEG): container finished" podID="988b9bea-09e1-48d3-9f07-5b7a8099f23f" containerID="b9039dde4f47a71c71085a8872d287b5cca916163354551bfbc89d93d31530ff" exitCode=0 Sep 30 19:49:15 crc kubenswrapper[4756]: I0930 19:49:15.994749 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-qf8st" event={"ID":"988b9bea-09e1-48d3-9f07-5b7a8099f23f","Type":"ContainerDied","Data":"b9039dde4f47a71c71085a8872d287b5cca916163354551bfbc89d93d31530ff"} Sep 30 19:49:16 crc kubenswrapper[4756]: I0930 19:49:16.566062 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-northd-0" Sep 30 19:49:17 crc kubenswrapper[4756]: I0930 19:49:17.422605 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-qf8st" Sep 30 19:49:17 crc kubenswrapper[4756]: I0930 19:49:17.540708 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/988b9bea-09e1-48d3-9f07-5b7a8099f23f-etc-swift\") pod \"988b9bea-09e1-48d3-9f07-5b7a8099f23f\" (UID: \"988b9bea-09e1-48d3-9f07-5b7a8099f23f\") " Sep 30 19:49:17 crc kubenswrapper[4756]: I0930 19:49:17.541123 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/988b9bea-09e1-48d3-9f07-5b7a8099f23f-ring-data-devices\") pod \"988b9bea-09e1-48d3-9f07-5b7a8099f23f\" (UID: \"988b9bea-09e1-48d3-9f07-5b7a8099f23f\") " Sep 30 19:49:17 crc kubenswrapper[4756]: I0930 19:49:17.541152 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/988b9bea-09e1-48d3-9f07-5b7a8099f23f-swiftconf\") pod \"988b9bea-09e1-48d3-9f07-5b7a8099f23f\" (UID: \"988b9bea-09e1-48d3-9f07-5b7a8099f23f\") " Sep 30 19:49:17 crc kubenswrapper[4756]: I0930 19:49:17.541197 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/988b9bea-09e1-48d3-9f07-5b7a8099f23f-scripts\") pod \"988b9bea-09e1-48d3-9f07-5b7a8099f23f\" (UID: \"988b9bea-09e1-48d3-9f07-5b7a8099f23f\") " Sep 30 19:49:17 crc kubenswrapper[4756]: I0930 19:49:17.541289 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mscbg\" (UniqueName: \"kubernetes.io/projected/988b9bea-09e1-48d3-9f07-5b7a8099f23f-kube-api-access-mscbg\") pod \"988b9bea-09e1-48d3-9f07-5b7a8099f23f\" (UID: \"988b9bea-09e1-48d3-9f07-5b7a8099f23f\") " Sep 30 19:49:17 crc kubenswrapper[4756]: I0930 19:49:17.541335 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/988b9bea-09e1-48d3-9f07-5b7a8099f23f-dispersionconf\") pod \"988b9bea-09e1-48d3-9f07-5b7a8099f23f\" (UID: \"988b9bea-09e1-48d3-9f07-5b7a8099f23f\") " Sep 30 19:49:17 crc kubenswrapper[4756]: I0930 19:49:17.541413 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/988b9bea-09e1-48d3-9f07-5b7a8099f23f-combined-ca-bundle\") pod \"988b9bea-09e1-48d3-9f07-5b7a8099f23f\" (UID: \"988b9bea-09e1-48d3-9f07-5b7a8099f23f\") " Sep 30 19:49:17 crc kubenswrapper[4756]: I0930 19:49:17.542297 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/988b9bea-09e1-48d3-9f07-5b7a8099f23f-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "988b9bea-09e1-48d3-9f07-5b7a8099f23f" (UID: "988b9bea-09e1-48d3-9f07-5b7a8099f23f"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 19:49:17 crc kubenswrapper[4756]: I0930 19:49:17.543083 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/988b9bea-09e1-48d3-9f07-5b7a8099f23f-ring-data-devices" (OuterVolumeSpecName: "ring-data-devices") pod "988b9bea-09e1-48d3-9f07-5b7a8099f23f" (UID: "988b9bea-09e1-48d3-9f07-5b7a8099f23f"). InnerVolumeSpecName "ring-data-devices". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:49:17 crc kubenswrapper[4756]: I0930 19:49:17.547984 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/988b9bea-09e1-48d3-9f07-5b7a8099f23f-kube-api-access-mscbg" (OuterVolumeSpecName: "kube-api-access-mscbg") pod "988b9bea-09e1-48d3-9f07-5b7a8099f23f" (UID: "988b9bea-09e1-48d3-9f07-5b7a8099f23f"). InnerVolumeSpecName "kube-api-access-mscbg". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:49:17 crc kubenswrapper[4756]: I0930 19:49:17.551344 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/988b9bea-09e1-48d3-9f07-5b7a8099f23f-dispersionconf" (OuterVolumeSpecName: "dispersionconf") pod "988b9bea-09e1-48d3-9f07-5b7a8099f23f" (UID: "988b9bea-09e1-48d3-9f07-5b7a8099f23f"). InnerVolumeSpecName "dispersionconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:49:17 crc kubenswrapper[4756]: I0930 19:49:17.561260 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/988b9bea-09e1-48d3-9f07-5b7a8099f23f-scripts" (OuterVolumeSpecName: "scripts") pod "988b9bea-09e1-48d3-9f07-5b7a8099f23f" (UID: "988b9bea-09e1-48d3-9f07-5b7a8099f23f"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:49:17 crc kubenswrapper[4756]: I0930 19:49:17.568966 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/988b9bea-09e1-48d3-9f07-5b7a8099f23f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "988b9bea-09e1-48d3-9f07-5b7a8099f23f" (UID: "988b9bea-09e1-48d3-9f07-5b7a8099f23f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:49:17 crc kubenswrapper[4756]: I0930 19:49:17.569839 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/988b9bea-09e1-48d3-9f07-5b7a8099f23f-swiftconf" (OuterVolumeSpecName: "swiftconf") pod "988b9bea-09e1-48d3-9f07-5b7a8099f23f" (UID: "988b9bea-09e1-48d3-9f07-5b7a8099f23f"). InnerVolumeSpecName "swiftconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:49:17 crc kubenswrapper[4756]: I0930 19:49:17.643557 4756 reconciler_common.go:293] "Volume detached for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/988b9bea-09e1-48d3-9f07-5b7a8099f23f-ring-data-devices\") on node \"crc\" DevicePath \"\"" Sep 30 19:49:17 crc kubenswrapper[4756]: I0930 19:49:17.643600 4756 reconciler_common.go:293] "Volume detached for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/988b9bea-09e1-48d3-9f07-5b7a8099f23f-swiftconf\") on node \"crc\" DevicePath \"\"" Sep 30 19:49:17 crc kubenswrapper[4756]: I0930 19:49:17.643613 4756 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/988b9bea-09e1-48d3-9f07-5b7a8099f23f-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 19:49:17 crc kubenswrapper[4756]: I0930 19:49:17.643625 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mscbg\" (UniqueName: \"kubernetes.io/projected/988b9bea-09e1-48d3-9f07-5b7a8099f23f-kube-api-access-mscbg\") on node \"crc\" DevicePath \"\"" Sep 30 19:49:17 crc kubenswrapper[4756]: I0930 19:49:17.643641 4756 reconciler_common.go:293] "Volume detached for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/988b9bea-09e1-48d3-9f07-5b7a8099f23f-dispersionconf\") on node \"crc\" DevicePath \"\"" Sep 30 19:49:17 crc kubenswrapper[4756]: I0930 19:49:17.643655 4756 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/988b9bea-09e1-48d3-9f07-5b7a8099f23f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 19:49:17 crc kubenswrapper[4756]: I0930 19:49:17.643671 4756 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/988b9bea-09e1-48d3-9f07-5b7a8099f23f-etc-swift\") on node \"crc\" DevicePath \"\"" Sep 30 19:49:18 crc kubenswrapper[4756]: I0930 19:49:18.026252 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-qf8st" event={"ID":"988b9bea-09e1-48d3-9f07-5b7a8099f23f","Type":"ContainerDied","Data":"a230bd7ee3780af9d2aa2073255fca917890e6f68d0526ac3ccf152a4b6a91ce"} Sep 30 19:49:18 crc kubenswrapper[4756]: I0930 19:49:18.026288 4756 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a230bd7ee3780af9d2aa2073255fca917890e6f68d0526ac3ccf152a4b6a91ce" Sep 30 19:49:18 crc kubenswrapper[4756]: I0930 19:49:18.026341 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-qf8st" Sep 30 19:49:20 crc kubenswrapper[4756]: I0930 19:49:20.394323 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/26be10f5-be18-4ab5-9f8e-0d6f2a8dcb4f-etc-swift\") pod \"swift-storage-0\" (UID: \"26be10f5-be18-4ab5-9f8e-0d6f2a8dcb4f\") " pod="openstack/swift-storage-0" Sep 30 19:49:20 crc kubenswrapper[4756]: I0930 19:49:20.403196 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/26be10f5-be18-4ab5-9f8e-0d6f2a8dcb4f-etc-swift\") pod \"swift-storage-0\" (UID: \"26be10f5-be18-4ab5-9f8e-0d6f2a8dcb4f\") " pod="openstack/swift-storage-0" Sep 30 19:49:20 crc kubenswrapper[4756]: I0930 19:49:20.476768 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-storage-0" Sep 30 19:49:21 crc kubenswrapper[4756]: I0930 19:49:21.010486 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-storage-0"] Sep 30 19:49:21 crc kubenswrapper[4756]: W0930 19:49:21.023761 4756 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod26be10f5_be18_4ab5_9f8e_0d6f2a8dcb4f.slice/crio-8aa879b20f8157c2cc1b3bc2fc2c825aee61fad6e16fc83bcb4862947fe2fcce WatchSource:0}: Error finding container 8aa879b20f8157c2cc1b3bc2fc2c825aee61fad6e16fc83bcb4862947fe2fcce: Status 404 returned error can't find the container with id 8aa879b20f8157c2cc1b3bc2fc2c825aee61fad6e16fc83bcb4862947fe2fcce Sep 30 19:49:21 crc kubenswrapper[4756]: I0930 19:49:21.053604 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"26be10f5-be18-4ab5-9f8e-0d6f2a8dcb4f","Type":"ContainerStarted","Data":"8aa879b20f8157c2cc1b3bc2fc2c825aee61fad6e16fc83bcb4862947fe2fcce"} Sep 30 19:49:21 crc kubenswrapper[4756]: I0930 19:49:21.490607 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-b613-account-create-zjxl7"] Sep 30 19:49:21 crc kubenswrapper[4756]: E0930 19:49:21.490912 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bbdacb5e-ec79-4c63-b745-cae6af7434d6" containerName="mariadb-database-create" Sep 30 19:49:21 crc kubenswrapper[4756]: I0930 19:49:21.490924 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="bbdacb5e-ec79-4c63-b745-cae6af7434d6" containerName="mariadb-database-create" Sep 30 19:49:21 crc kubenswrapper[4756]: E0930 19:49:21.490941 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9db1a87d-2ea3-4ec9-bf4d-a72708411f6e" containerName="init" Sep 30 19:49:21 crc kubenswrapper[4756]: I0930 19:49:21.490947 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="9db1a87d-2ea3-4ec9-bf4d-a72708411f6e" containerName="init" Sep 30 19:49:21 crc kubenswrapper[4756]: E0930 19:49:21.490959 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9db1a87d-2ea3-4ec9-bf4d-a72708411f6e" containerName="dnsmasq-dns" Sep 30 19:49:21 crc kubenswrapper[4756]: I0930 19:49:21.490965 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="9db1a87d-2ea3-4ec9-bf4d-a72708411f6e" containerName="dnsmasq-dns" Sep 30 19:49:21 crc kubenswrapper[4756]: E0930 19:49:21.490987 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f3bb8b1f-f343-4c14-a5da-70dc960397db" containerName="mariadb-database-create" Sep 30 19:49:21 crc kubenswrapper[4756]: I0930 19:49:21.490997 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="f3bb8b1f-f343-4c14-a5da-70dc960397db" containerName="mariadb-database-create" Sep 30 19:49:21 crc kubenswrapper[4756]: E0930 19:49:21.491014 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="988b9bea-09e1-48d3-9f07-5b7a8099f23f" containerName="swift-ring-rebalance" Sep 30 19:49:21 crc kubenswrapper[4756]: I0930 19:49:21.491021 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="988b9bea-09e1-48d3-9f07-5b7a8099f23f" containerName="swift-ring-rebalance" Sep 30 19:49:21 crc kubenswrapper[4756]: I0930 19:49:21.491157 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="988b9bea-09e1-48d3-9f07-5b7a8099f23f" containerName="swift-ring-rebalance" Sep 30 19:49:21 crc kubenswrapper[4756]: I0930 19:49:21.491170 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="bbdacb5e-ec79-4c63-b745-cae6af7434d6" containerName="mariadb-database-create" Sep 30 19:49:21 crc kubenswrapper[4756]: I0930 19:49:21.491183 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="f3bb8b1f-f343-4c14-a5da-70dc960397db" containerName="mariadb-database-create" Sep 30 19:49:21 crc kubenswrapper[4756]: I0930 19:49:21.491192 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="9db1a87d-2ea3-4ec9-bf4d-a72708411f6e" containerName="dnsmasq-dns" Sep 30 19:49:21 crc kubenswrapper[4756]: I0930 19:49:21.491711 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-b613-account-create-zjxl7" Sep 30 19:49:21 crc kubenswrapper[4756]: I0930 19:49:21.493826 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-db-secret" Sep 30 19:49:21 crc kubenswrapper[4756]: I0930 19:49:21.524425 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-b613-account-create-zjxl7"] Sep 30 19:49:21 crc kubenswrapper[4756]: I0930 19:49:21.617914 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zwmzd\" (UniqueName: \"kubernetes.io/projected/07c7b7f8-1c4a-4e3b-907b-662dc0c798aa-kube-api-access-zwmzd\") pod \"keystone-b613-account-create-zjxl7\" (UID: \"07c7b7f8-1c4a-4e3b-907b-662dc0c798aa\") " pod="openstack/keystone-b613-account-create-zjxl7" Sep 30 19:49:21 crc kubenswrapper[4756]: I0930 19:49:21.718959 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zwmzd\" (UniqueName: \"kubernetes.io/projected/07c7b7f8-1c4a-4e3b-907b-662dc0c798aa-kube-api-access-zwmzd\") pod \"keystone-b613-account-create-zjxl7\" (UID: \"07c7b7f8-1c4a-4e3b-907b-662dc0c798aa\") " pod="openstack/keystone-b613-account-create-zjxl7" Sep 30 19:49:21 crc kubenswrapper[4756]: I0930 19:49:21.760579 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zwmzd\" (UniqueName: \"kubernetes.io/projected/07c7b7f8-1c4a-4e3b-907b-662dc0c798aa-kube-api-access-zwmzd\") pod \"keystone-b613-account-create-zjxl7\" (UID: \"07c7b7f8-1c4a-4e3b-907b-662dc0c798aa\") " pod="openstack/keystone-b613-account-create-zjxl7" Sep 30 19:49:21 crc kubenswrapper[4756]: I0930 19:49:21.800253 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-a4e2-account-create-q5qj9"] Sep 30 19:49:21 crc kubenswrapper[4756]: I0930 19:49:21.801580 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-a4e2-account-create-q5qj9" Sep 30 19:49:21 crc kubenswrapper[4756]: I0930 19:49:21.804020 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-db-secret" Sep 30 19:49:21 crc kubenswrapper[4756]: I0930 19:49:21.806856 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-a4e2-account-create-q5qj9"] Sep 30 19:49:21 crc kubenswrapper[4756]: I0930 19:49:21.836908 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-b613-account-create-zjxl7" Sep 30 19:49:21 crc kubenswrapper[4756]: I0930 19:49:21.922119 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rnvdl\" (UniqueName: \"kubernetes.io/projected/a84a6bd0-737d-4dc9-a94f-f6e449c51bd1-kube-api-access-rnvdl\") pod \"placement-a4e2-account-create-q5qj9\" (UID: \"a84a6bd0-737d-4dc9-a94f-f6e449c51bd1\") " pod="openstack/placement-a4e2-account-create-q5qj9" Sep 30 19:49:22 crc kubenswrapper[4756]: I0930 19:49:22.023563 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rnvdl\" (UniqueName: \"kubernetes.io/projected/a84a6bd0-737d-4dc9-a94f-f6e449c51bd1-kube-api-access-rnvdl\") pod \"placement-a4e2-account-create-q5qj9\" (UID: \"a84a6bd0-737d-4dc9-a94f-f6e449c51bd1\") " pod="openstack/placement-a4e2-account-create-q5qj9" Sep 30 19:49:22 crc kubenswrapper[4756]: I0930 19:49:22.050020 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rnvdl\" (UniqueName: \"kubernetes.io/projected/a84a6bd0-737d-4dc9-a94f-f6e449c51bd1-kube-api-access-rnvdl\") pod \"placement-a4e2-account-create-q5qj9\" (UID: \"a84a6bd0-737d-4dc9-a94f-f6e449c51bd1\") " pod="openstack/placement-a4e2-account-create-q5qj9" Sep 30 19:49:22 crc kubenswrapper[4756]: I0930 19:49:22.068288 4756 generic.go:334] "Generic (PLEG): container finished" podID="3bacc7b1-00f4-4489-9c0b-fa038cc443d6" containerID="3b361be86fa33c2d42005a3a29c1306df75cc2f04957244da9accd2d1ab215f9" exitCode=0 Sep 30 19:49:22 crc kubenswrapper[4756]: I0930 19:49:22.068362 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"3bacc7b1-00f4-4489-9c0b-fa038cc443d6","Type":"ContainerDied","Data":"3b361be86fa33c2d42005a3a29c1306df75cc2f04957244da9accd2d1ab215f9"} Sep 30 19:49:22 crc kubenswrapper[4756]: I0930 19:49:22.070890 4756 generic.go:334] "Generic (PLEG): container finished" podID="c3b1ef6d-6617-4d15-8709-a7623e75faa4" containerID="d8b917c3d73dba41be8fa6de0c7e43dda4897a0fa616dbea5cb7b4cf69c7fa60" exitCode=0 Sep 30 19:49:22 crc kubenswrapper[4756]: I0930 19:49:22.070927 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"c3b1ef6d-6617-4d15-8709-a7623e75faa4","Type":"ContainerDied","Data":"d8b917c3d73dba41be8fa6de0c7e43dda4897a0fa616dbea5cb7b4cf69c7fa60"} Sep 30 19:49:22 crc kubenswrapper[4756]: I0930 19:49:22.120865 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-a4e2-account-create-q5qj9" Sep 30 19:49:22 crc kubenswrapper[4756]: I0930 19:49:22.408980 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-b613-account-create-zjxl7"] Sep 30 19:49:22 crc kubenswrapper[4756]: W0930 19:49:22.408971 4756 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod07c7b7f8_1c4a_4e3b_907b_662dc0c798aa.slice/crio-fc15257fe405b91213c865e00c90415eab024ac59513cf022893e9285ee86792 WatchSource:0}: Error finding container fc15257fe405b91213c865e00c90415eab024ac59513cf022893e9285ee86792: Status 404 returned error can't find the container with id fc15257fe405b91213c865e00c90415eab024ac59513cf022893e9285ee86792 Sep 30 19:49:22 crc kubenswrapper[4756]: I0930 19:49:22.559314 4756 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ovn-controller-kfdjm" podUID="9996bc15-3d12-4aa1-a22e-d11726f6c4d8" containerName="ovn-controller" probeResult="failure" output=< Sep 30 19:49:22 crc kubenswrapper[4756]: ERROR - ovn-controller connection status is 'not connected', expecting 'connected' status Sep 30 19:49:22 crc kubenswrapper[4756]: > Sep 30 19:49:22 crc kubenswrapper[4756]: I0930 19:49:22.572615 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-a4e2-account-create-q5qj9"] Sep 30 19:49:22 crc kubenswrapper[4756]: W0930 19:49:22.581650 4756 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda84a6bd0_737d_4dc9_a94f_f6e449c51bd1.slice/crio-8b906afa01026feb1cef28a22fdf090d5078b6f20ecc9516783c226e98cfbdb4 WatchSource:0}: Error finding container 8b906afa01026feb1cef28a22fdf090d5078b6f20ecc9516783c226e98cfbdb4: Status 404 returned error can't find the container with id 8b906afa01026feb1cef28a22fdf090d5078b6f20ecc9516783c226e98cfbdb4 Sep 30 19:49:22 crc kubenswrapper[4756]: I0930 19:49:22.594913 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-ovs-kmbtz" Sep 30 19:49:22 crc kubenswrapper[4756]: I0930 19:49:22.629603 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-ovs-kmbtz" Sep 30 19:49:22 crc kubenswrapper[4756]: I0930 19:49:22.841637 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-kfdjm-config-hjw5w"] Sep 30 19:49:22 crc kubenswrapper[4756]: I0930 19:49:22.843248 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-kfdjm-config-hjw5w" Sep 30 19:49:22 crc kubenswrapper[4756]: I0930 19:49:22.845602 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-extra-scripts" Sep 30 19:49:22 crc kubenswrapper[4756]: I0930 19:49:22.850912 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-kfdjm-config-hjw5w"] Sep 30 19:49:22 crc kubenswrapper[4756]: I0930 19:49:22.940798 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/0fa5dfa0-2d24-4e1e-a507-b1d0ca19f68e-var-run\") pod \"ovn-controller-kfdjm-config-hjw5w\" (UID: \"0fa5dfa0-2d24-4e1e-a507-b1d0ca19f68e\") " pod="openstack/ovn-controller-kfdjm-config-hjw5w" Sep 30 19:49:22 crc kubenswrapper[4756]: I0930 19:49:22.940849 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j4qc6\" (UniqueName: \"kubernetes.io/projected/0fa5dfa0-2d24-4e1e-a507-b1d0ca19f68e-kube-api-access-j4qc6\") pod \"ovn-controller-kfdjm-config-hjw5w\" (UID: \"0fa5dfa0-2d24-4e1e-a507-b1d0ca19f68e\") " pod="openstack/ovn-controller-kfdjm-config-hjw5w" Sep 30 19:49:22 crc kubenswrapper[4756]: I0930 19:49:22.941092 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/0fa5dfa0-2d24-4e1e-a507-b1d0ca19f68e-scripts\") pod \"ovn-controller-kfdjm-config-hjw5w\" (UID: \"0fa5dfa0-2d24-4e1e-a507-b1d0ca19f68e\") " pod="openstack/ovn-controller-kfdjm-config-hjw5w" Sep 30 19:49:22 crc kubenswrapper[4756]: I0930 19:49:22.941174 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/0fa5dfa0-2d24-4e1e-a507-b1d0ca19f68e-var-run-ovn\") pod \"ovn-controller-kfdjm-config-hjw5w\" (UID: \"0fa5dfa0-2d24-4e1e-a507-b1d0ca19f68e\") " pod="openstack/ovn-controller-kfdjm-config-hjw5w" Sep 30 19:49:22 crc kubenswrapper[4756]: I0930 19:49:22.941415 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/0fa5dfa0-2d24-4e1e-a507-b1d0ca19f68e-additional-scripts\") pod \"ovn-controller-kfdjm-config-hjw5w\" (UID: \"0fa5dfa0-2d24-4e1e-a507-b1d0ca19f68e\") " pod="openstack/ovn-controller-kfdjm-config-hjw5w" Sep 30 19:49:22 crc kubenswrapper[4756]: I0930 19:49:22.941466 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/0fa5dfa0-2d24-4e1e-a507-b1d0ca19f68e-var-log-ovn\") pod \"ovn-controller-kfdjm-config-hjw5w\" (UID: \"0fa5dfa0-2d24-4e1e-a507-b1d0ca19f68e\") " pod="openstack/ovn-controller-kfdjm-config-hjw5w" Sep 30 19:49:23 crc kubenswrapper[4756]: I0930 19:49:23.043088 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/0fa5dfa0-2d24-4e1e-a507-b1d0ca19f68e-scripts\") pod \"ovn-controller-kfdjm-config-hjw5w\" (UID: \"0fa5dfa0-2d24-4e1e-a507-b1d0ca19f68e\") " pod="openstack/ovn-controller-kfdjm-config-hjw5w" Sep 30 19:49:23 crc kubenswrapper[4756]: I0930 19:49:23.043159 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/0fa5dfa0-2d24-4e1e-a507-b1d0ca19f68e-var-run-ovn\") pod \"ovn-controller-kfdjm-config-hjw5w\" (UID: \"0fa5dfa0-2d24-4e1e-a507-b1d0ca19f68e\") " pod="openstack/ovn-controller-kfdjm-config-hjw5w" Sep 30 19:49:23 crc kubenswrapper[4756]: I0930 19:49:23.043232 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/0fa5dfa0-2d24-4e1e-a507-b1d0ca19f68e-additional-scripts\") pod \"ovn-controller-kfdjm-config-hjw5w\" (UID: \"0fa5dfa0-2d24-4e1e-a507-b1d0ca19f68e\") " pod="openstack/ovn-controller-kfdjm-config-hjw5w" Sep 30 19:49:23 crc kubenswrapper[4756]: I0930 19:49:23.043259 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/0fa5dfa0-2d24-4e1e-a507-b1d0ca19f68e-var-log-ovn\") pod \"ovn-controller-kfdjm-config-hjw5w\" (UID: \"0fa5dfa0-2d24-4e1e-a507-b1d0ca19f68e\") " pod="openstack/ovn-controller-kfdjm-config-hjw5w" Sep 30 19:49:23 crc kubenswrapper[4756]: I0930 19:49:23.043313 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/0fa5dfa0-2d24-4e1e-a507-b1d0ca19f68e-var-run\") pod \"ovn-controller-kfdjm-config-hjw5w\" (UID: \"0fa5dfa0-2d24-4e1e-a507-b1d0ca19f68e\") " pod="openstack/ovn-controller-kfdjm-config-hjw5w" Sep 30 19:49:23 crc kubenswrapper[4756]: I0930 19:49:23.043347 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j4qc6\" (UniqueName: \"kubernetes.io/projected/0fa5dfa0-2d24-4e1e-a507-b1d0ca19f68e-kube-api-access-j4qc6\") pod \"ovn-controller-kfdjm-config-hjw5w\" (UID: \"0fa5dfa0-2d24-4e1e-a507-b1d0ca19f68e\") " pod="openstack/ovn-controller-kfdjm-config-hjw5w" Sep 30 19:49:23 crc kubenswrapper[4756]: I0930 19:49:23.043475 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/0fa5dfa0-2d24-4e1e-a507-b1d0ca19f68e-var-run-ovn\") pod \"ovn-controller-kfdjm-config-hjw5w\" (UID: \"0fa5dfa0-2d24-4e1e-a507-b1d0ca19f68e\") " pod="openstack/ovn-controller-kfdjm-config-hjw5w" Sep 30 19:49:23 crc kubenswrapper[4756]: I0930 19:49:23.043571 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/0fa5dfa0-2d24-4e1e-a507-b1d0ca19f68e-var-run\") pod \"ovn-controller-kfdjm-config-hjw5w\" (UID: \"0fa5dfa0-2d24-4e1e-a507-b1d0ca19f68e\") " pod="openstack/ovn-controller-kfdjm-config-hjw5w" Sep 30 19:49:23 crc kubenswrapper[4756]: I0930 19:49:23.043572 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/0fa5dfa0-2d24-4e1e-a507-b1d0ca19f68e-var-log-ovn\") pod \"ovn-controller-kfdjm-config-hjw5w\" (UID: \"0fa5dfa0-2d24-4e1e-a507-b1d0ca19f68e\") " pod="openstack/ovn-controller-kfdjm-config-hjw5w" Sep 30 19:49:23 crc kubenswrapper[4756]: I0930 19:49:23.043968 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/0fa5dfa0-2d24-4e1e-a507-b1d0ca19f68e-additional-scripts\") pod \"ovn-controller-kfdjm-config-hjw5w\" (UID: \"0fa5dfa0-2d24-4e1e-a507-b1d0ca19f68e\") " pod="openstack/ovn-controller-kfdjm-config-hjw5w" Sep 30 19:49:23 crc kubenswrapper[4756]: I0930 19:49:23.045017 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/0fa5dfa0-2d24-4e1e-a507-b1d0ca19f68e-scripts\") pod \"ovn-controller-kfdjm-config-hjw5w\" (UID: \"0fa5dfa0-2d24-4e1e-a507-b1d0ca19f68e\") " pod="openstack/ovn-controller-kfdjm-config-hjw5w" Sep 30 19:49:23 crc kubenswrapper[4756]: I0930 19:49:23.060659 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j4qc6\" (UniqueName: \"kubernetes.io/projected/0fa5dfa0-2d24-4e1e-a507-b1d0ca19f68e-kube-api-access-j4qc6\") pod \"ovn-controller-kfdjm-config-hjw5w\" (UID: \"0fa5dfa0-2d24-4e1e-a507-b1d0ca19f68e\") " pod="openstack/ovn-controller-kfdjm-config-hjw5w" Sep 30 19:49:23 crc kubenswrapper[4756]: I0930 19:49:23.080926 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"26be10f5-be18-4ab5-9f8e-0d6f2a8dcb4f","Type":"ContainerStarted","Data":"4b05a265d73c3a7ceb454829ebf559c526823bafbc3ff9d5547722704959942d"} Sep 30 19:49:23 crc kubenswrapper[4756]: I0930 19:49:23.080976 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"26be10f5-be18-4ab5-9f8e-0d6f2a8dcb4f","Type":"ContainerStarted","Data":"28e0221dbc10ee875c0d1d749687dc2243eba9bd6154bd2190340276bba9e102"} Sep 30 19:49:23 crc kubenswrapper[4756]: I0930 19:49:23.080987 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"26be10f5-be18-4ab5-9f8e-0d6f2a8dcb4f","Type":"ContainerStarted","Data":"e54877dad5935cd8933ec14fc3b8e4b0d87beeda887453f4702efc3c346c99e8"} Sep 30 19:49:23 crc kubenswrapper[4756]: I0930 19:49:23.080996 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"26be10f5-be18-4ab5-9f8e-0d6f2a8dcb4f","Type":"ContainerStarted","Data":"2ded9539aabf2d7ed41a0b6709054bd69836dd45212e96330a8ced4a80d659c3"} Sep 30 19:49:23 crc kubenswrapper[4756]: I0930 19:49:23.082284 4756 generic.go:334] "Generic (PLEG): container finished" podID="a84a6bd0-737d-4dc9-a94f-f6e449c51bd1" containerID="0c0236998df6869414ba3511af432b72b556619301b8a1f321a61e81a804042b" exitCode=0 Sep 30 19:49:23 crc kubenswrapper[4756]: I0930 19:49:23.082335 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-a4e2-account-create-q5qj9" event={"ID":"a84a6bd0-737d-4dc9-a94f-f6e449c51bd1","Type":"ContainerDied","Data":"0c0236998df6869414ba3511af432b72b556619301b8a1f321a61e81a804042b"} Sep 30 19:49:23 crc kubenswrapper[4756]: I0930 19:49:23.082355 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-a4e2-account-create-q5qj9" event={"ID":"a84a6bd0-737d-4dc9-a94f-f6e449c51bd1","Type":"ContainerStarted","Data":"8b906afa01026feb1cef28a22fdf090d5078b6f20ecc9516783c226e98cfbdb4"} Sep 30 19:49:23 crc kubenswrapper[4756]: I0930 19:49:23.083779 4756 generic.go:334] "Generic (PLEG): container finished" podID="07c7b7f8-1c4a-4e3b-907b-662dc0c798aa" containerID="4adae0e79776f455907198b99b57c0679e094944b65745853490c007c0b51563" exitCode=0 Sep 30 19:49:23 crc kubenswrapper[4756]: I0930 19:49:23.083827 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-b613-account-create-zjxl7" event={"ID":"07c7b7f8-1c4a-4e3b-907b-662dc0c798aa","Type":"ContainerDied","Data":"4adae0e79776f455907198b99b57c0679e094944b65745853490c007c0b51563"} Sep 30 19:49:23 crc kubenswrapper[4756]: I0930 19:49:23.083847 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-b613-account-create-zjxl7" event={"ID":"07c7b7f8-1c4a-4e3b-907b-662dc0c798aa","Type":"ContainerStarted","Data":"fc15257fe405b91213c865e00c90415eab024ac59513cf022893e9285ee86792"} Sep 30 19:49:23 crc kubenswrapper[4756]: I0930 19:49:23.085426 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"3bacc7b1-00f4-4489-9c0b-fa038cc443d6","Type":"ContainerStarted","Data":"22acee4e4b450fbce8e6aa4569d2260f650fed76f5340a9472eecc6bc744543f"} Sep 30 19:49:23 crc kubenswrapper[4756]: I0930 19:49:23.086201 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-cell1-server-0" Sep 30 19:49:23 crc kubenswrapper[4756]: I0930 19:49:23.089050 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"c3b1ef6d-6617-4d15-8709-a7623e75faa4","Type":"ContainerStarted","Data":"d30c7480fbfaf752f8c10fcadf2dff0b40a331d23daaf5997b23b75ef5b2aafa"} Sep 30 19:49:23 crc kubenswrapper[4756]: I0930 19:49:23.089370 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-server-0" Sep 30 19:49:23 crc kubenswrapper[4756]: I0930 19:49:23.130126 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-server-0" podStartSLOduration=48.058879636 podStartE2EDuration="56.130107978s" podCreationTimestamp="2025-09-30 19:48:27 +0000 UTC" firstStartedPulling="2025-09-30 19:48:39.437018384 +0000 UTC m=+1049.057951861" lastFinishedPulling="2025-09-30 19:48:47.508246716 +0000 UTC m=+1057.129180203" observedRunningTime="2025-09-30 19:49:23.128040894 +0000 UTC m=+1092.748974371" watchObservedRunningTime="2025-09-30 19:49:23.130107978 +0000 UTC m=+1092.751041455" Sep 30 19:49:23 crc kubenswrapper[4756]: I0930 19:49:23.155117 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-cell1-server-0" podStartSLOduration=48.490850459 podStartE2EDuration="56.155097722s" podCreationTimestamp="2025-09-30 19:48:27 +0000 UTC" firstStartedPulling="2025-09-30 19:48:39.396596437 +0000 UTC m=+1049.017529914" lastFinishedPulling="2025-09-30 19:48:47.06084366 +0000 UTC m=+1056.681777177" observedRunningTime="2025-09-30 19:49:23.150807849 +0000 UTC m=+1092.771741326" watchObservedRunningTime="2025-09-30 19:49:23.155097722 +0000 UTC m=+1092.776031199" Sep 30 19:49:23 crc kubenswrapper[4756]: I0930 19:49:23.168571 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-kfdjm-config-hjw5w" Sep 30 19:49:23 crc kubenswrapper[4756]: I0930 19:49:23.621842 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-kfdjm-config-hjw5w"] Sep 30 19:49:24 crc kubenswrapper[4756]: I0930 19:49:24.105171 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-kfdjm-config-hjw5w" event={"ID":"0fa5dfa0-2d24-4e1e-a507-b1d0ca19f68e","Type":"ContainerStarted","Data":"001b6cc738cccabf6dc34eb0d26d436e3bdcb9eaae466fb6ce0c2a4a698820c7"} Sep 30 19:49:24 crc kubenswrapper[4756]: I0930 19:49:24.105427 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-kfdjm-config-hjw5w" event={"ID":"0fa5dfa0-2d24-4e1e-a507-b1d0ca19f68e","Type":"ContainerStarted","Data":"1b065f2a02396bdbb7dd9c0f35c074988c598b4a97aafd4538c47b04804c6c92"} Sep 30 19:49:24 crc kubenswrapper[4756]: I0930 19:49:24.485522 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-b613-account-create-zjxl7" Sep 30 19:49:24 crc kubenswrapper[4756]: I0930 19:49:24.492942 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-a4e2-account-create-q5qj9" Sep 30 19:49:24 crc kubenswrapper[4756]: I0930 19:49:24.570733 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zwmzd\" (UniqueName: \"kubernetes.io/projected/07c7b7f8-1c4a-4e3b-907b-662dc0c798aa-kube-api-access-zwmzd\") pod \"07c7b7f8-1c4a-4e3b-907b-662dc0c798aa\" (UID: \"07c7b7f8-1c4a-4e3b-907b-662dc0c798aa\") " Sep 30 19:49:24 crc kubenswrapper[4756]: I0930 19:49:24.570891 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rnvdl\" (UniqueName: \"kubernetes.io/projected/a84a6bd0-737d-4dc9-a94f-f6e449c51bd1-kube-api-access-rnvdl\") pod \"a84a6bd0-737d-4dc9-a94f-f6e449c51bd1\" (UID: \"a84a6bd0-737d-4dc9-a94f-f6e449c51bd1\") " Sep 30 19:49:24 crc kubenswrapper[4756]: I0930 19:49:24.576540 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/07c7b7f8-1c4a-4e3b-907b-662dc0c798aa-kube-api-access-zwmzd" (OuterVolumeSpecName: "kube-api-access-zwmzd") pod "07c7b7f8-1c4a-4e3b-907b-662dc0c798aa" (UID: "07c7b7f8-1c4a-4e3b-907b-662dc0c798aa"). InnerVolumeSpecName "kube-api-access-zwmzd". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:49:24 crc kubenswrapper[4756]: I0930 19:49:24.577930 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a84a6bd0-737d-4dc9-a94f-f6e449c51bd1-kube-api-access-rnvdl" (OuterVolumeSpecName: "kube-api-access-rnvdl") pod "a84a6bd0-737d-4dc9-a94f-f6e449c51bd1" (UID: "a84a6bd0-737d-4dc9-a94f-f6e449c51bd1"). InnerVolumeSpecName "kube-api-access-rnvdl". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:49:24 crc kubenswrapper[4756]: I0930 19:49:24.672217 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zwmzd\" (UniqueName: \"kubernetes.io/projected/07c7b7f8-1c4a-4e3b-907b-662dc0c798aa-kube-api-access-zwmzd\") on node \"crc\" DevicePath \"\"" Sep 30 19:49:24 crc kubenswrapper[4756]: I0930 19:49:24.672585 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rnvdl\" (UniqueName: \"kubernetes.io/projected/a84a6bd0-737d-4dc9-a94f-f6e449c51bd1-kube-api-access-rnvdl\") on node \"crc\" DevicePath \"\"" Sep 30 19:49:25 crc kubenswrapper[4756]: I0930 19:49:25.144789 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"26be10f5-be18-4ab5-9f8e-0d6f2a8dcb4f","Type":"ContainerStarted","Data":"d845f4ce57e99f7f80a72593e5611ab764740d01bc79f238f2095047d2ba9939"} Sep 30 19:49:25 crc kubenswrapper[4756]: I0930 19:49:25.145376 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"26be10f5-be18-4ab5-9f8e-0d6f2a8dcb4f","Type":"ContainerStarted","Data":"c1034fd69491ceca2b5a5e3609ed94e95183c5cd8752878c0eb7d55136e8ac9b"} Sep 30 19:49:25 crc kubenswrapper[4756]: I0930 19:49:25.145431 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"26be10f5-be18-4ab5-9f8e-0d6f2a8dcb4f","Type":"ContainerStarted","Data":"9d88cf8ad00002797476e74b41a910b1b6ec952d10c00335bb0abfc6bd17fddd"} Sep 30 19:49:25 crc kubenswrapper[4756]: I0930 19:49:25.145446 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"26be10f5-be18-4ab5-9f8e-0d6f2a8dcb4f","Type":"ContainerStarted","Data":"db446a0e9626857a987217e5f70793bfbb0795d88490d45a933892f52b588044"} Sep 30 19:49:25 crc kubenswrapper[4756]: I0930 19:49:25.145673 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-a4e2-account-create-q5qj9" event={"ID":"a84a6bd0-737d-4dc9-a94f-f6e449c51bd1","Type":"ContainerDied","Data":"8b906afa01026feb1cef28a22fdf090d5078b6f20ecc9516783c226e98cfbdb4"} Sep 30 19:49:25 crc kubenswrapper[4756]: I0930 19:49:25.145705 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-a4e2-account-create-q5qj9" Sep 30 19:49:25 crc kubenswrapper[4756]: I0930 19:49:25.145763 4756 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8b906afa01026feb1cef28a22fdf090d5078b6f20ecc9516783c226e98cfbdb4" Sep 30 19:49:25 crc kubenswrapper[4756]: I0930 19:49:25.148531 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-b613-account-create-zjxl7" event={"ID":"07c7b7f8-1c4a-4e3b-907b-662dc0c798aa","Type":"ContainerDied","Data":"fc15257fe405b91213c865e00c90415eab024ac59513cf022893e9285ee86792"} Sep 30 19:49:25 crc kubenswrapper[4756]: I0930 19:49:25.148758 4756 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="fc15257fe405b91213c865e00c90415eab024ac59513cf022893e9285ee86792" Sep 30 19:49:25 crc kubenswrapper[4756]: I0930 19:49:25.149180 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-b613-account-create-zjxl7" Sep 30 19:49:25 crc kubenswrapper[4756]: I0930 19:49:25.150419 4756 generic.go:334] "Generic (PLEG): container finished" podID="0fa5dfa0-2d24-4e1e-a507-b1d0ca19f68e" containerID="001b6cc738cccabf6dc34eb0d26d436e3bdcb9eaae466fb6ce0c2a4a698820c7" exitCode=0 Sep 30 19:49:25 crc kubenswrapper[4756]: I0930 19:49:25.150449 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-kfdjm-config-hjw5w" event={"ID":"0fa5dfa0-2d24-4e1e-a507-b1d0ca19f68e","Type":"ContainerDied","Data":"001b6cc738cccabf6dc34eb0d26d436e3bdcb9eaae466fb6ce0c2a4a698820c7"} Sep 30 19:49:25 crc kubenswrapper[4756]: I0930 19:49:25.535384 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-kfdjm-config-hjw5w" Sep 30 19:49:25 crc kubenswrapper[4756]: I0930 19:49:25.693378 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/0fa5dfa0-2d24-4e1e-a507-b1d0ca19f68e-var-run\") pod \"0fa5dfa0-2d24-4e1e-a507-b1d0ca19f68e\" (UID: \"0fa5dfa0-2d24-4e1e-a507-b1d0ca19f68e\") " Sep 30 19:49:25 crc kubenswrapper[4756]: I0930 19:49:25.693699 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/0fa5dfa0-2d24-4e1e-a507-b1d0ca19f68e-var-log-ovn\") pod \"0fa5dfa0-2d24-4e1e-a507-b1d0ca19f68e\" (UID: \"0fa5dfa0-2d24-4e1e-a507-b1d0ca19f68e\") " Sep 30 19:49:25 crc kubenswrapper[4756]: I0930 19:49:25.693797 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-j4qc6\" (UniqueName: \"kubernetes.io/projected/0fa5dfa0-2d24-4e1e-a507-b1d0ca19f68e-kube-api-access-j4qc6\") pod \"0fa5dfa0-2d24-4e1e-a507-b1d0ca19f68e\" (UID: \"0fa5dfa0-2d24-4e1e-a507-b1d0ca19f68e\") " Sep 30 19:49:25 crc kubenswrapper[4756]: I0930 19:49:25.693823 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/0fa5dfa0-2d24-4e1e-a507-b1d0ca19f68e-scripts\") pod \"0fa5dfa0-2d24-4e1e-a507-b1d0ca19f68e\" (UID: \"0fa5dfa0-2d24-4e1e-a507-b1d0ca19f68e\") " Sep 30 19:49:25 crc kubenswrapper[4756]: I0930 19:49:25.693850 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/0fa5dfa0-2d24-4e1e-a507-b1d0ca19f68e-var-run-ovn\") pod \"0fa5dfa0-2d24-4e1e-a507-b1d0ca19f68e\" (UID: \"0fa5dfa0-2d24-4e1e-a507-b1d0ca19f68e\") " Sep 30 19:49:25 crc kubenswrapper[4756]: I0930 19:49:25.693469 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/0fa5dfa0-2d24-4e1e-a507-b1d0ca19f68e-var-run" (OuterVolumeSpecName: "var-run") pod "0fa5dfa0-2d24-4e1e-a507-b1d0ca19f68e" (UID: "0fa5dfa0-2d24-4e1e-a507-b1d0ca19f68e"). InnerVolumeSpecName "var-run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 19:49:25 crc kubenswrapper[4756]: I0930 19:49:25.693810 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/0fa5dfa0-2d24-4e1e-a507-b1d0ca19f68e-var-log-ovn" (OuterVolumeSpecName: "var-log-ovn") pod "0fa5dfa0-2d24-4e1e-a507-b1d0ca19f68e" (UID: "0fa5dfa0-2d24-4e1e-a507-b1d0ca19f68e"). InnerVolumeSpecName "var-log-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 19:49:25 crc kubenswrapper[4756]: I0930 19:49:25.693934 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/0fa5dfa0-2d24-4e1e-a507-b1d0ca19f68e-additional-scripts\") pod \"0fa5dfa0-2d24-4e1e-a507-b1d0ca19f68e\" (UID: \"0fa5dfa0-2d24-4e1e-a507-b1d0ca19f68e\") " Sep 30 19:49:25 crc kubenswrapper[4756]: I0930 19:49:25.694021 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/0fa5dfa0-2d24-4e1e-a507-b1d0ca19f68e-var-run-ovn" (OuterVolumeSpecName: "var-run-ovn") pod "0fa5dfa0-2d24-4e1e-a507-b1d0ca19f68e" (UID: "0fa5dfa0-2d24-4e1e-a507-b1d0ca19f68e"). InnerVolumeSpecName "var-run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 19:49:25 crc kubenswrapper[4756]: I0930 19:49:25.694630 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0fa5dfa0-2d24-4e1e-a507-b1d0ca19f68e-additional-scripts" (OuterVolumeSpecName: "additional-scripts") pod "0fa5dfa0-2d24-4e1e-a507-b1d0ca19f68e" (UID: "0fa5dfa0-2d24-4e1e-a507-b1d0ca19f68e"). InnerVolumeSpecName "additional-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:49:25 crc kubenswrapper[4756]: I0930 19:49:25.694818 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0fa5dfa0-2d24-4e1e-a507-b1d0ca19f68e-scripts" (OuterVolumeSpecName: "scripts") pod "0fa5dfa0-2d24-4e1e-a507-b1d0ca19f68e" (UID: "0fa5dfa0-2d24-4e1e-a507-b1d0ca19f68e"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:49:25 crc kubenswrapper[4756]: I0930 19:49:25.698369 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0fa5dfa0-2d24-4e1e-a507-b1d0ca19f68e-kube-api-access-j4qc6" (OuterVolumeSpecName: "kube-api-access-j4qc6") pod "0fa5dfa0-2d24-4e1e-a507-b1d0ca19f68e" (UID: "0fa5dfa0-2d24-4e1e-a507-b1d0ca19f68e"). InnerVolumeSpecName "kube-api-access-j4qc6". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:49:25 crc kubenswrapper[4756]: I0930 19:49:25.698519 4756 reconciler_common.go:293] "Volume detached for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/0fa5dfa0-2d24-4e1e-a507-b1d0ca19f68e-var-run\") on node \"crc\" DevicePath \"\"" Sep 30 19:49:25 crc kubenswrapper[4756]: I0930 19:49:25.698545 4756 reconciler_common.go:293] "Volume detached for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/0fa5dfa0-2d24-4e1e-a507-b1d0ca19f68e-var-log-ovn\") on node \"crc\" DevicePath \"\"" Sep 30 19:49:25 crc kubenswrapper[4756]: I0930 19:49:25.698556 4756 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/0fa5dfa0-2d24-4e1e-a507-b1d0ca19f68e-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 19:49:25 crc kubenswrapper[4756]: I0930 19:49:25.698568 4756 reconciler_common.go:293] "Volume detached for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/0fa5dfa0-2d24-4e1e-a507-b1d0ca19f68e-var-run-ovn\") on node \"crc\" DevicePath \"\"" Sep 30 19:49:25 crc kubenswrapper[4756]: I0930 19:49:25.698577 4756 reconciler_common.go:293] "Volume detached for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/0fa5dfa0-2d24-4e1e-a507-b1d0ca19f68e-additional-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 19:49:25 crc kubenswrapper[4756]: I0930 19:49:25.799614 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-j4qc6\" (UniqueName: \"kubernetes.io/projected/0fa5dfa0-2d24-4e1e-a507-b1d0ca19f68e-kube-api-access-j4qc6\") on node \"crc\" DevicePath \"\"" Sep 30 19:49:26 crc kubenswrapper[4756]: I0930 19:49:26.163549 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"26be10f5-be18-4ab5-9f8e-0d6f2a8dcb4f","Type":"ContainerStarted","Data":"c9e0fb72308640c83674dc311f125ea56a518ff2b747a1f3595f7eb4f02f32bd"} Sep 30 19:49:26 crc kubenswrapper[4756]: I0930 19:49:26.163601 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"26be10f5-be18-4ab5-9f8e-0d6f2a8dcb4f","Type":"ContainerStarted","Data":"9a23a18c16fa569d73aa74575f47acbf5abd74569f207256e830e5ae1207a4c1"} Sep 30 19:49:26 crc kubenswrapper[4756]: I0930 19:49:26.165603 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-kfdjm-config-hjw5w" event={"ID":"0fa5dfa0-2d24-4e1e-a507-b1d0ca19f68e","Type":"ContainerDied","Data":"1b065f2a02396bdbb7dd9c0f35c074988c598b4a97aafd4538c47b04804c6c92"} Sep 30 19:49:26 crc kubenswrapper[4756]: I0930 19:49:26.165634 4756 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1b065f2a02396bdbb7dd9c0f35c074988c598b4a97aafd4538c47b04804c6c92" Sep 30 19:49:26 crc kubenswrapper[4756]: I0930 19:49:26.165691 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-kfdjm-config-hjw5w" Sep 30 19:49:26 crc kubenswrapper[4756]: E0930 19:49:26.372719 4756 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0fa5dfa0_2d24_4e1e_a507_b1d0ca19f68e.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0fa5dfa0_2d24_4e1e_a507_b1d0ca19f68e.slice/crio-1b065f2a02396bdbb7dd9c0f35c074988c598b4a97aafd4538c47b04804c6c92\": RecentStats: unable to find data in memory cache]" Sep 30 19:49:26 crc kubenswrapper[4756]: I0930 19:49:26.653730 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-kfdjm-config-hjw5w"] Sep 30 19:49:26 crc kubenswrapper[4756]: I0930 19:49:26.667372 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-controller-kfdjm-config-hjw5w"] Sep 30 19:49:26 crc kubenswrapper[4756]: I0930 19:49:26.763594 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-kfdjm-config-c68wb"] Sep 30 19:49:26 crc kubenswrapper[4756]: E0930 19:49:26.763929 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="07c7b7f8-1c4a-4e3b-907b-662dc0c798aa" containerName="mariadb-account-create" Sep 30 19:49:26 crc kubenswrapper[4756]: I0930 19:49:26.763941 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="07c7b7f8-1c4a-4e3b-907b-662dc0c798aa" containerName="mariadb-account-create" Sep 30 19:49:26 crc kubenswrapper[4756]: E0930 19:49:26.763962 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a84a6bd0-737d-4dc9-a94f-f6e449c51bd1" containerName="mariadb-account-create" Sep 30 19:49:26 crc kubenswrapper[4756]: I0930 19:49:26.763968 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="a84a6bd0-737d-4dc9-a94f-f6e449c51bd1" containerName="mariadb-account-create" Sep 30 19:49:26 crc kubenswrapper[4756]: E0930 19:49:26.763979 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0fa5dfa0-2d24-4e1e-a507-b1d0ca19f68e" containerName="ovn-config" Sep 30 19:49:26 crc kubenswrapper[4756]: I0930 19:49:26.763984 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="0fa5dfa0-2d24-4e1e-a507-b1d0ca19f68e" containerName="ovn-config" Sep 30 19:49:26 crc kubenswrapper[4756]: I0930 19:49:26.764123 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="0fa5dfa0-2d24-4e1e-a507-b1d0ca19f68e" containerName="ovn-config" Sep 30 19:49:26 crc kubenswrapper[4756]: I0930 19:49:26.764145 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="a84a6bd0-737d-4dc9-a94f-f6e449c51bd1" containerName="mariadb-account-create" Sep 30 19:49:26 crc kubenswrapper[4756]: I0930 19:49:26.764157 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="07c7b7f8-1c4a-4e3b-907b-662dc0c798aa" containerName="mariadb-account-create" Sep 30 19:49:26 crc kubenswrapper[4756]: I0930 19:49:26.764673 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-kfdjm-config-c68wb" Sep 30 19:49:26 crc kubenswrapper[4756]: I0930 19:49:26.767583 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-extra-scripts" Sep 30 19:49:26 crc kubenswrapper[4756]: I0930 19:49:26.775645 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-kfdjm-config-c68wb"] Sep 30 19:49:26 crc kubenswrapper[4756]: I0930 19:49:26.914502 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/50f70a52-b554-466a-bb2c-e1518df4795c-var-run-ovn\") pod \"ovn-controller-kfdjm-config-c68wb\" (UID: \"50f70a52-b554-466a-bb2c-e1518df4795c\") " pod="openstack/ovn-controller-kfdjm-config-c68wb" Sep 30 19:49:26 crc kubenswrapper[4756]: I0930 19:49:26.914570 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/50f70a52-b554-466a-bb2c-e1518df4795c-var-run\") pod \"ovn-controller-kfdjm-config-c68wb\" (UID: \"50f70a52-b554-466a-bb2c-e1518df4795c\") " pod="openstack/ovn-controller-kfdjm-config-c68wb" Sep 30 19:49:26 crc kubenswrapper[4756]: I0930 19:49:26.914639 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/50f70a52-b554-466a-bb2c-e1518df4795c-var-log-ovn\") pod \"ovn-controller-kfdjm-config-c68wb\" (UID: \"50f70a52-b554-466a-bb2c-e1518df4795c\") " pod="openstack/ovn-controller-kfdjm-config-c68wb" Sep 30 19:49:26 crc kubenswrapper[4756]: I0930 19:49:26.914731 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/50f70a52-b554-466a-bb2c-e1518df4795c-additional-scripts\") pod \"ovn-controller-kfdjm-config-c68wb\" (UID: \"50f70a52-b554-466a-bb2c-e1518df4795c\") " pod="openstack/ovn-controller-kfdjm-config-c68wb" Sep 30 19:49:26 crc kubenswrapper[4756]: I0930 19:49:26.914838 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/50f70a52-b554-466a-bb2c-e1518df4795c-scripts\") pod \"ovn-controller-kfdjm-config-c68wb\" (UID: \"50f70a52-b554-466a-bb2c-e1518df4795c\") " pod="openstack/ovn-controller-kfdjm-config-c68wb" Sep 30 19:49:26 crc kubenswrapper[4756]: I0930 19:49:26.914965 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t5rfl\" (UniqueName: \"kubernetes.io/projected/50f70a52-b554-466a-bb2c-e1518df4795c-kube-api-access-t5rfl\") pod \"ovn-controller-kfdjm-config-c68wb\" (UID: \"50f70a52-b554-466a-bb2c-e1518df4795c\") " pod="openstack/ovn-controller-kfdjm-config-c68wb" Sep 30 19:49:27 crc kubenswrapper[4756]: I0930 19:49:27.016596 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t5rfl\" (UniqueName: \"kubernetes.io/projected/50f70a52-b554-466a-bb2c-e1518df4795c-kube-api-access-t5rfl\") pod \"ovn-controller-kfdjm-config-c68wb\" (UID: \"50f70a52-b554-466a-bb2c-e1518df4795c\") " pod="openstack/ovn-controller-kfdjm-config-c68wb" Sep 30 19:49:27 crc kubenswrapper[4756]: I0930 19:49:27.016652 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/50f70a52-b554-466a-bb2c-e1518df4795c-var-run-ovn\") pod \"ovn-controller-kfdjm-config-c68wb\" (UID: \"50f70a52-b554-466a-bb2c-e1518df4795c\") " pod="openstack/ovn-controller-kfdjm-config-c68wb" Sep 30 19:49:27 crc kubenswrapper[4756]: I0930 19:49:27.016693 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/50f70a52-b554-466a-bb2c-e1518df4795c-var-run\") pod \"ovn-controller-kfdjm-config-c68wb\" (UID: \"50f70a52-b554-466a-bb2c-e1518df4795c\") " pod="openstack/ovn-controller-kfdjm-config-c68wb" Sep 30 19:49:27 crc kubenswrapper[4756]: I0930 19:49:27.016740 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/50f70a52-b554-466a-bb2c-e1518df4795c-var-log-ovn\") pod \"ovn-controller-kfdjm-config-c68wb\" (UID: \"50f70a52-b554-466a-bb2c-e1518df4795c\") " pod="openstack/ovn-controller-kfdjm-config-c68wb" Sep 30 19:49:27 crc kubenswrapper[4756]: I0930 19:49:27.016763 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/50f70a52-b554-466a-bb2c-e1518df4795c-additional-scripts\") pod \"ovn-controller-kfdjm-config-c68wb\" (UID: \"50f70a52-b554-466a-bb2c-e1518df4795c\") " pod="openstack/ovn-controller-kfdjm-config-c68wb" Sep 30 19:49:27 crc kubenswrapper[4756]: I0930 19:49:27.016793 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/50f70a52-b554-466a-bb2c-e1518df4795c-scripts\") pod \"ovn-controller-kfdjm-config-c68wb\" (UID: \"50f70a52-b554-466a-bb2c-e1518df4795c\") " pod="openstack/ovn-controller-kfdjm-config-c68wb" Sep 30 19:49:27 crc kubenswrapper[4756]: I0930 19:49:27.017037 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/50f70a52-b554-466a-bb2c-e1518df4795c-var-log-ovn\") pod \"ovn-controller-kfdjm-config-c68wb\" (UID: \"50f70a52-b554-466a-bb2c-e1518df4795c\") " pod="openstack/ovn-controller-kfdjm-config-c68wb" Sep 30 19:49:27 crc kubenswrapper[4756]: I0930 19:49:27.017084 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/50f70a52-b554-466a-bb2c-e1518df4795c-var-run\") pod \"ovn-controller-kfdjm-config-c68wb\" (UID: \"50f70a52-b554-466a-bb2c-e1518df4795c\") " pod="openstack/ovn-controller-kfdjm-config-c68wb" Sep 30 19:49:27 crc kubenswrapper[4756]: I0930 19:49:27.017122 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/50f70a52-b554-466a-bb2c-e1518df4795c-var-run-ovn\") pod \"ovn-controller-kfdjm-config-c68wb\" (UID: \"50f70a52-b554-466a-bb2c-e1518df4795c\") " pod="openstack/ovn-controller-kfdjm-config-c68wb" Sep 30 19:49:27 crc kubenswrapper[4756]: I0930 19:49:27.017660 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/50f70a52-b554-466a-bb2c-e1518df4795c-additional-scripts\") pod \"ovn-controller-kfdjm-config-c68wb\" (UID: \"50f70a52-b554-466a-bb2c-e1518df4795c\") " pod="openstack/ovn-controller-kfdjm-config-c68wb" Sep 30 19:49:27 crc kubenswrapper[4756]: I0930 19:49:27.018812 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/50f70a52-b554-466a-bb2c-e1518df4795c-scripts\") pod \"ovn-controller-kfdjm-config-c68wb\" (UID: \"50f70a52-b554-466a-bb2c-e1518df4795c\") " pod="openstack/ovn-controller-kfdjm-config-c68wb" Sep 30 19:49:27 crc kubenswrapper[4756]: I0930 19:49:27.040714 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t5rfl\" (UniqueName: \"kubernetes.io/projected/50f70a52-b554-466a-bb2c-e1518df4795c-kube-api-access-t5rfl\") pod \"ovn-controller-kfdjm-config-c68wb\" (UID: \"50f70a52-b554-466a-bb2c-e1518df4795c\") " pod="openstack/ovn-controller-kfdjm-config-c68wb" Sep 30 19:49:27 crc kubenswrapper[4756]: I0930 19:49:27.086248 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-kfdjm-config-c68wb" Sep 30 19:49:27 crc kubenswrapper[4756]: I0930 19:49:27.128152 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0fa5dfa0-2d24-4e1e-a507-b1d0ca19f68e" path="/var/lib/kubelet/pods/0fa5dfa0-2d24-4e1e-a507-b1d0ca19f68e/volumes" Sep 30 19:49:27 crc kubenswrapper[4756]: I0930 19:49:27.191087 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"26be10f5-be18-4ab5-9f8e-0d6f2a8dcb4f","Type":"ContainerStarted","Data":"920c7a436f02252c0b5d507de8037ccd6743d1c2a255a147a18b894f28b926a4"} Sep 30 19:49:27 crc kubenswrapper[4756]: I0930 19:49:27.191122 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"26be10f5-be18-4ab5-9f8e-0d6f2a8dcb4f","Type":"ContainerStarted","Data":"762a83f21b826aa3eb53b3ebd785e06362136c9dadaba330d0843609fc9c2284"} Sep 30 19:49:27 crc kubenswrapper[4756]: I0930 19:49:27.191131 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"26be10f5-be18-4ab5-9f8e-0d6f2a8dcb4f","Type":"ContainerStarted","Data":"51f94be61ce4fe7988183466f20b547fdd3570bb129d81fb1a0d14b57bcbd868"} Sep 30 19:49:27 crc kubenswrapper[4756]: I0930 19:49:27.191139 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"26be10f5-be18-4ab5-9f8e-0d6f2a8dcb4f","Type":"ContainerStarted","Data":"c7c8e729cceb6e35134441b29a4688dd7e5d7409c1ff4e605c6736fd89e5f899"} Sep 30 19:49:27 crc kubenswrapper[4756]: I0930 19:49:27.191148 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"26be10f5-be18-4ab5-9f8e-0d6f2a8dcb4f","Type":"ContainerStarted","Data":"5434ba65bcd1a16d1031ac5831c922a2bfb696f3b7f8c608c5e3fbd5cff3290f"} Sep 30 19:49:27 crc kubenswrapper[4756]: I0930 19:49:27.263743 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-f07e-account-create-ng8hh"] Sep 30 19:49:27 crc kubenswrapper[4756]: I0930 19:49:27.286117 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-f07e-account-create-ng8hh"] Sep 30 19:49:27 crc kubenswrapper[4756]: I0930 19:49:27.286219 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-f07e-account-create-ng8hh" Sep 30 19:49:27 crc kubenswrapper[4756]: I0930 19:49:27.294067 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-db-secret" Sep 30 19:49:27 crc kubenswrapper[4756]: I0930 19:49:27.307157 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-storage-0" podStartSLOduration=19.483389891 podStartE2EDuration="24.307136735s" podCreationTimestamp="2025-09-30 19:49:03 +0000 UTC" firstStartedPulling="2025-09-30 19:49:21.025310557 +0000 UTC m=+1090.646244034" lastFinishedPulling="2025-09-30 19:49:25.849057401 +0000 UTC m=+1095.469990878" observedRunningTime="2025-09-30 19:49:27.285720316 +0000 UTC m=+1096.906653793" watchObservedRunningTime="2025-09-30 19:49:27.307136735 +0000 UTC m=+1096.928070222" Sep 30 19:49:27 crc kubenswrapper[4756]: I0930 19:49:27.430684 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qt46q\" (UniqueName: \"kubernetes.io/projected/cdc6b8bd-8038-4943-8861-4b5898f5940a-kube-api-access-qt46q\") pod \"glance-f07e-account-create-ng8hh\" (UID: \"cdc6b8bd-8038-4943-8861-4b5898f5940a\") " pod="openstack/glance-f07e-account-create-ng8hh" Sep 30 19:49:27 crc kubenswrapper[4756]: I0930 19:49:27.532667 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qt46q\" (UniqueName: \"kubernetes.io/projected/cdc6b8bd-8038-4943-8861-4b5898f5940a-kube-api-access-qt46q\") pod \"glance-f07e-account-create-ng8hh\" (UID: \"cdc6b8bd-8038-4943-8861-4b5898f5940a\") " pod="openstack/glance-f07e-account-create-ng8hh" Sep 30 19:49:27 crc kubenswrapper[4756]: I0930 19:49:27.551510 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-kfdjm" Sep 30 19:49:27 crc kubenswrapper[4756]: I0930 19:49:27.566403 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qt46q\" (UniqueName: \"kubernetes.io/projected/cdc6b8bd-8038-4943-8861-4b5898f5940a-kube-api-access-qt46q\") pod \"glance-f07e-account-create-ng8hh\" (UID: \"cdc6b8bd-8038-4943-8861-4b5898f5940a\") " pod="openstack/glance-f07e-account-create-ng8hh" Sep 30 19:49:27 crc kubenswrapper[4756]: I0930 19:49:27.576966 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-77585f5f8c-j99sb"] Sep 30 19:49:27 crc kubenswrapper[4756]: I0930 19:49:27.581624 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-77585f5f8c-j99sb" Sep 30 19:49:27 crc kubenswrapper[4756]: I0930 19:49:27.583568 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-77585f5f8c-j99sb"] Sep 30 19:49:27 crc kubenswrapper[4756]: I0930 19:49:27.587070 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns-swift-storage-0" Sep 30 19:49:27 crc kubenswrapper[4756]: I0930 19:49:27.634566 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-f07e-account-create-ng8hh" Sep 30 19:49:27 crc kubenswrapper[4756]: I0930 19:49:27.647300 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-kfdjm-config-c68wb"] Sep 30 19:49:27 crc kubenswrapper[4756]: I0930 19:49:27.736104 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gc9bg\" (UniqueName: \"kubernetes.io/projected/d38aabb4-4429-4475-80b6-13babcaef4be-kube-api-access-gc9bg\") pod \"dnsmasq-dns-77585f5f8c-j99sb\" (UID: \"d38aabb4-4429-4475-80b6-13babcaef4be\") " pod="openstack/dnsmasq-dns-77585f5f8c-j99sb" Sep 30 19:49:27 crc kubenswrapper[4756]: I0930 19:49:27.736149 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d38aabb4-4429-4475-80b6-13babcaef4be-dns-svc\") pod \"dnsmasq-dns-77585f5f8c-j99sb\" (UID: \"d38aabb4-4429-4475-80b6-13babcaef4be\") " pod="openstack/dnsmasq-dns-77585f5f8c-j99sb" Sep 30 19:49:27 crc kubenswrapper[4756]: I0930 19:49:27.736243 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/d38aabb4-4429-4475-80b6-13babcaef4be-dns-swift-storage-0\") pod \"dnsmasq-dns-77585f5f8c-j99sb\" (UID: \"d38aabb4-4429-4475-80b6-13babcaef4be\") " pod="openstack/dnsmasq-dns-77585f5f8c-j99sb" Sep 30 19:49:27 crc kubenswrapper[4756]: I0930 19:49:27.736461 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d38aabb4-4429-4475-80b6-13babcaef4be-ovsdbserver-sb\") pod \"dnsmasq-dns-77585f5f8c-j99sb\" (UID: \"d38aabb4-4429-4475-80b6-13babcaef4be\") " pod="openstack/dnsmasq-dns-77585f5f8c-j99sb" Sep 30 19:49:27 crc kubenswrapper[4756]: I0930 19:49:27.736549 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d38aabb4-4429-4475-80b6-13babcaef4be-config\") pod \"dnsmasq-dns-77585f5f8c-j99sb\" (UID: \"d38aabb4-4429-4475-80b6-13babcaef4be\") " pod="openstack/dnsmasq-dns-77585f5f8c-j99sb" Sep 30 19:49:27 crc kubenswrapper[4756]: I0930 19:49:27.736612 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d38aabb4-4429-4475-80b6-13babcaef4be-ovsdbserver-nb\") pod \"dnsmasq-dns-77585f5f8c-j99sb\" (UID: \"d38aabb4-4429-4475-80b6-13babcaef4be\") " pod="openstack/dnsmasq-dns-77585f5f8c-j99sb" Sep 30 19:49:27 crc kubenswrapper[4756]: I0930 19:49:27.838933 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d38aabb4-4429-4475-80b6-13babcaef4be-ovsdbserver-sb\") pod \"dnsmasq-dns-77585f5f8c-j99sb\" (UID: \"d38aabb4-4429-4475-80b6-13babcaef4be\") " pod="openstack/dnsmasq-dns-77585f5f8c-j99sb" Sep 30 19:49:27 crc kubenswrapper[4756]: I0930 19:49:27.839315 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d38aabb4-4429-4475-80b6-13babcaef4be-config\") pod \"dnsmasq-dns-77585f5f8c-j99sb\" (UID: \"d38aabb4-4429-4475-80b6-13babcaef4be\") " pod="openstack/dnsmasq-dns-77585f5f8c-j99sb" Sep 30 19:49:27 crc kubenswrapper[4756]: I0930 19:49:27.839355 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d38aabb4-4429-4475-80b6-13babcaef4be-ovsdbserver-nb\") pod \"dnsmasq-dns-77585f5f8c-j99sb\" (UID: \"d38aabb4-4429-4475-80b6-13babcaef4be\") " pod="openstack/dnsmasq-dns-77585f5f8c-j99sb" Sep 30 19:49:27 crc kubenswrapper[4756]: I0930 19:49:27.839418 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gc9bg\" (UniqueName: \"kubernetes.io/projected/d38aabb4-4429-4475-80b6-13babcaef4be-kube-api-access-gc9bg\") pod \"dnsmasq-dns-77585f5f8c-j99sb\" (UID: \"d38aabb4-4429-4475-80b6-13babcaef4be\") " pod="openstack/dnsmasq-dns-77585f5f8c-j99sb" Sep 30 19:49:27 crc kubenswrapper[4756]: I0930 19:49:27.839444 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d38aabb4-4429-4475-80b6-13babcaef4be-dns-svc\") pod \"dnsmasq-dns-77585f5f8c-j99sb\" (UID: \"d38aabb4-4429-4475-80b6-13babcaef4be\") " pod="openstack/dnsmasq-dns-77585f5f8c-j99sb" Sep 30 19:49:27 crc kubenswrapper[4756]: I0930 19:49:27.839509 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/d38aabb4-4429-4475-80b6-13babcaef4be-dns-swift-storage-0\") pod \"dnsmasq-dns-77585f5f8c-j99sb\" (UID: \"d38aabb4-4429-4475-80b6-13babcaef4be\") " pod="openstack/dnsmasq-dns-77585f5f8c-j99sb" Sep 30 19:49:27 crc kubenswrapper[4756]: I0930 19:49:27.840304 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d38aabb4-4429-4475-80b6-13babcaef4be-ovsdbserver-sb\") pod \"dnsmasq-dns-77585f5f8c-j99sb\" (UID: \"d38aabb4-4429-4475-80b6-13babcaef4be\") " pod="openstack/dnsmasq-dns-77585f5f8c-j99sb" Sep 30 19:49:27 crc kubenswrapper[4756]: I0930 19:49:27.840350 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d38aabb4-4429-4475-80b6-13babcaef4be-config\") pod \"dnsmasq-dns-77585f5f8c-j99sb\" (UID: \"d38aabb4-4429-4475-80b6-13babcaef4be\") " pod="openstack/dnsmasq-dns-77585f5f8c-j99sb" Sep 30 19:49:27 crc kubenswrapper[4756]: I0930 19:49:27.847359 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d38aabb4-4429-4475-80b6-13babcaef4be-dns-svc\") pod \"dnsmasq-dns-77585f5f8c-j99sb\" (UID: \"d38aabb4-4429-4475-80b6-13babcaef4be\") " pod="openstack/dnsmasq-dns-77585f5f8c-j99sb" Sep 30 19:49:27 crc kubenswrapper[4756]: I0930 19:49:27.847371 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/d38aabb4-4429-4475-80b6-13babcaef4be-dns-swift-storage-0\") pod \"dnsmasq-dns-77585f5f8c-j99sb\" (UID: \"d38aabb4-4429-4475-80b6-13babcaef4be\") " pod="openstack/dnsmasq-dns-77585f5f8c-j99sb" Sep 30 19:49:27 crc kubenswrapper[4756]: I0930 19:49:27.861512 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d38aabb4-4429-4475-80b6-13babcaef4be-ovsdbserver-nb\") pod \"dnsmasq-dns-77585f5f8c-j99sb\" (UID: \"d38aabb4-4429-4475-80b6-13babcaef4be\") " pod="openstack/dnsmasq-dns-77585f5f8c-j99sb" Sep 30 19:49:27 crc kubenswrapper[4756]: I0930 19:49:27.870116 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gc9bg\" (UniqueName: \"kubernetes.io/projected/d38aabb4-4429-4475-80b6-13babcaef4be-kube-api-access-gc9bg\") pod \"dnsmasq-dns-77585f5f8c-j99sb\" (UID: \"d38aabb4-4429-4475-80b6-13babcaef4be\") " pod="openstack/dnsmasq-dns-77585f5f8c-j99sb" Sep 30 19:49:27 crc kubenswrapper[4756]: I0930 19:49:27.919409 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-77585f5f8c-j99sb" Sep 30 19:49:28 crc kubenswrapper[4756]: I0930 19:49:28.101281 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-f07e-account-create-ng8hh"] Sep 30 19:49:28 crc kubenswrapper[4756]: W0930 19:49:28.128417 4756 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podcdc6b8bd_8038_4943_8861_4b5898f5940a.slice/crio-3f7130adb38525d4915cceee564e73ca2a38612f34ac9a3ff6098ea4b564c6b1 WatchSource:0}: Error finding container 3f7130adb38525d4915cceee564e73ca2a38612f34ac9a3ff6098ea4b564c6b1: Status 404 returned error can't find the container with id 3f7130adb38525d4915cceee564e73ca2a38612f34ac9a3ff6098ea4b564c6b1 Sep 30 19:49:28 crc kubenswrapper[4756]: I0930 19:49:28.198554 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-f07e-account-create-ng8hh" event={"ID":"cdc6b8bd-8038-4943-8861-4b5898f5940a","Type":"ContainerStarted","Data":"3f7130adb38525d4915cceee564e73ca2a38612f34ac9a3ff6098ea4b564c6b1"} Sep 30 19:49:28 crc kubenswrapper[4756]: I0930 19:49:28.202227 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-kfdjm-config-c68wb" event={"ID":"50f70a52-b554-466a-bb2c-e1518df4795c","Type":"ContainerStarted","Data":"c1c4b1ee0f6189aa7da5e82704d143de6665c35c1309434f7d47ace0d8c1106a"} Sep 30 19:49:28 crc kubenswrapper[4756]: I0930 19:49:28.202255 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-kfdjm-config-c68wb" event={"ID":"50f70a52-b554-466a-bb2c-e1518df4795c","Type":"ContainerStarted","Data":"57c8303d05a94f64f28bda42577d27fbd87d8dae3962d8c99a7057a309bd4b4e"} Sep 30 19:49:28 crc kubenswrapper[4756]: I0930 19:49:28.220433 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-kfdjm-config-c68wb" podStartSLOduration=2.220373097 podStartE2EDuration="2.220373097s" podCreationTimestamp="2025-09-30 19:49:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 19:49:28.215051757 +0000 UTC m=+1097.835985234" watchObservedRunningTime="2025-09-30 19:49:28.220373097 +0000 UTC m=+1097.841306584" Sep 30 19:49:28 crc kubenswrapper[4756]: I0930 19:49:28.387439 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-77585f5f8c-j99sb"] Sep 30 19:49:28 crc kubenswrapper[4756]: W0930 19:49:28.412373 4756 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd38aabb4_4429_4475_80b6_13babcaef4be.slice/crio-c63dd0b4d029952717074f1ac81ec3f463b9e34a46063293bfcea2d555f99ad1 WatchSource:0}: Error finding container c63dd0b4d029952717074f1ac81ec3f463b9e34a46063293bfcea2d555f99ad1: Status 404 returned error can't find the container with id c63dd0b4d029952717074f1ac81ec3f463b9e34a46063293bfcea2d555f99ad1 Sep 30 19:49:29 crc kubenswrapper[4756]: I0930 19:49:29.208999 4756 generic.go:334] "Generic (PLEG): container finished" podID="d38aabb4-4429-4475-80b6-13babcaef4be" containerID="7b903c8d2b5ed97e4455d6df5471cc8e329460b7d5b7b1253d3f7c759ec4d34a" exitCode=0 Sep 30 19:49:29 crc kubenswrapper[4756]: I0930 19:49:29.209116 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-77585f5f8c-j99sb" event={"ID":"d38aabb4-4429-4475-80b6-13babcaef4be","Type":"ContainerDied","Data":"7b903c8d2b5ed97e4455d6df5471cc8e329460b7d5b7b1253d3f7c759ec4d34a"} Sep 30 19:49:29 crc kubenswrapper[4756]: I0930 19:49:29.209621 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-77585f5f8c-j99sb" event={"ID":"d38aabb4-4429-4475-80b6-13babcaef4be","Type":"ContainerStarted","Data":"c63dd0b4d029952717074f1ac81ec3f463b9e34a46063293bfcea2d555f99ad1"} Sep 30 19:49:29 crc kubenswrapper[4756]: I0930 19:49:29.211759 4756 generic.go:334] "Generic (PLEG): container finished" podID="cdc6b8bd-8038-4943-8861-4b5898f5940a" containerID="a678febbf786620062061e9e22724a0f5433ffe878f66576ce828fa62823fb1a" exitCode=0 Sep 30 19:49:29 crc kubenswrapper[4756]: I0930 19:49:29.211847 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-f07e-account-create-ng8hh" event={"ID":"cdc6b8bd-8038-4943-8861-4b5898f5940a","Type":"ContainerDied","Data":"a678febbf786620062061e9e22724a0f5433ffe878f66576ce828fa62823fb1a"} Sep 30 19:49:29 crc kubenswrapper[4756]: I0930 19:49:29.217483 4756 generic.go:334] "Generic (PLEG): container finished" podID="50f70a52-b554-466a-bb2c-e1518df4795c" containerID="c1c4b1ee0f6189aa7da5e82704d143de6665c35c1309434f7d47ace0d8c1106a" exitCode=0 Sep 30 19:49:29 crc kubenswrapper[4756]: I0930 19:49:29.217529 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-kfdjm-config-c68wb" event={"ID":"50f70a52-b554-466a-bb2c-e1518df4795c","Type":"ContainerDied","Data":"c1c4b1ee0f6189aa7da5e82704d143de6665c35c1309434f7d47ace0d8c1106a"} Sep 30 19:49:30 crc kubenswrapper[4756]: I0930 19:49:30.228585 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-77585f5f8c-j99sb" event={"ID":"d38aabb4-4429-4475-80b6-13babcaef4be","Type":"ContainerStarted","Data":"b7465efabe8d474257df848949656b39daefde7e0aa4d0894afd80437344cc82"} Sep 30 19:49:30 crc kubenswrapper[4756]: I0930 19:49:30.252799 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-77585f5f8c-j99sb" podStartSLOduration=3.2527833839999998 podStartE2EDuration="3.252783384s" podCreationTimestamp="2025-09-30 19:49:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 19:49:30.248982665 +0000 UTC m=+1099.869916142" watchObservedRunningTime="2025-09-30 19:49:30.252783384 +0000 UTC m=+1099.873716861" Sep 30 19:49:30 crc kubenswrapper[4756]: I0930 19:49:30.562927 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-f07e-account-create-ng8hh" Sep 30 19:49:30 crc kubenswrapper[4756]: I0930 19:49:30.570160 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-kfdjm-config-c68wb" Sep 30 19:49:30 crc kubenswrapper[4756]: I0930 19:49:30.712255 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-kfdjm-config-c68wb"] Sep 30 19:49:30 crc kubenswrapper[4756]: I0930 19:49:30.716624 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/50f70a52-b554-466a-bb2c-e1518df4795c-var-log-ovn\") pod \"50f70a52-b554-466a-bb2c-e1518df4795c\" (UID: \"50f70a52-b554-466a-bb2c-e1518df4795c\") " Sep 30 19:49:30 crc kubenswrapper[4756]: I0930 19:49:30.716673 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/50f70a52-b554-466a-bb2c-e1518df4795c-additional-scripts\") pod \"50f70a52-b554-466a-bb2c-e1518df4795c\" (UID: \"50f70a52-b554-466a-bb2c-e1518df4795c\") " Sep 30 19:49:30 crc kubenswrapper[4756]: I0930 19:49:30.716766 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/50f70a52-b554-466a-bb2c-e1518df4795c-scripts\") pod \"50f70a52-b554-466a-bb2c-e1518df4795c\" (UID: \"50f70a52-b554-466a-bb2c-e1518df4795c\") " Sep 30 19:49:30 crc kubenswrapper[4756]: I0930 19:49:30.716771 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/50f70a52-b554-466a-bb2c-e1518df4795c-var-log-ovn" (OuterVolumeSpecName: "var-log-ovn") pod "50f70a52-b554-466a-bb2c-e1518df4795c" (UID: "50f70a52-b554-466a-bb2c-e1518df4795c"). InnerVolumeSpecName "var-log-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 19:49:30 crc kubenswrapper[4756]: I0930 19:49:30.716788 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-t5rfl\" (UniqueName: \"kubernetes.io/projected/50f70a52-b554-466a-bb2c-e1518df4795c-kube-api-access-t5rfl\") pod \"50f70a52-b554-466a-bb2c-e1518df4795c\" (UID: \"50f70a52-b554-466a-bb2c-e1518df4795c\") " Sep 30 19:49:30 crc kubenswrapper[4756]: I0930 19:49:30.716807 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/50f70a52-b554-466a-bb2c-e1518df4795c-var-run-ovn\") pod \"50f70a52-b554-466a-bb2c-e1518df4795c\" (UID: \"50f70a52-b554-466a-bb2c-e1518df4795c\") " Sep 30 19:49:30 crc kubenswrapper[4756]: I0930 19:49:30.716854 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qt46q\" (UniqueName: \"kubernetes.io/projected/cdc6b8bd-8038-4943-8861-4b5898f5940a-kube-api-access-qt46q\") pod \"cdc6b8bd-8038-4943-8861-4b5898f5940a\" (UID: \"cdc6b8bd-8038-4943-8861-4b5898f5940a\") " Sep 30 19:49:30 crc kubenswrapper[4756]: I0930 19:49:30.716910 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/50f70a52-b554-466a-bb2c-e1518df4795c-var-run\") pod \"50f70a52-b554-466a-bb2c-e1518df4795c\" (UID: \"50f70a52-b554-466a-bb2c-e1518df4795c\") " Sep 30 19:49:30 crc kubenswrapper[4756]: I0930 19:49:30.717261 4756 reconciler_common.go:293] "Volume detached for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/50f70a52-b554-466a-bb2c-e1518df4795c-var-log-ovn\") on node \"crc\" DevicePath \"\"" Sep 30 19:49:30 crc kubenswrapper[4756]: I0930 19:49:30.717292 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/50f70a52-b554-466a-bb2c-e1518df4795c-var-run" (OuterVolumeSpecName: "var-run") pod "50f70a52-b554-466a-bb2c-e1518df4795c" (UID: "50f70a52-b554-466a-bb2c-e1518df4795c"). InnerVolumeSpecName "var-run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 19:49:30 crc kubenswrapper[4756]: I0930 19:49:30.717315 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/50f70a52-b554-466a-bb2c-e1518df4795c-var-run-ovn" (OuterVolumeSpecName: "var-run-ovn") pod "50f70a52-b554-466a-bb2c-e1518df4795c" (UID: "50f70a52-b554-466a-bb2c-e1518df4795c"). InnerVolumeSpecName "var-run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 19:49:30 crc kubenswrapper[4756]: I0930 19:49:30.717557 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/50f70a52-b554-466a-bb2c-e1518df4795c-additional-scripts" (OuterVolumeSpecName: "additional-scripts") pod "50f70a52-b554-466a-bb2c-e1518df4795c" (UID: "50f70a52-b554-466a-bb2c-e1518df4795c"). InnerVolumeSpecName "additional-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:49:30 crc kubenswrapper[4756]: I0930 19:49:30.717773 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/50f70a52-b554-466a-bb2c-e1518df4795c-scripts" (OuterVolumeSpecName: "scripts") pod "50f70a52-b554-466a-bb2c-e1518df4795c" (UID: "50f70a52-b554-466a-bb2c-e1518df4795c"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:49:30 crc kubenswrapper[4756]: I0930 19:49:30.721461 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-controller-kfdjm-config-c68wb"] Sep 30 19:49:30 crc kubenswrapper[4756]: I0930 19:49:30.722658 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/50f70a52-b554-466a-bb2c-e1518df4795c-kube-api-access-t5rfl" (OuterVolumeSpecName: "kube-api-access-t5rfl") pod "50f70a52-b554-466a-bb2c-e1518df4795c" (UID: "50f70a52-b554-466a-bb2c-e1518df4795c"). InnerVolumeSpecName "kube-api-access-t5rfl". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:49:30 crc kubenswrapper[4756]: I0930 19:49:30.727581 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cdc6b8bd-8038-4943-8861-4b5898f5940a-kube-api-access-qt46q" (OuterVolumeSpecName: "kube-api-access-qt46q") pod "cdc6b8bd-8038-4943-8861-4b5898f5940a" (UID: "cdc6b8bd-8038-4943-8861-4b5898f5940a"). InnerVolumeSpecName "kube-api-access-qt46q". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:49:30 crc kubenswrapper[4756]: I0930 19:49:30.818372 4756 reconciler_common.go:293] "Volume detached for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/50f70a52-b554-466a-bb2c-e1518df4795c-additional-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 19:49:30 crc kubenswrapper[4756]: I0930 19:49:30.818414 4756 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/50f70a52-b554-466a-bb2c-e1518df4795c-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 19:49:30 crc kubenswrapper[4756]: I0930 19:49:30.818424 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-t5rfl\" (UniqueName: \"kubernetes.io/projected/50f70a52-b554-466a-bb2c-e1518df4795c-kube-api-access-t5rfl\") on node \"crc\" DevicePath \"\"" Sep 30 19:49:30 crc kubenswrapper[4756]: I0930 19:49:30.818435 4756 reconciler_common.go:293] "Volume detached for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/50f70a52-b554-466a-bb2c-e1518df4795c-var-run-ovn\") on node \"crc\" DevicePath \"\"" Sep 30 19:49:30 crc kubenswrapper[4756]: I0930 19:49:30.818444 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qt46q\" (UniqueName: \"kubernetes.io/projected/cdc6b8bd-8038-4943-8861-4b5898f5940a-kube-api-access-qt46q\") on node \"crc\" DevicePath \"\"" Sep 30 19:49:30 crc kubenswrapper[4756]: I0930 19:49:30.818452 4756 reconciler_common.go:293] "Volume detached for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/50f70a52-b554-466a-bb2c-e1518df4795c-var-run\") on node \"crc\" DevicePath \"\"" Sep 30 19:49:31 crc kubenswrapper[4756]: I0930 19:49:31.129272 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="50f70a52-b554-466a-bb2c-e1518df4795c" path="/var/lib/kubelet/pods/50f70a52-b554-466a-bb2c-e1518df4795c/volumes" Sep 30 19:49:31 crc kubenswrapper[4756]: I0930 19:49:31.236408 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-f07e-account-create-ng8hh" event={"ID":"cdc6b8bd-8038-4943-8861-4b5898f5940a","Type":"ContainerDied","Data":"3f7130adb38525d4915cceee564e73ca2a38612f34ac9a3ff6098ea4b564c6b1"} Sep 30 19:49:31 crc kubenswrapper[4756]: I0930 19:49:31.236947 4756 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3f7130adb38525d4915cceee564e73ca2a38612f34ac9a3ff6098ea4b564c6b1" Sep 30 19:49:31 crc kubenswrapper[4756]: I0930 19:49:31.236428 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-f07e-account-create-ng8hh" Sep 30 19:49:31 crc kubenswrapper[4756]: I0930 19:49:31.238164 4756 scope.go:117] "RemoveContainer" containerID="c1c4b1ee0f6189aa7da5e82704d143de6665c35c1309434f7d47ace0d8c1106a" Sep 30 19:49:31 crc kubenswrapper[4756]: I0930 19:49:31.238184 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-kfdjm-config-c68wb" Sep 30 19:49:31 crc kubenswrapper[4756]: I0930 19:49:31.238273 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-77585f5f8c-j99sb" Sep 30 19:49:32 crc kubenswrapper[4756]: I0930 19:49:32.502862 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-db-sync-mbntt"] Sep 30 19:49:32 crc kubenswrapper[4756]: E0930 19:49:32.503488 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cdc6b8bd-8038-4943-8861-4b5898f5940a" containerName="mariadb-account-create" Sep 30 19:49:32 crc kubenswrapper[4756]: I0930 19:49:32.503504 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="cdc6b8bd-8038-4943-8861-4b5898f5940a" containerName="mariadb-account-create" Sep 30 19:49:32 crc kubenswrapper[4756]: E0930 19:49:32.503525 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="50f70a52-b554-466a-bb2c-e1518df4795c" containerName="ovn-config" Sep 30 19:49:32 crc kubenswrapper[4756]: I0930 19:49:32.503534 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="50f70a52-b554-466a-bb2c-e1518df4795c" containerName="ovn-config" Sep 30 19:49:32 crc kubenswrapper[4756]: I0930 19:49:32.503809 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="50f70a52-b554-466a-bb2c-e1518df4795c" containerName="ovn-config" Sep 30 19:49:32 crc kubenswrapper[4756]: I0930 19:49:32.503838 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="cdc6b8bd-8038-4943-8861-4b5898f5940a" containerName="mariadb-account-create" Sep 30 19:49:32 crc kubenswrapper[4756]: I0930 19:49:32.504427 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-mbntt" Sep 30 19:49:32 crc kubenswrapper[4756]: I0930 19:49:32.510804 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-config-data" Sep 30 19:49:32 crc kubenswrapper[4756]: I0930 19:49:32.511057 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-6qrb6" Sep 30 19:49:32 crc kubenswrapper[4756]: I0930 19:49:32.511802 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-sync-mbntt"] Sep 30 19:49:32 crc kubenswrapper[4756]: I0930 19:49:32.646439 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f08a96a9-79ca-4c30-b158-273876fe89b0-config-data\") pod \"glance-db-sync-mbntt\" (UID: \"f08a96a9-79ca-4c30-b158-273876fe89b0\") " pod="openstack/glance-db-sync-mbntt" Sep 30 19:49:32 crc kubenswrapper[4756]: I0930 19:49:32.646525 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f08a96a9-79ca-4c30-b158-273876fe89b0-combined-ca-bundle\") pod \"glance-db-sync-mbntt\" (UID: \"f08a96a9-79ca-4c30-b158-273876fe89b0\") " pod="openstack/glance-db-sync-mbntt" Sep 30 19:49:32 crc kubenswrapper[4756]: I0930 19:49:32.646618 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-twrm5\" (UniqueName: \"kubernetes.io/projected/f08a96a9-79ca-4c30-b158-273876fe89b0-kube-api-access-twrm5\") pod \"glance-db-sync-mbntt\" (UID: \"f08a96a9-79ca-4c30-b158-273876fe89b0\") " pod="openstack/glance-db-sync-mbntt" Sep 30 19:49:32 crc kubenswrapper[4756]: I0930 19:49:32.646675 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/f08a96a9-79ca-4c30-b158-273876fe89b0-db-sync-config-data\") pod \"glance-db-sync-mbntt\" (UID: \"f08a96a9-79ca-4c30-b158-273876fe89b0\") " pod="openstack/glance-db-sync-mbntt" Sep 30 19:49:32 crc kubenswrapper[4756]: I0930 19:49:32.749013 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f08a96a9-79ca-4c30-b158-273876fe89b0-config-data\") pod \"glance-db-sync-mbntt\" (UID: \"f08a96a9-79ca-4c30-b158-273876fe89b0\") " pod="openstack/glance-db-sync-mbntt" Sep 30 19:49:32 crc kubenswrapper[4756]: I0930 19:49:32.749501 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f08a96a9-79ca-4c30-b158-273876fe89b0-combined-ca-bundle\") pod \"glance-db-sync-mbntt\" (UID: \"f08a96a9-79ca-4c30-b158-273876fe89b0\") " pod="openstack/glance-db-sync-mbntt" Sep 30 19:49:32 crc kubenswrapper[4756]: I0930 19:49:32.749732 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-twrm5\" (UniqueName: \"kubernetes.io/projected/f08a96a9-79ca-4c30-b158-273876fe89b0-kube-api-access-twrm5\") pod \"glance-db-sync-mbntt\" (UID: \"f08a96a9-79ca-4c30-b158-273876fe89b0\") " pod="openstack/glance-db-sync-mbntt" Sep 30 19:49:32 crc kubenswrapper[4756]: I0930 19:49:32.749903 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/f08a96a9-79ca-4c30-b158-273876fe89b0-db-sync-config-data\") pod \"glance-db-sync-mbntt\" (UID: \"f08a96a9-79ca-4c30-b158-273876fe89b0\") " pod="openstack/glance-db-sync-mbntt" Sep 30 19:49:32 crc kubenswrapper[4756]: I0930 19:49:32.755326 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f08a96a9-79ca-4c30-b158-273876fe89b0-config-data\") pod \"glance-db-sync-mbntt\" (UID: \"f08a96a9-79ca-4c30-b158-273876fe89b0\") " pod="openstack/glance-db-sync-mbntt" Sep 30 19:49:32 crc kubenswrapper[4756]: I0930 19:49:32.756729 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f08a96a9-79ca-4c30-b158-273876fe89b0-combined-ca-bundle\") pod \"glance-db-sync-mbntt\" (UID: \"f08a96a9-79ca-4c30-b158-273876fe89b0\") " pod="openstack/glance-db-sync-mbntt" Sep 30 19:49:32 crc kubenswrapper[4756]: I0930 19:49:32.757325 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/f08a96a9-79ca-4c30-b158-273876fe89b0-db-sync-config-data\") pod \"glance-db-sync-mbntt\" (UID: \"f08a96a9-79ca-4c30-b158-273876fe89b0\") " pod="openstack/glance-db-sync-mbntt" Sep 30 19:49:32 crc kubenswrapper[4756]: I0930 19:49:32.777978 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-twrm5\" (UniqueName: \"kubernetes.io/projected/f08a96a9-79ca-4c30-b158-273876fe89b0-kube-api-access-twrm5\") pod \"glance-db-sync-mbntt\" (UID: \"f08a96a9-79ca-4c30-b158-273876fe89b0\") " pod="openstack/glance-db-sync-mbntt" Sep 30 19:49:32 crc kubenswrapper[4756]: I0930 19:49:32.834764 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-mbntt" Sep 30 19:49:33 crc kubenswrapper[4756]: I0930 19:49:33.419232 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-sync-mbntt"] Sep 30 19:49:34 crc kubenswrapper[4756]: I0930 19:49:34.282215 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-mbntt" event={"ID":"f08a96a9-79ca-4c30-b158-273876fe89b0","Type":"ContainerStarted","Data":"abd8f908f4b621d6dcb9f527a8aa16542f3329723ff5254abf156cb6ea303840"} Sep 30 19:49:37 crc kubenswrapper[4756]: I0930 19:49:37.921423 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-77585f5f8c-j99sb" Sep 30 19:49:37 crc kubenswrapper[4756]: I0930 19:49:37.990672 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-698758b865-nzmxq"] Sep 30 19:49:37 crc kubenswrapper[4756]: I0930 19:49:37.991051 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-698758b865-nzmxq" podUID="305c7f11-e862-489d-ba96-2426b722652c" containerName="dnsmasq-dns" containerID="cri-o://70b065679a123a719144084abb75b201a20b68a2f9edce0ba39bdc3f95c7a5de" gracePeriod=10 Sep 30 19:49:38 crc kubenswrapper[4756]: I0930 19:49:38.322094 4756 generic.go:334] "Generic (PLEG): container finished" podID="305c7f11-e862-489d-ba96-2426b722652c" containerID="70b065679a123a719144084abb75b201a20b68a2f9edce0ba39bdc3f95c7a5de" exitCode=0 Sep 30 19:49:38 crc kubenswrapper[4756]: I0930 19:49:38.322155 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-698758b865-nzmxq" event={"ID":"305c7f11-e862-489d-ba96-2426b722652c","Type":"ContainerDied","Data":"70b065679a123a719144084abb75b201a20b68a2f9edce0ba39bdc3f95c7a5de"} Sep 30 19:49:38 crc kubenswrapper[4756]: I0930 19:49:38.550806 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-server-0" Sep 30 19:49:38 crc kubenswrapper[4756]: I0930 19:49:38.677163 4756 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-698758b865-nzmxq" podUID="305c7f11-e862-489d-ba96-2426b722652c" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.118:5353: connect: connection refused" Sep 30 19:49:38 crc kubenswrapper[4756]: I0930 19:49:38.897574 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-cell1-server-0" Sep 30 19:49:38 crc kubenswrapper[4756]: I0930 19:49:38.939024 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-db-create-89jrd"] Sep 30 19:49:38 crc kubenswrapper[4756]: I0930 19:49:38.939951 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-89jrd" Sep 30 19:49:38 crc kubenswrapper[4756]: I0930 19:49:38.953651 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-create-89jrd"] Sep 30 19:49:39 crc kubenswrapper[4756]: I0930 19:49:39.051722 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-db-create-blvtr"] Sep 30 19:49:39 crc kubenswrapper[4756]: I0930 19:49:39.053852 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-blvtr" Sep 30 19:49:39 crc kubenswrapper[4756]: I0930 19:49:39.074670 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-create-blvtr"] Sep 30 19:49:39 crc kubenswrapper[4756]: I0930 19:49:39.078518 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-89szb\" (UniqueName: \"kubernetes.io/projected/a87a5a44-c4dc-4c43-9d51-f41f5d257b24-kube-api-access-89szb\") pod \"cinder-db-create-89jrd\" (UID: \"a87a5a44-c4dc-4c43-9d51-f41f5d257b24\") " pod="openstack/cinder-db-create-89jrd" Sep 30 19:49:39 crc kubenswrapper[4756]: I0930 19:49:39.180169 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5n864\" (UniqueName: \"kubernetes.io/projected/ed8f7aa8-9e70-4c1f-9ea8-187cd6900d87-kube-api-access-5n864\") pod \"barbican-db-create-blvtr\" (UID: \"ed8f7aa8-9e70-4c1f-9ea8-187cd6900d87\") " pod="openstack/barbican-db-create-blvtr" Sep 30 19:49:39 crc kubenswrapper[4756]: I0930 19:49:39.180302 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-89szb\" (UniqueName: \"kubernetes.io/projected/a87a5a44-c4dc-4c43-9d51-f41f5d257b24-kube-api-access-89szb\") pod \"cinder-db-create-89jrd\" (UID: \"a87a5a44-c4dc-4c43-9d51-f41f5d257b24\") " pod="openstack/cinder-db-create-89jrd" Sep 30 19:49:39 crc kubenswrapper[4756]: I0930 19:49:39.205322 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-89szb\" (UniqueName: \"kubernetes.io/projected/a87a5a44-c4dc-4c43-9d51-f41f5d257b24-kube-api-access-89szb\") pod \"cinder-db-create-89jrd\" (UID: \"a87a5a44-c4dc-4c43-9d51-f41f5d257b24\") " pod="openstack/cinder-db-create-89jrd" Sep 30 19:49:39 crc kubenswrapper[4756]: I0930 19:49:39.216298 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-sync-jsmf7"] Sep 30 19:49:39 crc kubenswrapper[4756]: I0930 19:49:39.217288 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-jsmf7" Sep 30 19:49:39 crc kubenswrapper[4756]: I0930 19:49:39.221692 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Sep 30 19:49:39 crc kubenswrapper[4756]: I0930 19:49:39.221925 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-crhkc" Sep 30 19:49:39 crc kubenswrapper[4756]: I0930 19:49:39.222105 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Sep 30 19:49:39 crc kubenswrapper[4756]: I0930 19:49:39.222640 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Sep 30 19:49:39 crc kubenswrapper[4756]: I0930 19:49:39.228033 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-jsmf7"] Sep 30 19:49:39 crc kubenswrapper[4756]: I0930 19:49:39.255120 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-89jrd" Sep 30 19:49:39 crc kubenswrapper[4756]: I0930 19:49:39.280238 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-db-create-2hkb2"] Sep 30 19:49:39 crc kubenswrapper[4756]: I0930 19:49:39.281857 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-2hkb2" Sep 30 19:49:39 crc kubenswrapper[4756]: I0930 19:49:39.282818 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5n864\" (UniqueName: \"kubernetes.io/projected/ed8f7aa8-9e70-4c1f-9ea8-187cd6900d87-kube-api-access-5n864\") pod \"barbican-db-create-blvtr\" (UID: \"ed8f7aa8-9e70-4c1f-9ea8-187cd6900d87\") " pod="openstack/barbican-db-create-blvtr" Sep 30 19:49:39 crc kubenswrapper[4756]: I0930 19:49:39.297009 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-create-2hkb2"] Sep 30 19:49:39 crc kubenswrapper[4756]: I0930 19:49:39.315052 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5n864\" (UniqueName: \"kubernetes.io/projected/ed8f7aa8-9e70-4c1f-9ea8-187cd6900d87-kube-api-access-5n864\") pod \"barbican-db-create-blvtr\" (UID: \"ed8f7aa8-9e70-4c1f-9ea8-187cd6900d87\") " pod="openstack/barbican-db-create-blvtr" Sep 30 19:49:39 crc kubenswrapper[4756]: I0930 19:49:39.372909 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-blvtr" Sep 30 19:49:39 crc kubenswrapper[4756]: I0930 19:49:39.384879 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7c4f7feb-d6b0-4e8e-ba81-008f5796220b-config-data\") pod \"keystone-db-sync-jsmf7\" (UID: \"7c4f7feb-d6b0-4e8e-ba81-008f5796220b\") " pod="openstack/keystone-db-sync-jsmf7" Sep 30 19:49:39 crc kubenswrapper[4756]: I0930 19:49:39.384963 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7c4f7feb-d6b0-4e8e-ba81-008f5796220b-combined-ca-bundle\") pod \"keystone-db-sync-jsmf7\" (UID: \"7c4f7feb-d6b0-4e8e-ba81-008f5796220b\") " pod="openstack/keystone-db-sync-jsmf7" Sep 30 19:49:39 crc kubenswrapper[4756]: I0930 19:49:39.385020 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9cb48\" (UniqueName: \"kubernetes.io/projected/ed2e720d-f813-47b0-a459-9a2b77737ebc-kube-api-access-9cb48\") pod \"neutron-db-create-2hkb2\" (UID: \"ed2e720d-f813-47b0-a459-9a2b77737ebc\") " pod="openstack/neutron-db-create-2hkb2" Sep 30 19:49:39 crc kubenswrapper[4756]: I0930 19:49:39.385057 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g79rs\" (UniqueName: \"kubernetes.io/projected/7c4f7feb-d6b0-4e8e-ba81-008f5796220b-kube-api-access-g79rs\") pod \"keystone-db-sync-jsmf7\" (UID: \"7c4f7feb-d6b0-4e8e-ba81-008f5796220b\") " pod="openstack/keystone-db-sync-jsmf7" Sep 30 19:49:39 crc kubenswrapper[4756]: I0930 19:49:39.486232 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7c4f7feb-d6b0-4e8e-ba81-008f5796220b-config-data\") pod \"keystone-db-sync-jsmf7\" (UID: \"7c4f7feb-d6b0-4e8e-ba81-008f5796220b\") " pod="openstack/keystone-db-sync-jsmf7" Sep 30 19:49:39 crc kubenswrapper[4756]: I0930 19:49:39.486304 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7c4f7feb-d6b0-4e8e-ba81-008f5796220b-combined-ca-bundle\") pod \"keystone-db-sync-jsmf7\" (UID: \"7c4f7feb-d6b0-4e8e-ba81-008f5796220b\") " pod="openstack/keystone-db-sync-jsmf7" Sep 30 19:49:39 crc kubenswrapper[4756]: I0930 19:49:39.486344 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9cb48\" (UniqueName: \"kubernetes.io/projected/ed2e720d-f813-47b0-a459-9a2b77737ebc-kube-api-access-9cb48\") pod \"neutron-db-create-2hkb2\" (UID: \"ed2e720d-f813-47b0-a459-9a2b77737ebc\") " pod="openstack/neutron-db-create-2hkb2" Sep 30 19:49:39 crc kubenswrapper[4756]: I0930 19:49:39.486371 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g79rs\" (UniqueName: \"kubernetes.io/projected/7c4f7feb-d6b0-4e8e-ba81-008f5796220b-kube-api-access-g79rs\") pod \"keystone-db-sync-jsmf7\" (UID: \"7c4f7feb-d6b0-4e8e-ba81-008f5796220b\") " pod="openstack/keystone-db-sync-jsmf7" Sep 30 19:49:39 crc kubenswrapper[4756]: I0930 19:49:39.492102 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7c4f7feb-d6b0-4e8e-ba81-008f5796220b-combined-ca-bundle\") pod \"keystone-db-sync-jsmf7\" (UID: \"7c4f7feb-d6b0-4e8e-ba81-008f5796220b\") " pod="openstack/keystone-db-sync-jsmf7" Sep 30 19:49:39 crc kubenswrapper[4756]: I0930 19:49:39.496420 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7c4f7feb-d6b0-4e8e-ba81-008f5796220b-config-data\") pod \"keystone-db-sync-jsmf7\" (UID: \"7c4f7feb-d6b0-4e8e-ba81-008f5796220b\") " pod="openstack/keystone-db-sync-jsmf7" Sep 30 19:49:39 crc kubenswrapper[4756]: I0930 19:49:39.506049 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g79rs\" (UniqueName: \"kubernetes.io/projected/7c4f7feb-d6b0-4e8e-ba81-008f5796220b-kube-api-access-g79rs\") pod \"keystone-db-sync-jsmf7\" (UID: \"7c4f7feb-d6b0-4e8e-ba81-008f5796220b\") " pod="openstack/keystone-db-sync-jsmf7" Sep 30 19:49:39 crc kubenswrapper[4756]: I0930 19:49:39.507620 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9cb48\" (UniqueName: \"kubernetes.io/projected/ed2e720d-f813-47b0-a459-9a2b77737ebc-kube-api-access-9cb48\") pod \"neutron-db-create-2hkb2\" (UID: \"ed2e720d-f813-47b0-a459-9a2b77737ebc\") " pod="openstack/neutron-db-create-2hkb2" Sep 30 19:49:39 crc kubenswrapper[4756]: I0930 19:49:39.549202 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-jsmf7" Sep 30 19:49:39 crc kubenswrapper[4756]: I0930 19:49:39.595290 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-2hkb2" Sep 30 19:49:41 crc kubenswrapper[4756]: I0930 19:49:41.539105 4756 patch_prober.go:28] interesting pod/machine-config-daemon-4n9zj container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 19:49:41 crc kubenswrapper[4756]: I0930 19:49:41.539446 4756 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 19:49:43 crc kubenswrapper[4756]: I0930 19:49:43.677287 4756 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-698758b865-nzmxq" podUID="305c7f11-e862-489d-ba96-2426b722652c" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.118:5353: connect: connection refused" Sep 30 19:49:48 crc kubenswrapper[4756]: I0930 19:49:48.677762 4756 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-698758b865-nzmxq" podUID="305c7f11-e862-489d-ba96-2426b722652c" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.118:5353: connect: connection refused" Sep 30 19:49:48 crc kubenswrapper[4756]: I0930 19:49:48.678504 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-698758b865-nzmxq" Sep 30 19:49:48 crc kubenswrapper[4756]: E0930 19:49:48.752205 4756 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-glance-api:current-podified" Sep 30 19:49:48 crc kubenswrapper[4756]: E0930 19:49:48.752363 4756 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:glance-db-sync,Image:quay.io/podified-antelope-centos9/openstack-glance-api:current-podified,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:true,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:db-sync-config-data,ReadOnly:true,MountPath:/etc/glance/glance.conf.d,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/etc/my.cnf,SubPath:my.cnf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:db-sync-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-twrm5,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42415,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:*42415,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod glance-db-sync-mbntt_openstack(f08a96a9-79ca-4c30-b158-273876fe89b0): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Sep 30 19:49:48 crc kubenswrapper[4756]: E0930 19:49:48.753580 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"glance-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/glance-db-sync-mbntt" podUID="f08a96a9-79ca-4c30-b158-273876fe89b0" Sep 30 19:49:49 crc kubenswrapper[4756]: I0930 19:49:49.282483 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-698758b865-nzmxq" Sep 30 19:49:49 crc kubenswrapper[4756]: I0930 19:49:49.367861 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/305c7f11-e862-489d-ba96-2426b722652c-config\") pod \"305c7f11-e862-489d-ba96-2426b722652c\" (UID: \"305c7f11-e862-489d-ba96-2426b722652c\") " Sep 30 19:49:49 crc kubenswrapper[4756]: I0930 19:49:49.367913 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/305c7f11-e862-489d-ba96-2426b722652c-ovsdbserver-sb\") pod \"305c7f11-e862-489d-ba96-2426b722652c\" (UID: \"305c7f11-e862-489d-ba96-2426b722652c\") " Sep 30 19:49:49 crc kubenswrapper[4756]: I0930 19:49:49.367932 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pq79m\" (UniqueName: \"kubernetes.io/projected/305c7f11-e862-489d-ba96-2426b722652c-kube-api-access-pq79m\") pod \"305c7f11-e862-489d-ba96-2426b722652c\" (UID: \"305c7f11-e862-489d-ba96-2426b722652c\") " Sep 30 19:49:49 crc kubenswrapper[4756]: I0930 19:49:49.368092 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/305c7f11-e862-489d-ba96-2426b722652c-dns-svc\") pod \"305c7f11-e862-489d-ba96-2426b722652c\" (UID: \"305c7f11-e862-489d-ba96-2426b722652c\") " Sep 30 19:49:49 crc kubenswrapper[4756]: I0930 19:49:49.368153 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/305c7f11-e862-489d-ba96-2426b722652c-ovsdbserver-nb\") pod \"305c7f11-e862-489d-ba96-2426b722652c\" (UID: \"305c7f11-e862-489d-ba96-2426b722652c\") " Sep 30 19:49:49 crc kubenswrapper[4756]: I0930 19:49:49.390031 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/305c7f11-e862-489d-ba96-2426b722652c-kube-api-access-pq79m" (OuterVolumeSpecName: "kube-api-access-pq79m") pod "305c7f11-e862-489d-ba96-2426b722652c" (UID: "305c7f11-e862-489d-ba96-2426b722652c"). InnerVolumeSpecName "kube-api-access-pq79m". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:49:49 crc kubenswrapper[4756]: W0930 19:49:49.405460 4756 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda87a5a44_c4dc_4c43_9d51_f41f5d257b24.slice/crio-9b1c890ec72fe45e5e61809df88fc8196111d7f357c263c0a2a3b94ee73da33b WatchSource:0}: Error finding container 9b1c890ec72fe45e5e61809df88fc8196111d7f357c263c0a2a3b94ee73da33b: Status 404 returned error can't find the container with id 9b1c890ec72fe45e5e61809df88fc8196111d7f357c263c0a2a3b94ee73da33b Sep 30 19:49:49 crc kubenswrapper[4756]: W0930 19:49:49.408087 4756 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poded8f7aa8_9e70_4c1f_9ea8_187cd6900d87.slice/crio-ae1b8593209b5cead767e4d591e7b00b8e2476a746284beb21d1e67192e3643d WatchSource:0}: Error finding container ae1b8593209b5cead767e4d591e7b00b8e2476a746284beb21d1e67192e3643d: Status 404 returned error can't find the container with id ae1b8593209b5cead767e4d591e7b00b8e2476a746284beb21d1e67192e3643d Sep 30 19:49:49 crc kubenswrapper[4756]: I0930 19:49:49.408856 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/305c7f11-e862-489d-ba96-2426b722652c-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "305c7f11-e862-489d-ba96-2426b722652c" (UID: "305c7f11-e862-489d-ba96-2426b722652c"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:49:49 crc kubenswrapper[4756]: I0930 19:49:49.414555 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-create-89jrd"] Sep 30 19:49:49 crc kubenswrapper[4756]: I0930 19:49:49.419351 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-create-blvtr"] Sep 30 19:49:49 crc kubenswrapper[4756]: I0930 19:49:49.420284 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/305c7f11-e862-489d-ba96-2426b722652c-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "305c7f11-e862-489d-ba96-2426b722652c" (UID: "305c7f11-e862-489d-ba96-2426b722652c"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:49:49 crc kubenswrapper[4756]: I0930 19:49:49.420438 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/305c7f11-e862-489d-ba96-2426b722652c-config" (OuterVolumeSpecName: "config") pod "305c7f11-e862-489d-ba96-2426b722652c" (UID: "305c7f11-e862-489d-ba96-2426b722652c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:49:49 crc kubenswrapper[4756]: I0930 19:49:49.429183 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-698758b865-nzmxq" event={"ID":"305c7f11-e862-489d-ba96-2426b722652c","Type":"ContainerDied","Data":"e15870dd12868d01320e27b3e586f284d7b81cb8c4e8212330928971ab1fdf84"} Sep 30 19:49:49 crc kubenswrapper[4756]: I0930 19:49:49.429235 4756 scope.go:117] "RemoveContainer" containerID="70b065679a123a719144084abb75b201a20b68a2f9edce0ba39bdc3f95c7a5de" Sep 30 19:49:49 crc kubenswrapper[4756]: I0930 19:49:49.429349 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-698758b865-nzmxq" Sep 30 19:49:49 crc kubenswrapper[4756]: I0930 19:49:49.433995 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/305c7f11-e862-489d-ba96-2426b722652c-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "305c7f11-e862-489d-ba96-2426b722652c" (UID: "305c7f11-e862-489d-ba96-2426b722652c"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:49:49 crc kubenswrapper[4756]: I0930 19:49:49.442619 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-89jrd" event={"ID":"a87a5a44-c4dc-4c43-9d51-f41f5d257b24","Type":"ContainerStarted","Data":"9b1c890ec72fe45e5e61809df88fc8196111d7f357c263c0a2a3b94ee73da33b"} Sep 30 19:49:49 crc kubenswrapper[4756]: I0930 19:49:49.446062 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-blvtr" event={"ID":"ed8f7aa8-9e70-4c1f-9ea8-187cd6900d87","Type":"ContainerStarted","Data":"ae1b8593209b5cead767e4d591e7b00b8e2476a746284beb21d1e67192e3643d"} Sep 30 19:49:49 crc kubenswrapper[4756]: E0930 19:49:49.448345 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"glance-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-glance-api:current-podified\\\"\"" pod="openstack/glance-db-sync-mbntt" podUID="f08a96a9-79ca-4c30-b158-273876fe89b0" Sep 30 19:49:49 crc kubenswrapper[4756]: I0930 19:49:49.469568 4756 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/305c7f11-e862-489d-ba96-2426b722652c-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 30 19:49:49 crc kubenswrapper[4756]: I0930 19:49:49.469604 4756 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/305c7f11-e862-489d-ba96-2426b722652c-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Sep 30 19:49:49 crc kubenswrapper[4756]: I0930 19:49:49.469617 4756 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/305c7f11-e862-489d-ba96-2426b722652c-config\") on node \"crc\" DevicePath \"\"" Sep 30 19:49:49 crc kubenswrapper[4756]: I0930 19:49:49.469625 4756 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/305c7f11-e862-489d-ba96-2426b722652c-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Sep 30 19:49:49 crc kubenswrapper[4756]: I0930 19:49:49.469634 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pq79m\" (UniqueName: \"kubernetes.io/projected/305c7f11-e862-489d-ba96-2426b722652c-kube-api-access-pq79m\") on node \"crc\" DevicePath \"\"" Sep 30 19:49:49 crc kubenswrapper[4756]: I0930 19:49:49.481319 4756 scope.go:117] "RemoveContainer" containerID="fdc1d64f6bda7bb66eee9974fda6ebe1033da22e5e912bb54c3187aaa145b809" Sep 30 19:49:49 crc kubenswrapper[4756]: I0930 19:49:49.507242 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-jsmf7"] Sep 30 19:49:49 crc kubenswrapper[4756]: I0930 19:49:49.513708 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-create-2hkb2"] Sep 30 19:49:49 crc kubenswrapper[4756]: W0930 19:49:49.513737 4756 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7c4f7feb_d6b0_4e8e_ba81_008f5796220b.slice/crio-c669d093059dc9955c1dbba8e8dd6db4484a5e398f6d4bd47ea66c8c9b8f514d WatchSource:0}: Error finding container c669d093059dc9955c1dbba8e8dd6db4484a5e398f6d4bd47ea66c8c9b8f514d: Status 404 returned error can't find the container with id c669d093059dc9955c1dbba8e8dd6db4484a5e398f6d4bd47ea66c8c9b8f514d Sep 30 19:49:49 crc kubenswrapper[4756]: W0930 19:49:49.526907 4756 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poded2e720d_f813_47b0_a459_9a2b77737ebc.slice/crio-cf6a594be0b8b1dce490c6a28c6b50629d2bfb12e336ddd58b5c2331984213db WatchSource:0}: Error finding container cf6a594be0b8b1dce490c6a28c6b50629d2bfb12e336ddd58b5c2331984213db: Status 404 returned error can't find the container with id cf6a594be0b8b1dce490c6a28c6b50629d2bfb12e336ddd58b5c2331984213db Sep 30 19:49:49 crc kubenswrapper[4756]: I0930 19:49:49.925714 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-698758b865-nzmxq"] Sep 30 19:49:49 crc kubenswrapper[4756]: I0930 19:49:49.942665 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-698758b865-nzmxq"] Sep 30 19:49:50 crc kubenswrapper[4756]: I0930 19:49:50.466696 4756 generic.go:334] "Generic (PLEG): container finished" podID="a87a5a44-c4dc-4c43-9d51-f41f5d257b24" containerID="a126280212e36fcf459f93852c7632718b1fd8e6ce5b19d55fd989d6bd0a35f0" exitCode=0 Sep 30 19:49:50 crc kubenswrapper[4756]: I0930 19:49:50.466762 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-89jrd" event={"ID":"a87a5a44-c4dc-4c43-9d51-f41f5d257b24","Type":"ContainerDied","Data":"a126280212e36fcf459f93852c7632718b1fd8e6ce5b19d55fd989d6bd0a35f0"} Sep 30 19:49:50 crc kubenswrapper[4756]: I0930 19:49:50.468618 4756 generic.go:334] "Generic (PLEG): container finished" podID="ed2e720d-f813-47b0-a459-9a2b77737ebc" containerID="ce70f4912d6a6d02a8b2a05e892b202161a2779d8b715e8a4d5f36c42277498f" exitCode=0 Sep 30 19:49:50 crc kubenswrapper[4756]: I0930 19:49:50.468689 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-2hkb2" event={"ID":"ed2e720d-f813-47b0-a459-9a2b77737ebc","Type":"ContainerDied","Data":"ce70f4912d6a6d02a8b2a05e892b202161a2779d8b715e8a4d5f36c42277498f"} Sep 30 19:49:50 crc kubenswrapper[4756]: I0930 19:49:50.468725 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-2hkb2" event={"ID":"ed2e720d-f813-47b0-a459-9a2b77737ebc","Type":"ContainerStarted","Data":"cf6a594be0b8b1dce490c6a28c6b50629d2bfb12e336ddd58b5c2331984213db"} Sep 30 19:49:50 crc kubenswrapper[4756]: I0930 19:49:50.470238 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-jsmf7" event={"ID":"7c4f7feb-d6b0-4e8e-ba81-008f5796220b","Type":"ContainerStarted","Data":"c669d093059dc9955c1dbba8e8dd6db4484a5e398f6d4bd47ea66c8c9b8f514d"} Sep 30 19:49:50 crc kubenswrapper[4756]: I0930 19:49:50.472009 4756 generic.go:334] "Generic (PLEG): container finished" podID="ed8f7aa8-9e70-4c1f-9ea8-187cd6900d87" containerID="2da5c1fb9beadd9318440b00bca43cb3548b37d0f21ce36f2e1f3ff26df6dda1" exitCode=0 Sep 30 19:49:50 crc kubenswrapper[4756]: I0930 19:49:50.472048 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-blvtr" event={"ID":"ed8f7aa8-9e70-4c1f-9ea8-187cd6900d87","Type":"ContainerDied","Data":"2da5c1fb9beadd9318440b00bca43cb3548b37d0f21ce36f2e1f3ff26df6dda1"} Sep 30 19:49:51 crc kubenswrapper[4756]: I0930 19:49:51.126827 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="305c7f11-e862-489d-ba96-2426b722652c" path="/var/lib/kubelet/pods/305c7f11-e862-489d-ba96-2426b722652c/volumes" Sep 30 19:49:53 crc kubenswrapper[4756]: I0930 19:49:53.498509 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-89jrd" event={"ID":"a87a5a44-c4dc-4c43-9d51-f41f5d257b24","Type":"ContainerDied","Data":"9b1c890ec72fe45e5e61809df88fc8196111d7f357c263c0a2a3b94ee73da33b"} Sep 30 19:49:53 crc kubenswrapper[4756]: I0930 19:49:53.500000 4756 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9b1c890ec72fe45e5e61809df88fc8196111d7f357c263c0a2a3b94ee73da33b" Sep 30 19:49:53 crc kubenswrapper[4756]: I0930 19:49:53.500933 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-2hkb2" event={"ID":"ed2e720d-f813-47b0-a459-9a2b77737ebc","Type":"ContainerDied","Data":"cf6a594be0b8b1dce490c6a28c6b50629d2bfb12e336ddd58b5c2331984213db"} Sep 30 19:49:53 crc kubenswrapper[4756]: I0930 19:49:53.501065 4756 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="cf6a594be0b8b1dce490c6a28c6b50629d2bfb12e336ddd58b5c2331984213db" Sep 30 19:49:53 crc kubenswrapper[4756]: I0930 19:49:53.502527 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-blvtr" event={"ID":"ed8f7aa8-9e70-4c1f-9ea8-187cd6900d87","Type":"ContainerDied","Data":"ae1b8593209b5cead767e4d591e7b00b8e2476a746284beb21d1e67192e3643d"} Sep 30 19:49:53 crc kubenswrapper[4756]: I0930 19:49:53.502655 4756 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ae1b8593209b5cead767e4d591e7b00b8e2476a746284beb21d1e67192e3643d" Sep 30 19:49:53 crc kubenswrapper[4756]: I0930 19:49:53.605874 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-89jrd" Sep 30 19:49:53 crc kubenswrapper[4756]: I0930 19:49:53.616503 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-2hkb2" Sep 30 19:49:53 crc kubenswrapper[4756]: I0930 19:49:53.628762 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-blvtr" Sep 30 19:49:53 crc kubenswrapper[4756]: I0930 19:49:53.754469 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5n864\" (UniqueName: \"kubernetes.io/projected/ed8f7aa8-9e70-4c1f-9ea8-187cd6900d87-kube-api-access-5n864\") pod \"ed8f7aa8-9e70-4c1f-9ea8-187cd6900d87\" (UID: \"ed8f7aa8-9e70-4c1f-9ea8-187cd6900d87\") " Sep 30 19:49:53 crc kubenswrapper[4756]: I0930 19:49:53.754610 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-89szb\" (UniqueName: \"kubernetes.io/projected/a87a5a44-c4dc-4c43-9d51-f41f5d257b24-kube-api-access-89szb\") pod \"a87a5a44-c4dc-4c43-9d51-f41f5d257b24\" (UID: \"a87a5a44-c4dc-4c43-9d51-f41f5d257b24\") " Sep 30 19:49:53 crc kubenswrapper[4756]: I0930 19:49:53.754774 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9cb48\" (UniqueName: \"kubernetes.io/projected/ed2e720d-f813-47b0-a459-9a2b77737ebc-kube-api-access-9cb48\") pod \"ed2e720d-f813-47b0-a459-9a2b77737ebc\" (UID: \"ed2e720d-f813-47b0-a459-9a2b77737ebc\") " Sep 30 19:49:53 crc kubenswrapper[4756]: I0930 19:49:53.758739 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a87a5a44-c4dc-4c43-9d51-f41f5d257b24-kube-api-access-89szb" (OuterVolumeSpecName: "kube-api-access-89szb") pod "a87a5a44-c4dc-4c43-9d51-f41f5d257b24" (UID: "a87a5a44-c4dc-4c43-9d51-f41f5d257b24"). InnerVolumeSpecName "kube-api-access-89szb". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:49:53 crc kubenswrapper[4756]: I0930 19:49:53.758775 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ed8f7aa8-9e70-4c1f-9ea8-187cd6900d87-kube-api-access-5n864" (OuterVolumeSpecName: "kube-api-access-5n864") pod "ed8f7aa8-9e70-4c1f-9ea8-187cd6900d87" (UID: "ed8f7aa8-9e70-4c1f-9ea8-187cd6900d87"). InnerVolumeSpecName "kube-api-access-5n864". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:49:53 crc kubenswrapper[4756]: I0930 19:49:53.759910 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ed2e720d-f813-47b0-a459-9a2b77737ebc-kube-api-access-9cb48" (OuterVolumeSpecName: "kube-api-access-9cb48") pod "ed2e720d-f813-47b0-a459-9a2b77737ebc" (UID: "ed2e720d-f813-47b0-a459-9a2b77737ebc"). InnerVolumeSpecName "kube-api-access-9cb48". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:49:53 crc kubenswrapper[4756]: I0930 19:49:53.856529 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9cb48\" (UniqueName: \"kubernetes.io/projected/ed2e720d-f813-47b0-a459-9a2b77737ebc-kube-api-access-9cb48\") on node \"crc\" DevicePath \"\"" Sep 30 19:49:53 crc kubenswrapper[4756]: I0930 19:49:53.856841 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5n864\" (UniqueName: \"kubernetes.io/projected/ed8f7aa8-9e70-4c1f-9ea8-187cd6900d87-kube-api-access-5n864\") on node \"crc\" DevicePath \"\"" Sep 30 19:49:53 crc kubenswrapper[4756]: I0930 19:49:53.856909 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-89szb\" (UniqueName: \"kubernetes.io/projected/a87a5a44-c4dc-4c43-9d51-f41f5d257b24-kube-api-access-89szb\") on node \"crc\" DevicePath \"\"" Sep 30 19:49:54 crc kubenswrapper[4756]: I0930 19:49:54.513115 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-2hkb2" Sep 30 19:49:54 crc kubenswrapper[4756]: I0930 19:49:54.514172 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-jsmf7" event={"ID":"7c4f7feb-d6b0-4e8e-ba81-008f5796220b","Type":"ContainerStarted","Data":"2e64c63e69ea630f61c08c4bde77afdbb2cc31157f4f3a8f15cbff5d6d60f2ed"} Sep 30 19:49:54 crc kubenswrapper[4756]: I0930 19:49:54.514591 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-blvtr" Sep 30 19:49:54 crc kubenswrapper[4756]: I0930 19:49:54.514764 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-89jrd" Sep 30 19:49:54 crc kubenswrapper[4756]: I0930 19:49:54.551209 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-db-sync-jsmf7" podStartSLOduration=11.589470112 podStartE2EDuration="15.551187631s" podCreationTimestamp="2025-09-30 19:49:39 +0000 UTC" firstStartedPulling="2025-09-30 19:49:49.51556348 +0000 UTC m=+1119.136496947" lastFinishedPulling="2025-09-30 19:49:53.477280989 +0000 UTC m=+1123.098214466" observedRunningTime="2025-09-30 19:49:54.54042863 +0000 UTC m=+1124.161362117" watchObservedRunningTime="2025-09-30 19:49:54.551187631 +0000 UTC m=+1124.172121118" Sep 30 19:49:56 crc kubenswrapper[4756]: I0930 19:49:56.535440 4756 generic.go:334] "Generic (PLEG): container finished" podID="7c4f7feb-d6b0-4e8e-ba81-008f5796220b" containerID="2e64c63e69ea630f61c08c4bde77afdbb2cc31157f4f3a8f15cbff5d6d60f2ed" exitCode=0 Sep 30 19:49:56 crc kubenswrapper[4756]: I0930 19:49:56.535537 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-jsmf7" event={"ID":"7c4f7feb-d6b0-4e8e-ba81-008f5796220b","Type":"ContainerDied","Data":"2e64c63e69ea630f61c08c4bde77afdbb2cc31157f4f3a8f15cbff5d6d60f2ed"} Sep 30 19:49:57 crc kubenswrapper[4756]: I0930 19:49:57.967096 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-jsmf7" Sep 30 19:49:58 crc kubenswrapper[4756]: I0930 19:49:58.130179 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-g79rs\" (UniqueName: \"kubernetes.io/projected/7c4f7feb-d6b0-4e8e-ba81-008f5796220b-kube-api-access-g79rs\") pod \"7c4f7feb-d6b0-4e8e-ba81-008f5796220b\" (UID: \"7c4f7feb-d6b0-4e8e-ba81-008f5796220b\") " Sep 30 19:49:58 crc kubenswrapper[4756]: I0930 19:49:58.130367 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7c4f7feb-d6b0-4e8e-ba81-008f5796220b-config-data\") pod \"7c4f7feb-d6b0-4e8e-ba81-008f5796220b\" (UID: \"7c4f7feb-d6b0-4e8e-ba81-008f5796220b\") " Sep 30 19:49:58 crc kubenswrapper[4756]: I0930 19:49:58.130393 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7c4f7feb-d6b0-4e8e-ba81-008f5796220b-combined-ca-bundle\") pod \"7c4f7feb-d6b0-4e8e-ba81-008f5796220b\" (UID: \"7c4f7feb-d6b0-4e8e-ba81-008f5796220b\") " Sep 30 19:49:58 crc kubenswrapper[4756]: I0930 19:49:58.135643 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7c4f7feb-d6b0-4e8e-ba81-008f5796220b-kube-api-access-g79rs" (OuterVolumeSpecName: "kube-api-access-g79rs") pod "7c4f7feb-d6b0-4e8e-ba81-008f5796220b" (UID: "7c4f7feb-d6b0-4e8e-ba81-008f5796220b"). InnerVolumeSpecName "kube-api-access-g79rs". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:49:58 crc kubenswrapper[4756]: I0930 19:49:58.160203 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7c4f7feb-d6b0-4e8e-ba81-008f5796220b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "7c4f7feb-d6b0-4e8e-ba81-008f5796220b" (UID: "7c4f7feb-d6b0-4e8e-ba81-008f5796220b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:49:58 crc kubenswrapper[4756]: I0930 19:49:58.177794 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7c4f7feb-d6b0-4e8e-ba81-008f5796220b-config-data" (OuterVolumeSpecName: "config-data") pod "7c4f7feb-d6b0-4e8e-ba81-008f5796220b" (UID: "7c4f7feb-d6b0-4e8e-ba81-008f5796220b"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:49:58 crc kubenswrapper[4756]: I0930 19:49:58.232616 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-g79rs\" (UniqueName: \"kubernetes.io/projected/7c4f7feb-d6b0-4e8e-ba81-008f5796220b-kube-api-access-g79rs\") on node \"crc\" DevicePath \"\"" Sep 30 19:49:58 crc kubenswrapper[4756]: I0930 19:49:58.232652 4756 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7c4f7feb-d6b0-4e8e-ba81-008f5796220b-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 19:49:58 crc kubenswrapper[4756]: I0930 19:49:58.232660 4756 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7c4f7feb-d6b0-4e8e-ba81-008f5796220b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 19:49:58 crc kubenswrapper[4756]: I0930 19:49:58.572529 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-jsmf7" event={"ID":"7c4f7feb-d6b0-4e8e-ba81-008f5796220b","Type":"ContainerDied","Data":"c669d093059dc9955c1dbba8e8dd6db4484a5e398f6d4bd47ea66c8c9b8f514d"} Sep 30 19:49:58 crc kubenswrapper[4756]: I0930 19:49:58.572585 4756 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c669d093059dc9955c1dbba8e8dd6db4484a5e398f6d4bd47ea66c8c9b8f514d" Sep 30 19:49:58 crc kubenswrapper[4756]: I0930 19:49:58.572642 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-jsmf7" Sep 30 19:49:58 crc kubenswrapper[4756]: I0930 19:49:58.847535 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-vrp9q"] Sep 30 19:49:58 crc kubenswrapper[4756]: E0930 19:49:58.847987 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a87a5a44-c4dc-4c43-9d51-f41f5d257b24" containerName="mariadb-database-create" Sep 30 19:49:58 crc kubenswrapper[4756]: I0930 19:49:58.848016 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="a87a5a44-c4dc-4c43-9d51-f41f5d257b24" containerName="mariadb-database-create" Sep 30 19:49:58 crc kubenswrapper[4756]: E0930 19:49:58.848037 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="305c7f11-e862-489d-ba96-2426b722652c" containerName="init" Sep 30 19:49:58 crc kubenswrapper[4756]: I0930 19:49:58.848046 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="305c7f11-e862-489d-ba96-2426b722652c" containerName="init" Sep 30 19:49:58 crc kubenswrapper[4756]: E0930 19:49:58.848065 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7c4f7feb-d6b0-4e8e-ba81-008f5796220b" containerName="keystone-db-sync" Sep 30 19:49:58 crc kubenswrapper[4756]: I0930 19:49:58.848074 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="7c4f7feb-d6b0-4e8e-ba81-008f5796220b" containerName="keystone-db-sync" Sep 30 19:49:58 crc kubenswrapper[4756]: E0930 19:49:58.848092 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="305c7f11-e862-489d-ba96-2426b722652c" containerName="dnsmasq-dns" Sep 30 19:49:58 crc kubenswrapper[4756]: I0930 19:49:58.848100 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="305c7f11-e862-489d-ba96-2426b722652c" containerName="dnsmasq-dns" Sep 30 19:49:58 crc kubenswrapper[4756]: E0930 19:49:58.848122 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ed2e720d-f813-47b0-a459-9a2b77737ebc" containerName="mariadb-database-create" Sep 30 19:49:58 crc kubenswrapper[4756]: I0930 19:49:58.848130 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="ed2e720d-f813-47b0-a459-9a2b77737ebc" containerName="mariadb-database-create" Sep 30 19:49:58 crc kubenswrapper[4756]: E0930 19:49:58.848146 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ed8f7aa8-9e70-4c1f-9ea8-187cd6900d87" containerName="mariadb-database-create" Sep 30 19:49:58 crc kubenswrapper[4756]: I0930 19:49:58.848154 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="ed8f7aa8-9e70-4c1f-9ea8-187cd6900d87" containerName="mariadb-database-create" Sep 30 19:49:58 crc kubenswrapper[4756]: I0930 19:49:58.848376 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="305c7f11-e862-489d-ba96-2426b722652c" containerName="dnsmasq-dns" Sep 30 19:49:58 crc kubenswrapper[4756]: I0930 19:49:58.848410 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="ed2e720d-f813-47b0-a459-9a2b77737ebc" containerName="mariadb-database-create" Sep 30 19:49:58 crc kubenswrapper[4756]: I0930 19:49:58.848425 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="a87a5a44-c4dc-4c43-9d51-f41f5d257b24" containerName="mariadb-database-create" Sep 30 19:49:58 crc kubenswrapper[4756]: I0930 19:49:58.848437 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="ed8f7aa8-9e70-4c1f-9ea8-187cd6900d87" containerName="mariadb-database-create" Sep 30 19:49:58 crc kubenswrapper[4756]: I0930 19:49:58.848463 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="7c4f7feb-d6b0-4e8e-ba81-008f5796220b" containerName="keystone-db-sync" Sep 30 19:49:58 crc kubenswrapper[4756]: I0930 19:49:58.849085 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-vrp9q" Sep 30 19:49:58 crc kubenswrapper[4756]: I0930 19:49:58.851312 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Sep 30 19:49:58 crc kubenswrapper[4756]: I0930 19:49:58.853833 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-55fff446b9-lk2zs"] Sep 30 19:49:58 crc kubenswrapper[4756]: I0930 19:49:58.855130 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-55fff446b9-lk2zs" Sep 30 19:49:58 crc kubenswrapper[4756]: I0930 19:49:58.861419 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Sep 30 19:49:58 crc kubenswrapper[4756]: I0930 19:49:58.861539 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Sep 30 19:49:58 crc kubenswrapper[4756]: I0930 19:49:58.861704 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-crhkc" Sep 30 19:49:58 crc kubenswrapper[4756]: I0930 19:49:58.877915 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-vrp9q"] Sep 30 19:49:58 crc kubenswrapper[4756]: I0930 19:49:58.892125 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-55fff446b9-lk2zs"] Sep 30 19:49:58 crc kubenswrapper[4756]: I0930 19:49:58.950499 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d8610de5-9b67-496f-b885-03555fac1ada-combined-ca-bundle\") pod \"keystone-bootstrap-vrp9q\" (UID: \"d8610de5-9b67-496f-b885-03555fac1ada\") " pod="openstack/keystone-bootstrap-vrp9q" Sep 30 19:49:58 crc kubenswrapper[4756]: I0930 19:49:58.950592 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8502815c-b72f-4e72-85d8-bced65073207-config\") pod \"dnsmasq-dns-55fff446b9-lk2zs\" (UID: \"8502815c-b72f-4e72-85d8-bced65073207\") " pod="openstack/dnsmasq-dns-55fff446b9-lk2zs" Sep 30 19:49:58 crc kubenswrapper[4756]: I0930 19:49:58.950638 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4qx4l\" (UniqueName: \"kubernetes.io/projected/d8610de5-9b67-496f-b885-03555fac1ada-kube-api-access-4qx4l\") pod \"keystone-bootstrap-vrp9q\" (UID: \"d8610de5-9b67-496f-b885-03555fac1ada\") " pod="openstack/keystone-bootstrap-vrp9q" Sep 30 19:49:58 crc kubenswrapper[4756]: I0930 19:49:58.950669 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8502815c-b72f-4e72-85d8-bced65073207-ovsdbserver-sb\") pod \"dnsmasq-dns-55fff446b9-lk2zs\" (UID: \"8502815c-b72f-4e72-85d8-bced65073207\") " pod="openstack/dnsmasq-dns-55fff446b9-lk2zs" Sep 30 19:49:58 crc kubenswrapper[4756]: I0930 19:49:58.950698 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fn8f6\" (UniqueName: \"kubernetes.io/projected/8502815c-b72f-4e72-85d8-bced65073207-kube-api-access-fn8f6\") pod \"dnsmasq-dns-55fff446b9-lk2zs\" (UID: \"8502815c-b72f-4e72-85d8-bced65073207\") " pod="openstack/dnsmasq-dns-55fff446b9-lk2zs" Sep 30 19:49:58 crc kubenswrapper[4756]: I0930 19:49:58.950747 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/d8610de5-9b67-496f-b885-03555fac1ada-credential-keys\") pod \"keystone-bootstrap-vrp9q\" (UID: \"d8610de5-9b67-496f-b885-03555fac1ada\") " pod="openstack/keystone-bootstrap-vrp9q" Sep 30 19:49:58 crc kubenswrapper[4756]: I0930 19:49:58.950882 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d8610de5-9b67-496f-b885-03555fac1ada-scripts\") pod \"keystone-bootstrap-vrp9q\" (UID: \"d8610de5-9b67-496f-b885-03555fac1ada\") " pod="openstack/keystone-bootstrap-vrp9q" Sep 30 19:49:58 crc kubenswrapper[4756]: I0930 19:49:58.950952 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8502815c-b72f-4e72-85d8-bced65073207-ovsdbserver-nb\") pod \"dnsmasq-dns-55fff446b9-lk2zs\" (UID: \"8502815c-b72f-4e72-85d8-bced65073207\") " pod="openstack/dnsmasq-dns-55fff446b9-lk2zs" Sep 30 19:49:58 crc kubenswrapper[4756]: I0930 19:49:58.951001 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/d8610de5-9b67-496f-b885-03555fac1ada-fernet-keys\") pod \"keystone-bootstrap-vrp9q\" (UID: \"d8610de5-9b67-496f-b885-03555fac1ada\") " pod="openstack/keystone-bootstrap-vrp9q" Sep 30 19:49:58 crc kubenswrapper[4756]: I0930 19:49:58.951084 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d8610de5-9b67-496f-b885-03555fac1ada-config-data\") pod \"keystone-bootstrap-vrp9q\" (UID: \"d8610de5-9b67-496f-b885-03555fac1ada\") " pod="openstack/keystone-bootstrap-vrp9q" Sep 30 19:49:58 crc kubenswrapper[4756]: I0930 19:49:58.951137 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/8502815c-b72f-4e72-85d8-bced65073207-dns-swift-storage-0\") pod \"dnsmasq-dns-55fff446b9-lk2zs\" (UID: \"8502815c-b72f-4e72-85d8-bced65073207\") " pod="openstack/dnsmasq-dns-55fff446b9-lk2zs" Sep 30 19:49:58 crc kubenswrapper[4756]: I0930 19:49:58.951156 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8502815c-b72f-4e72-85d8-bced65073207-dns-svc\") pod \"dnsmasq-dns-55fff446b9-lk2zs\" (UID: \"8502815c-b72f-4e72-85d8-bced65073207\") " pod="openstack/dnsmasq-dns-55fff446b9-lk2zs" Sep 30 19:49:59 crc kubenswrapper[4756]: I0930 19:49:59.042013 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Sep 30 19:49:59 crc kubenswrapper[4756]: I0930 19:49:59.050476 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 19:49:59 crc kubenswrapper[4756]: I0930 19:49:59.050754 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 30 19:49:59 crc kubenswrapper[4756]: I0930 19:49:59.053025 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8502815c-b72f-4e72-85d8-bced65073207-ovsdbserver-nb\") pod \"dnsmasq-dns-55fff446b9-lk2zs\" (UID: \"8502815c-b72f-4e72-85d8-bced65073207\") " pod="openstack/dnsmasq-dns-55fff446b9-lk2zs" Sep 30 19:49:59 crc kubenswrapper[4756]: I0930 19:49:59.054011 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/d8610de5-9b67-496f-b885-03555fac1ada-fernet-keys\") pod \"keystone-bootstrap-vrp9q\" (UID: \"d8610de5-9b67-496f-b885-03555fac1ada\") " pod="openstack/keystone-bootstrap-vrp9q" Sep 30 19:49:59 crc kubenswrapper[4756]: I0930 19:49:59.054118 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d8610de5-9b67-496f-b885-03555fac1ada-config-data\") pod \"keystone-bootstrap-vrp9q\" (UID: \"d8610de5-9b67-496f-b885-03555fac1ada\") " pod="openstack/keystone-bootstrap-vrp9q" Sep 30 19:49:59 crc kubenswrapper[4756]: I0930 19:49:59.054245 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/8502815c-b72f-4e72-85d8-bced65073207-dns-swift-storage-0\") pod \"dnsmasq-dns-55fff446b9-lk2zs\" (UID: \"8502815c-b72f-4e72-85d8-bced65073207\") " pod="openstack/dnsmasq-dns-55fff446b9-lk2zs" Sep 30 19:49:59 crc kubenswrapper[4756]: I0930 19:49:59.054318 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8502815c-b72f-4e72-85d8-bced65073207-dns-svc\") pod \"dnsmasq-dns-55fff446b9-lk2zs\" (UID: \"8502815c-b72f-4e72-85d8-bced65073207\") " pod="openstack/dnsmasq-dns-55fff446b9-lk2zs" Sep 30 19:49:59 crc kubenswrapper[4756]: I0930 19:49:59.054435 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d8610de5-9b67-496f-b885-03555fac1ada-combined-ca-bundle\") pod \"keystone-bootstrap-vrp9q\" (UID: \"d8610de5-9b67-496f-b885-03555fac1ada\") " pod="openstack/keystone-bootstrap-vrp9q" Sep 30 19:49:59 crc kubenswrapper[4756]: I0930 19:49:59.053476 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Sep 30 19:49:59 crc kubenswrapper[4756]: I0930 19:49:59.054601 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8502815c-b72f-4e72-85d8-bced65073207-config\") pod \"dnsmasq-dns-55fff446b9-lk2zs\" (UID: \"8502815c-b72f-4e72-85d8-bced65073207\") " pod="openstack/dnsmasq-dns-55fff446b9-lk2zs" Sep 30 19:49:59 crc kubenswrapper[4756]: I0930 19:49:59.054655 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8502815c-b72f-4e72-85d8-bced65073207-ovsdbserver-nb\") pod \"dnsmasq-dns-55fff446b9-lk2zs\" (UID: \"8502815c-b72f-4e72-85d8-bced65073207\") " pod="openstack/dnsmasq-dns-55fff446b9-lk2zs" Sep 30 19:49:59 crc kubenswrapper[4756]: I0930 19:49:59.054752 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4qx4l\" (UniqueName: \"kubernetes.io/projected/d8610de5-9b67-496f-b885-03555fac1ada-kube-api-access-4qx4l\") pod \"keystone-bootstrap-vrp9q\" (UID: \"d8610de5-9b67-496f-b885-03555fac1ada\") " pod="openstack/keystone-bootstrap-vrp9q" Sep 30 19:49:59 crc kubenswrapper[4756]: I0930 19:49:59.054837 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8502815c-b72f-4e72-85d8-bced65073207-ovsdbserver-sb\") pod \"dnsmasq-dns-55fff446b9-lk2zs\" (UID: \"8502815c-b72f-4e72-85d8-bced65073207\") " pod="openstack/dnsmasq-dns-55fff446b9-lk2zs" Sep 30 19:49:59 crc kubenswrapper[4756]: I0930 19:49:59.054918 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fn8f6\" (UniqueName: \"kubernetes.io/projected/8502815c-b72f-4e72-85d8-bced65073207-kube-api-access-fn8f6\") pod \"dnsmasq-dns-55fff446b9-lk2zs\" (UID: \"8502815c-b72f-4e72-85d8-bced65073207\") " pod="openstack/dnsmasq-dns-55fff446b9-lk2zs" Sep 30 19:49:59 crc kubenswrapper[4756]: I0930 19:49:59.055256 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/8502815c-b72f-4e72-85d8-bced65073207-dns-swift-storage-0\") pod \"dnsmasq-dns-55fff446b9-lk2zs\" (UID: \"8502815c-b72f-4e72-85d8-bced65073207\") " pod="openstack/dnsmasq-dns-55fff446b9-lk2zs" Sep 30 19:49:59 crc kubenswrapper[4756]: I0930 19:49:59.055355 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/d8610de5-9b67-496f-b885-03555fac1ada-credential-keys\") pod \"keystone-bootstrap-vrp9q\" (UID: \"d8610de5-9b67-496f-b885-03555fac1ada\") " pod="openstack/keystone-bootstrap-vrp9q" Sep 30 19:49:59 crc kubenswrapper[4756]: I0930 19:49:59.055394 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d8610de5-9b67-496f-b885-03555fac1ada-scripts\") pod \"keystone-bootstrap-vrp9q\" (UID: \"d8610de5-9b67-496f-b885-03555fac1ada\") " pod="openstack/keystone-bootstrap-vrp9q" Sep 30 19:49:59 crc kubenswrapper[4756]: I0930 19:49:59.055579 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8502815c-b72f-4e72-85d8-bced65073207-dns-svc\") pod \"dnsmasq-dns-55fff446b9-lk2zs\" (UID: \"8502815c-b72f-4e72-85d8-bced65073207\") " pod="openstack/dnsmasq-dns-55fff446b9-lk2zs" Sep 30 19:49:59 crc kubenswrapper[4756]: I0930 19:49:59.056312 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8502815c-b72f-4e72-85d8-bced65073207-ovsdbserver-sb\") pod \"dnsmasq-dns-55fff446b9-lk2zs\" (UID: \"8502815c-b72f-4e72-85d8-bced65073207\") " pod="openstack/dnsmasq-dns-55fff446b9-lk2zs" Sep 30 19:49:59 crc kubenswrapper[4756]: I0930 19:49:59.060112 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Sep 30 19:49:59 crc kubenswrapper[4756]: I0930 19:49:59.061027 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d8610de5-9b67-496f-b885-03555fac1ada-config-data\") pod \"keystone-bootstrap-vrp9q\" (UID: \"d8610de5-9b67-496f-b885-03555fac1ada\") " pod="openstack/keystone-bootstrap-vrp9q" Sep 30 19:49:59 crc kubenswrapper[4756]: I0930 19:49:59.061904 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/d8610de5-9b67-496f-b885-03555fac1ada-credential-keys\") pod \"keystone-bootstrap-vrp9q\" (UID: \"d8610de5-9b67-496f-b885-03555fac1ada\") " pod="openstack/keystone-bootstrap-vrp9q" Sep 30 19:49:59 crc kubenswrapper[4756]: I0930 19:49:59.062258 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8502815c-b72f-4e72-85d8-bced65073207-config\") pod \"dnsmasq-dns-55fff446b9-lk2zs\" (UID: \"8502815c-b72f-4e72-85d8-bced65073207\") " pod="openstack/dnsmasq-dns-55fff446b9-lk2zs" Sep 30 19:49:59 crc kubenswrapper[4756]: I0930 19:49:59.062460 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d8610de5-9b67-496f-b885-03555fac1ada-combined-ca-bundle\") pod \"keystone-bootstrap-vrp9q\" (UID: \"d8610de5-9b67-496f-b885-03555fac1ada\") " pod="openstack/keystone-bootstrap-vrp9q" Sep 30 19:49:59 crc kubenswrapper[4756]: I0930 19:49:59.066573 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d8610de5-9b67-496f-b885-03555fac1ada-scripts\") pod \"keystone-bootstrap-vrp9q\" (UID: \"d8610de5-9b67-496f-b885-03555fac1ada\") " pod="openstack/keystone-bootstrap-vrp9q" Sep 30 19:49:59 crc kubenswrapper[4756]: I0930 19:49:59.073188 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4qx4l\" (UniqueName: \"kubernetes.io/projected/d8610de5-9b67-496f-b885-03555fac1ada-kube-api-access-4qx4l\") pod \"keystone-bootstrap-vrp9q\" (UID: \"d8610de5-9b67-496f-b885-03555fac1ada\") " pod="openstack/keystone-bootstrap-vrp9q" Sep 30 19:49:59 crc kubenswrapper[4756]: I0930 19:49:59.074504 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fn8f6\" (UniqueName: \"kubernetes.io/projected/8502815c-b72f-4e72-85d8-bced65073207-kube-api-access-fn8f6\") pod \"dnsmasq-dns-55fff446b9-lk2zs\" (UID: \"8502815c-b72f-4e72-85d8-bced65073207\") " pod="openstack/dnsmasq-dns-55fff446b9-lk2zs" Sep 30 19:49:59 crc kubenswrapper[4756]: I0930 19:49:59.084334 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/d8610de5-9b67-496f-b885-03555fac1ada-fernet-keys\") pod \"keystone-bootstrap-vrp9q\" (UID: \"d8610de5-9b67-496f-b885-03555fac1ada\") " pod="openstack/keystone-bootstrap-vrp9q" Sep 30 19:49:59 crc kubenswrapper[4756]: I0930 19:49:59.158862 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/c736dd47-8e28-4fbb-8036-d206f7cc5471-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"c736dd47-8e28-4fbb-8036-d206f7cc5471\") " pod="openstack/ceilometer-0" Sep 30 19:49:59 crc kubenswrapper[4756]: I0930 19:49:59.158913 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-27mxj\" (UniqueName: \"kubernetes.io/projected/c736dd47-8e28-4fbb-8036-d206f7cc5471-kube-api-access-27mxj\") pod \"ceilometer-0\" (UID: \"c736dd47-8e28-4fbb-8036-d206f7cc5471\") " pod="openstack/ceilometer-0" Sep 30 19:49:59 crc kubenswrapper[4756]: I0930 19:49:59.158946 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c736dd47-8e28-4fbb-8036-d206f7cc5471-config-data\") pod \"ceilometer-0\" (UID: \"c736dd47-8e28-4fbb-8036-d206f7cc5471\") " pod="openstack/ceilometer-0" Sep 30 19:49:59 crc kubenswrapper[4756]: I0930 19:49:59.159018 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c736dd47-8e28-4fbb-8036-d206f7cc5471-scripts\") pod \"ceilometer-0\" (UID: \"c736dd47-8e28-4fbb-8036-d206f7cc5471\") " pod="openstack/ceilometer-0" Sep 30 19:49:59 crc kubenswrapper[4756]: I0930 19:49:59.159050 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c736dd47-8e28-4fbb-8036-d206f7cc5471-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"c736dd47-8e28-4fbb-8036-d206f7cc5471\") " pod="openstack/ceilometer-0" Sep 30 19:49:59 crc kubenswrapper[4756]: I0930 19:49:59.159089 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c736dd47-8e28-4fbb-8036-d206f7cc5471-log-httpd\") pod \"ceilometer-0\" (UID: \"c736dd47-8e28-4fbb-8036-d206f7cc5471\") " pod="openstack/ceilometer-0" Sep 30 19:49:59 crc kubenswrapper[4756]: I0930 19:49:59.159125 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c736dd47-8e28-4fbb-8036-d206f7cc5471-run-httpd\") pod \"ceilometer-0\" (UID: \"c736dd47-8e28-4fbb-8036-d206f7cc5471\") " pod="openstack/ceilometer-0" Sep 30 19:49:59 crc kubenswrapper[4756]: I0930 19:49:59.168250 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-vrp9q" Sep 30 19:49:59 crc kubenswrapper[4756]: I0930 19:49:59.181194 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-55fff446b9-lk2zs" Sep 30 19:49:59 crc kubenswrapper[4756]: I0930 19:49:59.219380 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-55fff446b9-lk2zs"] Sep 30 19:49:59 crc kubenswrapper[4756]: I0930 19:49:59.254390 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-76fcf4b695-fzrxq"] Sep 30 19:49:59 crc kubenswrapper[4756]: I0930 19:49:59.255782 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-76fcf4b695-fzrxq" Sep 30 19:49:59 crc kubenswrapper[4756]: I0930 19:49:59.265570 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-27mxj\" (UniqueName: \"kubernetes.io/projected/c736dd47-8e28-4fbb-8036-d206f7cc5471-kube-api-access-27mxj\") pod \"ceilometer-0\" (UID: \"c736dd47-8e28-4fbb-8036-d206f7cc5471\") " pod="openstack/ceilometer-0" Sep 30 19:49:59 crc kubenswrapper[4756]: I0930 19:49:59.265654 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c736dd47-8e28-4fbb-8036-d206f7cc5471-config-data\") pod \"ceilometer-0\" (UID: \"c736dd47-8e28-4fbb-8036-d206f7cc5471\") " pod="openstack/ceilometer-0" Sep 30 19:49:59 crc kubenswrapper[4756]: I0930 19:49:59.265718 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c736dd47-8e28-4fbb-8036-d206f7cc5471-scripts\") pod \"ceilometer-0\" (UID: \"c736dd47-8e28-4fbb-8036-d206f7cc5471\") " pod="openstack/ceilometer-0" Sep 30 19:49:59 crc kubenswrapper[4756]: I0930 19:49:59.265743 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c736dd47-8e28-4fbb-8036-d206f7cc5471-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"c736dd47-8e28-4fbb-8036-d206f7cc5471\") " pod="openstack/ceilometer-0" Sep 30 19:49:59 crc kubenswrapper[4756]: I0930 19:49:59.265795 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c736dd47-8e28-4fbb-8036-d206f7cc5471-log-httpd\") pod \"ceilometer-0\" (UID: \"c736dd47-8e28-4fbb-8036-d206f7cc5471\") " pod="openstack/ceilometer-0" Sep 30 19:49:59 crc kubenswrapper[4756]: I0930 19:49:59.265858 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c736dd47-8e28-4fbb-8036-d206f7cc5471-run-httpd\") pod \"ceilometer-0\" (UID: \"c736dd47-8e28-4fbb-8036-d206f7cc5471\") " pod="openstack/ceilometer-0" Sep 30 19:49:59 crc kubenswrapper[4756]: I0930 19:49:59.265946 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/c736dd47-8e28-4fbb-8036-d206f7cc5471-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"c736dd47-8e28-4fbb-8036-d206f7cc5471\") " pod="openstack/ceilometer-0" Sep 30 19:49:59 crc kubenswrapper[4756]: I0930 19:49:59.266936 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c736dd47-8e28-4fbb-8036-d206f7cc5471-log-httpd\") pod \"ceilometer-0\" (UID: \"c736dd47-8e28-4fbb-8036-d206f7cc5471\") " pod="openstack/ceilometer-0" Sep 30 19:49:59 crc kubenswrapper[4756]: I0930 19:49:59.268179 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c736dd47-8e28-4fbb-8036-d206f7cc5471-run-httpd\") pod \"ceilometer-0\" (UID: \"c736dd47-8e28-4fbb-8036-d206f7cc5471\") " pod="openstack/ceilometer-0" Sep 30 19:49:59 crc kubenswrapper[4756]: I0930 19:49:59.270456 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-76fcf4b695-fzrxq"] Sep 30 19:49:59 crc kubenswrapper[4756]: I0930 19:49:59.273814 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c736dd47-8e28-4fbb-8036-d206f7cc5471-config-data\") pod \"ceilometer-0\" (UID: \"c736dd47-8e28-4fbb-8036-d206f7cc5471\") " pod="openstack/ceilometer-0" Sep 30 19:49:59 crc kubenswrapper[4756]: I0930 19:49:59.278550 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c736dd47-8e28-4fbb-8036-d206f7cc5471-scripts\") pod \"ceilometer-0\" (UID: \"c736dd47-8e28-4fbb-8036-d206f7cc5471\") " pod="openstack/ceilometer-0" Sep 30 19:49:59 crc kubenswrapper[4756]: I0930 19:49:59.278939 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/c736dd47-8e28-4fbb-8036-d206f7cc5471-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"c736dd47-8e28-4fbb-8036-d206f7cc5471\") " pod="openstack/ceilometer-0" Sep 30 19:49:59 crc kubenswrapper[4756]: I0930 19:49:59.284174 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-db-sync-w6sv7"] Sep 30 19:49:59 crc kubenswrapper[4756]: I0930 19:49:59.285187 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c736dd47-8e28-4fbb-8036-d206f7cc5471-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"c736dd47-8e28-4fbb-8036-d206f7cc5471\") " pod="openstack/ceilometer-0" Sep 30 19:49:59 crc kubenswrapper[4756]: I0930 19:49:59.285609 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-w6sv7" Sep 30 19:49:59 crc kubenswrapper[4756]: I0930 19:49:59.288975 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-config-data" Sep 30 19:49:59 crc kubenswrapper[4756]: I0930 19:49:59.289188 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-scripts" Sep 30 19:49:59 crc kubenswrapper[4756]: I0930 19:49:59.289336 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-placement-dockercfg-fv677" Sep 30 19:49:59 crc kubenswrapper[4756]: I0930 19:49:59.303418 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-sync-w6sv7"] Sep 30 19:49:59 crc kubenswrapper[4756]: I0930 19:49:59.310069 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-27mxj\" (UniqueName: \"kubernetes.io/projected/c736dd47-8e28-4fbb-8036-d206f7cc5471-kube-api-access-27mxj\") pod \"ceilometer-0\" (UID: \"c736dd47-8e28-4fbb-8036-d206f7cc5471\") " pod="openstack/ceilometer-0" Sep 30 19:49:59 crc kubenswrapper[4756]: I0930 19:49:59.367909 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/b6f43c97-24c1-4b5c-9144-436afa885815-dns-swift-storage-0\") pod \"dnsmasq-dns-76fcf4b695-fzrxq\" (UID: \"b6f43c97-24c1-4b5c-9144-436afa885815\") " pod="openstack/dnsmasq-dns-76fcf4b695-fzrxq" Sep 30 19:49:59 crc kubenswrapper[4756]: I0930 19:49:59.367952 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b6f43c97-24c1-4b5c-9144-436afa885815-dns-svc\") pod \"dnsmasq-dns-76fcf4b695-fzrxq\" (UID: \"b6f43c97-24c1-4b5c-9144-436afa885815\") " pod="openstack/dnsmasq-dns-76fcf4b695-fzrxq" Sep 30 19:49:59 crc kubenswrapper[4756]: I0930 19:49:59.367987 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b6f43c97-24c1-4b5c-9144-436afa885815-config\") pod \"dnsmasq-dns-76fcf4b695-fzrxq\" (UID: \"b6f43c97-24c1-4b5c-9144-436afa885815\") " pod="openstack/dnsmasq-dns-76fcf4b695-fzrxq" Sep 30 19:49:59 crc kubenswrapper[4756]: I0930 19:49:59.368035 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b6f43c97-24c1-4b5c-9144-436afa885815-ovsdbserver-nb\") pod \"dnsmasq-dns-76fcf4b695-fzrxq\" (UID: \"b6f43c97-24c1-4b5c-9144-436afa885815\") " pod="openstack/dnsmasq-dns-76fcf4b695-fzrxq" Sep 30 19:49:59 crc kubenswrapper[4756]: I0930 19:49:59.368068 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b6f43c97-24c1-4b5c-9144-436afa885815-ovsdbserver-sb\") pod \"dnsmasq-dns-76fcf4b695-fzrxq\" (UID: \"b6f43c97-24c1-4b5c-9144-436afa885815\") " pod="openstack/dnsmasq-dns-76fcf4b695-fzrxq" Sep 30 19:49:59 crc kubenswrapper[4756]: I0930 19:49:59.368113 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kfwvh\" (UniqueName: \"kubernetes.io/projected/b6f43c97-24c1-4b5c-9144-436afa885815-kube-api-access-kfwvh\") pod \"dnsmasq-dns-76fcf4b695-fzrxq\" (UID: \"b6f43c97-24c1-4b5c-9144-436afa885815\") " pod="openstack/dnsmasq-dns-76fcf4b695-fzrxq" Sep 30 19:49:59 crc kubenswrapper[4756]: I0930 19:49:59.442509 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 19:49:59 crc kubenswrapper[4756]: I0930 19:49:59.469757 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q78mc\" (UniqueName: \"kubernetes.io/projected/c83d1164-f7e4-4ad5-a81e-1c5fa5d1486c-kube-api-access-q78mc\") pod \"placement-db-sync-w6sv7\" (UID: \"c83d1164-f7e4-4ad5-a81e-1c5fa5d1486c\") " pod="openstack/placement-db-sync-w6sv7" Sep 30 19:49:59 crc kubenswrapper[4756]: I0930 19:49:59.469823 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kfwvh\" (UniqueName: \"kubernetes.io/projected/b6f43c97-24c1-4b5c-9144-436afa885815-kube-api-access-kfwvh\") pod \"dnsmasq-dns-76fcf4b695-fzrxq\" (UID: \"b6f43c97-24c1-4b5c-9144-436afa885815\") " pod="openstack/dnsmasq-dns-76fcf4b695-fzrxq" Sep 30 19:49:59 crc kubenswrapper[4756]: I0930 19:49:59.469885 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/b6f43c97-24c1-4b5c-9144-436afa885815-dns-swift-storage-0\") pod \"dnsmasq-dns-76fcf4b695-fzrxq\" (UID: \"b6f43c97-24c1-4b5c-9144-436afa885815\") " pod="openstack/dnsmasq-dns-76fcf4b695-fzrxq" Sep 30 19:49:59 crc kubenswrapper[4756]: I0930 19:49:59.469905 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b6f43c97-24c1-4b5c-9144-436afa885815-dns-svc\") pod \"dnsmasq-dns-76fcf4b695-fzrxq\" (UID: \"b6f43c97-24c1-4b5c-9144-436afa885815\") " pod="openstack/dnsmasq-dns-76fcf4b695-fzrxq" Sep 30 19:49:59 crc kubenswrapper[4756]: I0930 19:49:59.469927 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c83d1164-f7e4-4ad5-a81e-1c5fa5d1486c-config-data\") pod \"placement-db-sync-w6sv7\" (UID: \"c83d1164-f7e4-4ad5-a81e-1c5fa5d1486c\") " pod="openstack/placement-db-sync-w6sv7" Sep 30 19:49:59 crc kubenswrapper[4756]: I0930 19:49:59.469946 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c83d1164-f7e4-4ad5-a81e-1c5fa5d1486c-scripts\") pod \"placement-db-sync-w6sv7\" (UID: \"c83d1164-f7e4-4ad5-a81e-1c5fa5d1486c\") " pod="openstack/placement-db-sync-w6sv7" Sep 30 19:49:59 crc kubenswrapper[4756]: I0930 19:49:59.469972 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b6f43c97-24c1-4b5c-9144-436afa885815-config\") pod \"dnsmasq-dns-76fcf4b695-fzrxq\" (UID: \"b6f43c97-24c1-4b5c-9144-436afa885815\") " pod="openstack/dnsmasq-dns-76fcf4b695-fzrxq" Sep 30 19:49:59 crc kubenswrapper[4756]: I0930 19:49:59.470014 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c83d1164-f7e4-4ad5-a81e-1c5fa5d1486c-combined-ca-bundle\") pod \"placement-db-sync-w6sv7\" (UID: \"c83d1164-f7e4-4ad5-a81e-1c5fa5d1486c\") " pod="openstack/placement-db-sync-w6sv7" Sep 30 19:49:59 crc kubenswrapper[4756]: I0930 19:49:59.470030 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c83d1164-f7e4-4ad5-a81e-1c5fa5d1486c-logs\") pod \"placement-db-sync-w6sv7\" (UID: \"c83d1164-f7e4-4ad5-a81e-1c5fa5d1486c\") " pod="openstack/placement-db-sync-w6sv7" Sep 30 19:49:59 crc kubenswrapper[4756]: I0930 19:49:59.470050 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b6f43c97-24c1-4b5c-9144-436afa885815-ovsdbserver-nb\") pod \"dnsmasq-dns-76fcf4b695-fzrxq\" (UID: \"b6f43c97-24c1-4b5c-9144-436afa885815\") " pod="openstack/dnsmasq-dns-76fcf4b695-fzrxq" Sep 30 19:49:59 crc kubenswrapper[4756]: I0930 19:49:59.470080 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b6f43c97-24c1-4b5c-9144-436afa885815-ovsdbserver-sb\") pod \"dnsmasq-dns-76fcf4b695-fzrxq\" (UID: \"b6f43c97-24c1-4b5c-9144-436afa885815\") " pod="openstack/dnsmasq-dns-76fcf4b695-fzrxq" Sep 30 19:49:59 crc kubenswrapper[4756]: I0930 19:49:59.471307 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b6f43c97-24c1-4b5c-9144-436afa885815-dns-svc\") pod \"dnsmasq-dns-76fcf4b695-fzrxq\" (UID: \"b6f43c97-24c1-4b5c-9144-436afa885815\") " pod="openstack/dnsmasq-dns-76fcf4b695-fzrxq" Sep 30 19:49:59 crc kubenswrapper[4756]: I0930 19:49:59.471649 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b6f43c97-24c1-4b5c-9144-436afa885815-ovsdbserver-nb\") pod \"dnsmasq-dns-76fcf4b695-fzrxq\" (UID: \"b6f43c97-24c1-4b5c-9144-436afa885815\") " pod="openstack/dnsmasq-dns-76fcf4b695-fzrxq" Sep 30 19:49:59 crc kubenswrapper[4756]: I0930 19:49:59.471656 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b6f43c97-24c1-4b5c-9144-436afa885815-config\") pod \"dnsmasq-dns-76fcf4b695-fzrxq\" (UID: \"b6f43c97-24c1-4b5c-9144-436afa885815\") " pod="openstack/dnsmasq-dns-76fcf4b695-fzrxq" Sep 30 19:49:59 crc kubenswrapper[4756]: I0930 19:49:59.471790 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b6f43c97-24c1-4b5c-9144-436afa885815-ovsdbserver-sb\") pod \"dnsmasq-dns-76fcf4b695-fzrxq\" (UID: \"b6f43c97-24c1-4b5c-9144-436afa885815\") " pod="openstack/dnsmasq-dns-76fcf4b695-fzrxq" Sep 30 19:49:59 crc kubenswrapper[4756]: I0930 19:49:59.472501 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/b6f43c97-24c1-4b5c-9144-436afa885815-dns-swift-storage-0\") pod \"dnsmasq-dns-76fcf4b695-fzrxq\" (UID: \"b6f43c97-24c1-4b5c-9144-436afa885815\") " pod="openstack/dnsmasq-dns-76fcf4b695-fzrxq" Sep 30 19:49:59 crc kubenswrapper[4756]: I0930 19:49:59.488005 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kfwvh\" (UniqueName: \"kubernetes.io/projected/b6f43c97-24c1-4b5c-9144-436afa885815-kube-api-access-kfwvh\") pod \"dnsmasq-dns-76fcf4b695-fzrxq\" (UID: \"b6f43c97-24c1-4b5c-9144-436afa885815\") " pod="openstack/dnsmasq-dns-76fcf4b695-fzrxq" Sep 30 19:50:00 crc kubenswrapper[4756]: I0930 19:49:59.572799 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c83d1164-f7e4-4ad5-a81e-1c5fa5d1486c-combined-ca-bundle\") pod \"placement-db-sync-w6sv7\" (UID: \"c83d1164-f7e4-4ad5-a81e-1c5fa5d1486c\") " pod="openstack/placement-db-sync-w6sv7" Sep 30 19:50:00 crc kubenswrapper[4756]: I0930 19:49:59.572840 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c83d1164-f7e4-4ad5-a81e-1c5fa5d1486c-logs\") pod \"placement-db-sync-w6sv7\" (UID: \"c83d1164-f7e4-4ad5-a81e-1c5fa5d1486c\") " pod="openstack/placement-db-sync-w6sv7" Sep 30 19:50:00 crc kubenswrapper[4756]: I0930 19:49:59.572885 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q78mc\" (UniqueName: \"kubernetes.io/projected/c83d1164-f7e4-4ad5-a81e-1c5fa5d1486c-kube-api-access-q78mc\") pod \"placement-db-sync-w6sv7\" (UID: \"c83d1164-f7e4-4ad5-a81e-1c5fa5d1486c\") " pod="openstack/placement-db-sync-w6sv7" Sep 30 19:50:00 crc kubenswrapper[4756]: I0930 19:49:59.572947 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c83d1164-f7e4-4ad5-a81e-1c5fa5d1486c-config-data\") pod \"placement-db-sync-w6sv7\" (UID: \"c83d1164-f7e4-4ad5-a81e-1c5fa5d1486c\") " pod="openstack/placement-db-sync-w6sv7" Sep 30 19:50:00 crc kubenswrapper[4756]: I0930 19:49:59.572966 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c83d1164-f7e4-4ad5-a81e-1c5fa5d1486c-scripts\") pod \"placement-db-sync-w6sv7\" (UID: \"c83d1164-f7e4-4ad5-a81e-1c5fa5d1486c\") " pod="openstack/placement-db-sync-w6sv7" Sep 30 19:50:00 crc kubenswrapper[4756]: I0930 19:49:59.573584 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c83d1164-f7e4-4ad5-a81e-1c5fa5d1486c-logs\") pod \"placement-db-sync-w6sv7\" (UID: \"c83d1164-f7e4-4ad5-a81e-1c5fa5d1486c\") " pod="openstack/placement-db-sync-w6sv7" Sep 30 19:50:00 crc kubenswrapper[4756]: I0930 19:49:59.576212 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c83d1164-f7e4-4ad5-a81e-1c5fa5d1486c-scripts\") pod \"placement-db-sync-w6sv7\" (UID: \"c83d1164-f7e4-4ad5-a81e-1c5fa5d1486c\") " pod="openstack/placement-db-sync-w6sv7" Sep 30 19:50:00 crc kubenswrapper[4756]: I0930 19:49:59.576754 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c83d1164-f7e4-4ad5-a81e-1c5fa5d1486c-config-data\") pod \"placement-db-sync-w6sv7\" (UID: \"c83d1164-f7e4-4ad5-a81e-1c5fa5d1486c\") " pod="openstack/placement-db-sync-w6sv7" Sep 30 19:50:00 crc kubenswrapper[4756]: I0930 19:49:59.581969 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c83d1164-f7e4-4ad5-a81e-1c5fa5d1486c-combined-ca-bundle\") pod \"placement-db-sync-w6sv7\" (UID: \"c83d1164-f7e4-4ad5-a81e-1c5fa5d1486c\") " pod="openstack/placement-db-sync-w6sv7" Sep 30 19:50:00 crc kubenswrapper[4756]: I0930 19:49:59.588760 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q78mc\" (UniqueName: \"kubernetes.io/projected/c83d1164-f7e4-4ad5-a81e-1c5fa5d1486c-kube-api-access-q78mc\") pod \"placement-db-sync-w6sv7\" (UID: \"c83d1164-f7e4-4ad5-a81e-1c5fa5d1486c\") " pod="openstack/placement-db-sync-w6sv7" Sep 30 19:50:00 crc kubenswrapper[4756]: I0930 19:49:59.656880 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-76fcf4b695-fzrxq" Sep 30 19:50:00 crc kubenswrapper[4756]: I0930 19:49:59.679572 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-w6sv7" Sep 30 19:50:00 crc kubenswrapper[4756]: I0930 19:49:59.706499 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-55fff446b9-lk2zs"] Sep 30 19:50:00 crc kubenswrapper[4756]: I0930 19:49:59.773931 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-vrp9q"] Sep 30 19:50:00 crc kubenswrapper[4756]: I0930 19:50:00.536379 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 30 19:50:00 crc kubenswrapper[4756]: I0930 19:50:00.587291 4756 generic.go:334] "Generic (PLEG): container finished" podID="8502815c-b72f-4e72-85d8-bced65073207" containerID="3d594e2a90057bf0fe14ae1c248b8a365db1648f734697caf53d1011047f4ea1" exitCode=0 Sep 30 19:50:00 crc kubenswrapper[4756]: I0930 19:50:00.587337 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-55fff446b9-lk2zs" event={"ID":"8502815c-b72f-4e72-85d8-bced65073207","Type":"ContainerDied","Data":"3d594e2a90057bf0fe14ae1c248b8a365db1648f734697caf53d1011047f4ea1"} Sep 30 19:50:00 crc kubenswrapper[4756]: I0930 19:50:00.587384 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-55fff446b9-lk2zs" event={"ID":"8502815c-b72f-4e72-85d8-bced65073207","Type":"ContainerStarted","Data":"958f20bf96840d6388e1874541be82ef2fe2e4f8dfee434feea3a1bc8eefbd26"} Sep 30 19:50:00 crc kubenswrapper[4756]: I0930 19:50:00.589765 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c736dd47-8e28-4fbb-8036-d206f7cc5471","Type":"ContainerStarted","Data":"a0457f5eeef64a14111de38237283dd527a65d774216cb0bdb7cf2e958ac5207"} Sep 30 19:50:00 crc kubenswrapper[4756]: I0930 19:50:00.591376 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-vrp9q" event={"ID":"d8610de5-9b67-496f-b885-03555fac1ada","Type":"ContainerStarted","Data":"88a68782dbc943aac7628e21b900c0973de74c4cdd5afe3d6e4270e57ae343af"} Sep 30 19:50:00 crc kubenswrapper[4756]: I0930 19:50:00.591428 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-vrp9q" event={"ID":"d8610de5-9b67-496f-b885-03555fac1ada","Type":"ContainerStarted","Data":"7fb8d37bfbb078d2ffccf120ee8055d5841dc895b501ff2b905c4a004fffe824"} Sep 30 19:50:00 crc kubenswrapper[4756]: I0930 19:50:00.609124 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-sync-w6sv7"] Sep 30 19:50:00 crc kubenswrapper[4756]: I0930 19:50:00.638629 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-76fcf4b695-fzrxq"] Sep 30 19:50:00 crc kubenswrapper[4756]: I0930 19:50:00.642368 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-vrp9q" podStartSLOduration=2.642352614 podStartE2EDuration="2.642352614s" podCreationTimestamp="2025-09-30 19:49:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 19:50:00.627313721 +0000 UTC m=+1130.248247208" watchObservedRunningTime="2025-09-30 19:50:00.642352614 +0000 UTC m=+1130.263286091" Sep 30 19:50:00 crc kubenswrapper[4756]: W0930 19:50:00.657221 4756 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb6f43c97_24c1_4b5c_9144_436afa885815.slice/crio-6fb5c383a933fced4ef19048e7a5d190b2a4e559f8e441d6c95e69ba70dba4fa WatchSource:0}: Error finding container 6fb5c383a933fced4ef19048e7a5d190b2a4e559f8e441d6c95e69ba70dba4fa: Status 404 returned error can't find the container with id 6fb5c383a933fced4ef19048e7a5d190b2a4e559f8e441d6c95e69ba70dba4fa Sep 30 19:50:00 crc kubenswrapper[4756]: I0930 19:50:00.862949 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-55fff446b9-lk2zs" Sep 30 19:50:00 crc kubenswrapper[4756]: I0930 19:50:00.901773 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 30 19:50:01 crc kubenswrapper[4756]: I0930 19:50:01.001504 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fn8f6\" (UniqueName: \"kubernetes.io/projected/8502815c-b72f-4e72-85d8-bced65073207-kube-api-access-fn8f6\") pod \"8502815c-b72f-4e72-85d8-bced65073207\" (UID: \"8502815c-b72f-4e72-85d8-bced65073207\") " Sep 30 19:50:01 crc kubenswrapper[4756]: I0930 19:50:01.001606 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8502815c-b72f-4e72-85d8-bced65073207-ovsdbserver-sb\") pod \"8502815c-b72f-4e72-85d8-bced65073207\" (UID: \"8502815c-b72f-4e72-85d8-bced65073207\") " Sep 30 19:50:01 crc kubenswrapper[4756]: I0930 19:50:01.001698 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8502815c-b72f-4e72-85d8-bced65073207-dns-svc\") pod \"8502815c-b72f-4e72-85d8-bced65073207\" (UID: \"8502815c-b72f-4e72-85d8-bced65073207\") " Sep 30 19:50:01 crc kubenswrapper[4756]: I0930 19:50:01.001715 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/8502815c-b72f-4e72-85d8-bced65073207-dns-swift-storage-0\") pod \"8502815c-b72f-4e72-85d8-bced65073207\" (UID: \"8502815c-b72f-4e72-85d8-bced65073207\") " Sep 30 19:50:01 crc kubenswrapper[4756]: I0930 19:50:01.001749 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8502815c-b72f-4e72-85d8-bced65073207-config\") pod \"8502815c-b72f-4e72-85d8-bced65073207\" (UID: \"8502815c-b72f-4e72-85d8-bced65073207\") " Sep 30 19:50:01 crc kubenswrapper[4756]: I0930 19:50:01.001783 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8502815c-b72f-4e72-85d8-bced65073207-ovsdbserver-nb\") pod \"8502815c-b72f-4e72-85d8-bced65073207\" (UID: \"8502815c-b72f-4e72-85d8-bced65073207\") " Sep 30 19:50:01 crc kubenswrapper[4756]: I0930 19:50:01.007104 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8502815c-b72f-4e72-85d8-bced65073207-kube-api-access-fn8f6" (OuterVolumeSpecName: "kube-api-access-fn8f6") pod "8502815c-b72f-4e72-85d8-bced65073207" (UID: "8502815c-b72f-4e72-85d8-bced65073207"). InnerVolumeSpecName "kube-api-access-fn8f6". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:50:01 crc kubenswrapper[4756]: I0930 19:50:01.022747 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8502815c-b72f-4e72-85d8-bced65073207-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "8502815c-b72f-4e72-85d8-bced65073207" (UID: "8502815c-b72f-4e72-85d8-bced65073207"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:50:01 crc kubenswrapper[4756]: I0930 19:50:01.023688 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8502815c-b72f-4e72-85d8-bced65073207-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "8502815c-b72f-4e72-85d8-bced65073207" (UID: "8502815c-b72f-4e72-85d8-bced65073207"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:50:01 crc kubenswrapper[4756]: I0930 19:50:01.031997 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8502815c-b72f-4e72-85d8-bced65073207-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "8502815c-b72f-4e72-85d8-bced65073207" (UID: "8502815c-b72f-4e72-85d8-bced65073207"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:50:01 crc kubenswrapper[4756]: I0930 19:50:01.034764 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8502815c-b72f-4e72-85d8-bced65073207-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "8502815c-b72f-4e72-85d8-bced65073207" (UID: "8502815c-b72f-4e72-85d8-bced65073207"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:50:01 crc kubenswrapper[4756]: I0930 19:50:01.037817 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8502815c-b72f-4e72-85d8-bced65073207-config" (OuterVolumeSpecName: "config") pod "8502815c-b72f-4e72-85d8-bced65073207" (UID: "8502815c-b72f-4e72-85d8-bced65073207"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:50:01 crc kubenswrapper[4756]: I0930 19:50:01.105776 4756 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8502815c-b72f-4e72-85d8-bced65073207-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Sep 30 19:50:01 crc kubenswrapper[4756]: I0930 19:50:01.105829 4756 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/8502815c-b72f-4e72-85d8-bced65073207-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Sep 30 19:50:01 crc kubenswrapper[4756]: I0930 19:50:01.105847 4756 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8502815c-b72f-4e72-85d8-bced65073207-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 30 19:50:01 crc kubenswrapper[4756]: I0930 19:50:01.105860 4756 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8502815c-b72f-4e72-85d8-bced65073207-config\") on node \"crc\" DevicePath \"\"" Sep 30 19:50:01 crc kubenswrapper[4756]: I0930 19:50:01.105879 4756 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8502815c-b72f-4e72-85d8-bced65073207-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Sep 30 19:50:01 crc kubenswrapper[4756]: I0930 19:50:01.105891 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fn8f6\" (UniqueName: \"kubernetes.io/projected/8502815c-b72f-4e72-85d8-bced65073207-kube-api-access-fn8f6\") on node \"crc\" DevicePath \"\"" Sep 30 19:50:01 crc kubenswrapper[4756]: I0930 19:50:01.602721 4756 generic.go:334] "Generic (PLEG): container finished" podID="b6f43c97-24c1-4b5c-9144-436afa885815" containerID="cee228160ab1db6a58b9bd5fac0c626043c07faf2a1022d2195a361362ca7bfc" exitCode=0 Sep 30 19:50:01 crc kubenswrapper[4756]: I0930 19:50:01.603015 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-76fcf4b695-fzrxq" event={"ID":"b6f43c97-24c1-4b5c-9144-436afa885815","Type":"ContainerDied","Data":"cee228160ab1db6a58b9bd5fac0c626043c07faf2a1022d2195a361362ca7bfc"} Sep 30 19:50:01 crc kubenswrapper[4756]: I0930 19:50:01.603043 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-76fcf4b695-fzrxq" event={"ID":"b6f43c97-24c1-4b5c-9144-436afa885815","Type":"ContainerStarted","Data":"6fb5c383a933fced4ef19048e7a5d190b2a4e559f8e441d6c95e69ba70dba4fa"} Sep 30 19:50:01 crc kubenswrapper[4756]: I0930 19:50:01.613383 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-w6sv7" event={"ID":"c83d1164-f7e4-4ad5-a81e-1c5fa5d1486c","Type":"ContainerStarted","Data":"5971ce407c4b1bcbce687931d09709c6c3302ac2434951ba2e6710912db30e43"} Sep 30 19:50:01 crc kubenswrapper[4756]: I0930 19:50:01.618440 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-55fff446b9-lk2zs" Sep 30 19:50:01 crc kubenswrapper[4756]: I0930 19:50:01.618848 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-55fff446b9-lk2zs" event={"ID":"8502815c-b72f-4e72-85d8-bced65073207","Type":"ContainerDied","Data":"958f20bf96840d6388e1874541be82ef2fe2e4f8dfee434feea3a1bc8eefbd26"} Sep 30 19:50:01 crc kubenswrapper[4756]: I0930 19:50:01.618873 4756 scope.go:117] "RemoveContainer" containerID="3d594e2a90057bf0fe14ae1c248b8a365db1648f734697caf53d1011047f4ea1" Sep 30 19:50:01 crc kubenswrapper[4756]: I0930 19:50:01.848897 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-55fff446b9-lk2zs"] Sep 30 19:50:01 crc kubenswrapper[4756]: I0930 19:50:01.854915 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-55fff446b9-lk2zs"] Sep 30 19:50:02 crc kubenswrapper[4756]: I0930 19:50:02.629363 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-76fcf4b695-fzrxq" event={"ID":"b6f43c97-24c1-4b5c-9144-436afa885815","Type":"ContainerStarted","Data":"54ba83b5ec9e3df45a74815ad28e4405e4895da7ecbe0a4cdfcafe155890ed6a"} Sep 30 19:50:02 crc kubenswrapper[4756]: I0930 19:50:02.629618 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-76fcf4b695-fzrxq" Sep 30 19:50:02 crc kubenswrapper[4756]: I0930 19:50:02.649175 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-76fcf4b695-fzrxq" podStartSLOduration=3.649158232 podStartE2EDuration="3.649158232s" podCreationTimestamp="2025-09-30 19:49:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 19:50:02.647769055 +0000 UTC m=+1132.268702522" watchObservedRunningTime="2025-09-30 19:50:02.649158232 +0000 UTC m=+1132.270091709" Sep 30 19:50:03 crc kubenswrapper[4756]: I0930 19:50:03.128359 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8502815c-b72f-4e72-85d8-bced65073207" path="/var/lib/kubelet/pods/8502815c-b72f-4e72-85d8-bced65073207/volumes" Sep 30 19:50:03 crc kubenswrapper[4756]: I0930 19:50:03.640162 4756 generic.go:334] "Generic (PLEG): container finished" podID="d8610de5-9b67-496f-b885-03555fac1ada" containerID="88a68782dbc943aac7628e21b900c0973de74c4cdd5afe3d6e4270e57ae343af" exitCode=0 Sep 30 19:50:03 crc kubenswrapper[4756]: I0930 19:50:03.641137 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-vrp9q" event={"ID":"d8610de5-9b67-496f-b885-03555fac1ada","Type":"ContainerDied","Data":"88a68782dbc943aac7628e21b900c0973de74c4cdd5afe3d6e4270e57ae343af"} Sep 30 19:50:06 crc kubenswrapper[4756]: I0930 19:50:06.276990 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-vrp9q" Sep 30 19:50:06 crc kubenswrapper[4756]: I0930 19:50:06.400941 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/d8610de5-9b67-496f-b885-03555fac1ada-credential-keys\") pod \"d8610de5-9b67-496f-b885-03555fac1ada\" (UID: \"d8610de5-9b67-496f-b885-03555fac1ada\") " Sep 30 19:50:06 crc kubenswrapper[4756]: I0930 19:50:06.401433 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/d8610de5-9b67-496f-b885-03555fac1ada-fernet-keys\") pod \"d8610de5-9b67-496f-b885-03555fac1ada\" (UID: \"d8610de5-9b67-496f-b885-03555fac1ada\") " Sep 30 19:50:06 crc kubenswrapper[4756]: I0930 19:50:06.401529 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d8610de5-9b67-496f-b885-03555fac1ada-scripts\") pod \"d8610de5-9b67-496f-b885-03555fac1ada\" (UID: \"d8610de5-9b67-496f-b885-03555fac1ada\") " Sep 30 19:50:06 crc kubenswrapper[4756]: I0930 19:50:06.401572 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4qx4l\" (UniqueName: \"kubernetes.io/projected/d8610de5-9b67-496f-b885-03555fac1ada-kube-api-access-4qx4l\") pod \"d8610de5-9b67-496f-b885-03555fac1ada\" (UID: \"d8610de5-9b67-496f-b885-03555fac1ada\") " Sep 30 19:50:06 crc kubenswrapper[4756]: I0930 19:50:06.401622 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d8610de5-9b67-496f-b885-03555fac1ada-config-data\") pod \"d8610de5-9b67-496f-b885-03555fac1ada\" (UID: \"d8610de5-9b67-496f-b885-03555fac1ada\") " Sep 30 19:50:06 crc kubenswrapper[4756]: I0930 19:50:06.401653 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d8610de5-9b67-496f-b885-03555fac1ada-combined-ca-bundle\") pod \"d8610de5-9b67-496f-b885-03555fac1ada\" (UID: \"d8610de5-9b67-496f-b885-03555fac1ada\") " Sep 30 19:50:06 crc kubenswrapper[4756]: I0930 19:50:06.406786 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d8610de5-9b67-496f-b885-03555fac1ada-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "d8610de5-9b67-496f-b885-03555fac1ada" (UID: "d8610de5-9b67-496f-b885-03555fac1ada"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:50:06 crc kubenswrapper[4756]: I0930 19:50:06.406823 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d8610de5-9b67-496f-b885-03555fac1ada-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "d8610de5-9b67-496f-b885-03555fac1ada" (UID: "d8610de5-9b67-496f-b885-03555fac1ada"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:50:06 crc kubenswrapper[4756]: I0930 19:50:06.408639 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d8610de5-9b67-496f-b885-03555fac1ada-kube-api-access-4qx4l" (OuterVolumeSpecName: "kube-api-access-4qx4l") pod "d8610de5-9b67-496f-b885-03555fac1ada" (UID: "d8610de5-9b67-496f-b885-03555fac1ada"). InnerVolumeSpecName "kube-api-access-4qx4l". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:50:06 crc kubenswrapper[4756]: I0930 19:50:06.409266 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d8610de5-9b67-496f-b885-03555fac1ada-scripts" (OuterVolumeSpecName: "scripts") pod "d8610de5-9b67-496f-b885-03555fac1ada" (UID: "d8610de5-9b67-496f-b885-03555fac1ada"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:50:06 crc kubenswrapper[4756]: I0930 19:50:06.424322 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d8610de5-9b67-496f-b885-03555fac1ada-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d8610de5-9b67-496f-b885-03555fac1ada" (UID: "d8610de5-9b67-496f-b885-03555fac1ada"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:50:06 crc kubenswrapper[4756]: I0930 19:50:06.447489 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d8610de5-9b67-496f-b885-03555fac1ada-config-data" (OuterVolumeSpecName: "config-data") pod "d8610de5-9b67-496f-b885-03555fac1ada" (UID: "d8610de5-9b67-496f-b885-03555fac1ada"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:50:06 crc kubenswrapper[4756]: I0930 19:50:06.503630 4756 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/d8610de5-9b67-496f-b885-03555fac1ada-fernet-keys\") on node \"crc\" DevicePath \"\"" Sep 30 19:50:06 crc kubenswrapper[4756]: I0930 19:50:06.503668 4756 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d8610de5-9b67-496f-b885-03555fac1ada-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 19:50:06 crc kubenswrapper[4756]: I0930 19:50:06.503681 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4qx4l\" (UniqueName: \"kubernetes.io/projected/d8610de5-9b67-496f-b885-03555fac1ada-kube-api-access-4qx4l\") on node \"crc\" DevicePath \"\"" Sep 30 19:50:06 crc kubenswrapper[4756]: I0930 19:50:06.503694 4756 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d8610de5-9b67-496f-b885-03555fac1ada-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 19:50:06 crc kubenswrapper[4756]: I0930 19:50:06.503705 4756 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d8610de5-9b67-496f-b885-03555fac1ada-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 19:50:06 crc kubenswrapper[4756]: I0930 19:50:06.503717 4756 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/d8610de5-9b67-496f-b885-03555fac1ada-credential-keys\") on node \"crc\" DevicePath \"\"" Sep 30 19:50:06 crc kubenswrapper[4756]: I0930 19:50:06.679694 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-vrp9q" event={"ID":"d8610de5-9b67-496f-b885-03555fac1ada","Type":"ContainerDied","Data":"7fb8d37bfbb078d2ffccf120ee8055d5841dc895b501ff2b905c4a004fffe824"} Sep 30 19:50:06 crc kubenswrapper[4756]: I0930 19:50:06.679739 4756 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7fb8d37bfbb078d2ffccf120ee8055d5841dc895b501ff2b905c4a004fffe824" Sep 30 19:50:06 crc kubenswrapper[4756]: I0930 19:50:06.679802 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-vrp9q" Sep 30 19:50:06 crc kubenswrapper[4756]: I0930 19:50:06.694825 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-w6sv7" event={"ID":"c83d1164-f7e4-4ad5-a81e-1c5fa5d1486c","Type":"ContainerStarted","Data":"b36ede0872ef31860c685d708b8f806dc565231aea3bbc12a13fd67ea25ef4a3"} Sep 30 19:50:06 crc kubenswrapper[4756]: I0930 19:50:06.703274 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c736dd47-8e28-4fbb-8036-d206f7cc5471","Type":"ContainerStarted","Data":"a70d9d815ab11d673e34ffbbbeab50cd29f0059de5b88903ab04b55ae2d01684"} Sep 30 19:50:06 crc kubenswrapper[4756]: I0930 19:50:06.723055 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-db-sync-w6sv7" podStartSLOduration=2.192210827 podStartE2EDuration="7.723030042s" podCreationTimestamp="2025-09-30 19:49:59 +0000 UTC" firstStartedPulling="2025-09-30 19:50:00.632170818 +0000 UTC m=+1130.253104295" lastFinishedPulling="2025-09-30 19:50:06.162990033 +0000 UTC m=+1135.783923510" observedRunningTime="2025-09-30 19:50:06.711242794 +0000 UTC m=+1136.332176281" watchObservedRunningTime="2025-09-30 19:50:06.723030042 +0000 UTC m=+1136.343963519" Sep 30 19:50:07 crc kubenswrapper[4756]: I0930 19:50:07.358519 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-vrp9q"] Sep 30 19:50:07 crc kubenswrapper[4756]: I0930 19:50:07.367767 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-vrp9q"] Sep 30 19:50:07 crc kubenswrapper[4756]: I0930 19:50:07.455909 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-dslgq"] Sep 30 19:50:07 crc kubenswrapper[4756]: E0930 19:50:07.456245 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d8610de5-9b67-496f-b885-03555fac1ada" containerName="keystone-bootstrap" Sep 30 19:50:07 crc kubenswrapper[4756]: I0930 19:50:07.456260 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="d8610de5-9b67-496f-b885-03555fac1ada" containerName="keystone-bootstrap" Sep 30 19:50:07 crc kubenswrapper[4756]: E0930 19:50:07.456279 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8502815c-b72f-4e72-85d8-bced65073207" containerName="init" Sep 30 19:50:07 crc kubenswrapper[4756]: I0930 19:50:07.456285 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="8502815c-b72f-4e72-85d8-bced65073207" containerName="init" Sep 30 19:50:07 crc kubenswrapper[4756]: I0930 19:50:07.456473 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="d8610de5-9b67-496f-b885-03555fac1ada" containerName="keystone-bootstrap" Sep 30 19:50:07 crc kubenswrapper[4756]: I0930 19:50:07.456504 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="8502815c-b72f-4e72-85d8-bced65073207" containerName="init" Sep 30 19:50:07 crc kubenswrapper[4756]: I0930 19:50:07.457083 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-dslgq" Sep 30 19:50:07 crc kubenswrapper[4756]: I0930 19:50:07.460195 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Sep 30 19:50:07 crc kubenswrapper[4756]: I0930 19:50:07.460527 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Sep 30 19:50:07 crc kubenswrapper[4756]: I0930 19:50:07.460576 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Sep 30 19:50:07 crc kubenswrapper[4756]: I0930 19:50:07.460622 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-crhkc" Sep 30 19:50:07 crc kubenswrapper[4756]: I0930 19:50:07.482500 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-dslgq"] Sep 30 19:50:07 crc kubenswrapper[4756]: I0930 19:50:07.523047 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/babfbb3a-b8d1-419a-92d9-37c5390467f6-combined-ca-bundle\") pod \"keystone-bootstrap-dslgq\" (UID: \"babfbb3a-b8d1-419a-92d9-37c5390467f6\") " pod="openstack/keystone-bootstrap-dslgq" Sep 30 19:50:07 crc kubenswrapper[4756]: I0930 19:50:07.523409 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/babfbb3a-b8d1-419a-92d9-37c5390467f6-credential-keys\") pod \"keystone-bootstrap-dslgq\" (UID: \"babfbb3a-b8d1-419a-92d9-37c5390467f6\") " pod="openstack/keystone-bootstrap-dslgq" Sep 30 19:50:07 crc kubenswrapper[4756]: I0930 19:50:07.523481 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/babfbb3a-b8d1-419a-92d9-37c5390467f6-config-data\") pod \"keystone-bootstrap-dslgq\" (UID: \"babfbb3a-b8d1-419a-92d9-37c5390467f6\") " pod="openstack/keystone-bootstrap-dslgq" Sep 30 19:50:07 crc kubenswrapper[4756]: I0930 19:50:07.523515 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/babfbb3a-b8d1-419a-92d9-37c5390467f6-fernet-keys\") pod \"keystone-bootstrap-dslgq\" (UID: \"babfbb3a-b8d1-419a-92d9-37c5390467f6\") " pod="openstack/keystone-bootstrap-dslgq" Sep 30 19:50:07 crc kubenswrapper[4756]: I0930 19:50:07.523786 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6pl8p\" (UniqueName: \"kubernetes.io/projected/babfbb3a-b8d1-419a-92d9-37c5390467f6-kube-api-access-6pl8p\") pod \"keystone-bootstrap-dslgq\" (UID: \"babfbb3a-b8d1-419a-92d9-37c5390467f6\") " pod="openstack/keystone-bootstrap-dslgq" Sep 30 19:50:07 crc kubenswrapper[4756]: I0930 19:50:07.523882 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/babfbb3a-b8d1-419a-92d9-37c5390467f6-scripts\") pod \"keystone-bootstrap-dslgq\" (UID: \"babfbb3a-b8d1-419a-92d9-37c5390467f6\") " pod="openstack/keystone-bootstrap-dslgq" Sep 30 19:50:07 crc kubenswrapper[4756]: I0930 19:50:07.625987 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/babfbb3a-b8d1-419a-92d9-37c5390467f6-combined-ca-bundle\") pod \"keystone-bootstrap-dslgq\" (UID: \"babfbb3a-b8d1-419a-92d9-37c5390467f6\") " pod="openstack/keystone-bootstrap-dslgq" Sep 30 19:50:07 crc kubenswrapper[4756]: I0930 19:50:07.626035 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/babfbb3a-b8d1-419a-92d9-37c5390467f6-credential-keys\") pod \"keystone-bootstrap-dslgq\" (UID: \"babfbb3a-b8d1-419a-92d9-37c5390467f6\") " pod="openstack/keystone-bootstrap-dslgq" Sep 30 19:50:07 crc kubenswrapper[4756]: I0930 19:50:07.626068 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/babfbb3a-b8d1-419a-92d9-37c5390467f6-config-data\") pod \"keystone-bootstrap-dslgq\" (UID: \"babfbb3a-b8d1-419a-92d9-37c5390467f6\") " pod="openstack/keystone-bootstrap-dslgq" Sep 30 19:50:07 crc kubenswrapper[4756]: I0930 19:50:07.626090 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/babfbb3a-b8d1-419a-92d9-37c5390467f6-fernet-keys\") pod \"keystone-bootstrap-dslgq\" (UID: \"babfbb3a-b8d1-419a-92d9-37c5390467f6\") " pod="openstack/keystone-bootstrap-dslgq" Sep 30 19:50:07 crc kubenswrapper[4756]: I0930 19:50:07.627056 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6pl8p\" (UniqueName: \"kubernetes.io/projected/babfbb3a-b8d1-419a-92d9-37c5390467f6-kube-api-access-6pl8p\") pod \"keystone-bootstrap-dslgq\" (UID: \"babfbb3a-b8d1-419a-92d9-37c5390467f6\") " pod="openstack/keystone-bootstrap-dslgq" Sep 30 19:50:07 crc kubenswrapper[4756]: I0930 19:50:07.627090 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/babfbb3a-b8d1-419a-92d9-37c5390467f6-scripts\") pod \"keystone-bootstrap-dslgq\" (UID: \"babfbb3a-b8d1-419a-92d9-37c5390467f6\") " pod="openstack/keystone-bootstrap-dslgq" Sep 30 19:50:07 crc kubenswrapper[4756]: I0930 19:50:07.631566 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/babfbb3a-b8d1-419a-92d9-37c5390467f6-scripts\") pod \"keystone-bootstrap-dslgq\" (UID: \"babfbb3a-b8d1-419a-92d9-37c5390467f6\") " pod="openstack/keystone-bootstrap-dslgq" Sep 30 19:50:07 crc kubenswrapper[4756]: I0930 19:50:07.631928 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/babfbb3a-b8d1-419a-92d9-37c5390467f6-credential-keys\") pod \"keystone-bootstrap-dslgq\" (UID: \"babfbb3a-b8d1-419a-92d9-37c5390467f6\") " pod="openstack/keystone-bootstrap-dslgq" Sep 30 19:50:07 crc kubenswrapper[4756]: I0930 19:50:07.632582 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/babfbb3a-b8d1-419a-92d9-37c5390467f6-config-data\") pod \"keystone-bootstrap-dslgq\" (UID: \"babfbb3a-b8d1-419a-92d9-37c5390467f6\") " pod="openstack/keystone-bootstrap-dslgq" Sep 30 19:50:07 crc kubenswrapper[4756]: I0930 19:50:07.642739 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/babfbb3a-b8d1-419a-92d9-37c5390467f6-fernet-keys\") pod \"keystone-bootstrap-dslgq\" (UID: \"babfbb3a-b8d1-419a-92d9-37c5390467f6\") " pod="openstack/keystone-bootstrap-dslgq" Sep 30 19:50:07 crc kubenswrapper[4756]: I0930 19:50:07.646423 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6pl8p\" (UniqueName: \"kubernetes.io/projected/babfbb3a-b8d1-419a-92d9-37c5390467f6-kube-api-access-6pl8p\") pod \"keystone-bootstrap-dslgq\" (UID: \"babfbb3a-b8d1-419a-92d9-37c5390467f6\") " pod="openstack/keystone-bootstrap-dslgq" Sep 30 19:50:07 crc kubenswrapper[4756]: I0930 19:50:07.655084 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/babfbb3a-b8d1-419a-92d9-37c5390467f6-combined-ca-bundle\") pod \"keystone-bootstrap-dslgq\" (UID: \"babfbb3a-b8d1-419a-92d9-37c5390467f6\") " pod="openstack/keystone-bootstrap-dslgq" Sep 30 19:50:07 crc kubenswrapper[4756]: I0930 19:50:07.715107 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-mbntt" event={"ID":"f08a96a9-79ca-4c30-b158-273876fe89b0","Type":"ContainerStarted","Data":"2efc7a3b56a345b58615fb38229a5dd9d08dd316e4746b7d2a81edbe3083c535"} Sep 30 19:50:07 crc kubenswrapper[4756]: I0930 19:50:07.736711 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-db-sync-mbntt" podStartSLOduration=2.981961894 podStartE2EDuration="35.736691678s" podCreationTimestamp="2025-09-30 19:49:32 +0000 UTC" firstStartedPulling="2025-09-30 19:49:33.412229022 +0000 UTC m=+1103.033162509" lastFinishedPulling="2025-09-30 19:50:06.166958806 +0000 UTC m=+1135.787892293" observedRunningTime="2025-09-30 19:50:07.731147793 +0000 UTC m=+1137.352081280" watchObservedRunningTime="2025-09-30 19:50:07.736691678 +0000 UTC m=+1137.357625155" Sep 30 19:50:07 crc kubenswrapper[4756]: I0930 19:50:07.777097 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-dslgq" Sep 30 19:50:08 crc kubenswrapper[4756]: I0930 19:50:08.499182 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-dslgq"] Sep 30 19:50:08 crc kubenswrapper[4756]: W0930 19:50:08.541779 4756 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podbabfbb3a_b8d1_419a_92d9_37c5390467f6.slice/crio-23e9d9668cee08f1aeafecf56ddb5d28d6d4d236881c4f0aee9497322146fd19 WatchSource:0}: Error finding container 23e9d9668cee08f1aeafecf56ddb5d28d6d4d236881c4f0aee9497322146fd19: Status 404 returned error can't find the container with id 23e9d9668cee08f1aeafecf56ddb5d28d6d4d236881c4f0aee9497322146fd19 Sep 30 19:50:08 crc kubenswrapper[4756]: I0930 19:50:08.724187 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-dslgq" event={"ID":"babfbb3a-b8d1-419a-92d9-37c5390467f6","Type":"ContainerStarted","Data":"edd2cc3b5058fc58938fed149c5e32347e36c9efc0f993f06ade0d7f6a0128e4"} Sep 30 19:50:08 crc kubenswrapper[4756]: I0930 19:50:08.724423 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-dslgq" event={"ID":"babfbb3a-b8d1-419a-92d9-37c5390467f6","Type":"ContainerStarted","Data":"23e9d9668cee08f1aeafecf56ddb5d28d6d4d236881c4f0aee9497322146fd19"} Sep 30 19:50:08 crc kubenswrapper[4756]: I0930 19:50:08.753777 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c736dd47-8e28-4fbb-8036-d206f7cc5471","Type":"ContainerStarted","Data":"e8e1a8f4dd068c498f7eaf221ec9920ff488b7ff0b44001b0099dd0d4df7dac0"} Sep 30 19:50:08 crc kubenswrapper[4756]: I0930 19:50:08.757809 4756 generic.go:334] "Generic (PLEG): container finished" podID="c83d1164-f7e4-4ad5-a81e-1c5fa5d1486c" containerID="b36ede0872ef31860c685d708b8f806dc565231aea3bbc12a13fd67ea25ef4a3" exitCode=0 Sep 30 19:50:08 crc kubenswrapper[4756]: I0930 19:50:08.757844 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-w6sv7" event={"ID":"c83d1164-f7e4-4ad5-a81e-1c5fa5d1486c","Type":"ContainerDied","Data":"b36ede0872ef31860c685d708b8f806dc565231aea3bbc12a13fd67ea25ef4a3"} Sep 30 19:50:08 crc kubenswrapper[4756]: I0930 19:50:08.784345 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-dslgq" podStartSLOduration=1.7843284640000001 podStartE2EDuration="1.784328464s" podCreationTimestamp="2025-09-30 19:50:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 19:50:08.754048312 +0000 UTC m=+1138.374981789" watchObservedRunningTime="2025-09-30 19:50:08.784328464 +0000 UTC m=+1138.405261941" Sep 30 19:50:08 crc kubenswrapper[4756]: I0930 19:50:08.985345 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-e450-account-create-m4pnm"] Sep 30 19:50:08 crc kubenswrapper[4756]: I0930 19:50:08.986339 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-e450-account-create-m4pnm" Sep 30 19:50:08 crc kubenswrapper[4756]: I0930 19:50:08.990589 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-db-secret" Sep 30 19:50:08 crc kubenswrapper[4756]: I0930 19:50:08.997941 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-e450-account-create-m4pnm"] Sep 30 19:50:09 crc kubenswrapper[4756]: I0930 19:50:09.080390 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7894t\" (UniqueName: \"kubernetes.io/projected/427631be-e1d9-46d5-8435-187eddd507c5-kube-api-access-7894t\") pod \"cinder-e450-account-create-m4pnm\" (UID: \"427631be-e1d9-46d5-8435-187eddd507c5\") " pod="openstack/cinder-e450-account-create-m4pnm" Sep 30 19:50:09 crc kubenswrapper[4756]: I0930 19:50:09.125448 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d8610de5-9b67-496f-b885-03555fac1ada" path="/var/lib/kubelet/pods/d8610de5-9b67-496f-b885-03555fac1ada/volumes" Sep 30 19:50:09 crc kubenswrapper[4756]: I0930 19:50:09.187698 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7894t\" (UniqueName: \"kubernetes.io/projected/427631be-e1d9-46d5-8435-187eddd507c5-kube-api-access-7894t\") pod \"cinder-e450-account-create-m4pnm\" (UID: \"427631be-e1d9-46d5-8435-187eddd507c5\") " pod="openstack/cinder-e450-account-create-m4pnm" Sep 30 19:50:09 crc kubenswrapper[4756]: I0930 19:50:09.198540 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-2b0c-account-create-vvcgf"] Sep 30 19:50:09 crc kubenswrapper[4756]: I0930 19:50:09.199631 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-2b0c-account-create-vvcgf" Sep 30 19:50:09 crc kubenswrapper[4756]: I0930 19:50:09.203853 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-2b0c-account-create-vvcgf"] Sep 30 19:50:09 crc kubenswrapper[4756]: I0930 19:50:09.204683 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-db-secret" Sep 30 19:50:09 crc kubenswrapper[4756]: I0930 19:50:09.209790 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7894t\" (UniqueName: \"kubernetes.io/projected/427631be-e1d9-46d5-8435-187eddd507c5-kube-api-access-7894t\") pod \"cinder-e450-account-create-m4pnm\" (UID: \"427631be-e1d9-46d5-8435-187eddd507c5\") " pod="openstack/cinder-e450-account-create-m4pnm" Sep 30 19:50:09 crc kubenswrapper[4756]: I0930 19:50:09.290641 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nkb8c\" (UniqueName: \"kubernetes.io/projected/62c0f048-1f35-4c66-81ff-500f4b11e32c-kube-api-access-nkb8c\") pod \"barbican-2b0c-account-create-vvcgf\" (UID: \"62c0f048-1f35-4c66-81ff-500f4b11e32c\") " pod="openstack/barbican-2b0c-account-create-vvcgf" Sep 30 19:50:09 crc kubenswrapper[4756]: I0930 19:50:09.308240 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-e450-account-create-m4pnm" Sep 30 19:50:09 crc kubenswrapper[4756]: I0930 19:50:09.392060 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nkb8c\" (UniqueName: \"kubernetes.io/projected/62c0f048-1f35-4c66-81ff-500f4b11e32c-kube-api-access-nkb8c\") pod \"barbican-2b0c-account-create-vvcgf\" (UID: \"62c0f048-1f35-4c66-81ff-500f4b11e32c\") " pod="openstack/barbican-2b0c-account-create-vvcgf" Sep 30 19:50:09 crc kubenswrapper[4756]: I0930 19:50:09.401312 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-cea4-account-create-kkbq5"] Sep 30 19:50:09 crc kubenswrapper[4756]: I0930 19:50:09.402821 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-cea4-account-create-kkbq5" Sep 30 19:50:09 crc kubenswrapper[4756]: I0930 19:50:09.417950 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-db-secret" Sep 30 19:50:09 crc kubenswrapper[4756]: I0930 19:50:09.434597 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-cea4-account-create-kkbq5"] Sep 30 19:50:09 crc kubenswrapper[4756]: I0930 19:50:09.455852 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nkb8c\" (UniqueName: \"kubernetes.io/projected/62c0f048-1f35-4c66-81ff-500f4b11e32c-kube-api-access-nkb8c\") pod \"barbican-2b0c-account-create-vvcgf\" (UID: \"62c0f048-1f35-4c66-81ff-500f4b11e32c\") " pod="openstack/barbican-2b0c-account-create-vvcgf" Sep 30 19:50:09 crc kubenswrapper[4756]: I0930 19:50:09.497204 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5gbzl\" (UniqueName: \"kubernetes.io/projected/be948507-c9db-4853-be77-02918a4ad42c-kube-api-access-5gbzl\") pod \"neutron-cea4-account-create-kkbq5\" (UID: \"be948507-c9db-4853-be77-02918a4ad42c\") " pod="openstack/neutron-cea4-account-create-kkbq5" Sep 30 19:50:09 crc kubenswrapper[4756]: I0930 19:50:09.601373 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5gbzl\" (UniqueName: \"kubernetes.io/projected/be948507-c9db-4853-be77-02918a4ad42c-kube-api-access-5gbzl\") pod \"neutron-cea4-account-create-kkbq5\" (UID: \"be948507-c9db-4853-be77-02918a4ad42c\") " pod="openstack/neutron-cea4-account-create-kkbq5" Sep 30 19:50:09 crc kubenswrapper[4756]: I0930 19:50:09.609626 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-2b0c-account-create-vvcgf" Sep 30 19:50:09 crc kubenswrapper[4756]: I0930 19:50:09.625532 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5gbzl\" (UniqueName: \"kubernetes.io/projected/be948507-c9db-4853-be77-02918a4ad42c-kube-api-access-5gbzl\") pod \"neutron-cea4-account-create-kkbq5\" (UID: \"be948507-c9db-4853-be77-02918a4ad42c\") " pod="openstack/neutron-cea4-account-create-kkbq5" Sep 30 19:50:09 crc kubenswrapper[4756]: I0930 19:50:09.659582 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-76fcf4b695-fzrxq" Sep 30 19:50:09 crc kubenswrapper[4756]: I0930 19:50:09.727375 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-77585f5f8c-j99sb"] Sep 30 19:50:09 crc kubenswrapper[4756]: I0930 19:50:09.727671 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-77585f5f8c-j99sb" podUID="d38aabb4-4429-4475-80b6-13babcaef4be" containerName="dnsmasq-dns" containerID="cri-o://b7465efabe8d474257df848949656b39daefde7e0aa4d0894afd80437344cc82" gracePeriod=10 Sep 30 19:50:09 crc kubenswrapper[4756]: I0930 19:50:09.735518 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-cea4-account-create-kkbq5" Sep 30 19:50:09 crc kubenswrapper[4756]: I0930 19:50:09.774667 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-e450-account-create-m4pnm"] Sep 30 19:50:09 crc kubenswrapper[4756]: W0930 19:50:09.801826 4756 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod427631be_e1d9_46d5_8435_187eddd507c5.slice/crio-4d70084e12e1e24fe80d8463ece4265b2ad3ccbf35ddcbb3ab04350cd1d10e5d WatchSource:0}: Error finding container 4d70084e12e1e24fe80d8463ece4265b2ad3ccbf35ddcbb3ab04350cd1d10e5d: Status 404 returned error can't find the container with id 4d70084e12e1e24fe80d8463ece4265b2ad3ccbf35ddcbb3ab04350cd1d10e5d Sep 30 19:50:10 crc kubenswrapper[4756]: I0930 19:50:10.126884 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-2b0c-account-create-vvcgf"] Sep 30 19:50:10 crc kubenswrapper[4756]: I0930 19:50:10.297185 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-w6sv7" Sep 30 19:50:10 crc kubenswrapper[4756]: I0930 19:50:10.375058 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-cea4-account-create-kkbq5"] Sep 30 19:50:10 crc kubenswrapper[4756]: I0930 19:50:10.420601 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c83d1164-f7e4-4ad5-a81e-1c5fa5d1486c-scripts\") pod \"c83d1164-f7e4-4ad5-a81e-1c5fa5d1486c\" (UID: \"c83d1164-f7e4-4ad5-a81e-1c5fa5d1486c\") " Sep 30 19:50:10 crc kubenswrapper[4756]: I0930 19:50:10.420937 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-q78mc\" (UniqueName: \"kubernetes.io/projected/c83d1164-f7e4-4ad5-a81e-1c5fa5d1486c-kube-api-access-q78mc\") pod \"c83d1164-f7e4-4ad5-a81e-1c5fa5d1486c\" (UID: \"c83d1164-f7e4-4ad5-a81e-1c5fa5d1486c\") " Sep 30 19:50:10 crc kubenswrapper[4756]: I0930 19:50:10.420970 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c83d1164-f7e4-4ad5-a81e-1c5fa5d1486c-logs\") pod \"c83d1164-f7e4-4ad5-a81e-1c5fa5d1486c\" (UID: \"c83d1164-f7e4-4ad5-a81e-1c5fa5d1486c\") " Sep 30 19:50:10 crc kubenswrapper[4756]: I0930 19:50:10.421499 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c83d1164-f7e4-4ad5-a81e-1c5fa5d1486c-combined-ca-bundle\") pod \"c83d1164-f7e4-4ad5-a81e-1c5fa5d1486c\" (UID: \"c83d1164-f7e4-4ad5-a81e-1c5fa5d1486c\") " Sep 30 19:50:10 crc kubenswrapper[4756]: I0930 19:50:10.421553 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c83d1164-f7e4-4ad5-a81e-1c5fa5d1486c-config-data\") pod \"c83d1164-f7e4-4ad5-a81e-1c5fa5d1486c\" (UID: \"c83d1164-f7e4-4ad5-a81e-1c5fa5d1486c\") " Sep 30 19:50:10 crc kubenswrapper[4756]: I0930 19:50:10.422157 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c83d1164-f7e4-4ad5-a81e-1c5fa5d1486c-logs" (OuterVolumeSpecName: "logs") pod "c83d1164-f7e4-4ad5-a81e-1c5fa5d1486c" (UID: "c83d1164-f7e4-4ad5-a81e-1c5fa5d1486c"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 19:50:10 crc kubenswrapper[4756]: I0930 19:50:10.427061 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c83d1164-f7e4-4ad5-a81e-1c5fa5d1486c-kube-api-access-q78mc" (OuterVolumeSpecName: "kube-api-access-q78mc") pod "c83d1164-f7e4-4ad5-a81e-1c5fa5d1486c" (UID: "c83d1164-f7e4-4ad5-a81e-1c5fa5d1486c"). InnerVolumeSpecName "kube-api-access-q78mc". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:50:10 crc kubenswrapper[4756]: I0930 19:50:10.427619 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c83d1164-f7e4-4ad5-a81e-1c5fa5d1486c-scripts" (OuterVolumeSpecName: "scripts") pod "c83d1164-f7e4-4ad5-a81e-1c5fa5d1486c" (UID: "c83d1164-f7e4-4ad5-a81e-1c5fa5d1486c"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:50:10 crc kubenswrapper[4756]: I0930 19:50:10.473087 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-77585f5f8c-j99sb" Sep 30 19:50:10 crc kubenswrapper[4756]: I0930 19:50:10.496557 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c83d1164-f7e4-4ad5-a81e-1c5fa5d1486c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "c83d1164-f7e4-4ad5-a81e-1c5fa5d1486c" (UID: "c83d1164-f7e4-4ad5-a81e-1c5fa5d1486c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:50:10 crc kubenswrapper[4756]: I0930 19:50:10.510467 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c83d1164-f7e4-4ad5-a81e-1c5fa5d1486c-config-data" (OuterVolumeSpecName: "config-data") pod "c83d1164-f7e4-4ad5-a81e-1c5fa5d1486c" (UID: "c83d1164-f7e4-4ad5-a81e-1c5fa5d1486c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:50:10 crc kubenswrapper[4756]: I0930 19:50:10.522971 4756 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c83d1164-f7e4-4ad5-a81e-1c5fa5d1486c-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 19:50:10 crc kubenswrapper[4756]: I0930 19:50:10.523007 4756 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c83d1164-f7e4-4ad5-a81e-1c5fa5d1486c-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 19:50:10 crc kubenswrapper[4756]: I0930 19:50:10.523017 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-q78mc\" (UniqueName: \"kubernetes.io/projected/c83d1164-f7e4-4ad5-a81e-1c5fa5d1486c-kube-api-access-q78mc\") on node \"crc\" DevicePath \"\"" Sep 30 19:50:10 crc kubenswrapper[4756]: I0930 19:50:10.523028 4756 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c83d1164-f7e4-4ad5-a81e-1c5fa5d1486c-logs\") on node \"crc\" DevicePath \"\"" Sep 30 19:50:10 crc kubenswrapper[4756]: I0930 19:50:10.523037 4756 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c83d1164-f7e4-4ad5-a81e-1c5fa5d1486c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 19:50:10 crc kubenswrapper[4756]: I0930 19:50:10.624170 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/d38aabb4-4429-4475-80b6-13babcaef4be-dns-swift-storage-0\") pod \"d38aabb4-4429-4475-80b6-13babcaef4be\" (UID: \"d38aabb4-4429-4475-80b6-13babcaef4be\") " Sep 30 19:50:10 crc kubenswrapper[4756]: I0930 19:50:10.624253 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d38aabb4-4429-4475-80b6-13babcaef4be-dns-svc\") pod \"d38aabb4-4429-4475-80b6-13babcaef4be\" (UID: \"d38aabb4-4429-4475-80b6-13babcaef4be\") " Sep 30 19:50:10 crc kubenswrapper[4756]: I0930 19:50:10.624282 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gc9bg\" (UniqueName: \"kubernetes.io/projected/d38aabb4-4429-4475-80b6-13babcaef4be-kube-api-access-gc9bg\") pod \"d38aabb4-4429-4475-80b6-13babcaef4be\" (UID: \"d38aabb4-4429-4475-80b6-13babcaef4be\") " Sep 30 19:50:10 crc kubenswrapper[4756]: I0930 19:50:10.624302 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d38aabb4-4429-4475-80b6-13babcaef4be-ovsdbserver-sb\") pod \"d38aabb4-4429-4475-80b6-13babcaef4be\" (UID: \"d38aabb4-4429-4475-80b6-13babcaef4be\") " Sep 30 19:50:10 crc kubenswrapper[4756]: I0930 19:50:10.624321 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d38aabb4-4429-4475-80b6-13babcaef4be-config\") pod \"d38aabb4-4429-4475-80b6-13babcaef4be\" (UID: \"d38aabb4-4429-4475-80b6-13babcaef4be\") " Sep 30 19:50:10 crc kubenswrapper[4756]: I0930 19:50:10.624453 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d38aabb4-4429-4475-80b6-13babcaef4be-ovsdbserver-nb\") pod \"d38aabb4-4429-4475-80b6-13babcaef4be\" (UID: \"d38aabb4-4429-4475-80b6-13babcaef4be\") " Sep 30 19:50:10 crc kubenswrapper[4756]: I0930 19:50:10.626919 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d38aabb4-4429-4475-80b6-13babcaef4be-kube-api-access-gc9bg" (OuterVolumeSpecName: "kube-api-access-gc9bg") pod "d38aabb4-4429-4475-80b6-13babcaef4be" (UID: "d38aabb4-4429-4475-80b6-13babcaef4be"). InnerVolumeSpecName "kube-api-access-gc9bg". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:50:10 crc kubenswrapper[4756]: I0930 19:50:10.671266 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d38aabb4-4429-4475-80b6-13babcaef4be-config" (OuterVolumeSpecName: "config") pod "d38aabb4-4429-4475-80b6-13babcaef4be" (UID: "d38aabb4-4429-4475-80b6-13babcaef4be"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:50:10 crc kubenswrapper[4756]: I0930 19:50:10.672136 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d38aabb4-4429-4475-80b6-13babcaef4be-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "d38aabb4-4429-4475-80b6-13babcaef4be" (UID: "d38aabb4-4429-4475-80b6-13babcaef4be"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:50:10 crc kubenswrapper[4756]: I0930 19:50:10.672494 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d38aabb4-4429-4475-80b6-13babcaef4be-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "d38aabb4-4429-4475-80b6-13babcaef4be" (UID: "d38aabb4-4429-4475-80b6-13babcaef4be"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:50:10 crc kubenswrapper[4756]: I0930 19:50:10.676970 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d38aabb4-4429-4475-80b6-13babcaef4be-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "d38aabb4-4429-4475-80b6-13babcaef4be" (UID: "d38aabb4-4429-4475-80b6-13babcaef4be"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:50:10 crc kubenswrapper[4756]: I0930 19:50:10.689613 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d38aabb4-4429-4475-80b6-13babcaef4be-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "d38aabb4-4429-4475-80b6-13babcaef4be" (UID: "d38aabb4-4429-4475-80b6-13babcaef4be"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:50:10 crc kubenswrapper[4756]: I0930 19:50:10.726482 4756 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/d38aabb4-4429-4475-80b6-13babcaef4be-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Sep 30 19:50:10 crc kubenswrapper[4756]: I0930 19:50:10.726515 4756 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d38aabb4-4429-4475-80b6-13babcaef4be-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 30 19:50:10 crc kubenswrapper[4756]: I0930 19:50:10.726524 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gc9bg\" (UniqueName: \"kubernetes.io/projected/d38aabb4-4429-4475-80b6-13babcaef4be-kube-api-access-gc9bg\") on node \"crc\" DevicePath \"\"" Sep 30 19:50:10 crc kubenswrapper[4756]: I0930 19:50:10.726535 4756 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d38aabb4-4429-4475-80b6-13babcaef4be-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Sep 30 19:50:10 crc kubenswrapper[4756]: I0930 19:50:10.726545 4756 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d38aabb4-4429-4475-80b6-13babcaef4be-config\") on node \"crc\" DevicePath \"\"" Sep 30 19:50:10 crc kubenswrapper[4756]: I0930 19:50:10.726553 4756 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d38aabb4-4429-4475-80b6-13babcaef4be-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Sep 30 19:50:10 crc kubenswrapper[4756]: I0930 19:50:10.817324 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-w6sv7" event={"ID":"c83d1164-f7e4-4ad5-a81e-1c5fa5d1486c","Type":"ContainerDied","Data":"5971ce407c4b1bcbce687931d09709c6c3302ac2434951ba2e6710912db30e43"} Sep 30 19:50:10 crc kubenswrapper[4756]: I0930 19:50:10.817371 4756 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5971ce407c4b1bcbce687931d09709c6c3302ac2434951ba2e6710912db30e43" Sep 30 19:50:10 crc kubenswrapper[4756]: I0930 19:50:10.817453 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-w6sv7" Sep 30 19:50:10 crc kubenswrapper[4756]: I0930 19:50:10.825380 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-cea4-account-create-kkbq5" event={"ID":"be948507-c9db-4853-be77-02918a4ad42c","Type":"ContainerStarted","Data":"9df0555f3c8e9c61006320811dc3865ff17aeb0ac002679468f323d52fd9ee05"} Sep 30 19:50:10 crc kubenswrapper[4756]: I0930 19:50:10.825574 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-cea4-account-create-kkbq5" event={"ID":"be948507-c9db-4853-be77-02918a4ad42c","Type":"ContainerStarted","Data":"fe58ad0c45a5160c16bb757e766e011140808d78adaef87ad5b8790bd9f950cf"} Sep 30 19:50:10 crc kubenswrapper[4756]: I0930 19:50:10.835183 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-2b0c-account-create-vvcgf" event={"ID":"62c0f048-1f35-4c66-81ff-500f4b11e32c","Type":"ContainerStarted","Data":"ca33aa0424ff5fc1f6488fdb5c4b14e6351ce319000039e3be72f4a12493d153"} Sep 30 19:50:10 crc kubenswrapper[4756]: I0930 19:50:10.835273 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-2b0c-account-create-vvcgf" event={"ID":"62c0f048-1f35-4c66-81ff-500f4b11e32c","Type":"ContainerStarted","Data":"c12f65c6a0eb82f2c6cdd77a6f7eb54186510e91ab5f1925e27b23ac7d353ffd"} Sep 30 19:50:10 crc kubenswrapper[4756]: I0930 19:50:10.843337 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-e450-account-create-m4pnm" event={"ID":"427631be-e1d9-46d5-8435-187eddd507c5","Type":"ContainerStarted","Data":"33c3ea5f36bb441ebdd67b050b98c2ab9aa572ab6e8284a59c2e24fc642c85b0"} Sep 30 19:50:10 crc kubenswrapper[4756]: I0930 19:50:10.843401 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-e450-account-create-m4pnm" event={"ID":"427631be-e1d9-46d5-8435-187eddd507c5","Type":"ContainerStarted","Data":"4d70084e12e1e24fe80d8463ece4265b2ad3ccbf35ddcbb3ab04350cd1d10e5d"} Sep 30 19:50:10 crc kubenswrapper[4756]: I0930 19:50:10.850553 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-cea4-account-create-kkbq5" podStartSLOduration=1.850533244 podStartE2EDuration="1.850533244s" podCreationTimestamp="2025-09-30 19:50:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 19:50:10.847770821 +0000 UTC m=+1140.468704298" watchObservedRunningTime="2025-09-30 19:50:10.850533244 +0000 UTC m=+1140.471466721" Sep 30 19:50:10 crc kubenswrapper[4756]: I0930 19:50:10.861705 4756 generic.go:334] "Generic (PLEG): container finished" podID="d38aabb4-4429-4475-80b6-13babcaef4be" containerID="b7465efabe8d474257df848949656b39daefde7e0aa4d0894afd80437344cc82" exitCode=0 Sep 30 19:50:10 crc kubenswrapper[4756]: I0930 19:50:10.861761 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-77585f5f8c-j99sb" event={"ID":"d38aabb4-4429-4475-80b6-13babcaef4be","Type":"ContainerDied","Data":"b7465efabe8d474257df848949656b39daefde7e0aa4d0894afd80437344cc82"} Sep 30 19:50:10 crc kubenswrapper[4756]: I0930 19:50:10.861780 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-77585f5f8c-j99sb" Sep 30 19:50:10 crc kubenswrapper[4756]: I0930 19:50:10.861806 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-77585f5f8c-j99sb" event={"ID":"d38aabb4-4429-4475-80b6-13babcaef4be","Type":"ContainerDied","Data":"c63dd0b4d029952717074f1ac81ec3f463b9e34a46063293bfcea2d555f99ad1"} Sep 30 19:50:10 crc kubenswrapper[4756]: I0930 19:50:10.861838 4756 scope.go:117] "RemoveContainer" containerID="b7465efabe8d474257df848949656b39daefde7e0aa4d0894afd80437344cc82" Sep 30 19:50:10 crc kubenswrapper[4756]: I0930 19:50:10.882214 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-e450-account-create-m4pnm" podStartSLOduration=2.882192551 podStartE2EDuration="2.882192551s" podCreationTimestamp="2025-09-30 19:50:08 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 19:50:10.870788633 +0000 UTC m=+1140.491722110" watchObservedRunningTime="2025-09-30 19:50:10.882192551 +0000 UTC m=+1140.503126028" Sep 30 19:50:10 crc kubenswrapper[4756]: I0930 19:50:10.883450 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-6bfdd55b6d-fqgk9"] Sep 30 19:50:10 crc kubenswrapper[4756]: E0930 19:50:10.884789 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d38aabb4-4429-4475-80b6-13babcaef4be" containerName="init" Sep 30 19:50:10 crc kubenswrapper[4756]: I0930 19:50:10.884814 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="d38aabb4-4429-4475-80b6-13babcaef4be" containerName="init" Sep 30 19:50:10 crc kubenswrapper[4756]: E0930 19:50:10.884833 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d38aabb4-4429-4475-80b6-13babcaef4be" containerName="dnsmasq-dns" Sep 30 19:50:10 crc kubenswrapper[4756]: I0930 19:50:10.884840 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="d38aabb4-4429-4475-80b6-13babcaef4be" containerName="dnsmasq-dns" Sep 30 19:50:10 crc kubenswrapper[4756]: E0930 19:50:10.884853 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c83d1164-f7e4-4ad5-a81e-1c5fa5d1486c" containerName="placement-db-sync" Sep 30 19:50:10 crc kubenswrapper[4756]: I0930 19:50:10.884859 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="c83d1164-f7e4-4ad5-a81e-1c5fa5d1486c" containerName="placement-db-sync" Sep 30 19:50:10 crc kubenswrapper[4756]: I0930 19:50:10.885052 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="c83d1164-f7e4-4ad5-a81e-1c5fa5d1486c" containerName="placement-db-sync" Sep 30 19:50:10 crc kubenswrapper[4756]: I0930 19:50:10.885069 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="d38aabb4-4429-4475-80b6-13babcaef4be" containerName="dnsmasq-dns" Sep 30 19:50:10 crc kubenswrapper[4756]: I0930 19:50:10.886152 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-6bfdd55b6d-fqgk9" Sep 30 19:50:10 crc kubenswrapper[4756]: I0930 19:50:10.888511 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-scripts" Sep 30 19:50:10 crc kubenswrapper[4756]: I0930 19:50:10.888903 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-placement-public-svc" Sep 30 19:50:10 crc kubenswrapper[4756]: I0930 19:50:10.889081 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-config-data" Sep 30 19:50:10 crc kubenswrapper[4756]: I0930 19:50:10.889241 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-placement-internal-svc" Sep 30 19:50:10 crc kubenswrapper[4756]: I0930 19:50:10.889385 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-placement-dockercfg-fv677" Sep 30 19:50:10 crc kubenswrapper[4756]: I0930 19:50:10.902948 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-6bfdd55b6d-fqgk9"] Sep 30 19:50:10 crc kubenswrapper[4756]: I0930 19:50:10.904318 4756 scope.go:117] "RemoveContainer" containerID="7b903c8d2b5ed97e4455d6df5471cc8e329460b7d5b7b1253d3f7c759ec4d34a" Sep 30 19:50:10 crc kubenswrapper[4756]: I0930 19:50:10.906466 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-2b0c-account-create-vvcgf" podStartSLOduration=1.906443085 podStartE2EDuration="1.906443085s" podCreationTimestamp="2025-09-30 19:50:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 19:50:10.901875316 +0000 UTC m=+1140.522808793" watchObservedRunningTime="2025-09-30 19:50:10.906443085 +0000 UTC m=+1140.527376562" Sep 30 19:50:10 crc kubenswrapper[4756]: I0930 19:50:10.930439 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-77585f5f8c-j99sb"] Sep 30 19:50:10 crc kubenswrapper[4756]: I0930 19:50:10.942109 4756 scope.go:117] "RemoveContainer" containerID="b7465efabe8d474257df848949656b39daefde7e0aa4d0894afd80437344cc82" Sep 30 19:50:10 crc kubenswrapper[4756]: I0930 19:50:10.942211 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-77585f5f8c-j99sb"] Sep 30 19:50:10 crc kubenswrapper[4756]: E0930 19:50:10.942545 4756 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b7465efabe8d474257df848949656b39daefde7e0aa4d0894afd80437344cc82\": container with ID starting with b7465efabe8d474257df848949656b39daefde7e0aa4d0894afd80437344cc82 not found: ID does not exist" containerID="b7465efabe8d474257df848949656b39daefde7e0aa4d0894afd80437344cc82" Sep 30 19:50:10 crc kubenswrapper[4756]: I0930 19:50:10.942579 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b7465efabe8d474257df848949656b39daefde7e0aa4d0894afd80437344cc82"} err="failed to get container status \"b7465efabe8d474257df848949656b39daefde7e0aa4d0894afd80437344cc82\": rpc error: code = NotFound desc = could not find container \"b7465efabe8d474257df848949656b39daefde7e0aa4d0894afd80437344cc82\": container with ID starting with b7465efabe8d474257df848949656b39daefde7e0aa4d0894afd80437344cc82 not found: ID does not exist" Sep 30 19:50:10 crc kubenswrapper[4756]: I0930 19:50:10.942612 4756 scope.go:117] "RemoveContainer" containerID="7b903c8d2b5ed97e4455d6df5471cc8e329460b7d5b7b1253d3f7c759ec4d34a" Sep 30 19:50:10 crc kubenswrapper[4756]: E0930 19:50:10.942878 4756 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7b903c8d2b5ed97e4455d6df5471cc8e329460b7d5b7b1253d3f7c759ec4d34a\": container with ID starting with 7b903c8d2b5ed97e4455d6df5471cc8e329460b7d5b7b1253d3f7c759ec4d34a not found: ID does not exist" containerID="7b903c8d2b5ed97e4455d6df5471cc8e329460b7d5b7b1253d3f7c759ec4d34a" Sep 30 19:50:10 crc kubenswrapper[4756]: I0930 19:50:10.942913 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7b903c8d2b5ed97e4455d6df5471cc8e329460b7d5b7b1253d3f7c759ec4d34a"} err="failed to get container status \"7b903c8d2b5ed97e4455d6df5471cc8e329460b7d5b7b1253d3f7c759ec4d34a\": rpc error: code = NotFound desc = could not find container \"7b903c8d2b5ed97e4455d6df5471cc8e329460b7d5b7b1253d3f7c759ec4d34a\": container with ID starting with 7b903c8d2b5ed97e4455d6df5471cc8e329460b7d5b7b1253d3f7c759ec4d34a not found: ID does not exist" Sep 30 19:50:11 crc kubenswrapper[4756]: I0930 19:50:11.032159 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/28dfe26f-4d95-4def-9500-a0410c260cf2-internal-tls-certs\") pod \"placement-6bfdd55b6d-fqgk9\" (UID: \"28dfe26f-4d95-4def-9500-a0410c260cf2\") " pod="openstack/placement-6bfdd55b6d-fqgk9" Sep 30 19:50:11 crc kubenswrapper[4756]: I0930 19:50:11.032227 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/28dfe26f-4d95-4def-9500-a0410c260cf2-combined-ca-bundle\") pod \"placement-6bfdd55b6d-fqgk9\" (UID: \"28dfe26f-4d95-4def-9500-a0410c260cf2\") " pod="openstack/placement-6bfdd55b6d-fqgk9" Sep 30 19:50:11 crc kubenswrapper[4756]: I0930 19:50:11.032289 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/28dfe26f-4d95-4def-9500-a0410c260cf2-config-data\") pod \"placement-6bfdd55b6d-fqgk9\" (UID: \"28dfe26f-4d95-4def-9500-a0410c260cf2\") " pod="openstack/placement-6bfdd55b6d-fqgk9" Sep 30 19:50:11 crc kubenswrapper[4756]: I0930 19:50:11.032340 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/28dfe26f-4d95-4def-9500-a0410c260cf2-scripts\") pod \"placement-6bfdd55b6d-fqgk9\" (UID: \"28dfe26f-4d95-4def-9500-a0410c260cf2\") " pod="openstack/placement-6bfdd55b6d-fqgk9" Sep 30 19:50:11 crc kubenswrapper[4756]: I0930 19:50:11.032361 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/28dfe26f-4d95-4def-9500-a0410c260cf2-logs\") pod \"placement-6bfdd55b6d-fqgk9\" (UID: \"28dfe26f-4d95-4def-9500-a0410c260cf2\") " pod="openstack/placement-6bfdd55b6d-fqgk9" Sep 30 19:50:11 crc kubenswrapper[4756]: I0930 19:50:11.032379 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/28dfe26f-4d95-4def-9500-a0410c260cf2-public-tls-certs\") pod \"placement-6bfdd55b6d-fqgk9\" (UID: \"28dfe26f-4d95-4def-9500-a0410c260cf2\") " pod="openstack/placement-6bfdd55b6d-fqgk9" Sep 30 19:50:11 crc kubenswrapper[4756]: I0930 19:50:11.032416 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k25s9\" (UniqueName: \"kubernetes.io/projected/28dfe26f-4d95-4def-9500-a0410c260cf2-kube-api-access-k25s9\") pod \"placement-6bfdd55b6d-fqgk9\" (UID: \"28dfe26f-4d95-4def-9500-a0410c260cf2\") " pod="openstack/placement-6bfdd55b6d-fqgk9" Sep 30 19:50:11 crc kubenswrapper[4756]: I0930 19:50:11.137699 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/28dfe26f-4d95-4def-9500-a0410c260cf2-scripts\") pod \"placement-6bfdd55b6d-fqgk9\" (UID: \"28dfe26f-4d95-4def-9500-a0410c260cf2\") " pod="openstack/placement-6bfdd55b6d-fqgk9" Sep 30 19:50:11 crc kubenswrapper[4756]: I0930 19:50:11.137754 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/28dfe26f-4d95-4def-9500-a0410c260cf2-logs\") pod \"placement-6bfdd55b6d-fqgk9\" (UID: \"28dfe26f-4d95-4def-9500-a0410c260cf2\") " pod="openstack/placement-6bfdd55b6d-fqgk9" Sep 30 19:50:11 crc kubenswrapper[4756]: I0930 19:50:11.137778 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/28dfe26f-4d95-4def-9500-a0410c260cf2-public-tls-certs\") pod \"placement-6bfdd55b6d-fqgk9\" (UID: \"28dfe26f-4d95-4def-9500-a0410c260cf2\") " pod="openstack/placement-6bfdd55b6d-fqgk9" Sep 30 19:50:11 crc kubenswrapper[4756]: I0930 19:50:11.137811 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k25s9\" (UniqueName: \"kubernetes.io/projected/28dfe26f-4d95-4def-9500-a0410c260cf2-kube-api-access-k25s9\") pod \"placement-6bfdd55b6d-fqgk9\" (UID: \"28dfe26f-4d95-4def-9500-a0410c260cf2\") " pod="openstack/placement-6bfdd55b6d-fqgk9" Sep 30 19:50:11 crc kubenswrapper[4756]: I0930 19:50:11.137850 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/28dfe26f-4d95-4def-9500-a0410c260cf2-internal-tls-certs\") pod \"placement-6bfdd55b6d-fqgk9\" (UID: \"28dfe26f-4d95-4def-9500-a0410c260cf2\") " pod="openstack/placement-6bfdd55b6d-fqgk9" Sep 30 19:50:11 crc kubenswrapper[4756]: I0930 19:50:11.138005 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/28dfe26f-4d95-4def-9500-a0410c260cf2-combined-ca-bundle\") pod \"placement-6bfdd55b6d-fqgk9\" (UID: \"28dfe26f-4d95-4def-9500-a0410c260cf2\") " pod="openstack/placement-6bfdd55b6d-fqgk9" Sep 30 19:50:11 crc kubenswrapper[4756]: I0930 19:50:11.138064 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/28dfe26f-4d95-4def-9500-a0410c260cf2-config-data\") pod \"placement-6bfdd55b6d-fqgk9\" (UID: \"28dfe26f-4d95-4def-9500-a0410c260cf2\") " pod="openstack/placement-6bfdd55b6d-fqgk9" Sep 30 19:50:11 crc kubenswrapper[4756]: I0930 19:50:11.139117 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/28dfe26f-4d95-4def-9500-a0410c260cf2-logs\") pod \"placement-6bfdd55b6d-fqgk9\" (UID: \"28dfe26f-4d95-4def-9500-a0410c260cf2\") " pod="openstack/placement-6bfdd55b6d-fqgk9" Sep 30 19:50:11 crc kubenswrapper[4756]: I0930 19:50:11.141761 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-scripts" Sep 30 19:50:11 crc kubenswrapper[4756]: I0930 19:50:11.142279 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-config-data" Sep 30 19:50:11 crc kubenswrapper[4756]: I0930 19:50:11.143050 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-placement-public-svc" Sep 30 19:50:11 crc kubenswrapper[4756]: I0930 19:50:11.143427 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-placement-internal-svc" Sep 30 19:50:11 crc kubenswrapper[4756]: I0930 19:50:11.144992 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d38aabb4-4429-4475-80b6-13babcaef4be" path="/var/lib/kubelet/pods/d38aabb4-4429-4475-80b6-13babcaef4be/volumes" Sep 30 19:50:11 crc kubenswrapper[4756]: I0930 19:50:11.156115 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/28dfe26f-4d95-4def-9500-a0410c260cf2-internal-tls-certs\") pod \"placement-6bfdd55b6d-fqgk9\" (UID: \"28dfe26f-4d95-4def-9500-a0410c260cf2\") " pod="openstack/placement-6bfdd55b6d-fqgk9" Sep 30 19:50:11 crc kubenswrapper[4756]: I0930 19:50:11.160726 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/28dfe26f-4d95-4def-9500-a0410c260cf2-combined-ca-bundle\") pod \"placement-6bfdd55b6d-fqgk9\" (UID: \"28dfe26f-4d95-4def-9500-a0410c260cf2\") " pod="openstack/placement-6bfdd55b6d-fqgk9" Sep 30 19:50:11 crc kubenswrapper[4756]: I0930 19:50:11.162183 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/28dfe26f-4d95-4def-9500-a0410c260cf2-public-tls-certs\") pod \"placement-6bfdd55b6d-fqgk9\" (UID: \"28dfe26f-4d95-4def-9500-a0410c260cf2\") " pod="openstack/placement-6bfdd55b6d-fqgk9" Sep 30 19:50:11 crc kubenswrapper[4756]: I0930 19:50:11.169727 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/28dfe26f-4d95-4def-9500-a0410c260cf2-config-data\") pod \"placement-6bfdd55b6d-fqgk9\" (UID: \"28dfe26f-4d95-4def-9500-a0410c260cf2\") " pod="openstack/placement-6bfdd55b6d-fqgk9" Sep 30 19:50:11 crc kubenswrapper[4756]: I0930 19:50:11.173797 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k25s9\" (UniqueName: \"kubernetes.io/projected/28dfe26f-4d95-4def-9500-a0410c260cf2-kube-api-access-k25s9\") pod \"placement-6bfdd55b6d-fqgk9\" (UID: \"28dfe26f-4d95-4def-9500-a0410c260cf2\") " pod="openstack/placement-6bfdd55b6d-fqgk9" Sep 30 19:50:11 crc kubenswrapper[4756]: I0930 19:50:11.209263 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/28dfe26f-4d95-4def-9500-a0410c260cf2-scripts\") pod \"placement-6bfdd55b6d-fqgk9\" (UID: \"28dfe26f-4d95-4def-9500-a0410c260cf2\") " pod="openstack/placement-6bfdd55b6d-fqgk9" Sep 30 19:50:11 crc kubenswrapper[4756]: I0930 19:50:11.214086 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-placement-dockercfg-fv677" Sep 30 19:50:11 crc kubenswrapper[4756]: I0930 19:50:11.226544 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-6bfdd55b6d-fqgk9" Sep 30 19:50:11 crc kubenswrapper[4756]: I0930 19:50:11.539218 4756 patch_prober.go:28] interesting pod/machine-config-daemon-4n9zj container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 19:50:11 crc kubenswrapper[4756]: I0930 19:50:11.539641 4756 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 19:50:11 crc kubenswrapper[4756]: I0930 19:50:11.539694 4756 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" Sep 30 19:50:11 crc kubenswrapper[4756]: I0930 19:50:11.540472 4756 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"1fda6d8389dddb1fad1f1c0dcec9808a71ad955037c692639b3d4fd84034ae3c"} pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 30 19:50:11 crc kubenswrapper[4756]: I0930 19:50:11.540533 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" containerName="machine-config-daemon" containerID="cri-o://1fda6d8389dddb1fad1f1c0dcec9808a71ad955037c692639b3d4fd84034ae3c" gracePeriod=600 Sep 30 19:50:11 crc kubenswrapper[4756]: I0930 19:50:11.717693 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-6bfdd55b6d-fqgk9"] Sep 30 19:50:11 crc kubenswrapper[4756]: I0930 19:50:11.871615 4756 generic.go:334] "Generic (PLEG): container finished" podID="be948507-c9db-4853-be77-02918a4ad42c" containerID="9df0555f3c8e9c61006320811dc3865ff17aeb0ac002679468f323d52fd9ee05" exitCode=0 Sep 30 19:50:11 crc kubenswrapper[4756]: I0930 19:50:11.871686 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-cea4-account-create-kkbq5" event={"ID":"be948507-c9db-4853-be77-02918a4ad42c","Type":"ContainerDied","Data":"9df0555f3c8e9c61006320811dc3865ff17aeb0ac002679468f323d52fd9ee05"} Sep 30 19:50:11 crc kubenswrapper[4756]: I0930 19:50:11.876643 4756 generic.go:334] "Generic (PLEG): container finished" podID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" containerID="1fda6d8389dddb1fad1f1c0dcec9808a71ad955037c692639b3d4fd84034ae3c" exitCode=0 Sep 30 19:50:11 crc kubenswrapper[4756]: I0930 19:50:11.876684 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" event={"ID":"3370c2ca-fec3-4f90-8df7-51e21e6c7e1c","Type":"ContainerDied","Data":"1fda6d8389dddb1fad1f1c0dcec9808a71ad955037c692639b3d4fd84034ae3c"} Sep 30 19:50:11 crc kubenswrapper[4756]: I0930 19:50:11.876704 4756 scope.go:117] "RemoveContainer" containerID="2f76cebe68777fe0e73f317a28eb7b8968d88c664ac174239db340a67b731df2" Sep 30 19:50:11 crc kubenswrapper[4756]: I0930 19:50:11.890777 4756 generic.go:334] "Generic (PLEG): container finished" podID="62c0f048-1f35-4c66-81ff-500f4b11e32c" containerID="ca33aa0424ff5fc1f6488fdb5c4b14e6351ce319000039e3be72f4a12493d153" exitCode=0 Sep 30 19:50:11 crc kubenswrapper[4756]: I0930 19:50:11.890875 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-2b0c-account-create-vvcgf" event={"ID":"62c0f048-1f35-4c66-81ff-500f4b11e32c","Type":"ContainerDied","Data":"ca33aa0424ff5fc1f6488fdb5c4b14e6351ce319000039e3be72f4a12493d153"} Sep 30 19:50:11 crc kubenswrapper[4756]: I0930 19:50:11.893602 4756 generic.go:334] "Generic (PLEG): container finished" podID="427631be-e1d9-46d5-8435-187eddd507c5" containerID="33c3ea5f36bb441ebdd67b050b98c2ab9aa572ab6e8284a59c2e24fc642c85b0" exitCode=0 Sep 30 19:50:11 crc kubenswrapper[4756]: I0930 19:50:11.893649 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-e450-account-create-m4pnm" event={"ID":"427631be-e1d9-46d5-8435-187eddd507c5","Type":"ContainerDied","Data":"33c3ea5f36bb441ebdd67b050b98c2ab9aa572ab6e8284a59c2e24fc642c85b0"} Sep 30 19:50:12 crc kubenswrapper[4756]: I0930 19:50:12.905042 4756 generic.go:334] "Generic (PLEG): container finished" podID="babfbb3a-b8d1-419a-92d9-37c5390467f6" containerID="edd2cc3b5058fc58938fed149c5e32347e36c9efc0f993f06ade0d7f6a0128e4" exitCode=0 Sep 30 19:50:12 crc kubenswrapper[4756]: I0930 19:50:12.905133 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-dslgq" event={"ID":"babfbb3a-b8d1-419a-92d9-37c5390467f6","Type":"ContainerDied","Data":"edd2cc3b5058fc58938fed149c5e32347e36c9efc0f993f06ade0d7f6a0128e4"} Sep 30 19:50:13 crc kubenswrapper[4756]: I0930 19:50:13.916842 4756 generic.go:334] "Generic (PLEG): container finished" podID="f08a96a9-79ca-4c30-b158-273876fe89b0" containerID="2efc7a3b56a345b58615fb38229a5dd9d08dd316e4746b7d2a81edbe3083c535" exitCode=0 Sep 30 19:50:13 crc kubenswrapper[4756]: I0930 19:50:13.917310 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-mbntt" event={"ID":"f08a96a9-79ca-4c30-b158-273876fe89b0","Type":"ContainerDied","Data":"2efc7a3b56a345b58615fb38229a5dd9d08dd316e4746b7d2a81edbe3083c535"} Sep 30 19:50:13 crc kubenswrapper[4756]: W0930 19:50:13.983440 4756 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod28dfe26f_4d95_4def_9500_a0410c260cf2.slice/crio-7faa56d1345f4b38e275d73a64f337502b9569aef157ee1fe2beefc111a2401b WatchSource:0}: Error finding container 7faa56d1345f4b38e275d73a64f337502b9569aef157ee1fe2beefc111a2401b: Status 404 returned error can't find the container with id 7faa56d1345f4b38e275d73a64f337502b9569aef157ee1fe2beefc111a2401b Sep 30 19:50:14 crc kubenswrapper[4756]: I0930 19:50:14.082320 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-e450-account-create-m4pnm" Sep 30 19:50:14 crc kubenswrapper[4756]: I0930 19:50:14.188729 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7894t\" (UniqueName: \"kubernetes.io/projected/427631be-e1d9-46d5-8435-187eddd507c5-kube-api-access-7894t\") pod \"427631be-e1d9-46d5-8435-187eddd507c5\" (UID: \"427631be-e1d9-46d5-8435-187eddd507c5\") " Sep 30 19:50:14 crc kubenswrapper[4756]: I0930 19:50:14.193205 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/427631be-e1d9-46d5-8435-187eddd507c5-kube-api-access-7894t" (OuterVolumeSpecName: "kube-api-access-7894t") pod "427631be-e1d9-46d5-8435-187eddd507c5" (UID: "427631be-e1d9-46d5-8435-187eddd507c5"). InnerVolumeSpecName "kube-api-access-7894t". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:50:14 crc kubenswrapper[4756]: I0930 19:50:14.202442 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-2b0c-account-create-vvcgf" Sep 30 19:50:14 crc kubenswrapper[4756]: I0930 19:50:14.210105 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-cea4-account-create-kkbq5" Sep 30 19:50:14 crc kubenswrapper[4756]: I0930 19:50:14.221125 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-dslgq" Sep 30 19:50:14 crc kubenswrapper[4756]: I0930 19:50:14.290884 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5gbzl\" (UniqueName: \"kubernetes.io/projected/be948507-c9db-4853-be77-02918a4ad42c-kube-api-access-5gbzl\") pod \"be948507-c9db-4853-be77-02918a4ad42c\" (UID: \"be948507-c9db-4853-be77-02918a4ad42c\") " Sep 30 19:50:14 crc kubenswrapper[4756]: I0930 19:50:14.291013 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/babfbb3a-b8d1-419a-92d9-37c5390467f6-credential-keys\") pod \"babfbb3a-b8d1-419a-92d9-37c5390467f6\" (UID: \"babfbb3a-b8d1-419a-92d9-37c5390467f6\") " Sep 30 19:50:14 crc kubenswrapper[4756]: I0930 19:50:14.291056 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/babfbb3a-b8d1-419a-92d9-37c5390467f6-combined-ca-bundle\") pod \"babfbb3a-b8d1-419a-92d9-37c5390467f6\" (UID: \"babfbb3a-b8d1-419a-92d9-37c5390467f6\") " Sep 30 19:50:14 crc kubenswrapper[4756]: I0930 19:50:14.291178 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/babfbb3a-b8d1-419a-92d9-37c5390467f6-scripts\") pod \"babfbb3a-b8d1-419a-92d9-37c5390467f6\" (UID: \"babfbb3a-b8d1-419a-92d9-37c5390467f6\") " Sep 30 19:50:14 crc kubenswrapper[4756]: I0930 19:50:14.291232 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/babfbb3a-b8d1-419a-92d9-37c5390467f6-fernet-keys\") pod \"babfbb3a-b8d1-419a-92d9-37c5390467f6\" (UID: \"babfbb3a-b8d1-419a-92d9-37c5390467f6\") " Sep 30 19:50:14 crc kubenswrapper[4756]: I0930 19:50:14.291320 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6pl8p\" (UniqueName: \"kubernetes.io/projected/babfbb3a-b8d1-419a-92d9-37c5390467f6-kube-api-access-6pl8p\") pod \"babfbb3a-b8d1-419a-92d9-37c5390467f6\" (UID: \"babfbb3a-b8d1-419a-92d9-37c5390467f6\") " Sep 30 19:50:14 crc kubenswrapper[4756]: I0930 19:50:14.291361 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nkb8c\" (UniqueName: \"kubernetes.io/projected/62c0f048-1f35-4c66-81ff-500f4b11e32c-kube-api-access-nkb8c\") pod \"62c0f048-1f35-4c66-81ff-500f4b11e32c\" (UID: \"62c0f048-1f35-4c66-81ff-500f4b11e32c\") " Sep 30 19:50:14 crc kubenswrapper[4756]: I0930 19:50:14.291417 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/babfbb3a-b8d1-419a-92d9-37c5390467f6-config-data\") pod \"babfbb3a-b8d1-419a-92d9-37c5390467f6\" (UID: \"babfbb3a-b8d1-419a-92d9-37c5390467f6\") " Sep 30 19:50:14 crc kubenswrapper[4756]: I0930 19:50:14.291824 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7894t\" (UniqueName: \"kubernetes.io/projected/427631be-e1d9-46d5-8435-187eddd507c5-kube-api-access-7894t\") on node \"crc\" DevicePath \"\"" Sep 30 19:50:14 crc kubenswrapper[4756]: I0930 19:50:14.296964 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/babfbb3a-b8d1-419a-92d9-37c5390467f6-scripts" (OuterVolumeSpecName: "scripts") pod "babfbb3a-b8d1-419a-92d9-37c5390467f6" (UID: "babfbb3a-b8d1-419a-92d9-37c5390467f6"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:50:14 crc kubenswrapper[4756]: I0930 19:50:14.296986 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/be948507-c9db-4853-be77-02918a4ad42c-kube-api-access-5gbzl" (OuterVolumeSpecName: "kube-api-access-5gbzl") pod "be948507-c9db-4853-be77-02918a4ad42c" (UID: "be948507-c9db-4853-be77-02918a4ad42c"). InnerVolumeSpecName "kube-api-access-5gbzl". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:50:14 crc kubenswrapper[4756]: I0930 19:50:14.299602 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/babfbb3a-b8d1-419a-92d9-37c5390467f6-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "babfbb3a-b8d1-419a-92d9-37c5390467f6" (UID: "babfbb3a-b8d1-419a-92d9-37c5390467f6"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:50:14 crc kubenswrapper[4756]: I0930 19:50:14.301487 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/babfbb3a-b8d1-419a-92d9-37c5390467f6-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "babfbb3a-b8d1-419a-92d9-37c5390467f6" (UID: "babfbb3a-b8d1-419a-92d9-37c5390467f6"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:50:14 crc kubenswrapper[4756]: I0930 19:50:14.310523 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/62c0f048-1f35-4c66-81ff-500f4b11e32c-kube-api-access-nkb8c" (OuterVolumeSpecName: "kube-api-access-nkb8c") pod "62c0f048-1f35-4c66-81ff-500f4b11e32c" (UID: "62c0f048-1f35-4c66-81ff-500f4b11e32c"). InnerVolumeSpecName "kube-api-access-nkb8c". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:50:14 crc kubenswrapper[4756]: I0930 19:50:14.310628 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/babfbb3a-b8d1-419a-92d9-37c5390467f6-kube-api-access-6pl8p" (OuterVolumeSpecName: "kube-api-access-6pl8p") pod "babfbb3a-b8d1-419a-92d9-37c5390467f6" (UID: "babfbb3a-b8d1-419a-92d9-37c5390467f6"). InnerVolumeSpecName "kube-api-access-6pl8p". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:50:14 crc kubenswrapper[4756]: I0930 19:50:14.319036 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/babfbb3a-b8d1-419a-92d9-37c5390467f6-config-data" (OuterVolumeSpecName: "config-data") pod "babfbb3a-b8d1-419a-92d9-37c5390467f6" (UID: "babfbb3a-b8d1-419a-92d9-37c5390467f6"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:50:14 crc kubenswrapper[4756]: I0930 19:50:14.326490 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/babfbb3a-b8d1-419a-92d9-37c5390467f6-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "babfbb3a-b8d1-419a-92d9-37c5390467f6" (UID: "babfbb3a-b8d1-419a-92d9-37c5390467f6"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:50:14 crc kubenswrapper[4756]: I0930 19:50:14.393556 4756 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/babfbb3a-b8d1-419a-92d9-37c5390467f6-fernet-keys\") on node \"crc\" DevicePath \"\"" Sep 30 19:50:14 crc kubenswrapper[4756]: I0930 19:50:14.393594 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6pl8p\" (UniqueName: \"kubernetes.io/projected/babfbb3a-b8d1-419a-92d9-37c5390467f6-kube-api-access-6pl8p\") on node \"crc\" DevicePath \"\"" Sep 30 19:50:14 crc kubenswrapper[4756]: I0930 19:50:14.393605 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nkb8c\" (UniqueName: \"kubernetes.io/projected/62c0f048-1f35-4c66-81ff-500f4b11e32c-kube-api-access-nkb8c\") on node \"crc\" DevicePath \"\"" Sep 30 19:50:14 crc kubenswrapper[4756]: I0930 19:50:14.393614 4756 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/babfbb3a-b8d1-419a-92d9-37c5390467f6-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 19:50:14 crc kubenswrapper[4756]: I0930 19:50:14.393622 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5gbzl\" (UniqueName: \"kubernetes.io/projected/be948507-c9db-4853-be77-02918a4ad42c-kube-api-access-5gbzl\") on node \"crc\" DevicePath \"\"" Sep 30 19:50:14 crc kubenswrapper[4756]: I0930 19:50:14.393630 4756 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/babfbb3a-b8d1-419a-92d9-37c5390467f6-credential-keys\") on node \"crc\" DevicePath \"\"" Sep 30 19:50:14 crc kubenswrapper[4756]: I0930 19:50:14.393640 4756 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/babfbb3a-b8d1-419a-92d9-37c5390467f6-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 19:50:14 crc kubenswrapper[4756]: I0930 19:50:14.393647 4756 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/babfbb3a-b8d1-419a-92d9-37c5390467f6-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 19:50:14 crc kubenswrapper[4756]: I0930 19:50:14.927964 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-2b0c-account-create-vvcgf" event={"ID":"62c0f048-1f35-4c66-81ff-500f4b11e32c","Type":"ContainerDied","Data":"c12f65c6a0eb82f2c6cdd77a6f7eb54186510e91ab5f1925e27b23ac7d353ffd"} Sep 30 19:50:14 crc kubenswrapper[4756]: I0930 19:50:14.928254 4756 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c12f65c6a0eb82f2c6cdd77a6f7eb54186510e91ab5f1925e27b23ac7d353ffd" Sep 30 19:50:14 crc kubenswrapper[4756]: I0930 19:50:14.927981 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-2b0c-account-create-vvcgf" Sep 30 19:50:14 crc kubenswrapper[4756]: I0930 19:50:14.929943 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-cea4-account-create-kkbq5" event={"ID":"be948507-c9db-4853-be77-02918a4ad42c","Type":"ContainerDied","Data":"fe58ad0c45a5160c16bb757e766e011140808d78adaef87ad5b8790bd9f950cf"} Sep 30 19:50:14 crc kubenswrapper[4756]: I0930 19:50:14.929980 4756 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="fe58ad0c45a5160c16bb757e766e011140808d78adaef87ad5b8790bd9f950cf" Sep 30 19:50:14 crc kubenswrapper[4756]: I0930 19:50:14.930007 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-cea4-account-create-kkbq5" Sep 30 19:50:14 crc kubenswrapper[4756]: I0930 19:50:14.932601 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c736dd47-8e28-4fbb-8036-d206f7cc5471","Type":"ContainerStarted","Data":"726428566e5874d33d6da463d98bf4cf9458d6a88a6a7ab44adbf99d1e3c4e25"} Sep 30 19:50:14 crc kubenswrapper[4756]: I0930 19:50:14.934645 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-dslgq" event={"ID":"babfbb3a-b8d1-419a-92d9-37c5390467f6","Type":"ContainerDied","Data":"23e9d9668cee08f1aeafecf56ddb5d28d6d4d236881c4f0aee9497322146fd19"} Sep 30 19:50:14 crc kubenswrapper[4756]: I0930 19:50:14.934683 4756 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="23e9d9668cee08f1aeafecf56ddb5d28d6d4d236881c4f0aee9497322146fd19" Sep 30 19:50:14 crc kubenswrapper[4756]: I0930 19:50:14.934651 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-dslgq" Sep 30 19:50:14 crc kubenswrapper[4756]: I0930 19:50:14.936901 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-e450-account-create-m4pnm" Sep 30 19:50:14 crc kubenswrapper[4756]: I0930 19:50:14.937017 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-e450-account-create-m4pnm" event={"ID":"427631be-e1d9-46d5-8435-187eddd507c5","Type":"ContainerDied","Data":"4d70084e12e1e24fe80d8463ece4265b2ad3ccbf35ddcbb3ab04350cd1d10e5d"} Sep 30 19:50:14 crc kubenswrapper[4756]: I0930 19:50:14.937048 4756 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4d70084e12e1e24fe80d8463ece4265b2ad3ccbf35ddcbb3ab04350cd1d10e5d" Sep 30 19:50:14 crc kubenswrapper[4756]: I0930 19:50:14.940584 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-6bfdd55b6d-fqgk9" event={"ID":"28dfe26f-4d95-4def-9500-a0410c260cf2","Type":"ContainerStarted","Data":"0a44e61d030a7208b33d6a4a2edac3b0fbac211badfde73f8a2bb902741ee1c5"} Sep 30 19:50:14 crc kubenswrapper[4756]: I0930 19:50:14.940660 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-6bfdd55b6d-fqgk9" Sep 30 19:50:14 crc kubenswrapper[4756]: I0930 19:50:14.940682 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-6bfdd55b6d-fqgk9" event={"ID":"28dfe26f-4d95-4def-9500-a0410c260cf2","Type":"ContainerStarted","Data":"a02c15c325b692d99424e55c3116131ba5bc7283ac4bd35486df44c56fb3a3f2"} Sep 30 19:50:14 crc kubenswrapper[4756]: I0930 19:50:14.940702 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-6bfdd55b6d-fqgk9" event={"ID":"28dfe26f-4d95-4def-9500-a0410c260cf2","Type":"ContainerStarted","Data":"7faa56d1345f4b38e275d73a64f337502b9569aef157ee1fe2beefc111a2401b"} Sep 30 19:50:14 crc kubenswrapper[4756]: I0930 19:50:14.940724 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-6bfdd55b6d-fqgk9" Sep 30 19:50:14 crc kubenswrapper[4756]: I0930 19:50:14.956494 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" event={"ID":"3370c2ca-fec3-4f90-8df7-51e21e6c7e1c","Type":"ContainerStarted","Data":"3226587ad1762e9a93b7134218b589749878b020e4cd2179741113741ca24dea"} Sep 30 19:50:14 crc kubenswrapper[4756]: I0930 19:50:14.987723 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-6bfdd55b6d-fqgk9" podStartSLOduration=4.987701739 podStartE2EDuration="4.987701739s" podCreationTimestamp="2025-09-30 19:50:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 19:50:14.979678079 +0000 UTC m=+1144.600611556" watchObservedRunningTime="2025-09-30 19:50:14.987701739 +0000 UTC m=+1144.608635226" Sep 30 19:50:15 crc kubenswrapper[4756]: I0930 19:50:15.027988 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-6889fc7587-jlhbj"] Sep 30 19:50:15 crc kubenswrapper[4756]: E0930 19:50:15.028429 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="babfbb3a-b8d1-419a-92d9-37c5390467f6" containerName="keystone-bootstrap" Sep 30 19:50:15 crc kubenswrapper[4756]: I0930 19:50:15.028450 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="babfbb3a-b8d1-419a-92d9-37c5390467f6" containerName="keystone-bootstrap" Sep 30 19:50:15 crc kubenswrapper[4756]: E0930 19:50:15.028470 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="427631be-e1d9-46d5-8435-187eddd507c5" containerName="mariadb-account-create" Sep 30 19:50:15 crc kubenswrapper[4756]: I0930 19:50:15.028478 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="427631be-e1d9-46d5-8435-187eddd507c5" containerName="mariadb-account-create" Sep 30 19:50:15 crc kubenswrapper[4756]: E0930 19:50:15.028494 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="62c0f048-1f35-4c66-81ff-500f4b11e32c" containerName="mariadb-account-create" Sep 30 19:50:15 crc kubenswrapper[4756]: I0930 19:50:15.028504 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="62c0f048-1f35-4c66-81ff-500f4b11e32c" containerName="mariadb-account-create" Sep 30 19:50:15 crc kubenswrapper[4756]: E0930 19:50:15.028527 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="be948507-c9db-4853-be77-02918a4ad42c" containerName="mariadb-account-create" Sep 30 19:50:15 crc kubenswrapper[4756]: I0930 19:50:15.028535 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="be948507-c9db-4853-be77-02918a4ad42c" containerName="mariadb-account-create" Sep 30 19:50:15 crc kubenswrapper[4756]: I0930 19:50:15.028766 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="be948507-c9db-4853-be77-02918a4ad42c" containerName="mariadb-account-create" Sep 30 19:50:15 crc kubenswrapper[4756]: I0930 19:50:15.028799 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="babfbb3a-b8d1-419a-92d9-37c5390467f6" containerName="keystone-bootstrap" Sep 30 19:50:15 crc kubenswrapper[4756]: I0930 19:50:15.028818 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="427631be-e1d9-46d5-8435-187eddd507c5" containerName="mariadb-account-create" Sep 30 19:50:15 crc kubenswrapper[4756]: I0930 19:50:15.028833 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="62c0f048-1f35-4c66-81ff-500f4b11e32c" containerName="mariadb-account-create" Sep 30 19:50:15 crc kubenswrapper[4756]: I0930 19:50:15.029534 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-6889fc7587-jlhbj" Sep 30 19:50:15 crc kubenswrapper[4756]: I0930 19:50:15.033867 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Sep 30 19:50:15 crc kubenswrapper[4756]: I0930 19:50:15.034688 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-crhkc" Sep 30 19:50:15 crc kubenswrapper[4756]: I0930 19:50:15.034827 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-keystone-internal-svc" Sep 30 19:50:15 crc kubenswrapper[4756]: I0930 19:50:15.034918 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Sep 30 19:50:15 crc kubenswrapper[4756]: I0930 19:50:15.034995 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-keystone-public-svc" Sep 30 19:50:15 crc kubenswrapper[4756]: I0930 19:50:15.035097 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Sep 30 19:50:15 crc kubenswrapper[4756]: I0930 19:50:15.044465 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-6889fc7587-jlhbj"] Sep 30 19:50:15 crc kubenswrapper[4756]: I0930 19:50:15.116373 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/4e0d0499-c40c-474b-83d5-f72ac55d859c-internal-tls-certs\") pod \"keystone-6889fc7587-jlhbj\" (UID: \"4e0d0499-c40c-474b-83d5-f72ac55d859c\") " pod="openstack/keystone-6889fc7587-jlhbj" Sep 30 19:50:15 crc kubenswrapper[4756]: I0930 19:50:15.120812 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4e0d0499-c40c-474b-83d5-f72ac55d859c-combined-ca-bundle\") pod \"keystone-6889fc7587-jlhbj\" (UID: \"4e0d0499-c40c-474b-83d5-f72ac55d859c\") " pod="openstack/keystone-6889fc7587-jlhbj" Sep 30 19:50:15 crc kubenswrapper[4756]: I0930 19:50:15.121089 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4e0d0499-c40c-474b-83d5-f72ac55d859c-config-data\") pod \"keystone-6889fc7587-jlhbj\" (UID: \"4e0d0499-c40c-474b-83d5-f72ac55d859c\") " pod="openstack/keystone-6889fc7587-jlhbj" Sep 30 19:50:15 crc kubenswrapper[4756]: I0930 19:50:15.121184 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4gv9l\" (UniqueName: \"kubernetes.io/projected/4e0d0499-c40c-474b-83d5-f72ac55d859c-kube-api-access-4gv9l\") pod \"keystone-6889fc7587-jlhbj\" (UID: \"4e0d0499-c40c-474b-83d5-f72ac55d859c\") " pod="openstack/keystone-6889fc7587-jlhbj" Sep 30 19:50:15 crc kubenswrapper[4756]: I0930 19:50:15.121246 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/4e0d0499-c40c-474b-83d5-f72ac55d859c-credential-keys\") pod \"keystone-6889fc7587-jlhbj\" (UID: \"4e0d0499-c40c-474b-83d5-f72ac55d859c\") " pod="openstack/keystone-6889fc7587-jlhbj" Sep 30 19:50:15 crc kubenswrapper[4756]: I0930 19:50:15.121318 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/4e0d0499-c40c-474b-83d5-f72ac55d859c-fernet-keys\") pod \"keystone-6889fc7587-jlhbj\" (UID: \"4e0d0499-c40c-474b-83d5-f72ac55d859c\") " pod="openstack/keystone-6889fc7587-jlhbj" Sep 30 19:50:15 crc kubenswrapper[4756]: I0930 19:50:15.121514 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/4e0d0499-c40c-474b-83d5-f72ac55d859c-public-tls-certs\") pod \"keystone-6889fc7587-jlhbj\" (UID: \"4e0d0499-c40c-474b-83d5-f72ac55d859c\") " pod="openstack/keystone-6889fc7587-jlhbj" Sep 30 19:50:15 crc kubenswrapper[4756]: I0930 19:50:15.121619 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4e0d0499-c40c-474b-83d5-f72ac55d859c-scripts\") pod \"keystone-6889fc7587-jlhbj\" (UID: \"4e0d0499-c40c-474b-83d5-f72ac55d859c\") " pod="openstack/keystone-6889fc7587-jlhbj" Sep 30 19:50:15 crc kubenswrapper[4756]: I0930 19:50:15.222920 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/4e0d0499-c40c-474b-83d5-f72ac55d859c-fernet-keys\") pod \"keystone-6889fc7587-jlhbj\" (UID: \"4e0d0499-c40c-474b-83d5-f72ac55d859c\") " pod="openstack/keystone-6889fc7587-jlhbj" Sep 30 19:50:15 crc kubenswrapper[4756]: I0930 19:50:15.222973 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/4e0d0499-c40c-474b-83d5-f72ac55d859c-public-tls-certs\") pod \"keystone-6889fc7587-jlhbj\" (UID: \"4e0d0499-c40c-474b-83d5-f72ac55d859c\") " pod="openstack/keystone-6889fc7587-jlhbj" Sep 30 19:50:15 crc kubenswrapper[4756]: I0930 19:50:15.223010 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4e0d0499-c40c-474b-83d5-f72ac55d859c-scripts\") pod \"keystone-6889fc7587-jlhbj\" (UID: \"4e0d0499-c40c-474b-83d5-f72ac55d859c\") " pod="openstack/keystone-6889fc7587-jlhbj" Sep 30 19:50:15 crc kubenswrapper[4756]: I0930 19:50:15.223036 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/4e0d0499-c40c-474b-83d5-f72ac55d859c-internal-tls-certs\") pod \"keystone-6889fc7587-jlhbj\" (UID: \"4e0d0499-c40c-474b-83d5-f72ac55d859c\") " pod="openstack/keystone-6889fc7587-jlhbj" Sep 30 19:50:15 crc kubenswrapper[4756]: I0930 19:50:15.223129 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4e0d0499-c40c-474b-83d5-f72ac55d859c-combined-ca-bundle\") pod \"keystone-6889fc7587-jlhbj\" (UID: \"4e0d0499-c40c-474b-83d5-f72ac55d859c\") " pod="openstack/keystone-6889fc7587-jlhbj" Sep 30 19:50:15 crc kubenswrapper[4756]: I0930 19:50:15.223146 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4e0d0499-c40c-474b-83d5-f72ac55d859c-config-data\") pod \"keystone-6889fc7587-jlhbj\" (UID: \"4e0d0499-c40c-474b-83d5-f72ac55d859c\") " pod="openstack/keystone-6889fc7587-jlhbj" Sep 30 19:50:15 crc kubenswrapper[4756]: I0930 19:50:15.223184 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/4e0d0499-c40c-474b-83d5-f72ac55d859c-credential-keys\") pod \"keystone-6889fc7587-jlhbj\" (UID: \"4e0d0499-c40c-474b-83d5-f72ac55d859c\") " pod="openstack/keystone-6889fc7587-jlhbj" Sep 30 19:50:15 crc kubenswrapper[4756]: I0930 19:50:15.223198 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4gv9l\" (UniqueName: \"kubernetes.io/projected/4e0d0499-c40c-474b-83d5-f72ac55d859c-kube-api-access-4gv9l\") pod \"keystone-6889fc7587-jlhbj\" (UID: \"4e0d0499-c40c-474b-83d5-f72ac55d859c\") " pod="openstack/keystone-6889fc7587-jlhbj" Sep 30 19:50:15 crc kubenswrapper[4756]: I0930 19:50:15.228740 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/4e0d0499-c40c-474b-83d5-f72ac55d859c-fernet-keys\") pod \"keystone-6889fc7587-jlhbj\" (UID: \"4e0d0499-c40c-474b-83d5-f72ac55d859c\") " pod="openstack/keystone-6889fc7587-jlhbj" Sep 30 19:50:15 crc kubenswrapper[4756]: I0930 19:50:15.232172 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/4e0d0499-c40c-474b-83d5-f72ac55d859c-credential-keys\") pod \"keystone-6889fc7587-jlhbj\" (UID: \"4e0d0499-c40c-474b-83d5-f72ac55d859c\") " pod="openstack/keystone-6889fc7587-jlhbj" Sep 30 19:50:15 crc kubenswrapper[4756]: I0930 19:50:15.233305 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4e0d0499-c40c-474b-83d5-f72ac55d859c-config-data\") pod \"keystone-6889fc7587-jlhbj\" (UID: \"4e0d0499-c40c-474b-83d5-f72ac55d859c\") " pod="openstack/keystone-6889fc7587-jlhbj" Sep 30 19:50:15 crc kubenswrapper[4756]: I0930 19:50:15.242242 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4e0d0499-c40c-474b-83d5-f72ac55d859c-combined-ca-bundle\") pod \"keystone-6889fc7587-jlhbj\" (UID: \"4e0d0499-c40c-474b-83d5-f72ac55d859c\") " pod="openstack/keystone-6889fc7587-jlhbj" Sep 30 19:50:15 crc kubenswrapper[4756]: I0930 19:50:15.242475 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4e0d0499-c40c-474b-83d5-f72ac55d859c-scripts\") pod \"keystone-6889fc7587-jlhbj\" (UID: \"4e0d0499-c40c-474b-83d5-f72ac55d859c\") " pod="openstack/keystone-6889fc7587-jlhbj" Sep 30 19:50:15 crc kubenswrapper[4756]: I0930 19:50:15.242794 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/4e0d0499-c40c-474b-83d5-f72ac55d859c-public-tls-certs\") pod \"keystone-6889fc7587-jlhbj\" (UID: \"4e0d0499-c40c-474b-83d5-f72ac55d859c\") " pod="openstack/keystone-6889fc7587-jlhbj" Sep 30 19:50:15 crc kubenswrapper[4756]: I0930 19:50:15.261754 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/4e0d0499-c40c-474b-83d5-f72ac55d859c-internal-tls-certs\") pod \"keystone-6889fc7587-jlhbj\" (UID: \"4e0d0499-c40c-474b-83d5-f72ac55d859c\") " pod="openstack/keystone-6889fc7587-jlhbj" Sep 30 19:50:15 crc kubenswrapper[4756]: I0930 19:50:15.263079 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4gv9l\" (UniqueName: \"kubernetes.io/projected/4e0d0499-c40c-474b-83d5-f72ac55d859c-kube-api-access-4gv9l\") pod \"keystone-6889fc7587-jlhbj\" (UID: \"4e0d0499-c40c-474b-83d5-f72ac55d859c\") " pod="openstack/keystone-6889fc7587-jlhbj" Sep 30 19:50:15 crc kubenswrapper[4756]: I0930 19:50:15.375612 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-6889fc7587-jlhbj" Sep 30 19:50:15 crc kubenswrapper[4756]: I0930 19:50:15.433454 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-mbntt" Sep 30 19:50:15 crc kubenswrapper[4756]: I0930 19:50:15.527447 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/f08a96a9-79ca-4c30-b158-273876fe89b0-db-sync-config-data\") pod \"f08a96a9-79ca-4c30-b158-273876fe89b0\" (UID: \"f08a96a9-79ca-4c30-b158-273876fe89b0\") " Sep 30 19:50:15 crc kubenswrapper[4756]: I0930 19:50:15.527551 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f08a96a9-79ca-4c30-b158-273876fe89b0-combined-ca-bundle\") pod \"f08a96a9-79ca-4c30-b158-273876fe89b0\" (UID: \"f08a96a9-79ca-4c30-b158-273876fe89b0\") " Sep 30 19:50:15 crc kubenswrapper[4756]: I0930 19:50:15.527578 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f08a96a9-79ca-4c30-b158-273876fe89b0-config-data\") pod \"f08a96a9-79ca-4c30-b158-273876fe89b0\" (UID: \"f08a96a9-79ca-4c30-b158-273876fe89b0\") " Sep 30 19:50:15 crc kubenswrapper[4756]: I0930 19:50:15.527902 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-twrm5\" (UniqueName: \"kubernetes.io/projected/f08a96a9-79ca-4c30-b158-273876fe89b0-kube-api-access-twrm5\") pod \"f08a96a9-79ca-4c30-b158-273876fe89b0\" (UID: \"f08a96a9-79ca-4c30-b158-273876fe89b0\") " Sep 30 19:50:15 crc kubenswrapper[4756]: I0930 19:50:15.535232 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f08a96a9-79ca-4c30-b158-273876fe89b0-kube-api-access-twrm5" (OuterVolumeSpecName: "kube-api-access-twrm5") pod "f08a96a9-79ca-4c30-b158-273876fe89b0" (UID: "f08a96a9-79ca-4c30-b158-273876fe89b0"). InnerVolumeSpecName "kube-api-access-twrm5". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:50:15 crc kubenswrapper[4756]: I0930 19:50:15.543466 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f08a96a9-79ca-4c30-b158-273876fe89b0-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "f08a96a9-79ca-4c30-b158-273876fe89b0" (UID: "f08a96a9-79ca-4c30-b158-273876fe89b0"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:50:15 crc kubenswrapper[4756]: I0930 19:50:15.565917 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f08a96a9-79ca-4c30-b158-273876fe89b0-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f08a96a9-79ca-4c30-b158-273876fe89b0" (UID: "f08a96a9-79ca-4c30-b158-273876fe89b0"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:50:15 crc kubenswrapper[4756]: I0930 19:50:15.593283 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f08a96a9-79ca-4c30-b158-273876fe89b0-config-data" (OuterVolumeSpecName: "config-data") pod "f08a96a9-79ca-4c30-b158-273876fe89b0" (UID: "f08a96a9-79ca-4c30-b158-273876fe89b0"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:50:15 crc kubenswrapper[4756]: I0930 19:50:15.630045 4756 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f08a96a9-79ca-4c30-b158-273876fe89b0-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 19:50:15 crc kubenswrapper[4756]: I0930 19:50:15.630079 4756 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f08a96a9-79ca-4c30-b158-273876fe89b0-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 19:50:15 crc kubenswrapper[4756]: I0930 19:50:15.630093 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-twrm5\" (UniqueName: \"kubernetes.io/projected/f08a96a9-79ca-4c30-b158-273876fe89b0-kube-api-access-twrm5\") on node \"crc\" DevicePath \"\"" Sep 30 19:50:15 crc kubenswrapper[4756]: I0930 19:50:15.630106 4756 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/f08a96a9-79ca-4c30-b158-273876fe89b0-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 19:50:15 crc kubenswrapper[4756]: I0930 19:50:15.818201 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-6889fc7587-jlhbj"] Sep 30 19:50:15 crc kubenswrapper[4756]: W0930 19:50:15.829874 4756 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4e0d0499_c40c_474b_83d5_f72ac55d859c.slice/crio-4d75a894500d1a1a588e5f2969ca4eb270930f2f5199afa2c832709272569525 WatchSource:0}: Error finding container 4d75a894500d1a1a588e5f2969ca4eb270930f2f5199afa2c832709272569525: Status 404 returned error can't find the container with id 4d75a894500d1a1a588e5f2969ca4eb270930f2f5199afa2c832709272569525 Sep 30 19:50:15 crc kubenswrapper[4756]: I0930 19:50:15.976265 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-6889fc7587-jlhbj" event={"ID":"4e0d0499-c40c-474b-83d5-f72ac55d859c","Type":"ContainerStarted","Data":"4d75a894500d1a1a588e5f2969ca4eb270930f2f5199afa2c832709272569525"} Sep 30 19:50:15 crc kubenswrapper[4756]: I0930 19:50:15.979565 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-mbntt" event={"ID":"f08a96a9-79ca-4c30-b158-273876fe89b0","Type":"ContainerDied","Data":"abd8f908f4b621d6dcb9f527a8aa16542f3329723ff5254abf156cb6ea303840"} Sep 30 19:50:15 crc kubenswrapper[4756]: I0930 19:50:15.979601 4756 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="abd8f908f4b621d6dcb9f527a8aa16542f3329723ff5254abf156cb6ea303840" Sep 30 19:50:15 crc kubenswrapper[4756]: I0930 19:50:15.979617 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-mbntt" Sep 30 19:50:16 crc kubenswrapper[4756]: I0930 19:50:16.330449 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-8b5c85b87-tkqp5"] Sep 30 19:50:16 crc kubenswrapper[4756]: E0930 19:50:16.331119 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f08a96a9-79ca-4c30-b158-273876fe89b0" containerName="glance-db-sync" Sep 30 19:50:16 crc kubenswrapper[4756]: I0930 19:50:16.331134 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="f08a96a9-79ca-4c30-b158-273876fe89b0" containerName="glance-db-sync" Sep 30 19:50:16 crc kubenswrapper[4756]: I0930 19:50:16.331350 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="f08a96a9-79ca-4c30-b158-273876fe89b0" containerName="glance-db-sync" Sep 30 19:50:16 crc kubenswrapper[4756]: I0930 19:50:16.332373 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8b5c85b87-tkqp5" Sep 30 19:50:16 crc kubenswrapper[4756]: I0930 19:50:16.349742 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-8b5c85b87-tkqp5"] Sep 30 19:50:16 crc kubenswrapper[4756]: I0930 19:50:16.450959 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/62aa5559-f0c5-4603-95ec-795ff309390c-config\") pod \"dnsmasq-dns-8b5c85b87-tkqp5\" (UID: \"62aa5559-f0c5-4603-95ec-795ff309390c\") " pod="openstack/dnsmasq-dns-8b5c85b87-tkqp5" Sep 30 19:50:16 crc kubenswrapper[4756]: I0930 19:50:16.451064 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/62aa5559-f0c5-4603-95ec-795ff309390c-ovsdbserver-nb\") pod \"dnsmasq-dns-8b5c85b87-tkqp5\" (UID: \"62aa5559-f0c5-4603-95ec-795ff309390c\") " pod="openstack/dnsmasq-dns-8b5c85b87-tkqp5" Sep 30 19:50:16 crc kubenswrapper[4756]: I0930 19:50:16.451083 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jcbrr\" (UniqueName: \"kubernetes.io/projected/62aa5559-f0c5-4603-95ec-795ff309390c-kube-api-access-jcbrr\") pod \"dnsmasq-dns-8b5c85b87-tkqp5\" (UID: \"62aa5559-f0c5-4603-95ec-795ff309390c\") " pod="openstack/dnsmasq-dns-8b5c85b87-tkqp5" Sep 30 19:50:16 crc kubenswrapper[4756]: I0930 19:50:16.451358 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/62aa5559-f0c5-4603-95ec-795ff309390c-ovsdbserver-sb\") pod \"dnsmasq-dns-8b5c85b87-tkqp5\" (UID: \"62aa5559-f0c5-4603-95ec-795ff309390c\") " pod="openstack/dnsmasq-dns-8b5c85b87-tkqp5" Sep 30 19:50:16 crc kubenswrapper[4756]: I0930 19:50:16.451478 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/62aa5559-f0c5-4603-95ec-795ff309390c-dns-swift-storage-0\") pod \"dnsmasq-dns-8b5c85b87-tkqp5\" (UID: \"62aa5559-f0c5-4603-95ec-795ff309390c\") " pod="openstack/dnsmasq-dns-8b5c85b87-tkqp5" Sep 30 19:50:16 crc kubenswrapper[4756]: I0930 19:50:16.451556 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/62aa5559-f0c5-4603-95ec-795ff309390c-dns-svc\") pod \"dnsmasq-dns-8b5c85b87-tkqp5\" (UID: \"62aa5559-f0c5-4603-95ec-795ff309390c\") " pod="openstack/dnsmasq-dns-8b5c85b87-tkqp5" Sep 30 19:50:16 crc kubenswrapper[4756]: I0930 19:50:16.553077 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/62aa5559-f0c5-4603-95ec-795ff309390c-ovsdbserver-nb\") pod \"dnsmasq-dns-8b5c85b87-tkqp5\" (UID: \"62aa5559-f0c5-4603-95ec-795ff309390c\") " pod="openstack/dnsmasq-dns-8b5c85b87-tkqp5" Sep 30 19:50:16 crc kubenswrapper[4756]: I0930 19:50:16.553158 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jcbrr\" (UniqueName: \"kubernetes.io/projected/62aa5559-f0c5-4603-95ec-795ff309390c-kube-api-access-jcbrr\") pod \"dnsmasq-dns-8b5c85b87-tkqp5\" (UID: \"62aa5559-f0c5-4603-95ec-795ff309390c\") " pod="openstack/dnsmasq-dns-8b5c85b87-tkqp5" Sep 30 19:50:16 crc kubenswrapper[4756]: I0930 19:50:16.553525 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/62aa5559-f0c5-4603-95ec-795ff309390c-ovsdbserver-sb\") pod \"dnsmasq-dns-8b5c85b87-tkqp5\" (UID: \"62aa5559-f0c5-4603-95ec-795ff309390c\") " pod="openstack/dnsmasq-dns-8b5c85b87-tkqp5" Sep 30 19:50:16 crc kubenswrapper[4756]: I0930 19:50:16.553559 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/62aa5559-f0c5-4603-95ec-795ff309390c-dns-swift-storage-0\") pod \"dnsmasq-dns-8b5c85b87-tkqp5\" (UID: \"62aa5559-f0c5-4603-95ec-795ff309390c\") " pod="openstack/dnsmasq-dns-8b5c85b87-tkqp5" Sep 30 19:50:16 crc kubenswrapper[4756]: I0930 19:50:16.553594 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/62aa5559-f0c5-4603-95ec-795ff309390c-dns-svc\") pod \"dnsmasq-dns-8b5c85b87-tkqp5\" (UID: \"62aa5559-f0c5-4603-95ec-795ff309390c\") " pod="openstack/dnsmasq-dns-8b5c85b87-tkqp5" Sep 30 19:50:16 crc kubenswrapper[4756]: I0930 19:50:16.553638 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/62aa5559-f0c5-4603-95ec-795ff309390c-config\") pod \"dnsmasq-dns-8b5c85b87-tkqp5\" (UID: \"62aa5559-f0c5-4603-95ec-795ff309390c\") " pod="openstack/dnsmasq-dns-8b5c85b87-tkqp5" Sep 30 19:50:16 crc kubenswrapper[4756]: I0930 19:50:16.554200 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/62aa5559-f0c5-4603-95ec-795ff309390c-ovsdbserver-nb\") pod \"dnsmasq-dns-8b5c85b87-tkqp5\" (UID: \"62aa5559-f0c5-4603-95ec-795ff309390c\") " pod="openstack/dnsmasq-dns-8b5c85b87-tkqp5" Sep 30 19:50:16 crc kubenswrapper[4756]: I0930 19:50:16.554848 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/62aa5559-f0c5-4603-95ec-795ff309390c-config\") pod \"dnsmasq-dns-8b5c85b87-tkqp5\" (UID: \"62aa5559-f0c5-4603-95ec-795ff309390c\") " pod="openstack/dnsmasq-dns-8b5c85b87-tkqp5" Sep 30 19:50:16 crc kubenswrapper[4756]: I0930 19:50:16.555298 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/62aa5559-f0c5-4603-95ec-795ff309390c-dns-swift-storage-0\") pod \"dnsmasq-dns-8b5c85b87-tkqp5\" (UID: \"62aa5559-f0c5-4603-95ec-795ff309390c\") " pod="openstack/dnsmasq-dns-8b5c85b87-tkqp5" Sep 30 19:50:16 crc kubenswrapper[4756]: I0930 19:50:16.555377 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/62aa5559-f0c5-4603-95ec-795ff309390c-dns-svc\") pod \"dnsmasq-dns-8b5c85b87-tkqp5\" (UID: \"62aa5559-f0c5-4603-95ec-795ff309390c\") " pod="openstack/dnsmasq-dns-8b5c85b87-tkqp5" Sep 30 19:50:16 crc kubenswrapper[4756]: I0930 19:50:16.556187 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/62aa5559-f0c5-4603-95ec-795ff309390c-ovsdbserver-sb\") pod \"dnsmasq-dns-8b5c85b87-tkqp5\" (UID: \"62aa5559-f0c5-4603-95ec-795ff309390c\") " pod="openstack/dnsmasq-dns-8b5c85b87-tkqp5" Sep 30 19:50:16 crc kubenswrapper[4756]: I0930 19:50:16.573154 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jcbrr\" (UniqueName: \"kubernetes.io/projected/62aa5559-f0c5-4603-95ec-795ff309390c-kube-api-access-jcbrr\") pod \"dnsmasq-dns-8b5c85b87-tkqp5\" (UID: \"62aa5559-f0c5-4603-95ec-795ff309390c\") " pod="openstack/dnsmasq-dns-8b5c85b87-tkqp5" Sep 30 19:50:16 crc kubenswrapper[4756]: I0930 19:50:16.650950 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8b5c85b87-tkqp5" Sep 30 19:50:16 crc kubenswrapper[4756]: I0930 19:50:16.990619 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-6889fc7587-jlhbj" event={"ID":"4e0d0499-c40c-474b-83d5-f72ac55d859c","Type":"ContainerStarted","Data":"941c1f888df56ddf960e94e48f17bb2adec0251bc3c8d8f1dd8e4c62d928796e"} Sep 30 19:50:16 crc kubenswrapper[4756]: I0930 19:50:16.990751 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/keystone-6889fc7587-jlhbj" Sep 30 19:50:17 crc kubenswrapper[4756]: I0930 19:50:17.008949 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-6889fc7587-jlhbj" podStartSLOduration=3.008932364 podStartE2EDuration="3.008932364s" podCreationTimestamp="2025-09-30 19:50:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 19:50:17.007046715 +0000 UTC m=+1146.627980192" watchObservedRunningTime="2025-09-30 19:50:17.008932364 +0000 UTC m=+1146.629865841" Sep 30 19:50:17 crc kubenswrapper[4756]: I0930 19:50:17.258272 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Sep 30 19:50:17 crc kubenswrapper[4756]: I0930 19:50:17.259687 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Sep 30 19:50:17 crc kubenswrapper[4756]: I0930 19:50:17.261500 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Sep 30 19:50:17 crc kubenswrapper[4756]: I0930 19:50:17.261643 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-6qrb6" Sep 30 19:50:17 crc kubenswrapper[4756]: I0930 19:50:17.270944 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Sep 30 19:50:17 crc kubenswrapper[4756]: I0930 19:50:17.271697 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-scripts" Sep 30 19:50:17 crc kubenswrapper[4756]: I0930 19:50:17.370153 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/729dd130-94c6-4995-b9c8-c2ebdbdc5886-config-data\") pod \"glance-default-external-api-0\" (UID: \"729dd130-94c6-4995-b9c8-c2ebdbdc5886\") " pod="openstack/glance-default-external-api-0" Sep 30 19:50:17 crc kubenswrapper[4756]: I0930 19:50:17.370213 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z7m4x\" (UniqueName: \"kubernetes.io/projected/729dd130-94c6-4995-b9c8-c2ebdbdc5886-kube-api-access-z7m4x\") pod \"glance-default-external-api-0\" (UID: \"729dd130-94c6-4995-b9c8-c2ebdbdc5886\") " pod="openstack/glance-default-external-api-0" Sep 30 19:50:17 crc kubenswrapper[4756]: I0930 19:50:17.370272 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/729dd130-94c6-4995-b9c8-c2ebdbdc5886-scripts\") pod \"glance-default-external-api-0\" (UID: \"729dd130-94c6-4995-b9c8-c2ebdbdc5886\") " pod="openstack/glance-default-external-api-0" Sep 30 19:50:17 crc kubenswrapper[4756]: I0930 19:50:17.370298 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/729dd130-94c6-4995-b9c8-c2ebdbdc5886-logs\") pod \"glance-default-external-api-0\" (UID: \"729dd130-94c6-4995-b9c8-c2ebdbdc5886\") " pod="openstack/glance-default-external-api-0" Sep 30 19:50:17 crc kubenswrapper[4756]: I0930 19:50:17.370383 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-external-api-0\" (UID: \"729dd130-94c6-4995-b9c8-c2ebdbdc5886\") " pod="openstack/glance-default-external-api-0" Sep 30 19:50:17 crc kubenswrapper[4756]: I0930 19:50:17.370432 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/729dd130-94c6-4995-b9c8-c2ebdbdc5886-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"729dd130-94c6-4995-b9c8-c2ebdbdc5886\") " pod="openstack/glance-default-external-api-0" Sep 30 19:50:17 crc kubenswrapper[4756]: I0930 19:50:17.370452 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/729dd130-94c6-4995-b9c8-c2ebdbdc5886-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"729dd130-94c6-4995-b9c8-c2ebdbdc5886\") " pod="openstack/glance-default-external-api-0" Sep 30 19:50:17 crc kubenswrapper[4756]: I0930 19:50:17.461701 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 30 19:50:17 crc kubenswrapper[4756]: I0930 19:50:17.463020 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Sep 30 19:50:17 crc kubenswrapper[4756]: I0930 19:50:17.465137 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Sep 30 19:50:17 crc kubenswrapper[4756]: I0930 19:50:17.472576 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/729dd130-94c6-4995-b9c8-c2ebdbdc5886-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"729dd130-94c6-4995-b9c8-c2ebdbdc5886\") " pod="openstack/glance-default-external-api-0" Sep 30 19:50:17 crc kubenswrapper[4756]: I0930 19:50:17.472706 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/729dd130-94c6-4995-b9c8-c2ebdbdc5886-config-data\") pod \"glance-default-external-api-0\" (UID: \"729dd130-94c6-4995-b9c8-c2ebdbdc5886\") " pod="openstack/glance-default-external-api-0" Sep 30 19:50:17 crc kubenswrapper[4756]: I0930 19:50:17.472748 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z7m4x\" (UniqueName: \"kubernetes.io/projected/729dd130-94c6-4995-b9c8-c2ebdbdc5886-kube-api-access-z7m4x\") pod \"glance-default-external-api-0\" (UID: \"729dd130-94c6-4995-b9c8-c2ebdbdc5886\") " pod="openstack/glance-default-external-api-0" Sep 30 19:50:17 crc kubenswrapper[4756]: I0930 19:50:17.472771 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/729dd130-94c6-4995-b9c8-c2ebdbdc5886-scripts\") pod \"glance-default-external-api-0\" (UID: \"729dd130-94c6-4995-b9c8-c2ebdbdc5886\") " pod="openstack/glance-default-external-api-0" Sep 30 19:50:17 crc kubenswrapper[4756]: I0930 19:50:17.472794 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/729dd130-94c6-4995-b9c8-c2ebdbdc5886-logs\") pod \"glance-default-external-api-0\" (UID: \"729dd130-94c6-4995-b9c8-c2ebdbdc5886\") " pod="openstack/glance-default-external-api-0" Sep 30 19:50:17 crc kubenswrapper[4756]: I0930 19:50:17.474122 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/729dd130-94c6-4995-b9c8-c2ebdbdc5886-logs\") pod \"glance-default-external-api-0\" (UID: \"729dd130-94c6-4995-b9c8-c2ebdbdc5886\") " pod="openstack/glance-default-external-api-0" Sep 30 19:50:17 crc kubenswrapper[4756]: I0930 19:50:17.474128 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-external-api-0\" (UID: \"729dd130-94c6-4995-b9c8-c2ebdbdc5886\") " pod="openstack/glance-default-external-api-0" Sep 30 19:50:17 crc kubenswrapper[4756]: I0930 19:50:17.474269 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/729dd130-94c6-4995-b9c8-c2ebdbdc5886-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"729dd130-94c6-4995-b9c8-c2ebdbdc5886\") " pod="openstack/glance-default-external-api-0" Sep 30 19:50:17 crc kubenswrapper[4756]: I0930 19:50:17.474465 4756 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-external-api-0\" (UID: \"729dd130-94c6-4995-b9c8-c2ebdbdc5886\") device mount path \"/mnt/openstack/pv10\"" pod="openstack/glance-default-external-api-0" Sep 30 19:50:17 crc kubenswrapper[4756]: I0930 19:50:17.475322 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/729dd130-94c6-4995-b9c8-c2ebdbdc5886-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"729dd130-94c6-4995-b9c8-c2ebdbdc5886\") " pod="openstack/glance-default-external-api-0" Sep 30 19:50:17 crc kubenswrapper[4756]: I0930 19:50:17.483618 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 30 19:50:17 crc kubenswrapper[4756]: I0930 19:50:17.491768 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/729dd130-94c6-4995-b9c8-c2ebdbdc5886-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"729dd130-94c6-4995-b9c8-c2ebdbdc5886\") " pod="openstack/glance-default-external-api-0" Sep 30 19:50:17 crc kubenswrapper[4756]: I0930 19:50:17.493943 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/729dd130-94c6-4995-b9c8-c2ebdbdc5886-config-data\") pod \"glance-default-external-api-0\" (UID: \"729dd130-94c6-4995-b9c8-c2ebdbdc5886\") " pod="openstack/glance-default-external-api-0" Sep 30 19:50:17 crc kubenswrapper[4756]: I0930 19:50:17.503713 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z7m4x\" (UniqueName: \"kubernetes.io/projected/729dd130-94c6-4995-b9c8-c2ebdbdc5886-kube-api-access-z7m4x\") pod \"glance-default-external-api-0\" (UID: \"729dd130-94c6-4995-b9c8-c2ebdbdc5886\") " pod="openstack/glance-default-external-api-0" Sep 30 19:50:17 crc kubenswrapper[4756]: I0930 19:50:17.512248 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-external-api-0\" (UID: \"729dd130-94c6-4995-b9c8-c2ebdbdc5886\") " pod="openstack/glance-default-external-api-0" Sep 30 19:50:17 crc kubenswrapper[4756]: I0930 19:50:17.519665 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/729dd130-94c6-4995-b9c8-c2ebdbdc5886-scripts\") pod \"glance-default-external-api-0\" (UID: \"729dd130-94c6-4995-b9c8-c2ebdbdc5886\") " pod="openstack/glance-default-external-api-0" Sep 30 19:50:17 crc kubenswrapper[4756]: I0930 19:50:17.575382 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/8614c78e-58de-4a39-80a4-ba8f9d068ebb-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"8614c78e-58de-4a39-80a4-ba8f9d068ebb\") " pod="openstack/glance-default-internal-api-0" Sep 30 19:50:17 crc kubenswrapper[4756]: I0930 19:50:17.575748 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-internal-api-0\" (UID: \"8614c78e-58de-4a39-80a4-ba8f9d068ebb\") " pod="openstack/glance-default-internal-api-0" Sep 30 19:50:17 crc kubenswrapper[4756]: I0930 19:50:17.575778 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-crbrr\" (UniqueName: \"kubernetes.io/projected/8614c78e-58de-4a39-80a4-ba8f9d068ebb-kube-api-access-crbrr\") pod \"glance-default-internal-api-0\" (UID: \"8614c78e-58de-4a39-80a4-ba8f9d068ebb\") " pod="openstack/glance-default-internal-api-0" Sep 30 19:50:17 crc kubenswrapper[4756]: I0930 19:50:17.575853 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8614c78e-58de-4a39-80a4-ba8f9d068ebb-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"8614c78e-58de-4a39-80a4-ba8f9d068ebb\") " pod="openstack/glance-default-internal-api-0" Sep 30 19:50:17 crc kubenswrapper[4756]: I0930 19:50:17.575953 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8614c78e-58de-4a39-80a4-ba8f9d068ebb-scripts\") pod \"glance-default-internal-api-0\" (UID: \"8614c78e-58de-4a39-80a4-ba8f9d068ebb\") " pod="openstack/glance-default-internal-api-0" Sep 30 19:50:17 crc kubenswrapper[4756]: I0930 19:50:17.575981 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8614c78e-58de-4a39-80a4-ba8f9d068ebb-config-data\") pod \"glance-default-internal-api-0\" (UID: \"8614c78e-58de-4a39-80a4-ba8f9d068ebb\") " pod="openstack/glance-default-internal-api-0" Sep 30 19:50:17 crc kubenswrapper[4756]: I0930 19:50:17.575997 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8614c78e-58de-4a39-80a4-ba8f9d068ebb-logs\") pod \"glance-default-internal-api-0\" (UID: \"8614c78e-58de-4a39-80a4-ba8f9d068ebb\") " pod="openstack/glance-default-internal-api-0" Sep 30 19:50:17 crc kubenswrapper[4756]: I0930 19:50:17.618791 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Sep 30 19:50:17 crc kubenswrapper[4756]: I0930 19:50:17.677449 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-crbrr\" (UniqueName: \"kubernetes.io/projected/8614c78e-58de-4a39-80a4-ba8f9d068ebb-kube-api-access-crbrr\") pod \"glance-default-internal-api-0\" (UID: \"8614c78e-58de-4a39-80a4-ba8f9d068ebb\") " pod="openstack/glance-default-internal-api-0" Sep 30 19:50:17 crc kubenswrapper[4756]: I0930 19:50:17.677523 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8614c78e-58de-4a39-80a4-ba8f9d068ebb-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"8614c78e-58de-4a39-80a4-ba8f9d068ebb\") " pod="openstack/glance-default-internal-api-0" Sep 30 19:50:17 crc kubenswrapper[4756]: I0930 19:50:17.677595 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8614c78e-58de-4a39-80a4-ba8f9d068ebb-scripts\") pod \"glance-default-internal-api-0\" (UID: \"8614c78e-58de-4a39-80a4-ba8f9d068ebb\") " pod="openstack/glance-default-internal-api-0" Sep 30 19:50:17 crc kubenswrapper[4756]: I0930 19:50:17.677615 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8614c78e-58de-4a39-80a4-ba8f9d068ebb-config-data\") pod \"glance-default-internal-api-0\" (UID: \"8614c78e-58de-4a39-80a4-ba8f9d068ebb\") " pod="openstack/glance-default-internal-api-0" Sep 30 19:50:17 crc kubenswrapper[4756]: I0930 19:50:17.677629 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8614c78e-58de-4a39-80a4-ba8f9d068ebb-logs\") pod \"glance-default-internal-api-0\" (UID: \"8614c78e-58de-4a39-80a4-ba8f9d068ebb\") " pod="openstack/glance-default-internal-api-0" Sep 30 19:50:17 crc kubenswrapper[4756]: I0930 19:50:17.677663 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/8614c78e-58de-4a39-80a4-ba8f9d068ebb-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"8614c78e-58de-4a39-80a4-ba8f9d068ebb\") " pod="openstack/glance-default-internal-api-0" Sep 30 19:50:17 crc kubenswrapper[4756]: I0930 19:50:17.677685 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-internal-api-0\" (UID: \"8614c78e-58de-4a39-80a4-ba8f9d068ebb\") " pod="openstack/glance-default-internal-api-0" Sep 30 19:50:17 crc kubenswrapper[4756]: I0930 19:50:17.677947 4756 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-internal-api-0\" (UID: \"8614c78e-58de-4a39-80a4-ba8f9d068ebb\") device mount path \"/mnt/openstack/pv07\"" pod="openstack/glance-default-internal-api-0" Sep 30 19:50:17 crc kubenswrapper[4756]: I0930 19:50:17.678355 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/8614c78e-58de-4a39-80a4-ba8f9d068ebb-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"8614c78e-58de-4a39-80a4-ba8f9d068ebb\") " pod="openstack/glance-default-internal-api-0" Sep 30 19:50:17 crc kubenswrapper[4756]: I0930 19:50:17.678526 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8614c78e-58de-4a39-80a4-ba8f9d068ebb-logs\") pod \"glance-default-internal-api-0\" (UID: \"8614c78e-58de-4a39-80a4-ba8f9d068ebb\") " pod="openstack/glance-default-internal-api-0" Sep 30 19:50:17 crc kubenswrapper[4756]: I0930 19:50:17.683152 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8614c78e-58de-4a39-80a4-ba8f9d068ebb-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"8614c78e-58de-4a39-80a4-ba8f9d068ebb\") " pod="openstack/glance-default-internal-api-0" Sep 30 19:50:17 crc kubenswrapper[4756]: I0930 19:50:17.685162 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8614c78e-58de-4a39-80a4-ba8f9d068ebb-scripts\") pod \"glance-default-internal-api-0\" (UID: \"8614c78e-58de-4a39-80a4-ba8f9d068ebb\") " pod="openstack/glance-default-internal-api-0" Sep 30 19:50:17 crc kubenswrapper[4756]: I0930 19:50:17.692605 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8614c78e-58de-4a39-80a4-ba8f9d068ebb-config-data\") pod \"glance-default-internal-api-0\" (UID: \"8614c78e-58de-4a39-80a4-ba8f9d068ebb\") " pod="openstack/glance-default-internal-api-0" Sep 30 19:50:17 crc kubenswrapper[4756]: I0930 19:50:17.695658 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-crbrr\" (UniqueName: \"kubernetes.io/projected/8614c78e-58de-4a39-80a4-ba8f9d068ebb-kube-api-access-crbrr\") pod \"glance-default-internal-api-0\" (UID: \"8614c78e-58de-4a39-80a4-ba8f9d068ebb\") " pod="openstack/glance-default-internal-api-0" Sep 30 19:50:17 crc kubenswrapper[4756]: I0930 19:50:17.702288 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-internal-api-0\" (UID: \"8614c78e-58de-4a39-80a4-ba8f9d068ebb\") " pod="openstack/glance-default-internal-api-0" Sep 30 19:50:17 crc kubenswrapper[4756]: I0930 19:50:17.862760 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Sep 30 19:50:18 crc kubenswrapper[4756]: I0930 19:50:18.480328 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-8b5c85b87-tkqp5"] Sep 30 19:50:18 crc kubenswrapper[4756]: I0930 19:50:18.562855 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Sep 30 19:50:19 crc kubenswrapper[4756]: I0930 19:50:19.007787 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Sep 30 19:50:19 crc kubenswrapper[4756]: I0930 19:50:19.008283 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"729dd130-94c6-4995-b9c8-c2ebdbdc5886","Type":"ContainerStarted","Data":"34911a2259ecb59426dba708f67beae31b4c822227b583a0988ec3d5d7e89965"} Sep 30 19:50:19 crc kubenswrapper[4756]: I0930 19:50:19.009352 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8b5c85b87-tkqp5" event={"ID":"62aa5559-f0c5-4603-95ec-795ff309390c","Type":"ContainerStarted","Data":"a0f95a21abeea8cd6122320d3f13c4041f9f67af3e610f59996b6c3c9859e35d"} Sep 30 19:50:19 crc kubenswrapper[4756]: I0930 19:50:19.095869 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 30 19:50:19 crc kubenswrapper[4756]: I0930 19:50:19.265443 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-db-sync-j57gg"] Sep 30 19:50:19 crc kubenswrapper[4756]: I0930 19:50:19.266678 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-j57gg" Sep 30 19:50:19 crc kubenswrapper[4756]: I0930 19:50:19.270830 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-config-data" Sep 30 19:50:19 crc kubenswrapper[4756]: I0930 19:50:19.271229 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-cinder-dockercfg-bk8mp" Sep 30 19:50:19 crc kubenswrapper[4756]: I0930 19:50:19.271468 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scripts" Sep 30 19:50:19 crc kubenswrapper[4756]: I0930 19:50:19.319267 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0f0f53de-f545-431e-9901-e45eedc73bf9-combined-ca-bundle\") pod \"cinder-db-sync-j57gg\" (UID: \"0f0f53de-f545-431e-9901-e45eedc73bf9\") " pod="openstack/cinder-db-sync-j57gg" Sep 30 19:50:19 crc kubenswrapper[4756]: I0930 19:50:19.319324 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/0f0f53de-f545-431e-9901-e45eedc73bf9-etc-machine-id\") pod \"cinder-db-sync-j57gg\" (UID: \"0f0f53de-f545-431e-9901-e45eedc73bf9\") " pod="openstack/cinder-db-sync-j57gg" Sep 30 19:50:19 crc kubenswrapper[4756]: I0930 19:50:19.319347 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/0f0f53de-f545-431e-9901-e45eedc73bf9-db-sync-config-data\") pod \"cinder-db-sync-j57gg\" (UID: \"0f0f53de-f545-431e-9901-e45eedc73bf9\") " pod="openstack/cinder-db-sync-j57gg" Sep 30 19:50:19 crc kubenswrapper[4756]: I0930 19:50:19.319374 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0f0f53de-f545-431e-9901-e45eedc73bf9-config-data\") pod \"cinder-db-sync-j57gg\" (UID: \"0f0f53de-f545-431e-9901-e45eedc73bf9\") " pod="openstack/cinder-db-sync-j57gg" Sep 30 19:50:19 crc kubenswrapper[4756]: I0930 19:50:19.319421 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gxmgt\" (UniqueName: \"kubernetes.io/projected/0f0f53de-f545-431e-9901-e45eedc73bf9-kube-api-access-gxmgt\") pod \"cinder-db-sync-j57gg\" (UID: \"0f0f53de-f545-431e-9901-e45eedc73bf9\") " pod="openstack/cinder-db-sync-j57gg" Sep 30 19:50:19 crc kubenswrapper[4756]: I0930 19:50:19.319455 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0f0f53de-f545-431e-9901-e45eedc73bf9-scripts\") pod \"cinder-db-sync-j57gg\" (UID: \"0f0f53de-f545-431e-9901-e45eedc73bf9\") " pod="openstack/cinder-db-sync-j57gg" Sep 30 19:50:19 crc kubenswrapper[4756]: I0930 19:50:19.344548 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-sync-j57gg"] Sep 30 19:50:19 crc kubenswrapper[4756]: I0930 19:50:19.422764 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/0f0f53de-f545-431e-9901-e45eedc73bf9-db-sync-config-data\") pod \"cinder-db-sync-j57gg\" (UID: \"0f0f53de-f545-431e-9901-e45eedc73bf9\") " pod="openstack/cinder-db-sync-j57gg" Sep 30 19:50:19 crc kubenswrapper[4756]: I0930 19:50:19.422811 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0f0f53de-f545-431e-9901-e45eedc73bf9-config-data\") pod \"cinder-db-sync-j57gg\" (UID: \"0f0f53de-f545-431e-9901-e45eedc73bf9\") " pod="openstack/cinder-db-sync-j57gg" Sep 30 19:50:19 crc kubenswrapper[4756]: I0930 19:50:19.422850 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gxmgt\" (UniqueName: \"kubernetes.io/projected/0f0f53de-f545-431e-9901-e45eedc73bf9-kube-api-access-gxmgt\") pod \"cinder-db-sync-j57gg\" (UID: \"0f0f53de-f545-431e-9901-e45eedc73bf9\") " pod="openstack/cinder-db-sync-j57gg" Sep 30 19:50:19 crc kubenswrapper[4756]: I0930 19:50:19.422886 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0f0f53de-f545-431e-9901-e45eedc73bf9-scripts\") pod \"cinder-db-sync-j57gg\" (UID: \"0f0f53de-f545-431e-9901-e45eedc73bf9\") " pod="openstack/cinder-db-sync-j57gg" Sep 30 19:50:19 crc kubenswrapper[4756]: I0930 19:50:19.422954 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0f0f53de-f545-431e-9901-e45eedc73bf9-combined-ca-bundle\") pod \"cinder-db-sync-j57gg\" (UID: \"0f0f53de-f545-431e-9901-e45eedc73bf9\") " pod="openstack/cinder-db-sync-j57gg" Sep 30 19:50:19 crc kubenswrapper[4756]: I0930 19:50:19.422982 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/0f0f53de-f545-431e-9901-e45eedc73bf9-etc-machine-id\") pod \"cinder-db-sync-j57gg\" (UID: \"0f0f53de-f545-431e-9901-e45eedc73bf9\") " pod="openstack/cinder-db-sync-j57gg" Sep 30 19:50:19 crc kubenswrapper[4756]: I0930 19:50:19.423048 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/0f0f53de-f545-431e-9901-e45eedc73bf9-etc-machine-id\") pod \"cinder-db-sync-j57gg\" (UID: \"0f0f53de-f545-431e-9901-e45eedc73bf9\") " pod="openstack/cinder-db-sync-j57gg" Sep 30 19:50:19 crc kubenswrapper[4756]: I0930 19:50:19.432996 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0f0f53de-f545-431e-9901-e45eedc73bf9-combined-ca-bundle\") pod \"cinder-db-sync-j57gg\" (UID: \"0f0f53de-f545-431e-9901-e45eedc73bf9\") " pod="openstack/cinder-db-sync-j57gg" Sep 30 19:50:19 crc kubenswrapper[4756]: I0930 19:50:19.433087 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0f0f53de-f545-431e-9901-e45eedc73bf9-config-data\") pod \"cinder-db-sync-j57gg\" (UID: \"0f0f53de-f545-431e-9901-e45eedc73bf9\") " pod="openstack/cinder-db-sync-j57gg" Sep 30 19:50:19 crc kubenswrapper[4756]: I0930 19:50:19.434972 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0f0f53de-f545-431e-9901-e45eedc73bf9-scripts\") pod \"cinder-db-sync-j57gg\" (UID: \"0f0f53de-f545-431e-9901-e45eedc73bf9\") " pod="openstack/cinder-db-sync-j57gg" Sep 30 19:50:19 crc kubenswrapper[4756]: I0930 19:50:19.435120 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/0f0f53de-f545-431e-9901-e45eedc73bf9-db-sync-config-data\") pod \"cinder-db-sync-j57gg\" (UID: \"0f0f53de-f545-431e-9901-e45eedc73bf9\") " pod="openstack/cinder-db-sync-j57gg" Sep 30 19:50:19 crc kubenswrapper[4756]: I0930 19:50:19.452796 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gxmgt\" (UniqueName: \"kubernetes.io/projected/0f0f53de-f545-431e-9901-e45eedc73bf9-kube-api-access-gxmgt\") pod \"cinder-db-sync-j57gg\" (UID: \"0f0f53de-f545-431e-9901-e45eedc73bf9\") " pod="openstack/cinder-db-sync-j57gg" Sep 30 19:50:19 crc kubenswrapper[4756]: I0930 19:50:19.561045 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 30 19:50:19 crc kubenswrapper[4756]: I0930 19:50:19.570081 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-db-sync-8hkvg"] Sep 30 19:50:19 crc kubenswrapper[4756]: I0930 19:50:19.571643 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-8hkvg" Sep 30 19:50:19 crc kubenswrapper[4756]: I0930 19:50:19.574848 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-config-data" Sep 30 19:50:19 crc kubenswrapper[4756]: I0930 19:50:19.575021 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-barbican-dockercfg-mv7v6" Sep 30 19:50:19 crc kubenswrapper[4756]: I0930 19:50:19.594366 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-j57gg" Sep 30 19:50:19 crc kubenswrapper[4756]: I0930 19:50:19.597134 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-sync-8hkvg"] Sep 30 19:50:19 crc kubenswrapper[4756]: I0930 19:50:19.626775 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9v9dx\" (UniqueName: \"kubernetes.io/projected/fb5b7a75-2b20-4dc3-bd63-e29dbb6cdba2-kube-api-access-9v9dx\") pod \"barbican-db-sync-8hkvg\" (UID: \"fb5b7a75-2b20-4dc3-bd63-e29dbb6cdba2\") " pod="openstack/barbican-db-sync-8hkvg" Sep 30 19:50:19 crc kubenswrapper[4756]: I0930 19:50:19.626846 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fb5b7a75-2b20-4dc3-bd63-e29dbb6cdba2-combined-ca-bundle\") pod \"barbican-db-sync-8hkvg\" (UID: \"fb5b7a75-2b20-4dc3-bd63-e29dbb6cdba2\") " pod="openstack/barbican-db-sync-8hkvg" Sep 30 19:50:19 crc kubenswrapper[4756]: I0930 19:50:19.626942 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/fb5b7a75-2b20-4dc3-bd63-e29dbb6cdba2-db-sync-config-data\") pod \"barbican-db-sync-8hkvg\" (UID: \"fb5b7a75-2b20-4dc3-bd63-e29dbb6cdba2\") " pod="openstack/barbican-db-sync-8hkvg" Sep 30 19:50:19 crc kubenswrapper[4756]: I0930 19:50:19.655899 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-db-sync-pljns"] Sep 30 19:50:19 crc kubenswrapper[4756]: I0930 19:50:19.658445 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-pljns" Sep 30 19:50:19 crc kubenswrapper[4756]: I0930 19:50:19.662700 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-neutron-dockercfg-2jxvh" Sep 30 19:50:19 crc kubenswrapper[4756]: I0930 19:50:19.662753 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-httpd-config" Sep 30 19:50:19 crc kubenswrapper[4756]: I0930 19:50:19.662897 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-config" Sep 30 19:50:19 crc kubenswrapper[4756]: I0930 19:50:19.664377 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-sync-pljns"] Sep 30 19:50:19 crc kubenswrapper[4756]: I0930 19:50:19.729044 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2d1a09a3-9f2d-41ec-a2de-009e4835154e-combined-ca-bundle\") pod \"neutron-db-sync-pljns\" (UID: \"2d1a09a3-9f2d-41ec-a2de-009e4835154e\") " pod="openstack/neutron-db-sync-pljns" Sep 30 19:50:19 crc kubenswrapper[4756]: I0930 19:50:19.729098 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9v9dx\" (UniqueName: \"kubernetes.io/projected/fb5b7a75-2b20-4dc3-bd63-e29dbb6cdba2-kube-api-access-9v9dx\") pod \"barbican-db-sync-8hkvg\" (UID: \"fb5b7a75-2b20-4dc3-bd63-e29dbb6cdba2\") " pod="openstack/barbican-db-sync-8hkvg" Sep 30 19:50:19 crc kubenswrapper[4756]: I0930 19:50:19.729212 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fb5b7a75-2b20-4dc3-bd63-e29dbb6cdba2-combined-ca-bundle\") pod \"barbican-db-sync-8hkvg\" (UID: \"fb5b7a75-2b20-4dc3-bd63-e29dbb6cdba2\") " pod="openstack/barbican-db-sync-8hkvg" Sep 30 19:50:19 crc kubenswrapper[4756]: I0930 19:50:19.729252 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/2d1a09a3-9f2d-41ec-a2de-009e4835154e-config\") pod \"neutron-db-sync-pljns\" (UID: \"2d1a09a3-9f2d-41ec-a2de-009e4835154e\") " pod="openstack/neutron-db-sync-pljns" Sep 30 19:50:19 crc kubenswrapper[4756]: I0930 19:50:19.729320 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mcf85\" (UniqueName: \"kubernetes.io/projected/2d1a09a3-9f2d-41ec-a2de-009e4835154e-kube-api-access-mcf85\") pod \"neutron-db-sync-pljns\" (UID: \"2d1a09a3-9f2d-41ec-a2de-009e4835154e\") " pod="openstack/neutron-db-sync-pljns" Sep 30 19:50:19 crc kubenswrapper[4756]: I0930 19:50:19.729354 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/fb5b7a75-2b20-4dc3-bd63-e29dbb6cdba2-db-sync-config-data\") pod \"barbican-db-sync-8hkvg\" (UID: \"fb5b7a75-2b20-4dc3-bd63-e29dbb6cdba2\") " pod="openstack/barbican-db-sync-8hkvg" Sep 30 19:50:19 crc kubenswrapper[4756]: I0930 19:50:19.748124 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/fb5b7a75-2b20-4dc3-bd63-e29dbb6cdba2-db-sync-config-data\") pod \"barbican-db-sync-8hkvg\" (UID: \"fb5b7a75-2b20-4dc3-bd63-e29dbb6cdba2\") " pod="openstack/barbican-db-sync-8hkvg" Sep 30 19:50:19 crc kubenswrapper[4756]: I0930 19:50:19.748288 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fb5b7a75-2b20-4dc3-bd63-e29dbb6cdba2-combined-ca-bundle\") pod \"barbican-db-sync-8hkvg\" (UID: \"fb5b7a75-2b20-4dc3-bd63-e29dbb6cdba2\") " pod="openstack/barbican-db-sync-8hkvg" Sep 30 19:50:19 crc kubenswrapper[4756]: I0930 19:50:19.753152 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9v9dx\" (UniqueName: \"kubernetes.io/projected/fb5b7a75-2b20-4dc3-bd63-e29dbb6cdba2-kube-api-access-9v9dx\") pod \"barbican-db-sync-8hkvg\" (UID: \"fb5b7a75-2b20-4dc3-bd63-e29dbb6cdba2\") " pod="openstack/barbican-db-sync-8hkvg" Sep 30 19:50:19 crc kubenswrapper[4756]: I0930 19:50:19.830938 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/2d1a09a3-9f2d-41ec-a2de-009e4835154e-config\") pod \"neutron-db-sync-pljns\" (UID: \"2d1a09a3-9f2d-41ec-a2de-009e4835154e\") " pod="openstack/neutron-db-sync-pljns" Sep 30 19:50:19 crc kubenswrapper[4756]: I0930 19:50:19.831022 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mcf85\" (UniqueName: \"kubernetes.io/projected/2d1a09a3-9f2d-41ec-a2de-009e4835154e-kube-api-access-mcf85\") pod \"neutron-db-sync-pljns\" (UID: \"2d1a09a3-9f2d-41ec-a2de-009e4835154e\") " pod="openstack/neutron-db-sync-pljns" Sep 30 19:50:19 crc kubenswrapper[4756]: I0930 19:50:19.831056 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2d1a09a3-9f2d-41ec-a2de-009e4835154e-combined-ca-bundle\") pod \"neutron-db-sync-pljns\" (UID: \"2d1a09a3-9f2d-41ec-a2de-009e4835154e\") " pod="openstack/neutron-db-sync-pljns" Sep 30 19:50:19 crc kubenswrapper[4756]: I0930 19:50:19.834968 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2d1a09a3-9f2d-41ec-a2de-009e4835154e-combined-ca-bundle\") pod \"neutron-db-sync-pljns\" (UID: \"2d1a09a3-9f2d-41ec-a2de-009e4835154e\") " pod="openstack/neutron-db-sync-pljns" Sep 30 19:50:19 crc kubenswrapper[4756]: I0930 19:50:19.835978 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/2d1a09a3-9f2d-41ec-a2de-009e4835154e-config\") pod \"neutron-db-sync-pljns\" (UID: \"2d1a09a3-9f2d-41ec-a2de-009e4835154e\") " pod="openstack/neutron-db-sync-pljns" Sep 30 19:50:19 crc kubenswrapper[4756]: I0930 19:50:19.846266 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mcf85\" (UniqueName: \"kubernetes.io/projected/2d1a09a3-9f2d-41ec-a2de-009e4835154e-kube-api-access-mcf85\") pod \"neutron-db-sync-pljns\" (UID: \"2d1a09a3-9f2d-41ec-a2de-009e4835154e\") " pod="openstack/neutron-db-sync-pljns" Sep 30 19:50:19 crc kubenswrapper[4756]: I0930 19:50:19.894004 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-8hkvg" Sep 30 19:50:19 crc kubenswrapper[4756]: I0930 19:50:19.975442 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-pljns" Sep 30 19:50:20 crc kubenswrapper[4756]: I0930 19:50:20.019024 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"729dd130-94c6-4995-b9c8-c2ebdbdc5886","Type":"ContainerStarted","Data":"d642f68603b5ff0f9120c79a0129e4e63a55b1eb639cdd4896d863495472c8fb"} Sep 30 19:50:20 crc kubenswrapper[4756]: I0930 19:50:20.021033 4756 generic.go:334] "Generic (PLEG): container finished" podID="62aa5559-f0c5-4603-95ec-795ff309390c" containerID="31682d67530027d45efd041583c7d83d586d117a1d88a2d45baea5a4670ecdc4" exitCode=0 Sep 30 19:50:20 crc kubenswrapper[4756]: I0930 19:50:20.021061 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8b5c85b87-tkqp5" event={"ID":"62aa5559-f0c5-4603-95ec-795ff309390c","Type":"ContainerDied","Data":"31682d67530027d45efd041583c7d83d586d117a1d88a2d45baea5a4670ecdc4"} Sep 30 19:50:23 crc kubenswrapper[4756]: W0930 19:50:23.562938 4756 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8614c78e_58de_4a39_80a4_ba8f9d068ebb.slice/crio-81d1b2bc855444ce17c8d46903f65fd53acac2549f34bde195ae52835385a6cb WatchSource:0}: Error finding container 81d1b2bc855444ce17c8d46903f65fd53acac2549f34bde195ae52835385a6cb: Status 404 returned error can't find the container with id 81d1b2bc855444ce17c8d46903f65fd53acac2549f34bde195ae52835385a6cb Sep 30 19:50:24 crc kubenswrapper[4756]: I0930 19:50:24.058565 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"8614c78e-58de-4a39-80a4-ba8f9d068ebb","Type":"ContainerStarted","Data":"81d1b2bc855444ce17c8d46903f65fd53acac2549f34bde195ae52835385a6cb"} Sep 30 19:50:24 crc kubenswrapper[4756]: I0930 19:50:24.412606 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-sync-j57gg"] Sep 30 19:50:24 crc kubenswrapper[4756]: W0930 19:50:24.433534 4756 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0f0f53de_f545_431e_9901_e45eedc73bf9.slice/crio-b2853b890958d67d9161f6df4fed70d6affc6576649913ae035a6340ce281549 WatchSource:0}: Error finding container b2853b890958d67d9161f6df4fed70d6affc6576649913ae035a6340ce281549: Status 404 returned error can't find the container with id b2853b890958d67d9161f6df4fed70d6affc6576649913ae035a6340ce281549 Sep 30 19:50:24 crc kubenswrapper[4756]: I0930 19:50:24.719371 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-sync-8hkvg"] Sep 30 19:50:24 crc kubenswrapper[4756]: W0930 19:50:24.724096 4756 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podfb5b7a75_2b20_4dc3_bd63_e29dbb6cdba2.slice/crio-f1791a8c2c7a0241b12a98bd9995b061ccefb5f9dea263885f0d95d836408466 WatchSource:0}: Error finding container f1791a8c2c7a0241b12a98bd9995b061ccefb5f9dea263885f0d95d836408466: Status 404 returned error can't find the container with id f1791a8c2c7a0241b12a98bd9995b061ccefb5f9dea263885f0d95d836408466 Sep 30 19:50:24 crc kubenswrapper[4756]: I0930 19:50:24.799436 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-sync-pljns"] Sep 30 19:50:24 crc kubenswrapper[4756]: W0930 19:50:24.806033 4756 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2d1a09a3_9f2d_41ec_a2de_009e4835154e.slice/crio-7f33e058ec4363989668a3a68891e078e0d96660f2395e48f31c1ccbbe686a2a WatchSource:0}: Error finding container 7f33e058ec4363989668a3a68891e078e0d96660f2395e48f31c1ccbbe686a2a: Status 404 returned error can't find the container with id 7f33e058ec4363989668a3a68891e078e0d96660f2395e48f31c1ccbbe686a2a Sep 30 19:50:25 crc kubenswrapper[4756]: I0930 19:50:25.072984 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-8hkvg" event={"ID":"fb5b7a75-2b20-4dc3-bd63-e29dbb6cdba2","Type":"ContainerStarted","Data":"f1791a8c2c7a0241b12a98bd9995b061ccefb5f9dea263885f0d95d836408466"} Sep 30 19:50:25 crc kubenswrapper[4756]: I0930 19:50:25.075897 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-pljns" event={"ID":"2d1a09a3-9f2d-41ec-a2de-009e4835154e","Type":"ContainerStarted","Data":"88bc27040e35d9df953d55c08c8b378a2f16d65a8a8f0afd059a1be9ec84599d"} Sep 30 19:50:25 crc kubenswrapper[4756]: I0930 19:50:25.075942 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-pljns" event={"ID":"2d1a09a3-9f2d-41ec-a2de-009e4835154e","Type":"ContainerStarted","Data":"7f33e058ec4363989668a3a68891e078e0d96660f2395e48f31c1ccbbe686a2a"} Sep 30 19:50:25 crc kubenswrapper[4756]: I0930 19:50:25.079975 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"729dd130-94c6-4995-b9c8-c2ebdbdc5886","Type":"ContainerStarted","Data":"0a2918b42232ba4d6a87cbca74c72233fb8d900d594e97e5509f98e9ab23dc74"} Sep 30 19:50:25 crc kubenswrapper[4756]: I0930 19:50:25.080155 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="729dd130-94c6-4995-b9c8-c2ebdbdc5886" containerName="glance-httpd" containerID="cri-o://0a2918b42232ba4d6a87cbca74c72233fb8d900d594e97e5509f98e9ab23dc74" gracePeriod=30 Sep 30 19:50:25 crc kubenswrapper[4756]: I0930 19:50:25.080160 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="729dd130-94c6-4995-b9c8-c2ebdbdc5886" containerName="glance-log" containerID="cri-o://d642f68603b5ff0f9120c79a0129e4e63a55b1eb639cdd4896d863495472c8fb" gracePeriod=30 Sep 30 19:50:25 crc kubenswrapper[4756]: I0930 19:50:25.083660 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c736dd47-8e28-4fbb-8036-d206f7cc5471","Type":"ContainerStarted","Data":"6a13740313829bf263916b0f39a53603833681d23155291a3e296376914e598f"} Sep 30 19:50:25 crc kubenswrapper[4756]: I0930 19:50:25.083756 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Sep 30 19:50:25 crc kubenswrapper[4756]: I0930 19:50:25.083745 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="c736dd47-8e28-4fbb-8036-d206f7cc5471" containerName="ceilometer-central-agent" containerID="cri-o://a70d9d815ab11d673e34ffbbbeab50cd29f0059de5b88903ab04b55ae2d01684" gracePeriod=30 Sep 30 19:50:25 crc kubenswrapper[4756]: I0930 19:50:25.083785 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="c736dd47-8e28-4fbb-8036-d206f7cc5471" containerName="sg-core" containerID="cri-o://726428566e5874d33d6da463d98bf4cf9458d6a88a6a7ab44adbf99d1e3c4e25" gracePeriod=30 Sep 30 19:50:25 crc kubenswrapper[4756]: I0930 19:50:25.083810 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="c736dd47-8e28-4fbb-8036-d206f7cc5471" containerName="ceilometer-notification-agent" containerID="cri-o://e8e1a8f4dd068c498f7eaf221ec9920ff488b7ff0b44001b0099dd0d4df7dac0" gracePeriod=30 Sep 30 19:50:25 crc kubenswrapper[4756]: I0930 19:50:25.083784 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="c736dd47-8e28-4fbb-8036-d206f7cc5471" containerName="proxy-httpd" containerID="cri-o://6a13740313829bf263916b0f39a53603833681d23155291a3e296376914e598f" gracePeriod=30 Sep 30 19:50:25 crc kubenswrapper[4756]: I0930 19:50:25.095557 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8b5c85b87-tkqp5" event={"ID":"62aa5559-f0c5-4603-95ec-795ff309390c","Type":"ContainerStarted","Data":"b64e97da8b9cfcc67199d8a195264e9958c5a9eb2e03df50f6cc7efcf6186f51"} Sep 30 19:50:25 crc kubenswrapper[4756]: I0930 19:50:25.095911 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-8b5c85b87-tkqp5" Sep 30 19:50:25 crc kubenswrapper[4756]: I0930 19:50:25.101785 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-db-sync-pljns" podStartSLOduration=6.101767949 podStartE2EDuration="6.101767949s" podCreationTimestamp="2025-09-30 19:50:19 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 19:50:25.092015664 +0000 UTC m=+1154.712949141" watchObservedRunningTime="2025-09-30 19:50:25.101767949 +0000 UTC m=+1154.722701426" Sep 30 19:50:25 crc kubenswrapper[4756]: I0930 19:50:25.101950 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-j57gg" event={"ID":"0f0f53de-f545-431e-9901-e45eedc73bf9","Type":"ContainerStarted","Data":"b2853b890958d67d9161f6df4fed70d6affc6576649913ae035a6340ce281549"} Sep 30 19:50:25 crc kubenswrapper[4756]: I0930 19:50:25.107264 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"8614c78e-58de-4a39-80a4-ba8f9d068ebb","Type":"ContainerStarted","Data":"b71dfa87c8da4823c8486c18ec442810523e91946df9693d7b46568418c78a27"} Sep 30 19:50:25 crc kubenswrapper[4756]: I0930 19:50:25.120376 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=9.120356895 podStartE2EDuration="9.120356895s" podCreationTimestamp="2025-09-30 19:50:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 19:50:25.116437703 +0000 UTC m=+1154.737371180" watchObservedRunningTime="2025-09-30 19:50:25.120356895 +0000 UTC m=+1154.741290372" Sep 30 19:50:25 crc kubenswrapper[4756]: I0930 19:50:25.145737 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.3850578479999998 podStartE2EDuration="26.145708438s" podCreationTimestamp="2025-09-30 19:49:59 +0000 UTC" firstStartedPulling="2025-09-30 19:50:00.547779712 +0000 UTC m=+1130.168713189" lastFinishedPulling="2025-09-30 19:50:24.308430292 +0000 UTC m=+1153.929363779" observedRunningTime="2025-09-30 19:50:25.144159367 +0000 UTC m=+1154.765092844" watchObservedRunningTime="2025-09-30 19:50:25.145708438 +0000 UTC m=+1154.766641955" Sep 30 19:50:25 crc kubenswrapper[4756]: I0930 19:50:25.181849 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-8b5c85b87-tkqp5" podStartSLOduration=9.181829982 podStartE2EDuration="9.181829982s" podCreationTimestamp="2025-09-30 19:50:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 19:50:25.171175914 +0000 UTC m=+1154.792109391" watchObservedRunningTime="2025-09-30 19:50:25.181829982 +0000 UTC m=+1154.802763469" Sep 30 19:50:25 crc kubenswrapper[4756]: I0930 19:50:25.709419 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Sep 30 19:50:25 crc kubenswrapper[4756]: I0930 19:50:25.757557 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/729dd130-94c6-4995-b9c8-c2ebdbdc5886-combined-ca-bundle\") pod \"729dd130-94c6-4995-b9c8-c2ebdbdc5886\" (UID: \"729dd130-94c6-4995-b9c8-c2ebdbdc5886\") " Sep 30 19:50:25 crc kubenswrapper[4756]: I0930 19:50:25.757696 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/729dd130-94c6-4995-b9c8-c2ebdbdc5886-config-data\") pod \"729dd130-94c6-4995-b9c8-c2ebdbdc5886\" (UID: \"729dd130-94c6-4995-b9c8-c2ebdbdc5886\") " Sep 30 19:50:25 crc kubenswrapper[4756]: I0930 19:50:25.757722 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/729dd130-94c6-4995-b9c8-c2ebdbdc5886-httpd-run\") pod \"729dd130-94c6-4995-b9c8-c2ebdbdc5886\" (UID: \"729dd130-94c6-4995-b9c8-c2ebdbdc5886\") " Sep 30 19:50:25 crc kubenswrapper[4756]: I0930 19:50:25.757754 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-z7m4x\" (UniqueName: \"kubernetes.io/projected/729dd130-94c6-4995-b9c8-c2ebdbdc5886-kube-api-access-z7m4x\") pod \"729dd130-94c6-4995-b9c8-c2ebdbdc5886\" (UID: \"729dd130-94c6-4995-b9c8-c2ebdbdc5886\") " Sep 30 19:50:25 crc kubenswrapper[4756]: I0930 19:50:25.757827 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"729dd130-94c6-4995-b9c8-c2ebdbdc5886\" (UID: \"729dd130-94c6-4995-b9c8-c2ebdbdc5886\") " Sep 30 19:50:25 crc kubenswrapper[4756]: I0930 19:50:25.757977 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/729dd130-94c6-4995-b9c8-c2ebdbdc5886-logs\") pod \"729dd130-94c6-4995-b9c8-c2ebdbdc5886\" (UID: \"729dd130-94c6-4995-b9c8-c2ebdbdc5886\") " Sep 30 19:50:25 crc kubenswrapper[4756]: I0930 19:50:25.758005 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/729dd130-94c6-4995-b9c8-c2ebdbdc5886-scripts\") pod \"729dd130-94c6-4995-b9c8-c2ebdbdc5886\" (UID: \"729dd130-94c6-4995-b9c8-c2ebdbdc5886\") " Sep 30 19:50:25 crc kubenswrapper[4756]: I0930 19:50:25.759199 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/729dd130-94c6-4995-b9c8-c2ebdbdc5886-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "729dd130-94c6-4995-b9c8-c2ebdbdc5886" (UID: "729dd130-94c6-4995-b9c8-c2ebdbdc5886"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 19:50:25 crc kubenswrapper[4756]: I0930 19:50:25.759422 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/729dd130-94c6-4995-b9c8-c2ebdbdc5886-logs" (OuterVolumeSpecName: "logs") pod "729dd130-94c6-4995-b9c8-c2ebdbdc5886" (UID: "729dd130-94c6-4995-b9c8-c2ebdbdc5886"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 19:50:25 crc kubenswrapper[4756]: I0930 19:50:25.764607 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/729dd130-94c6-4995-b9c8-c2ebdbdc5886-scripts" (OuterVolumeSpecName: "scripts") pod "729dd130-94c6-4995-b9c8-c2ebdbdc5886" (UID: "729dd130-94c6-4995-b9c8-c2ebdbdc5886"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:50:25 crc kubenswrapper[4756]: I0930 19:50:25.766469 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage10-crc" (OuterVolumeSpecName: "glance") pod "729dd130-94c6-4995-b9c8-c2ebdbdc5886" (UID: "729dd130-94c6-4995-b9c8-c2ebdbdc5886"). InnerVolumeSpecName "local-storage10-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Sep 30 19:50:25 crc kubenswrapper[4756]: I0930 19:50:25.766581 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/729dd130-94c6-4995-b9c8-c2ebdbdc5886-kube-api-access-z7m4x" (OuterVolumeSpecName: "kube-api-access-z7m4x") pod "729dd130-94c6-4995-b9c8-c2ebdbdc5886" (UID: "729dd130-94c6-4995-b9c8-c2ebdbdc5886"). InnerVolumeSpecName "kube-api-access-z7m4x". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:50:25 crc kubenswrapper[4756]: I0930 19:50:25.785656 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/729dd130-94c6-4995-b9c8-c2ebdbdc5886-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "729dd130-94c6-4995-b9c8-c2ebdbdc5886" (UID: "729dd130-94c6-4995-b9c8-c2ebdbdc5886"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:50:25 crc kubenswrapper[4756]: I0930 19:50:25.813080 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/729dd130-94c6-4995-b9c8-c2ebdbdc5886-config-data" (OuterVolumeSpecName: "config-data") pod "729dd130-94c6-4995-b9c8-c2ebdbdc5886" (UID: "729dd130-94c6-4995-b9c8-c2ebdbdc5886"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:50:25 crc kubenswrapper[4756]: I0930 19:50:25.859963 4756 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/729dd130-94c6-4995-b9c8-c2ebdbdc5886-logs\") on node \"crc\" DevicePath \"\"" Sep 30 19:50:25 crc kubenswrapper[4756]: I0930 19:50:25.859996 4756 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/729dd130-94c6-4995-b9c8-c2ebdbdc5886-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 19:50:25 crc kubenswrapper[4756]: I0930 19:50:25.860006 4756 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/729dd130-94c6-4995-b9c8-c2ebdbdc5886-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 19:50:25 crc kubenswrapper[4756]: I0930 19:50:25.860014 4756 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/729dd130-94c6-4995-b9c8-c2ebdbdc5886-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 19:50:25 crc kubenswrapper[4756]: I0930 19:50:25.860024 4756 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/729dd130-94c6-4995-b9c8-c2ebdbdc5886-httpd-run\") on node \"crc\" DevicePath \"\"" Sep 30 19:50:25 crc kubenswrapper[4756]: I0930 19:50:25.860032 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-z7m4x\" (UniqueName: \"kubernetes.io/projected/729dd130-94c6-4995-b9c8-c2ebdbdc5886-kube-api-access-z7m4x\") on node \"crc\" DevicePath \"\"" Sep 30 19:50:25 crc kubenswrapper[4756]: I0930 19:50:25.860074 4756 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") on node \"crc\" " Sep 30 19:50:25 crc kubenswrapper[4756]: I0930 19:50:25.878369 4756 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage10-crc" (UniqueName: "kubernetes.io/local-volume/local-storage10-crc") on node "crc" Sep 30 19:50:25 crc kubenswrapper[4756]: I0930 19:50:25.961162 4756 reconciler_common.go:293] "Volume detached for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") on node \"crc\" DevicePath \"\"" Sep 30 19:50:26 crc kubenswrapper[4756]: I0930 19:50:26.119659 4756 generic.go:334] "Generic (PLEG): container finished" podID="729dd130-94c6-4995-b9c8-c2ebdbdc5886" containerID="0a2918b42232ba4d6a87cbca74c72233fb8d900d594e97e5509f98e9ab23dc74" exitCode=0 Sep 30 19:50:26 crc kubenswrapper[4756]: I0930 19:50:26.119978 4756 generic.go:334] "Generic (PLEG): container finished" podID="729dd130-94c6-4995-b9c8-c2ebdbdc5886" containerID="d642f68603b5ff0f9120c79a0129e4e63a55b1eb639cdd4896d863495472c8fb" exitCode=143 Sep 30 19:50:26 crc kubenswrapper[4756]: I0930 19:50:26.120022 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"729dd130-94c6-4995-b9c8-c2ebdbdc5886","Type":"ContainerDied","Data":"0a2918b42232ba4d6a87cbca74c72233fb8d900d594e97e5509f98e9ab23dc74"} Sep 30 19:50:26 crc kubenswrapper[4756]: I0930 19:50:26.120049 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"729dd130-94c6-4995-b9c8-c2ebdbdc5886","Type":"ContainerDied","Data":"d642f68603b5ff0f9120c79a0129e4e63a55b1eb639cdd4896d863495472c8fb"} Sep 30 19:50:26 crc kubenswrapper[4756]: I0930 19:50:26.120060 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"729dd130-94c6-4995-b9c8-c2ebdbdc5886","Type":"ContainerDied","Data":"34911a2259ecb59426dba708f67beae31b4c822227b583a0988ec3d5d7e89965"} Sep 30 19:50:26 crc kubenswrapper[4756]: I0930 19:50:26.120077 4756 scope.go:117] "RemoveContainer" containerID="0a2918b42232ba4d6a87cbca74c72233fb8d900d594e97e5509f98e9ab23dc74" Sep 30 19:50:26 crc kubenswrapper[4756]: I0930 19:50:26.120196 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Sep 30 19:50:26 crc kubenswrapper[4756]: I0930 19:50:26.130640 4756 generic.go:334] "Generic (PLEG): container finished" podID="c736dd47-8e28-4fbb-8036-d206f7cc5471" containerID="6a13740313829bf263916b0f39a53603833681d23155291a3e296376914e598f" exitCode=0 Sep 30 19:50:26 crc kubenswrapper[4756]: I0930 19:50:26.130677 4756 generic.go:334] "Generic (PLEG): container finished" podID="c736dd47-8e28-4fbb-8036-d206f7cc5471" containerID="726428566e5874d33d6da463d98bf4cf9458d6a88a6a7ab44adbf99d1e3c4e25" exitCode=2 Sep 30 19:50:26 crc kubenswrapper[4756]: I0930 19:50:26.130687 4756 generic.go:334] "Generic (PLEG): container finished" podID="c736dd47-8e28-4fbb-8036-d206f7cc5471" containerID="a70d9d815ab11d673e34ffbbbeab50cd29f0059de5b88903ab04b55ae2d01684" exitCode=0 Sep 30 19:50:26 crc kubenswrapper[4756]: I0930 19:50:26.130736 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c736dd47-8e28-4fbb-8036-d206f7cc5471","Type":"ContainerDied","Data":"6a13740313829bf263916b0f39a53603833681d23155291a3e296376914e598f"} Sep 30 19:50:26 crc kubenswrapper[4756]: I0930 19:50:26.130765 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c736dd47-8e28-4fbb-8036-d206f7cc5471","Type":"ContainerDied","Data":"726428566e5874d33d6da463d98bf4cf9458d6a88a6a7ab44adbf99d1e3c4e25"} Sep 30 19:50:26 crc kubenswrapper[4756]: I0930 19:50:26.130778 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c736dd47-8e28-4fbb-8036-d206f7cc5471","Type":"ContainerDied","Data":"a70d9d815ab11d673e34ffbbbeab50cd29f0059de5b88903ab04b55ae2d01684"} Sep 30 19:50:26 crc kubenswrapper[4756]: I0930 19:50:26.142372 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"8614c78e-58de-4a39-80a4-ba8f9d068ebb","Type":"ContainerStarted","Data":"d0c64683b8a345d8332f27cc5f0ab478e6ad32b843012f24148e03833dd11a41"} Sep 30 19:50:26 crc kubenswrapper[4756]: I0930 19:50:26.142706 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="8614c78e-58de-4a39-80a4-ba8f9d068ebb" containerName="glance-log" containerID="cri-o://b71dfa87c8da4823c8486c18ec442810523e91946df9693d7b46568418c78a27" gracePeriod=30 Sep 30 19:50:26 crc kubenswrapper[4756]: I0930 19:50:26.142740 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="8614c78e-58de-4a39-80a4-ba8f9d068ebb" containerName="glance-httpd" containerID="cri-o://d0c64683b8a345d8332f27cc5f0ab478e6ad32b843012f24148e03833dd11a41" gracePeriod=30 Sep 30 19:50:26 crc kubenswrapper[4756]: I0930 19:50:26.169967 4756 scope.go:117] "RemoveContainer" containerID="d642f68603b5ff0f9120c79a0129e4e63a55b1eb639cdd4896d863495472c8fb" Sep 30 19:50:26 crc kubenswrapper[4756]: I0930 19:50:26.174901 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=10.174883961 podStartE2EDuration="10.174883961s" podCreationTimestamp="2025-09-30 19:50:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 19:50:26.170480535 +0000 UTC m=+1155.791414012" watchObservedRunningTime="2025-09-30 19:50:26.174883961 +0000 UTC m=+1155.795817438" Sep 30 19:50:26 crc kubenswrapper[4756]: I0930 19:50:26.191382 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Sep 30 19:50:26 crc kubenswrapper[4756]: I0930 19:50:26.199654 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Sep 30 19:50:26 crc kubenswrapper[4756]: I0930 19:50:26.212586 4756 scope.go:117] "RemoveContainer" containerID="0a2918b42232ba4d6a87cbca74c72233fb8d900d594e97e5509f98e9ab23dc74" Sep 30 19:50:26 crc kubenswrapper[4756]: I0930 19:50:26.215732 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Sep 30 19:50:26 crc kubenswrapper[4756]: E0930 19:50:26.216172 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="729dd130-94c6-4995-b9c8-c2ebdbdc5886" containerName="glance-httpd" Sep 30 19:50:26 crc kubenswrapper[4756]: I0930 19:50:26.216238 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="729dd130-94c6-4995-b9c8-c2ebdbdc5886" containerName="glance-httpd" Sep 30 19:50:26 crc kubenswrapper[4756]: E0930 19:50:26.216327 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="729dd130-94c6-4995-b9c8-c2ebdbdc5886" containerName="glance-log" Sep 30 19:50:26 crc kubenswrapper[4756]: I0930 19:50:26.216376 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="729dd130-94c6-4995-b9c8-c2ebdbdc5886" containerName="glance-log" Sep 30 19:50:26 crc kubenswrapper[4756]: E0930 19:50:26.216511 4756 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0a2918b42232ba4d6a87cbca74c72233fb8d900d594e97e5509f98e9ab23dc74\": container with ID starting with 0a2918b42232ba4d6a87cbca74c72233fb8d900d594e97e5509f98e9ab23dc74 not found: ID does not exist" containerID="0a2918b42232ba4d6a87cbca74c72233fb8d900d594e97e5509f98e9ab23dc74" Sep 30 19:50:26 crc kubenswrapper[4756]: I0930 19:50:26.216560 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0a2918b42232ba4d6a87cbca74c72233fb8d900d594e97e5509f98e9ab23dc74"} err="failed to get container status \"0a2918b42232ba4d6a87cbca74c72233fb8d900d594e97e5509f98e9ab23dc74\": rpc error: code = NotFound desc = could not find container \"0a2918b42232ba4d6a87cbca74c72233fb8d900d594e97e5509f98e9ab23dc74\": container with ID starting with 0a2918b42232ba4d6a87cbca74c72233fb8d900d594e97e5509f98e9ab23dc74 not found: ID does not exist" Sep 30 19:50:26 crc kubenswrapper[4756]: I0930 19:50:26.216590 4756 scope.go:117] "RemoveContainer" containerID="d642f68603b5ff0f9120c79a0129e4e63a55b1eb639cdd4896d863495472c8fb" Sep 30 19:50:26 crc kubenswrapper[4756]: I0930 19:50:26.216763 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="729dd130-94c6-4995-b9c8-c2ebdbdc5886" containerName="glance-httpd" Sep 30 19:50:26 crc kubenswrapper[4756]: I0930 19:50:26.216840 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="729dd130-94c6-4995-b9c8-c2ebdbdc5886" containerName="glance-log" Sep 30 19:50:26 crc kubenswrapper[4756]: I0930 19:50:26.217783 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Sep 30 19:50:26 crc kubenswrapper[4756]: E0930 19:50:26.217906 4756 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d642f68603b5ff0f9120c79a0129e4e63a55b1eb639cdd4896d863495472c8fb\": container with ID starting with d642f68603b5ff0f9120c79a0129e4e63a55b1eb639cdd4896d863495472c8fb not found: ID does not exist" containerID="d642f68603b5ff0f9120c79a0129e4e63a55b1eb639cdd4896d863495472c8fb" Sep 30 19:50:26 crc kubenswrapper[4756]: I0930 19:50:26.217945 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d642f68603b5ff0f9120c79a0129e4e63a55b1eb639cdd4896d863495472c8fb"} err="failed to get container status \"d642f68603b5ff0f9120c79a0129e4e63a55b1eb639cdd4896d863495472c8fb\": rpc error: code = NotFound desc = could not find container \"d642f68603b5ff0f9120c79a0129e4e63a55b1eb639cdd4896d863495472c8fb\": container with ID starting with d642f68603b5ff0f9120c79a0129e4e63a55b1eb639cdd4896d863495472c8fb not found: ID does not exist" Sep 30 19:50:26 crc kubenswrapper[4756]: I0930 19:50:26.217968 4756 scope.go:117] "RemoveContainer" containerID="0a2918b42232ba4d6a87cbca74c72233fb8d900d594e97e5509f98e9ab23dc74" Sep 30 19:50:26 crc kubenswrapper[4756]: I0930 19:50:26.218324 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0a2918b42232ba4d6a87cbca74c72233fb8d900d594e97e5509f98e9ab23dc74"} err="failed to get container status \"0a2918b42232ba4d6a87cbca74c72233fb8d900d594e97e5509f98e9ab23dc74\": rpc error: code = NotFound desc = could not find container \"0a2918b42232ba4d6a87cbca74c72233fb8d900d594e97e5509f98e9ab23dc74\": container with ID starting with 0a2918b42232ba4d6a87cbca74c72233fb8d900d594e97e5509f98e9ab23dc74 not found: ID does not exist" Sep 30 19:50:26 crc kubenswrapper[4756]: I0930 19:50:26.218381 4756 scope.go:117] "RemoveContainer" containerID="d642f68603b5ff0f9120c79a0129e4e63a55b1eb639cdd4896d863495472c8fb" Sep 30 19:50:26 crc kubenswrapper[4756]: I0930 19:50:26.222183 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d642f68603b5ff0f9120c79a0129e4e63a55b1eb639cdd4896d863495472c8fb"} err="failed to get container status \"d642f68603b5ff0f9120c79a0129e4e63a55b1eb639cdd4896d863495472c8fb\": rpc error: code = NotFound desc = could not find container \"d642f68603b5ff0f9120c79a0129e4e63a55b1eb639cdd4896d863495472c8fb\": container with ID starting with d642f68603b5ff0f9120c79a0129e4e63a55b1eb639cdd4896d863495472c8fb not found: ID does not exist" Sep 30 19:50:26 crc kubenswrapper[4756]: I0930 19:50:26.225101 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Sep 30 19:50:26 crc kubenswrapper[4756]: I0930 19:50:26.225748 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-public-svc" Sep 30 19:50:26 crc kubenswrapper[4756]: I0930 19:50:26.254603 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Sep 30 19:50:26 crc kubenswrapper[4756]: I0930 19:50:26.267619 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-external-api-0\" (UID: \"90342cf7-3a20-4e6d-a8f6-f70c474cd1f6\") " pod="openstack/glance-default-external-api-0" Sep 30 19:50:26 crc kubenswrapper[4756]: I0930 19:50:26.267657 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/90342cf7-3a20-4e6d-a8f6-f70c474cd1f6-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"90342cf7-3a20-4e6d-a8f6-f70c474cd1f6\") " pod="openstack/glance-default-external-api-0" Sep 30 19:50:26 crc kubenswrapper[4756]: I0930 19:50:26.267683 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/90342cf7-3a20-4e6d-a8f6-f70c474cd1f6-config-data\") pod \"glance-default-external-api-0\" (UID: \"90342cf7-3a20-4e6d-a8f6-f70c474cd1f6\") " pod="openstack/glance-default-external-api-0" Sep 30 19:50:26 crc kubenswrapper[4756]: I0930 19:50:26.267702 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/90342cf7-3a20-4e6d-a8f6-f70c474cd1f6-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"90342cf7-3a20-4e6d-a8f6-f70c474cd1f6\") " pod="openstack/glance-default-external-api-0" Sep 30 19:50:26 crc kubenswrapper[4756]: I0930 19:50:26.267832 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/90342cf7-3a20-4e6d-a8f6-f70c474cd1f6-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"90342cf7-3a20-4e6d-a8f6-f70c474cd1f6\") " pod="openstack/glance-default-external-api-0" Sep 30 19:50:26 crc kubenswrapper[4756]: I0930 19:50:26.267904 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/90342cf7-3a20-4e6d-a8f6-f70c474cd1f6-scripts\") pod \"glance-default-external-api-0\" (UID: \"90342cf7-3a20-4e6d-a8f6-f70c474cd1f6\") " pod="openstack/glance-default-external-api-0" Sep 30 19:50:26 crc kubenswrapper[4756]: I0930 19:50:26.268005 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/90342cf7-3a20-4e6d-a8f6-f70c474cd1f6-logs\") pod \"glance-default-external-api-0\" (UID: \"90342cf7-3a20-4e6d-a8f6-f70c474cd1f6\") " pod="openstack/glance-default-external-api-0" Sep 30 19:50:26 crc kubenswrapper[4756]: I0930 19:50:26.268081 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mxkt2\" (UniqueName: \"kubernetes.io/projected/90342cf7-3a20-4e6d-a8f6-f70c474cd1f6-kube-api-access-mxkt2\") pod \"glance-default-external-api-0\" (UID: \"90342cf7-3a20-4e6d-a8f6-f70c474cd1f6\") " pod="openstack/glance-default-external-api-0" Sep 30 19:50:26 crc kubenswrapper[4756]: I0930 19:50:26.369880 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-external-api-0\" (UID: \"90342cf7-3a20-4e6d-a8f6-f70c474cd1f6\") " pod="openstack/glance-default-external-api-0" Sep 30 19:50:26 crc kubenswrapper[4756]: I0930 19:50:26.369937 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/90342cf7-3a20-4e6d-a8f6-f70c474cd1f6-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"90342cf7-3a20-4e6d-a8f6-f70c474cd1f6\") " pod="openstack/glance-default-external-api-0" Sep 30 19:50:26 crc kubenswrapper[4756]: I0930 19:50:26.369975 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/90342cf7-3a20-4e6d-a8f6-f70c474cd1f6-config-data\") pod \"glance-default-external-api-0\" (UID: \"90342cf7-3a20-4e6d-a8f6-f70c474cd1f6\") " pod="openstack/glance-default-external-api-0" Sep 30 19:50:26 crc kubenswrapper[4756]: I0930 19:50:26.370000 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/90342cf7-3a20-4e6d-a8f6-f70c474cd1f6-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"90342cf7-3a20-4e6d-a8f6-f70c474cd1f6\") " pod="openstack/glance-default-external-api-0" Sep 30 19:50:26 crc kubenswrapper[4756]: I0930 19:50:26.370060 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/90342cf7-3a20-4e6d-a8f6-f70c474cd1f6-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"90342cf7-3a20-4e6d-a8f6-f70c474cd1f6\") " pod="openstack/glance-default-external-api-0" Sep 30 19:50:26 crc kubenswrapper[4756]: I0930 19:50:26.370093 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/90342cf7-3a20-4e6d-a8f6-f70c474cd1f6-scripts\") pod \"glance-default-external-api-0\" (UID: \"90342cf7-3a20-4e6d-a8f6-f70c474cd1f6\") " pod="openstack/glance-default-external-api-0" Sep 30 19:50:26 crc kubenswrapper[4756]: I0930 19:50:26.370143 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/90342cf7-3a20-4e6d-a8f6-f70c474cd1f6-logs\") pod \"glance-default-external-api-0\" (UID: \"90342cf7-3a20-4e6d-a8f6-f70c474cd1f6\") " pod="openstack/glance-default-external-api-0" Sep 30 19:50:26 crc kubenswrapper[4756]: I0930 19:50:26.370185 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mxkt2\" (UniqueName: \"kubernetes.io/projected/90342cf7-3a20-4e6d-a8f6-f70c474cd1f6-kube-api-access-mxkt2\") pod \"glance-default-external-api-0\" (UID: \"90342cf7-3a20-4e6d-a8f6-f70c474cd1f6\") " pod="openstack/glance-default-external-api-0" Sep 30 19:50:26 crc kubenswrapper[4756]: I0930 19:50:26.370891 4756 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-external-api-0\" (UID: \"90342cf7-3a20-4e6d-a8f6-f70c474cd1f6\") device mount path \"/mnt/openstack/pv10\"" pod="openstack/glance-default-external-api-0" Sep 30 19:50:26 crc kubenswrapper[4756]: I0930 19:50:26.371478 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/90342cf7-3a20-4e6d-a8f6-f70c474cd1f6-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"90342cf7-3a20-4e6d-a8f6-f70c474cd1f6\") " pod="openstack/glance-default-external-api-0" Sep 30 19:50:26 crc kubenswrapper[4756]: I0930 19:50:26.372518 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/90342cf7-3a20-4e6d-a8f6-f70c474cd1f6-logs\") pod \"glance-default-external-api-0\" (UID: \"90342cf7-3a20-4e6d-a8f6-f70c474cd1f6\") " pod="openstack/glance-default-external-api-0" Sep 30 19:50:26 crc kubenswrapper[4756]: I0930 19:50:26.376069 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/90342cf7-3a20-4e6d-a8f6-f70c474cd1f6-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"90342cf7-3a20-4e6d-a8f6-f70c474cd1f6\") " pod="openstack/glance-default-external-api-0" Sep 30 19:50:26 crc kubenswrapper[4756]: I0930 19:50:26.379679 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/90342cf7-3a20-4e6d-a8f6-f70c474cd1f6-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"90342cf7-3a20-4e6d-a8f6-f70c474cd1f6\") " pod="openstack/glance-default-external-api-0" Sep 30 19:50:26 crc kubenswrapper[4756]: I0930 19:50:26.379832 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/90342cf7-3a20-4e6d-a8f6-f70c474cd1f6-config-data\") pod \"glance-default-external-api-0\" (UID: \"90342cf7-3a20-4e6d-a8f6-f70c474cd1f6\") " pod="openstack/glance-default-external-api-0" Sep 30 19:50:26 crc kubenswrapper[4756]: I0930 19:50:26.381115 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/90342cf7-3a20-4e6d-a8f6-f70c474cd1f6-scripts\") pod \"glance-default-external-api-0\" (UID: \"90342cf7-3a20-4e6d-a8f6-f70c474cd1f6\") " pod="openstack/glance-default-external-api-0" Sep 30 19:50:26 crc kubenswrapper[4756]: I0930 19:50:26.387717 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mxkt2\" (UniqueName: \"kubernetes.io/projected/90342cf7-3a20-4e6d-a8f6-f70c474cd1f6-kube-api-access-mxkt2\") pod \"glance-default-external-api-0\" (UID: \"90342cf7-3a20-4e6d-a8f6-f70c474cd1f6\") " pod="openstack/glance-default-external-api-0" Sep 30 19:50:26 crc kubenswrapper[4756]: I0930 19:50:26.407149 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-external-api-0\" (UID: \"90342cf7-3a20-4e6d-a8f6-f70c474cd1f6\") " pod="openstack/glance-default-external-api-0" Sep 30 19:50:26 crc kubenswrapper[4756]: I0930 19:50:26.536224 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Sep 30 19:50:27 crc kubenswrapper[4756]: I0930 19:50:27.127852 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="729dd130-94c6-4995-b9c8-c2ebdbdc5886" path="/var/lib/kubelet/pods/729dd130-94c6-4995-b9c8-c2ebdbdc5886/volumes" Sep 30 19:50:27 crc kubenswrapper[4756]: I0930 19:50:27.128969 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Sep 30 19:50:27 crc kubenswrapper[4756]: I0930 19:50:27.155326 4756 generic.go:334] "Generic (PLEG): container finished" podID="8614c78e-58de-4a39-80a4-ba8f9d068ebb" containerID="d0c64683b8a345d8332f27cc5f0ab478e6ad32b843012f24148e03833dd11a41" exitCode=0 Sep 30 19:50:27 crc kubenswrapper[4756]: I0930 19:50:27.155593 4756 generic.go:334] "Generic (PLEG): container finished" podID="8614c78e-58de-4a39-80a4-ba8f9d068ebb" containerID="b71dfa87c8da4823c8486c18ec442810523e91946df9693d7b46568418c78a27" exitCode=143 Sep 30 19:50:27 crc kubenswrapper[4756]: I0930 19:50:27.155430 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"8614c78e-58de-4a39-80a4-ba8f9d068ebb","Type":"ContainerDied","Data":"d0c64683b8a345d8332f27cc5f0ab478e6ad32b843012f24148e03833dd11a41"} Sep 30 19:50:27 crc kubenswrapper[4756]: I0930 19:50:27.155863 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"8614c78e-58de-4a39-80a4-ba8f9d068ebb","Type":"ContainerDied","Data":"b71dfa87c8da4823c8486c18ec442810523e91946df9693d7b46568418c78a27"} Sep 30 19:50:31 crc kubenswrapper[4756]: I0930 19:50:31.218375 4756 generic.go:334] "Generic (PLEG): container finished" podID="c736dd47-8e28-4fbb-8036-d206f7cc5471" containerID="e8e1a8f4dd068c498f7eaf221ec9920ff488b7ff0b44001b0099dd0d4df7dac0" exitCode=0 Sep 30 19:50:31 crc kubenswrapper[4756]: I0930 19:50:31.218443 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c736dd47-8e28-4fbb-8036-d206f7cc5471","Type":"ContainerDied","Data":"e8e1a8f4dd068c498f7eaf221ec9920ff488b7ff0b44001b0099dd0d4df7dac0"} Sep 30 19:50:31 crc kubenswrapper[4756]: I0930 19:50:31.652641 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-8b5c85b87-tkqp5" Sep 30 19:50:31 crc kubenswrapper[4756]: I0930 19:50:31.731321 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-76fcf4b695-fzrxq"] Sep 30 19:50:31 crc kubenswrapper[4756]: I0930 19:50:31.731605 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-76fcf4b695-fzrxq" podUID="b6f43c97-24c1-4b5c-9144-436afa885815" containerName="dnsmasq-dns" containerID="cri-o://54ba83b5ec9e3df45a74815ad28e4405e4895da7ecbe0a4cdfcafe155890ed6a" gracePeriod=10 Sep 30 19:50:31 crc kubenswrapper[4756]: I0930 19:50:31.960297 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Sep 30 19:50:32 crc kubenswrapper[4756]: I0930 19:50:32.008796 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8614c78e-58de-4a39-80a4-ba8f9d068ebb-config-data\") pod \"8614c78e-58de-4a39-80a4-ba8f9d068ebb\" (UID: \"8614c78e-58de-4a39-80a4-ba8f9d068ebb\") " Sep 30 19:50:32 crc kubenswrapper[4756]: I0930 19:50:32.008914 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8614c78e-58de-4a39-80a4-ba8f9d068ebb-combined-ca-bundle\") pod \"8614c78e-58de-4a39-80a4-ba8f9d068ebb\" (UID: \"8614c78e-58de-4a39-80a4-ba8f9d068ebb\") " Sep 30 19:50:32 crc kubenswrapper[4756]: I0930 19:50:32.008970 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8614c78e-58de-4a39-80a4-ba8f9d068ebb-scripts\") pod \"8614c78e-58de-4a39-80a4-ba8f9d068ebb\" (UID: \"8614c78e-58de-4a39-80a4-ba8f9d068ebb\") " Sep 30 19:50:32 crc kubenswrapper[4756]: I0930 19:50:32.009032 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"8614c78e-58de-4a39-80a4-ba8f9d068ebb\" (UID: \"8614c78e-58de-4a39-80a4-ba8f9d068ebb\") " Sep 30 19:50:32 crc kubenswrapper[4756]: I0930 19:50:32.009076 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8614c78e-58de-4a39-80a4-ba8f9d068ebb-logs\") pod \"8614c78e-58de-4a39-80a4-ba8f9d068ebb\" (UID: \"8614c78e-58de-4a39-80a4-ba8f9d068ebb\") " Sep 30 19:50:32 crc kubenswrapper[4756]: I0930 19:50:32.009100 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-crbrr\" (UniqueName: \"kubernetes.io/projected/8614c78e-58de-4a39-80a4-ba8f9d068ebb-kube-api-access-crbrr\") pod \"8614c78e-58de-4a39-80a4-ba8f9d068ebb\" (UID: \"8614c78e-58de-4a39-80a4-ba8f9d068ebb\") " Sep 30 19:50:32 crc kubenswrapper[4756]: I0930 19:50:32.009135 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/8614c78e-58de-4a39-80a4-ba8f9d068ebb-httpd-run\") pod \"8614c78e-58de-4a39-80a4-ba8f9d068ebb\" (UID: \"8614c78e-58de-4a39-80a4-ba8f9d068ebb\") " Sep 30 19:50:32 crc kubenswrapper[4756]: I0930 19:50:32.010562 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8614c78e-58de-4a39-80a4-ba8f9d068ebb-logs" (OuterVolumeSpecName: "logs") pod "8614c78e-58de-4a39-80a4-ba8f9d068ebb" (UID: "8614c78e-58de-4a39-80a4-ba8f9d068ebb"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 19:50:32 crc kubenswrapper[4756]: I0930 19:50:32.010732 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8614c78e-58de-4a39-80a4-ba8f9d068ebb-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "8614c78e-58de-4a39-80a4-ba8f9d068ebb" (UID: "8614c78e-58de-4a39-80a4-ba8f9d068ebb"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 19:50:32 crc kubenswrapper[4756]: I0930 19:50:32.016565 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage07-crc" (OuterVolumeSpecName: "glance") pod "8614c78e-58de-4a39-80a4-ba8f9d068ebb" (UID: "8614c78e-58de-4a39-80a4-ba8f9d068ebb"). InnerVolumeSpecName "local-storage07-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Sep 30 19:50:32 crc kubenswrapper[4756]: I0930 19:50:32.016612 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8614c78e-58de-4a39-80a4-ba8f9d068ebb-kube-api-access-crbrr" (OuterVolumeSpecName: "kube-api-access-crbrr") pod "8614c78e-58de-4a39-80a4-ba8f9d068ebb" (UID: "8614c78e-58de-4a39-80a4-ba8f9d068ebb"). InnerVolumeSpecName "kube-api-access-crbrr". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:50:32 crc kubenswrapper[4756]: I0930 19:50:32.028252 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8614c78e-58de-4a39-80a4-ba8f9d068ebb-scripts" (OuterVolumeSpecName: "scripts") pod "8614c78e-58de-4a39-80a4-ba8f9d068ebb" (UID: "8614c78e-58de-4a39-80a4-ba8f9d068ebb"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:50:32 crc kubenswrapper[4756]: I0930 19:50:32.078744 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8614c78e-58de-4a39-80a4-ba8f9d068ebb-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "8614c78e-58de-4a39-80a4-ba8f9d068ebb" (UID: "8614c78e-58de-4a39-80a4-ba8f9d068ebb"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:50:32 crc kubenswrapper[4756]: I0930 19:50:32.083940 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8614c78e-58de-4a39-80a4-ba8f9d068ebb-config-data" (OuterVolumeSpecName: "config-data") pod "8614c78e-58de-4a39-80a4-ba8f9d068ebb" (UID: "8614c78e-58de-4a39-80a4-ba8f9d068ebb"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:50:32 crc kubenswrapper[4756]: I0930 19:50:32.111528 4756 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/8614c78e-58de-4a39-80a4-ba8f9d068ebb-httpd-run\") on node \"crc\" DevicePath \"\"" Sep 30 19:50:32 crc kubenswrapper[4756]: I0930 19:50:32.111558 4756 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8614c78e-58de-4a39-80a4-ba8f9d068ebb-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 19:50:32 crc kubenswrapper[4756]: I0930 19:50:32.111569 4756 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8614c78e-58de-4a39-80a4-ba8f9d068ebb-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 19:50:32 crc kubenswrapper[4756]: I0930 19:50:32.111579 4756 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8614c78e-58de-4a39-80a4-ba8f9d068ebb-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 19:50:32 crc kubenswrapper[4756]: I0930 19:50:32.111614 4756 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") on node \"crc\" " Sep 30 19:50:32 crc kubenswrapper[4756]: I0930 19:50:32.111623 4756 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8614c78e-58de-4a39-80a4-ba8f9d068ebb-logs\") on node \"crc\" DevicePath \"\"" Sep 30 19:50:32 crc kubenswrapper[4756]: I0930 19:50:32.111631 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-crbrr\" (UniqueName: \"kubernetes.io/projected/8614c78e-58de-4a39-80a4-ba8f9d068ebb-kube-api-access-crbrr\") on node \"crc\" DevicePath \"\"" Sep 30 19:50:32 crc kubenswrapper[4756]: I0930 19:50:32.130959 4756 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage07-crc" (UniqueName: "kubernetes.io/local-volume/local-storage07-crc") on node "crc" Sep 30 19:50:32 crc kubenswrapper[4756]: I0930 19:50:32.213560 4756 reconciler_common.go:293] "Volume detached for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") on node \"crc\" DevicePath \"\"" Sep 30 19:50:32 crc kubenswrapper[4756]: I0930 19:50:32.242264 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"90342cf7-3a20-4e6d-a8f6-f70c474cd1f6","Type":"ContainerStarted","Data":"f3d99f1858dbcab52e03f81f1b706ca0bc16f951aa653b8ce609e8ef2c640ef2"} Sep 30 19:50:32 crc kubenswrapper[4756]: I0930 19:50:32.245107 4756 generic.go:334] "Generic (PLEG): container finished" podID="b6f43c97-24c1-4b5c-9144-436afa885815" containerID="54ba83b5ec9e3df45a74815ad28e4405e4895da7ecbe0a4cdfcafe155890ed6a" exitCode=0 Sep 30 19:50:32 crc kubenswrapper[4756]: I0930 19:50:32.245160 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-76fcf4b695-fzrxq" event={"ID":"b6f43c97-24c1-4b5c-9144-436afa885815","Type":"ContainerDied","Data":"54ba83b5ec9e3df45a74815ad28e4405e4895da7ecbe0a4cdfcafe155890ed6a"} Sep 30 19:50:32 crc kubenswrapper[4756]: I0930 19:50:32.254901 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"8614c78e-58de-4a39-80a4-ba8f9d068ebb","Type":"ContainerDied","Data":"81d1b2bc855444ce17c8d46903f65fd53acac2549f34bde195ae52835385a6cb"} Sep 30 19:50:32 crc kubenswrapper[4756]: I0930 19:50:32.254949 4756 scope.go:117] "RemoveContainer" containerID="d0c64683b8a345d8332f27cc5f0ab478e6ad32b843012f24148e03833dd11a41" Sep 30 19:50:32 crc kubenswrapper[4756]: I0930 19:50:32.255070 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Sep 30 19:50:32 crc kubenswrapper[4756]: I0930 19:50:32.289931 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 30 19:50:32 crc kubenswrapper[4756]: I0930 19:50:32.296509 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 30 19:50:32 crc kubenswrapper[4756]: I0930 19:50:32.315165 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 30 19:50:32 crc kubenswrapper[4756]: E0930 19:50:32.315528 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8614c78e-58de-4a39-80a4-ba8f9d068ebb" containerName="glance-log" Sep 30 19:50:32 crc kubenswrapper[4756]: I0930 19:50:32.315541 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="8614c78e-58de-4a39-80a4-ba8f9d068ebb" containerName="glance-log" Sep 30 19:50:32 crc kubenswrapper[4756]: E0930 19:50:32.315580 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8614c78e-58de-4a39-80a4-ba8f9d068ebb" containerName="glance-httpd" Sep 30 19:50:32 crc kubenswrapper[4756]: I0930 19:50:32.315587 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="8614c78e-58de-4a39-80a4-ba8f9d068ebb" containerName="glance-httpd" Sep 30 19:50:32 crc kubenswrapper[4756]: I0930 19:50:32.315750 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="8614c78e-58de-4a39-80a4-ba8f9d068ebb" containerName="glance-log" Sep 30 19:50:32 crc kubenswrapper[4756]: I0930 19:50:32.315764 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="8614c78e-58de-4a39-80a4-ba8f9d068ebb" containerName="glance-httpd" Sep 30 19:50:32 crc kubenswrapper[4756]: I0930 19:50:32.316752 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Sep 30 19:50:32 crc kubenswrapper[4756]: I0930 19:50:32.321137 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-internal-svc" Sep 30 19:50:32 crc kubenswrapper[4756]: I0930 19:50:32.321266 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Sep 30 19:50:32 crc kubenswrapper[4756]: I0930 19:50:32.349024 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 30 19:50:32 crc kubenswrapper[4756]: I0930 19:50:32.518556 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-internal-api-0\" (UID: \"c2e63e93-d8e4-4069-bdb3-364f98238496\") " pod="openstack/glance-default-internal-api-0" Sep 30 19:50:32 crc kubenswrapper[4756]: I0930 19:50:32.518627 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/c2e63e93-d8e4-4069-bdb3-364f98238496-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"c2e63e93-d8e4-4069-bdb3-364f98238496\") " pod="openstack/glance-default-internal-api-0" Sep 30 19:50:32 crc kubenswrapper[4756]: I0930 19:50:32.518660 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c2e63e93-d8e4-4069-bdb3-364f98238496-logs\") pod \"glance-default-internal-api-0\" (UID: \"c2e63e93-d8e4-4069-bdb3-364f98238496\") " pod="openstack/glance-default-internal-api-0" Sep 30 19:50:32 crc kubenswrapper[4756]: I0930 19:50:32.518690 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/c2e63e93-d8e4-4069-bdb3-364f98238496-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"c2e63e93-d8e4-4069-bdb3-364f98238496\") " pod="openstack/glance-default-internal-api-0" Sep 30 19:50:32 crc kubenswrapper[4756]: I0930 19:50:32.518721 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c2e63e93-d8e4-4069-bdb3-364f98238496-config-data\") pod \"glance-default-internal-api-0\" (UID: \"c2e63e93-d8e4-4069-bdb3-364f98238496\") " pod="openstack/glance-default-internal-api-0" Sep 30 19:50:32 crc kubenswrapper[4756]: I0930 19:50:32.518762 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p29gg\" (UniqueName: \"kubernetes.io/projected/c2e63e93-d8e4-4069-bdb3-364f98238496-kube-api-access-p29gg\") pod \"glance-default-internal-api-0\" (UID: \"c2e63e93-d8e4-4069-bdb3-364f98238496\") " pod="openstack/glance-default-internal-api-0" Sep 30 19:50:32 crc kubenswrapper[4756]: I0930 19:50:32.518801 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c2e63e93-d8e4-4069-bdb3-364f98238496-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"c2e63e93-d8e4-4069-bdb3-364f98238496\") " pod="openstack/glance-default-internal-api-0" Sep 30 19:50:32 crc kubenswrapper[4756]: I0930 19:50:32.518820 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c2e63e93-d8e4-4069-bdb3-364f98238496-scripts\") pod \"glance-default-internal-api-0\" (UID: \"c2e63e93-d8e4-4069-bdb3-364f98238496\") " pod="openstack/glance-default-internal-api-0" Sep 30 19:50:32 crc kubenswrapper[4756]: I0930 19:50:32.622083 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p29gg\" (UniqueName: \"kubernetes.io/projected/c2e63e93-d8e4-4069-bdb3-364f98238496-kube-api-access-p29gg\") pod \"glance-default-internal-api-0\" (UID: \"c2e63e93-d8e4-4069-bdb3-364f98238496\") " pod="openstack/glance-default-internal-api-0" Sep 30 19:50:32 crc kubenswrapper[4756]: I0930 19:50:32.622287 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c2e63e93-d8e4-4069-bdb3-364f98238496-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"c2e63e93-d8e4-4069-bdb3-364f98238496\") " pod="openstack/glance-default-internal-api-0" Sep 30 19:50:32 crc kubenswrapper[4756]: I0930 19:50:32.622308 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c2e63e93-d8e4-4069-bdb3-364f98238496-scripts\") pod \"glance-default-internal-api-0\" (UID: \"c2e63e93-d8e4-4069-bdb3-364f98238496\") " pod="openstack/glance-default-internal-api-0" Sep 30 19:50:32 crc kubenswrapper[4756]: I0930 19:50:32.622444 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-internal-api-0\" (UID: \"c2e63e93-d8e4-4069-bdb3-364f98238496\") " pod="openstack/glance-default-internal-api-0" Sep 30 19:50:32 crc kubenswrapper[4756]: I0930 19:50:32.622594 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/c2e63e93-d8e4-4069-bdb3-364f98238496-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"c2e63e93-d8e4-4069-bdb3-364f98238496\") " pod="openstack/glance-default-internal-api-0" Sep 30 19:50:32 crc kubenswrapper[4756]: I0930 19:50:32.622650 4756 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-internal-api-0\" (UID: \"c2e63e93-d8e4-4069-bdb3-364f98238496\") device mount path \"/mnt/openstack/pv07\"" pod="openstack/glance-default-internal-api-0" Sep 30 19:50:32 crc kubenswrapper[4756]: I0930 19:50:32.622679 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c2e63e93-d8e4-4069-bdb3-364f98238496-logs\") pod \"glance-default-internal-api-0\" (UID: \"c2e63e93-d8e4-4069-bdb3-364f98238496\") " pod="openstack/glance-default-internal-api-0" Sep 30 19:50:32 crc kubenswrapper[4756]: I0930 19:50:32.622747 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/c2e63e93-d8e4-4069-bdb3-364f98238496-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"c2e63e93-d8e4-4069-bdb3-364f98238496\") " pod="openstack/glance-default-internal-api-0" Sep 30 19:50:32 crc kubenswrapper[4756]: I0930 19:50:32.622789 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c2e63e93-d8e4-4069-bdb3-364f98238496-config-data\") pod \"glance-default-internal-api-0\" (UID: \"c2e63e93-d8e4-4069-bdb3-364f98238496\") " pod="openstack/glance-default-internal-api-0" Sep 30 19:50:32 crc kubenswrapper[4756]: I0930 19:50:32.623249 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/c2e63e93-d8e4-4069-bdb3-364f98238496-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"c2e63e93-d8e4-4069-bdb3-364f98238496\") " pod="openstack/glance-default-internal-api-0" Sep 30 19:50:32 crc kubenswrapper[4756]: I0930 19:50:32.623303 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c2e63e93-d8e4-4069-bdb3-364f98238496-logs\") pod \"glance-default-internal-api-0\" (UID: \"c2e63e93-d8e4-4069-bdb3-364f98238496\") " pod="openstack/glance-default-internal-api-0" Sep 30 19:50:32 crc kubenswrapper[4756]: I0930 19:50:32.627193 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c2e63e93-d8e4-4069-bdb3-364f98238496-config-data\") pod \"glance-default-internal-api-0\" (UID: \"c2e63e93-d8e4-4069-bdb3-364f98238496\") " pod="openstack/glance-default-internal-api-0" Sep 30 19:50:32 crc kubenswrapper[4756]: I0930 19:50:32.628796 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/c2e63e93-d8e4-4069-bdb3-364f98238496-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"c2e63e93-d8e4-4069-bdb3-364f98238496\") " pod="openstack/glance-default-internal-api-0" Sep 30 19:50:32 crc kubenswrapper[4756]: I0930 19:50:32.628909 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c2e63e93-d8e4-4069-bdb3-364f98238496-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"c2e63e93-d8e4-4069-bdb3-364f98238496\") " pod="openstack/glance-default-internal-api-0" Sep 30 19:50:32 crc kubenswrapper[4756]: I0930 19:50:32.630939 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c2e63e93-d8e4-4069-bdb3-364f98238496-scripts\") pod \"glance-default-internal-api-0\" (UID: \"c2e63e93-d8e4-4069-bdb3-364f98238496\") " pod="openstack/glance-default-internal-api-0" Sep 30 19:50:32 crc kubenswrapper[4756]: I0930 19:50:32.646294 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p29gg\" (UniqueName: \"kubernetes.io/projected/c2e63e93-d8e4-4069-bdb3-364f98238496-kube-api-access-p29gg\") pod \"glance-default-internal-api-0\" (UID: \"c2e63e93-d8e4-4069-bdb3-364f98238496\") " pod="openstack/glance-default-internal-api-0" Sep 30 19:50:32 crc kubenswrapper[4756]: I0930 19:50:32.651065 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-internal-api-0\" (UID: \"c2e63e93-d8e4-4069-bdb3-364f98238496\") " pod="openstack/glance-default-internal-api-0" Sep 30 19:50:32 crc kubenswrapper[4756]: I0930 19:50:32.660212 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Sep 30 19:50:33 crc kubenswrapper[4756]: I0930 19:50:33.126980 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8614c78e-58de-4a39-80a4-ba8f9d068ebb" path="/var/lib/kubelet/pods/8614c78e-58de-4a39-80a4-ba8f9d068ebb/volumes" Sep 30 19:50:34 crc kubenswrapper[4756]: I0930 19:50:34.657975 4756 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-76fcf4b695-fzrxq" podUID="b6f43c97-24c1-4b5c-9144-436afa885815" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.138:5353: connect: connection refused" Sep 30 19:50:39 crc kubenswrapper[4756]: I0930 19:50:39.673364 4756 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-76fcf4b695-fzrxq" podUID="b6f43c97-24c1-4b5c-9144-436afa885815" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.138:5353: connect: connection refused" Sep 30 19:50:42 crc kubenswrapper[4756]: E0930 19:50:42.048638 4756 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-cinder-api:current-podified" Sep 30 19:50:42 crc kubenswrapper[4756]: E0930 19:50:42.049182 4756 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:cinder-db-sync,Image:quay.io/podified-antelope-centos9/openstack-cinder-api:current-podified,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_set_configs && /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:TRUE,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:etc-machine-id,ReadOnly:true,MountPath:/etc/machine-id,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:scripts,ReadOnly:true,MountPath:/usr/local/bin/container-scripts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/config-data/merged,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/etc/my.cnf,SubPath:my.cnf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:db-sync-config-data,ReadOnly:true,MountPath:/etc/cinder/cinder.conf.d,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:db-sync-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-gxmgt,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:nil,Privileged:nil,SELinuxOptions:nil,RunAsUser:*0,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod cinder-db-sync-j57gg_openstack(0f0f53de-f545-431e-9901-e45eedc73bf9): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Sep 30 19:50:42 crc kubenswrapper[4756]: E0930 19:50:42.050602 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cinder-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/cinder-db-sync-j57gg" podUID="0f0f53de-f545-431e-9901-e45eedc73bf9" Sep 30 19:50:42 crc kubenswrapper[4756]: I0930 19:50:42.063622 4756 scope.go:117] "RemoveContainer" containerID="b71dfa87c8da4823c8486c18ec442810523e91946df9693d7b46568418c78a27" Sep 30 19:50:42 crc kubenswrapper[4756]: I0930 19:50:42.270549 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-6bfdd55b6d-fqgk9" Sep 30 19:50:42 crc kubenswrapper[4756]: I0930 19:50:42.271171 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-6bfdd55b6d-fqgk9" Sep 30 19:50:42 crc kubenswrapper[4756]: I0930 19:50:42.309012 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-76fcf4b695-fzrxq" Sep 30 19:50:42 crc kubenswrapper[4756]: I0930 19:50:42.350914 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 19:50:42 crc kubenswrapper[4756]: I0930 19:50:42.358285 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-76fcf4b695-fzrxq" event={"ID":"b6f43c97-24c1-4b5c-9144-436afa885815","Type":"ContainerDied","Data":"6fb5c383a933fced4ef19048e7a5d190b2a4e559f8e441d6c95e69ba70dba4fa"} Sep 30 19:50:42 crc kubenswrapper[4756]: I0930 19:50:42.358325 4756 scope.go:117] "RemoveContainer" containerID="54ba83b5ec9e3df45a74815ad28e4405e4895da7ecbe0a4cdfcafe155890ed6a" Sep 30 19:50:42 crc kubenswrapper[4756]: I0930 19:50:42.358453 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-76fcf4b695-fzrxq" Sep 30 19:50:42 crc kubenswrapper[4756]: I0930 19:50:42.393841 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c736dd47-8e28-4fbb-8036-d206f7cc5471","Type":"ContainerDied","Data":"a0457f5eeef64a14111de38237283dd527a65d774216cb0bdb7cf2e958ac5207"} Sep 30 19:50:42 crc kubenswrapper[4756]: I0930 19:50:42.396011 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 19:50:42 crc kubenswrapper[4756]: I0930 19:50:42.404672 4756 scope.go:117] "RemoveContainer" containerID="cee228160ab1db6a58b9bd5fac0c626043c07faf2a1022d2195a361362ca7bfc" Sep 30 19:50:42 crc kubenswrapper[4756]: E0930 19:50:42.404788 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cinder-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-cinder-api:current-podified\\\"\"" pod="openstack/cinder-db-sync-j57gg" podUID="0f0f53de-f545-431e-9901-e45eedc73bf9" Sep 30 19:50:42 crc kubenswrapper[4756]: I0930 19:50:42.425946 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b6f43c97-24c1-4b5c-9144-436afa885815-config\") pod \"b6f43c97-24c1-4b5c-9144-436afa885815\" (UID: \"b6f43c97-24c1-4b5c-9144-436afa885815\") " Sep 30 19:50:42 crc kubenswrapper[4756]: I0930 19:50:42.425999 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/b6f43c97-24c1-4b5c-9144-436afa885815-dns-swift-storage-0\") pod \"b6f43c97-24c1-4b5c-9144-436afa885815\" (UID: \"b6f43c97-24c1-4b5c-9144-436afa885815\") " Sep 30 19:50:42 crc kubenswrapper[4756]: I0930 19:50:42.426023 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b6f43c97-24c1-4b5c-9144-436afa885815-ovsdbserver-sb\") pod \"b6f43c97-24c1-4b5c-9144-436afa885815\" (UID: \"b6f43c97-24c1-4b5c-9144-436afa885815\") " Sep 30 19:50:42 crc kubenswrapper[4756]: I0930 19:50:42.426155 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b6f43c97-24c1-4b5c-9144-436afa885815-ovsdbserver-nb\") pod \"b6f43c97-24c1-4b5c-9144-436afa885815\" (UID: \"b6f43c97-24c1-4b5c-9144-436afa885815\") " Sep 30 19:50:42 crc kubenswrapper[4756]: I0930 19:50:42.426176 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kfwvh\" (UniqueName: \"kubernetes.io/projected/b6f43c97-24c1-4b5c-9144-436afa885815-kube-api-access-kfwvh\") pod \"b6f43c97-24c1-4b5c-9144-436afa885815\" (UID: \"b6f43c97-24c1-4b5c-9144-436afa885815\") " Sep 30 19:50:42 crc kubenswrapper[4756]: I0930 19:50:42.426204 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b6f43c97-24c1-4b5c-9144-436afa885815-dns-svc\") pod \"b6f43c97-24c1-4b5c-9144-436afa885815\" (UID: \"b6f43c97-24c1-4b5c-9144-436afa885815\") " Sep 30 19:50:42 crc kubenswrapper[4756]: I0930 19:50:42.482584 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6f43c97-24c1-4b5c-9144-436afa885815-kube-api-access-kfwvh" (OuterVolumeSpecName: "kube-api-access-kfwvh") pod "b6f43c97-24c1-4b5c-9144-436afa885815" (UID: "b6f43c97-24c1-4b5c-9144-436afa885815"). InnerVolumeSpecName "kube-api-access-kfwvh". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:50:42 crc kubenswrapper[4756]: I0930 19:50:42.507863 4756 scope.go:117] "RemoveContainer" containerID="6a13740313829bf263916b0f39a53603833681d23155291a3e296376914e598f" Sep 30 19:50:42 crc kubenswrapper[4756]: I0930 19:50:42.528513 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/c736dd47-8e28-4fbb-8036-d206f7cc5471-sg-core-conf-yaml\") pod \"c736dd47-8e28-4fbb-8036-d206f7cc5471\" (UID: \"c736dd47-8e28-4fbb-8036-d206f7cc5471\") " Sep 30 19:50:42 crc kubenswrapper[4756]: I0930 19:50:42.528625 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c736dd47-8e28-4fbb-8036-d206f7cc5471-log-httpd\") pod \"c736dd47-8e28-4fbb-8036-d206f7cc5471\" (UID: \"c736dd47-8e28-4fbb-8036-d206f7cc5471\") " Sep 30 19:50:42 crc kubenswrapper[4756]: I0930 19:50:42.528673 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-27mxj\" (UniqueName: \"kubernetes.io/projected/c736dd47-8e28-4fbb-8036-d206f7cc5471-kube-api-access-27mxj\") pod \"c736dd47-8e28-4fbb-8036-d206f7cc5471\" (UID: \"c736dd47-8e28-4fbb-8036-d206f7cc5471\") " Sep 30 19:50:42 crc kubenswrapper[4756]: I0930 19:50:42.528696 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c736dd47-8e28-4fbb-8036-d206f7cc5471-scripts\") pod \"c736dd47-8e28-4fbb-8036-d206f7cc5471\" (UID: \"c736dd47-8e28-4fbb-8036-d206f7cc5471\") " Sep 30 19:50:42 crc kubenswrapper[4756]: I0930 19:50:42.528782 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c736dd47-8e28-4fbb-8036-d206f7cc5471-combined-ca-bundle\") pod \"c736dd47-8e28-4fbb-8036-d206f7cc5471\" (UID: \"c736dd47-8e28-4fbb-8036-d206f7cc5471\") " Sep 30 19:50:42 crc kubenswrapper[4756]: I0930 19:50:42.528828 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c736dd47-8e28-4fbb-8036-d206f7cc5471-run-httpd\") pod \"c736dd47-8e28-4fbb-8036-d206f7cc5471\" (UID: \"c736dd47-8e28-4fbb-8036-d206f7cc5471\") " Sep 30 19:50:42 crc kubenswrapper[4756]: I0930 19:50:42.528939 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c736dd47-8e28-4fbb-8036-d206f7cc5471-config-data\") pod \"c736dd47-8e28-4fbb-8036-d206f7cc5471\" (UID: \"c736dd47-8e28-4fbb-8036-d206f7cc5471\") " Sep 30 19:50:42 crc kubenswrapper[4756]: I0930 19:50:42.529998 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kfwvh\" (UniqueName: \"kubernetes.io/projected/b6f43c97-24c1-4b5c-9144-436afa885815-kube-api-access-kfwvh\") on node \"crc\" DevicePath \"\"" Sep 30 19:50:42 crc kubenswrapper[4756]: I0930 19:50:42.560285 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c736dd47-8e28-4fbb-8036-d206f7cc5471-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "c736dd47-8e28-4fbb-8036-d206f7cc5471" (UID: "c736dd47-8e28-4fbb-8036-d206f7cc5471"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 19:50:42 crc kubenswrapper[4756]: I0930 19:50:42.574185 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c736dd47-8e28-4fbb-8036-d206f7cc5471-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "c736dd47-8e28-4fbb-8036-d206f7cc5471" (UID: "c736dd47-8e28-4fbb-8036-d206f7cc5471"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 19:50:42 crc kubenswrapper[4756]: I0930 19:50:42.581128 4756 scope.go:117] "RemoveContainer" containerID="726428566e5874d33d6da463d98bf4cf9458d6a88a6a7ab44adbf99d1e3c4e25" Sep 30 19:50:42 crc kubenswrapper[4756]: I0930 19:50:42.582786 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b6f43c97-24c1-4b5c-9144-436afa885815-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "b6f43c97-24c1-4b5c-9144-436afa885815" (UID: "b6f43c97-24c1-4b5c-9144-436afa885815"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:50:42 crc kubenswrapper[4756]: I0930 19:50:42.585373 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c736dd47-8e28-4fbb-8036-d206f7cc5471-scripts" (OuterVolumeSpecName: "scripts") pod "c736dd47-8e28-4fbb-8036-d206f7cc5471" (UID: "c736dd47-8e28-4fbb-8036-d206f7cc5471"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:50:42 crc kubenswrapper[4756]: I0930 19:50:42.597995 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c736dd47-8e28-4fbb-8036-d206f7cc5471-kube-api-access-27mxj" (OuterVolumeSpecName: "kube-api-access-27mxj") pod "c736dd47-8e28-4fbb-8036-d206f7cc5471" (UID: "c736dd47-8e28-4fbb-8036-d206f7cc5471"). InnerVolumeSpecName "kube-api-access-27mxj". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:50:42 crc kubenswrapper[4756]: I0930 19:50:42.608195 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c736dd47-8e28-4fbb-8036-d206f7cc5471-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "c736dd47-8e28-4fbb-8036-d206f7cc5471" (UID: "c736dd47-8e28-4fbb-8036-d206f7cc5471"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:50:42 crc kubenswrapper[4756]: I0930 19:50:42.628522 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b6f43c97-24c1-4b5c-9144-436afa885815-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "b6f43c97-24c1-4b5c-9144-436afa885815" (UID: "b6f43c97-24c1-4b5c-9144-436afa885815"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:50:42 crc kubenswrapper[4756]: I0930 19:50:42.633745 4756 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/c736dd47-8e28-4fbb-8036-d206f7cc5471-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Sep 30 19:50:42 crc kubenswrapper[4756]: I0930 19:50:42.633769 4756 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b6f43c97-24c1-4b5c-9144-436afa885815-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Sep 30 19:50:42 crc kubenswrapper[4756]: I0930 19:50:42.633778 4756 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c736dd47-8e28-4fbb-8036-d206f7cc5471-log-httpd\") on node \"crc\" DevicePath \"\"" Sep 30 19:50:42 crc kubenswrapper[4756]: I0930 19:50:42.633788 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-27mxj\" (UniqueName: \"kubernetes.io/projected/c736dd47-8e28-4fbb-8036-d206f7cc5471-kube-api-access-27mxj\") on node \"crc\" DevicePath \"\"" Sep 30 19:50:42 crc kubenswrapper[4756]: I0930 19:50:42.633798 4756 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c736dd47-8e28-4fbb-8036-d206f7cc5471-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 19:50:42 crc kubenswrapper[4756]: I0930 19:50:42.633807 4756 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c736dd47-8e28-4fbb-8036-d206f7cc5471-run-httpd\") on node \"crc\" DevicePath \"\"" Sep 30 19:50:42 crc kubenswrapper[4756]: I0930 19:50:42.633815 4756 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b6f43c97-24c1-4b5c-9144-436afa885815-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Sep 30 19:50:42 crc kubenswrapper[4756]: I0930 19:50:42.640829 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b6f43c97-24c1-4b5c-9144-436afa885815-config" (OuterVolumeSpecName: "config") pod "b6f43c97-24c1-4b5c-9144-436afa885815" (UID: "b6f43c97-24c1-4b5c-9144-436afa885815"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:50:42 crc kubenswrapper[4756]: I0930 19:50:42.648749 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b6f43c97-24c1-4b5c-9144-436afa885815-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "b6f43c97-24c1-4b5c-9144-436afa885815" (UID: "b6f43c97-24c1-4b5c-9144-436afa885815"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:50:42 crc kubenswrapper[4756]: I0930 19:50:42.656649 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b6f43c97-24c1-4b5c-9144-436afa885815-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "b6f43c97-24c1-4b5c-9144-436afa885815" (UID: "b6f43c97-24c1-4b5c-9144-436afa885815"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:50:42 crc kubenswrapper[4756]: I0930 19:50:42.659532 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 30 19:50:42 crc kubenswrapper[4756]: I0930 19:50:42.691077 4756 scope.go:117] "RemoveContainer" containerID="e8e1a8f4dd068c498f7eaf221ec9920ff488b7ff0b44001b0099dd0d4df7dac0" Sep 30 19:50:42 crc kubenswrapper[4756]: I0930 19:50:42.719875 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-76fcf4b695-fzrxq"] Sep 30 19:50:42 crc kubenswrapper[4756]: I0930 19:50:42.724875 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-76fcf4b695-fzrxq"] Sep 30 19:50:42 crc kubenswrapper[4756]: I0930 19:50:42.734591 4756 scope.go:117] "RemoveContainer" containerID="a70d9d815ab11d673e34ffbbbeab50cd29f0059de5b88903ab04b55ae2d01684" Sep 30 19:50:42 crc kubenswrapper[4756]: I0930 19:50:42.736520 4756 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b6f43c97-24c1-4b5c-9144-436afa885815-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 30 19:50:42 crc kubenswrapper[4756]: I0930 19:50:42.736550 4756 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b6f43c97-24c1-4b5c-9144-436afa885815-config\") on node \"crc\" DevicePath \"\"" Sep 30 19:50:42 crc kubenswrapper[4756]: I0930 19:50:42.736560 4756 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/b6f43c97-24c1-4b5c-9144-436afa885815-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Sep 30 19:50:42 crc kubenswrapper[4756]: I0930 19:50:42.741971 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c736dd47-8e28-4fbb-8036-d206f7cc5471-config-data" (OuterVolumeSpecName: "config-data") pod "c736dd47-8e28-4fbb-8036-d206f7cc5471" (UID: "c736dd47-8e28-4fbb-8036-d206f7cc5471"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:50:42 crc kubenswrapper[4756]: I0930 19:50:42.745116 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c736dd47-8e28-4fbb-8036-d206f7cc5471-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "c736dd47-8e28-4fbb-8036-d206f7cc5471" (UID: "c736dd47-8e28-4fbb-8036-d206f7cc5471"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:50:42 crc kubenswrapper[4756]: I0930 19:50:42.838335 4756 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c736dd47-8e28-4fbb-8036-d206f7cc5471-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 19:50:42 crc kubenswrapper[4756]: I0930 19:50:42.838368 4756 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c736dd47-8e28-4fbb-8036-d206f7cc5471-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 19:50:43 crc kubenswrapper[4756]: I0930 19:50:43.111333 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 30 19:50:43 crc kubenswrapper[4756]: I0930 19:50:43.125022 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6f43c97-24c1-4b5c-9144-436afa885815" path="/var/lib/kubelet/pods/b6f43c97-24c1-4b5c-9144-436afa885815/volumes" Sep 30 19:50:43 crc kubenswrapper[4756]: I0930 19:50:43.125606 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Sep 30 19:50:43 crc kubenswrapper[4756]: I0930 19:50:43.142987 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Sep 30 19:50:43 crc kubenswrapper[4756]: E0930 19:50:43.143328 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b6f43c97-24c1-4b5c-9144-436afa885815" containerName="init" Sep 30 19:50:43 crc kubenswrapper[4756]: I0930 19:50:43.143339 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="b6f43c97-24c1-4b5c-9144-436afa885815" containerName="init" Sep 30 19:50:43 crc kubenswrapper[4756]: E0930 19:50:43.143358 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c736dd47-8e28-4fbb-8036-d206f7cc5471" containerName="ceilometer-central-agent" Sep 30 19:50:43 crc kubenswrapper[4756]: I0930 19:50:43.143363 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="c736dd47-8e28-4fbb-8036-d206f7cc5471" containerName="ceilometer-central-agent" Sep 30 19:50:43 crc kubenswrapper[4756]: E0930 19:50:43.143380 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b6f43c97-24c1-4b5c-9144-436afa885815" containerName="dnsmasq-dns" Sep 30 19:50:43 crc kubenswrapper[4756]: I0930 19:50:43.143386 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="b6f43c97-24c1-4b5c-9144-436afa885815" containerName="dnsmasq-dns" Sep 30 19:50:43 crc kubenswrapper[4756]: E0930 19:50:43.143411 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c736dd47-8e28-4fbb-8036-d206f7cc5471" containerName="proxy-httpd" Sep 30 19:50:43 crc kubenswrapper[4756]: I0930 19:50:43.143417 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="c736dd47-8e28-4fbb-8036-d206f7cc5471" containerName="proxy-httpd" Sep 30 19:50:43 crc kubenswrapper[4756]: E0930 19:50:43.143434 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c736dd47-8e28-4fbb-8036-d206f7cc5471" containerName="sg-core" Sep 30 19:50:43 crc kubenswrapper[4756]: I0930 19:50:43.143439 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="c736dd47-8e28-4fbb-8036-d206f7cc5471" containerName="sg-core" Sep 30 19:50:43 crc kubenswrapper[4756]: E0930 19:50:43.143449 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c736dd47-8e28-4fbb-8036-d206f7cc5471" containerName="ceilometer-notification-agent" Sep 30 19:50:43 crc kubenswrapper[4756]: I0930 19:50:43.143455 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="c736dd47-8e28-4fbb-8036-d206f7cc5471" containerName="ceilometer-notification-agent" Sep 30 19:50:43 crc kubenswrapper[4756]: I0930 19:50:43.143634 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="c736dd47-8e28-4fbb-8036-d206f7cc5471" containerName="ceilometer-notification-agent" Sep 30 19:50:43 crc kubenswrapper[4756]: I0930 19:50:43.143655 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="c736dd47-8e28-4fbb-8036-d206f7cc5471" containerName="ceilometer-central-agent" Sep 30 19:50:43 crc kubenswrapper[4756]: I0930 19:50:43.143667 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="c736dd47-8e28-4fbb-8036-d206f7cc5471" containerName="sg-core" Sep 30 19:50:43 crc kubenswrapper[4756]: I0930 19:50:43.143679 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="c736dd47-8e28-4fbb-8036-d206f7cc5471" containerName="proxy-httpd" Sep 30 19:50:43 crc kubenswrapper[4756]: I0930 19:50:43.143691 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="b6f43c97-24c1-4b5c-9144-436afa885815" containerName="dnsmasq-dns" Sep 30 19:50:43 crc kubenswrapper[4756]: I0930 19:50:43.145110 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 19:50:43 crc kubenswrapper[4756]: I0930 19:50:43.147186 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Sep 30 19:50:43 crc kubenswrapper[4756]: I0930 19:50:43.147960 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Sep 30 19:50:43 crc kubenswrapper[4756]: I0930 19:50:43.149748 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 30 19:50:43 crc kubenswrapper[4756]: I0930 19:50:43.244305 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/16fbb022-5ffb-41ad-a18b-c9eb1ad86897-run-httpd\") pod \"ceilometer-0\" (UID: \"16fbb022-5ffb-41ad-a18b-c9eb1ad86897\") " pod="openstack/ceilometer-0" Sep 30 19:50:43 crc kubenswrapper[4756]: I0930 19:50:43.244382 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/16fbb022-5ffb-41ad-a18b-c9eb1ad86897-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"16fbb022-5ffb-41ad-a18b-c9eb1ad86897\") " pod="openstack/ceilometer-0" Sep 30 19:50:43 crc kubenswrapper[4756]: I0930 19:50:43.244434 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2fftl\" (UniqueName: \"kubernetes.io/projected/16fbb022-5ffb-41ad-a18b-c9eb1ad86897-kube-api-access-2fftl\") pod \"ceilometer-0\" (UID: \"16fbb022-5ffb-41ad-a18b-c9eb1ad86897\") " pod="openstack/ceilometer-0" Sep 30 19:50:43 crc kubenswrapper[4756]: I0930 19:50:43.244487 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/16fbb022-5ffb-41ad-a18b-c9eb1ad86897-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"16fbb022-5ffb-41ad-a18b-c9eb1ad86897\") " pod="openstack/ceilometer-0" Sep 30 19:50:43 crc kubenswrapper[4756]: I0930 19:50:43.244636 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/16fbb022-5ffb-41ad-a18b-c9eb1ad86897-config-data\") pod \"ceilometer-0\" (UID: \"16fbb022-5ffb-41ad-a18b-c9eb1ad86897\") " pod="openstack/ceilometer-0" Sep 30 19:50:43 crc kubenswrapper[4756]: I0930 19:50:43.244762 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/16fbb022-5ffb-41ad-a18b-c9eb1ad86897-scripts\") pod \"ceilometer-0\" (UID: \"16fbb022-5ffb-41ad-a18b-c9eb1ad86897\") " pod="openstack/ceilometer-0" Sep 30 19:50:43 crc kubenswrapper[4756]: I0930 19:50:43.244875 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/16fbb022-5ffb-41ad-a18b-c9eb1ad86897-log-httpd\") pod \"ceilometer-0\" (UID: \"16fbb022-5ffb-41ad-a18b-c9eb1ad86897\") " pod="openstack/ceilometer-0" Sep 30 19:50:43 crc kubenswrapper[4756]: I0930 19:50:43.347246 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/16fbb022-5ffb-41ad-a18b-c9eb1ad86897-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"16fbb022-5ffb-41ad-a18b-c9eb1ad86897\") " pod="openstack/ceilometer-0" Sep 30 19:50:43 crc kubenswrapper[4756]: I0930 19:50:43.347571 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/16fbb022-5ffb-41ad-a18b-c9eb1ad86897-config-data\") pod \"ceilometer-0\" (UID: \"16fbb022-5ffb-41ad-a18b-c9eb1ad86897\") " pod="openstack/ceilometer-0" Sep 30 19:50:43 crc kubenswrapper[4756]: I0930 19:50:43.347597 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/16fbb022-5ffb-41ad-a18b-c9eb1ad86897-scripts\") pod \"ceilometer-0\" (UID: \"16fbb022-5ffb-41ad-a18b-c9eb1ad86897\") " pod="openstack/ceilometer-0" Sep 30 19:50:43 crc kubenswrapper[4756]: I0930 19:50:43.347641 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/16fbb022-5ffb-41ad-a18b-c9eb1ad86897-log-httpd\") pod \"ceilometer-0\" (UID: \"16fbb022-5ffb-41ad-a18b-c9eb1ad86897\") " pod="openstack/ceilometer-0" Sep 30 19:50:43 crc kubenswrapper[4756]: I0930 19:50:43.347671 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/16fbb022-5ffb-41ad-a18b-c9eb1ad86897-run-httpd\") pod \"ceilometer-0\" (UID: \"16fbb022-5ffb-41ad-a18b-c9eb1ad86897\") " pod="openstack/ceilometer-0" Sep 30 19:50:43 crc kubenswrapper[4756]: I0930 19:50:43.347712 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/16fbb022-5ffb-41ad-a18b-c9eb1ad86897-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"16fbb022-5ffb-41ad-a18b-c9eb1ad86897\") " pod="openstack/ceilometer-0" Sep 30 19:50:43 crc kubenswrapper[4756]: I0930 19:50:43.347737 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2fftl\" (UniqueName: \"kubernetes.io/projected/16fbb022-5ffb-41ad-a18b-c9eb1ad86897-kube-api-access-2fftl\") pod \"ceilometer-0\" (UID: \"16fbb022-5ffb-41ad-a18b-c9eb1ad86897\") " pod="openstack/ceilometer-0" Sep 30 19:50:43 crc kubenswrapper[4756]: I0930 19:50:43.348284 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/16fbb022-5ffb-41ad-a18b-c9eb1ad86897-log-httpd\") pod \"ceilometer-0\" (UID: \"16fbb022-5ffb-41ad-a18b-c9eb1ad86897\") " pod="openstack/ceilometer-0" Sep 30 19:50:43 crc kubenswrapper[4756]: I0930 19:50:43.348323 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/16fbb022-5ffb-41ad-a18b-c9eb1ad86897-run-httpd\") pod \"ceilometer-0\" (UID: \"16fbb022-5ffb-41ad-a18b-c9eb1ad86897\") " pod="openstack/ceilometer-0" Sep 30 19:50:43 crc kubenswrapper[4756]: I0930 19:50:43.351007 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/16fbb022-5ffb-41ad-a18b-c9eb1ad86897-scripts\") pod \"ceilometer-0\" (UID: \"16fbb022-5ffb-41ad-a18b-c9eb1ad86897\") " pod="openstack/ceilometer-0" Sep 30 19:50:43 crc kubenswrapper[4756]: I0930 19:50:43.351569 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/16fbb022-5ffb-41ad-a18b-c9eb1ad86897-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"16fbb022-5ffb-41ad-a18b-c9eb1ad86897\") " pod="openstack/ceilometer-0" Sep 30 19:50:43 crc kubenswrapper[4756]: I0930 19:50:43.352033 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/16fbb022-5ffb-41ad-a18b-c9eb1ad86897-config-data\") pod \"ceilometer-0\" (UID: \"16fbb022-5ffb-41ad-a18b-c9eb1ad86897\") " pod="openstack/ceilometer-0" Sep 30 19:50:43 crc kubenswrapper[4756]: I0930 19:50:43.352088 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/16fbb022-5ffb-41ad-a18b-c9eb1ad86897-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"16fbb022-5ffb-41ad-a18b-c9eb1ad86897\") " pod="openstack/ceilometer-0" Sep 30 19:50:43 crc kubenswrapper[4756]: I0930 19:50:43.364922 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2fftl\" (UniqueName: \"kubernetes.io/projected/16fbb022-5ffb-41ad-a18b-c9eb1ad86897-kube-api-access-2fftl\") pod \"ceilometer-0\" (UID: \"16fbb022-5ffb-41ad-a18b-c9eb1ad86897\") " pod="openstack/ceilometer-0" Sep 30 19:50:43 crc kubenswrapper[4756]: I0930 19:50:43.410036 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"c2e63e93-d8e4-4069-bdb3-364f98238496","Type":"ContainerStarted","Data":"8d0874be0504e1031955aae03da566574435a30a2e4a55949466b466b1df6659"} Sep 30 19:50:43 crc kubenswrapper[4756]: I0930 19:50:43.410109 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"c2e63e93-d8e4-4069-bdb3-364f98238496","Type":"ContainerStarted","Data":"fbab5dc611d5e41a7e3d393f3c2db7f7b81f391215770f09f21b591f6a092580"} Sep 30 19:50:43 crc kubenswrapper[4756]: I0930 19:50:43.413065 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"90342cf7-3a20-4e6d-a8f6-f70c474cd1f6","Type":"ContainerStarted","Data":"ccd5bd63d9943a688550e01265425f0136d6c0e5460a307fca4d7581333bfff3"} Sep 30 19:50:43 crc kubenswrapper[4756]: I0930 19:50:43.423784 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-8hkvg" event={"ID":"fb5b7a75-2b20-4dc3-bd63-e29dbb6cdba2","Type":"ContainerStarted","Data":"188884cc097d3da46aaa74ebafbd550125a5f4b8a7035fcec7d11bd3deca4c31"} Sep 30 19:50:43 crc kubenswrapper[4756]: I0930 19:50:43.443676 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-db-sync-8hkvg" podStartSLOduration=7.082357342 podStartE2EDuration="24.443657934s" podCreationTimestamp="2025-09-30 19:50:19 +0000 UTC" firstStartedPulling="2025-09-30 19:50:24.725954416 +0000 UTC m=+1154.346887893" lastFinishedPulling="2025-09-30 19:50:42.087254968 +0000 UTC m=+1171.708188485" observedRunningTime="2025-09-30 19:50:43.440598764 +0000 UTC m=+1173.061532241" watchObservedRunningTime="2025-09-30 19:50:43.443657934 +0000 UTC m=+1173.064591411" Sep 30 19:50:43 crc kubenswrapper[4756]: I0930 19:50:43.502034 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 19:50:43 crc kubenswrapper[4756]: I0930 19:50:43.920797 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 30 19:50:43 crc kubenswrapper[4756]: W0930 19:50:43.923237 4756 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod16fbb022_5ffb_41ad_a18b_c9eb1ad86897.slice/crio-d1a0fb8f5168f822aeb8d6ef85a84aba6911b96775c3e94cc80304ad3e77dd3f WatchSource:0}: Error finding container d1a0fb8f5168f822aeb8d6ef85a84aba6911b96775c3e94cc80304ad3e77dd3f: Status 404 returned error can't find the container with id d1a0fb8f5168f822aeb8d6ef85a84aba6911b96775c3e94cc80304ad3e77dd3f Sep 30 19:50:44 crc kubenswrapper[4756]: I0930 19:50:44.464884 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"c2e63e93-d8e4-4069-bdb3-364f98238496","Type":"ContainerStarted","Data":"8a95f664cddc2bed4974f5a0200a7fdbeb71cb63fb51c1b95c363b9e5da33cc0"} Sep 30 19:50:44 crc kubenswrapper[4756]: I0930 19:50:44.471727 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"90342cf7-3a20-4e6d-a8f6-f70c474cd1f6","Type":"ContainerStarted","Data":"ea63f30719f7ef7c365ee9dedbcfdbef0aa5898605f064674fae1e0af74bf574"} Sep 30 19:50:44 crc kubenswrapper[4756]: I0930 19:50:44.473511 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"16fbb022-5ffb-41ad-a18b-c9eb1ad86897","Type":"ContainerStarted","Data":"d1a0fb8f5168f822aeb8d6ef85a84aba6911b96775c3e94cc80304ad3e77dd3f"} Sep 30 19:50:44 crc kubenswrapper[4756]: I0930 19:50:44.499031 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=12.499013591 podStartE2EDuration="12.499013591s" podCreationTimestamp="2025-09-30 19:50:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 19:50:44.497639745 +0000 UTC m=+1174.118573302" watchObservedRunningTime="2025-09-30 19:50:44.499013591 +0000 UTC m=+1174.119947068" Sep 30 19:50:44 crc kubenswrapper[4756]: I0930 19:50:44.529311 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=18.529296003 podStartE2EDuration="18.529296003s" podCreationTimestamp="2025-09-30 19:50:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 19:50:44.527325121 +0000 UTC m=+1174.148258608" watchObservedRunningTime="2025-09-30 19:50:44.529296003 +0000 UTC m=+1174.150229480" Sep 30 19:50:45 crc kubenswrapper[4756]: I0930 19:50:45.131456 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c736dd47-8e28-4fbb-8036-d206f7cc5471" path="/var/lib/kubelet/pods/c736dd47-8e28-4fbb-8036-d206f7cc5471/volumes" Sep 30 19:50:45 crc kubenswrapper[4756]: I0930 19:50:45.485450 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"16fbb022-5ffb-41ad-a18b-c9eb1ad86897","Type":"ContainerStarted","Data":"863f612cd545ac0fb2069bdbe890b8c349b3ace0bee3682e85b0690dbbe53426"} Sep 30 19:50:46 crc kubenswrapper[4756]: I0930 19:50:46.505859 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"16fbb022-5ffb-41ad-a18b-c9eb1ad86897","Type":"ContainerStarted","Data":"1d7ee6a247afc0df8d24969be2e9021b5b7950050932c2f4b2dc96b9234b945e"} Sep 30 19:50:46 crc kubenswrapper[4756]: I0930 19:50:46.508867 4756 generic.go:334] "Generic (PLEG): container finished" podID="fb5b7a75-2b20-4dc3-bd63-e29dbb6cdba2" containerID="188884cc097d3da46aaa74ebafbd550125a5f4b8a7035fcec7d11bd3deca4c31" exitCode=0 Sep 30 19:50:46 crc kubenswrapper[4756]: I0930 19:50:46.508914 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-8hkvg" event={"ID":"fb5b7a75-2b20-4dc3-bd63-e29dbb6cdba2","Type":"ContainerDied","Data":"188884cc097d3da46aaa74ebafbd550125a5f4b8a7035fcec7d11bd3deca4c31"} Sep 30 19:50:46 crc kubenswrapper[4756]: I0930 19:50:46.536570 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Sep 30 19:50:46 crc kubenswrapper[4756]: I0930 19:50:46.536627 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Sep 30 19:50:46 crc kubenswrapper[4756]: I0930 19:50:46.572831 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Sep 30 19:50:46 crc kubenswrapper[4756]: I0930 19:50:46.611740 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Sep 30 19:50:46 crc kubenswrapper[4756]: I0930 19:50:46.903221 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/keystone-6889fc7587-jlhbj" Sep 30 19:50:47 crc kubenswrapper[4756]: I0930 19:50:47.522029 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"16fbb022-5ffb-41ad-a18b-c9eb1ad86897","Type":"ContainerStarted","Data":"5958179af95498993e03abf8ecb860e02ff9989d57c79429653db9897c8d3b02"} Sep 30 19:50:47 crc kubenswrapper[4756]: I0930 19:50:47.525273 4756 generic.go:334] "Generic (PLEG): container finished" podID="2d1a09a3-9f2d-41ec-a2de-009e4835154e" containerID="88bc27040e35d9df953d55c08c8b378a2f16d65a8a8f0afd059a1be9ec84599d" exitCode=0 Sep 30 19:50:47 crc kubenswrapper[4756]: I0930 19:50:47.525827 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-pljns" event={"ID":"2d1a09a3-9f2d-41ec-a2de-009e4835154e","Type":"ContainerDied","Data":"88bc27040e35d9df953d55c08c8b378a2f16d65a8a8f0afd059a1be9ec84599d"} Sep 30 19:50:47 crc kubenswrapper[4756]: I0930 19:50:47.526235 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Sep 30 19:50:47 crc kubenswrapper[4756]: I0930 19:50:47.526516 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Sep 30 19:50:47 crc kubenswrapper[4756]: I0930 19:50:47.873034 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstackclient"] Sep 30 19:50:47 crc kubenswrapper[4756]: I0930 19:50:47.874308 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Sep 30 19:50:47 crc kubenswrapper[4756]: I0930 19:50:47.876636 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config" Sep 30 19:50:47 crc kubenswrapper[4756]: I0930 19:50:47.876636 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-config-secret" Sep 30 19:50:47 crc kubenswrapper[4756]: I0930 19:50:47.876794 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstackclient-openstackclient-dockercfg-lfv9k" Sep 30 19:50:47 crc kubenswrapper[4756]: I0930 19:50:47.880780 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-8hkvg" Sep 30 19:50:47 crc kubenswrapper[4756]: I0930 19:50:47.882583 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Sep 30 19:50:47 crc kubenswrapper[4756]: I0930 19:50:47.934488 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fb5b7a75-2b20-4dc3-bd63-e29dbb6cdba2-combined-ca-bundle\") pod \"fb5b7a75-2b20-4dc3-bd63-e29dbb6cdba2\" (UID: \"fb5b7a75-2b20-4dc3-bd63-e29dbb6cdba2\") " Sep 30 19:50:47 crc kubenswrapper[4756]: I0930 19:50:47.934710 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9v9dx\" (UniqueName: \"kubernetes.io/projected/fb5b7a75-2b20-4dc3-bd63-e29dbb6cdba2-kube-api-access-9v9dx\") pod \"fb5b7a75-2b20-4dc3-bd63-e29dbb6cdba2\" (UID: \"fb5b7a75-2b20-4dc3-bd63-e29dbb6cdba2\") " Sep 30 19:50:47 crc kubenswrapper[4756]: I0930 19:50:47.934830 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/fb5b7a75-2b20-4dc3-bd63-e29dbb6cdba2-db-sync-config-data\") pod \"fb5b7a75-2b20-4dc3-bd63-e29dbb6cdba2\" (UID: \"fb5b7a75-2b20-4dc3-bd63-e29dbb6cdba2\") " Sep 30 19:50:47 crc kubenswrapper[4756]: I0930 19:50:47.935449 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e11b18b8-af91-46d0-b1e3-086a57a0731a-combined-ca-bundle\") pod \"openstackclient\" (UID: \"e11b18b8-af91-46d0-b1e3-086a57a0731a\") " pod="openstack/openstackclient" Sep 30 19:50:47 crc kubenswrapper[4756]: I0930 19:50:47.935527 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n6rlh\" (UniqueName: \"kubernetes.io/projected/e11b18b8-af91-46d0-b1e3-086a57a0731a-kube-api-access-n6rlh\") pod \"openstackclient\" (UID: \"e11b18b8-af91-46d0-b1e3-086a57a0731a\") " pod="openstack/openstackclient" Sep 30 19:50:47 crc kubenswrapper[4756]: I0930 19:50:47.935609 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/e11b18b8-af91-46d0-b1e3-086a57a0731a-openstack-config\") pod \"openstackclient\" (UID: \"e11b18b8-af91-46d0-b1e3-086a57a0731a\") " pod="openstack/openstackclient" Sep 30 19:50:47 crc kubenswrapper[4756]: I0930 19:50:47.935684 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/e11b18b8-af91-46d0-b1e3-086a57a0731a-openstack-config-secret\") pod \"openstackclient\" (UID: \"e11b18b8-af91-46d0-b1e3-086a57a0731a\") " pod="openstack/openstackclient" Sep 30 19:50:47 crc kubenswrapper[4756]: I0930 19:50:47.947576 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fb5b7a75-2b20-4dc3-bd63-e29dbb6cdba2-kube-api-access-9v9dx" (OuterVolumeSpecName: "kube-api-access-9v9dx") pod "fb5b7a75-2b20-4dc3-bd63-e29dbb6cdba2" (UID: "fb5b7a75-2b20-4dc3-bd63-e29dbb6cdba2"). InnerVolumeSpecName "kube-api-access-9v9dx". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:50:47 crc kubenswrapper[4756]: I0930 19:50:47.951935 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fb5b7a75-2b20-4dc3-bd63-e29dbb6cdba2-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "fb5b7a75-2b20-4dc3-bd63-e29dbb6cdba2" (UID: "fb5b7a75-2b20-4dc3-bd63-e29dbb6cdba2"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:50:47 crc kubenswrapper[4756]: I0930 19:50:47.963606 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fb5b7a75-2b20-4dc3-bd63-e29dbb6cdba2-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "fb5b7a75-2b20-4dc3-bd63-e29dbb6cdba2" (UID: "fb5b7a75-2b20-4dc3-bd63-e29dbb6cdba2"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:50:48 crc kubenswrapper[4756]: I0930 19:50:48.036927 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n6rlh\" (UniqueName: \"kubernetes.io/projected/e11b18b8-af91-46d0-b1e3-086a57a0731a-kube-api-access-n6rlh\") pod \"openstackclient\" (UID: \"e11b18b8-af91-46d0-b1e3-086a57a0731a\") " pod="openstack/openstackclient" Sep 30 19:50:48 crc kubenswrapper[4756]: I0930 19:50:48.037008 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/e11b18b8-af91-46d0-b1e3-086a57a0731a-openstack-config\") pod \"openstackclient\" (UID: \"e11b18b8-af91-46d0-b1e3-086a57a0731a\") " pod="openstack/openstackclient" Sep 30 19:50:48 crc kubenswrapper[4756]: I0930 19:50:48.037035 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/e11b18b8-af91-46d0-b1e3-086a57a0731a-openstack-config-secret\") pod \"openstackclient\" (UID: \"e11b18b8-af91-46d0-b1e3-086a57a0731a\") " pod="openstack/openstackclient" Sep 30 19:50:48 crc kubenswrapper[4756]: I0930 19:50:48.037099 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e11b18b8-af91-46d0-b1e3-086a57a0731a-combined-ca-bundle\") pod \"openstackclient\" (UID: \"e11b18b8-af91-46d0-b1e3-086a57a0731a\") " pod="openstack/openstackclient" Sep 30 19:50:48 crc kubenswrapper[4756]: I0930 19:50:48.037162 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9v9dx\" (UniqueName: \"kubernetes.io/projected/fb5b7a75-2b20-4dc3-bd63-e29dbb6cdba2-kube-api-access-9v9dx\") on node \"crc\" DevicePath \"\"" Sep 30 19:50:48 crc kubenswrapper[4756]: I0930 19:50:48.037175 4756 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/fb5b7a75-2b20-4dc3-bd63-e29dbb6cdba2-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 19:50:48 crc kubenswrapper[4756]: I0930 19:50:48.037183 4756 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fb5b7a75-2b20-4dc3-bd63-e29dbb6cdba2-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 19:50:48 crc kubenswrapper[4756]: I0930 19:50:48.038520 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/e11b18b8-af91-46d0-b1e3-086a57a0731a-openstack-config\") pod \"openstackclient\" (UID: \"e11b18b8-af91-46d0-b1e3-086a57a0731a\") " pod="openstack/openstackclient" Sep 30 19:50:48 crc kubenswrapper[4756]: I0930 19:50:48.040283 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e11b18b8-af91-46d0-b1e3-086a57a0731a-combined-ca-bundle\") pod \"openstackclient\" (UID: \"e11b18b8-af91-46d0-b1e3-086a57a0731a\") " pod="openstack/openstackclient" Sep 30 19:50:48 crc kubenswrapper[4756]: I0930 19:50:48.041794 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/e11b18b8-af91-46d0-b1e3-086a57a0731a-openstack-config-secret\") pod \"openstackclient\" (UID: \"e11b18b8-af91-46d0-b1e3-086a57a0731a\") " pod="openstack/openstackclient" Sep 30 19:50:48 crc kubenswrapper[4756]: I0930 19:50:48.054804 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n6rlh\" (UniqueName: \"kubernetes.io/projected/e11b18b8-af91-46d0-b1e3-086a57a0731a-kube-api-access-n6rlh\") pod \"openstackclient\" (UID: \"e11b18b8-af91-46d0-b1e3-086a57a0731a\") " pod="openstack/openstackclient" Sep 30 19:50:48 crc kubenswrapper[4756]: I0930 19:50:48.089468 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/openstackclient"] Sep 30 19:50:48 crc kubenswrapper[4756]: I0930 19:50:48.090793 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Sep 30 19:50:48 crc kubenswrapper[4756]: I0930 19:50:48.094305 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/openstackclient"] Sep 30 19:50:48 crc kubenswrapper[4756]: I0930 19:50:48.184650 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstackclient"] Sep 30 19:50:48 crc kubenswrapper[4756]: E0930 19:50:48.185004 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fb5b7a75-2b20-4dc3-bd63-e29dbb6cdba2" containerName="barbican-db-sync" Sep 30 19:50:48 crc kubenswrapper[4756]: I0930 19:50:48.185015 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="fb5b7a75-2b20-4dc3-bd63-e29dbb6cdba2" containerName="barbican-db-sync" Sep 30 19:50:48 crc kubenswrapper[4756]: I0930 19:50:48.185208 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="fb5b7a75-2b20-4dc3-bd63-e29dbb6cdba2" containerName="barbican-db-sync" Sep 30 19:50:48 crc kubenswrapper[4756]: I0930 19:50:48.186183 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Sep 30 19:50:48 crc kubenswrapper[4756]: I0930 19:50:48.213896 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Sep 30 19:50:48 crc kubenswrapper[4756]: I0930 19:50:48.239295 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/3b333bad-e0f7-4b3a-b21d-7854ad7aafa6-openstack-config\") pod \"openstackclient\" (UID: \"3b333bad-e0f7-4b3a-b21d-7854ad7aafa6\") " pod="openstack/openstackclient" Sep 30 19:50:48 crc kubenswrapper[4756]: I0930 19:50:48.239549 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w8bkj\" (UniqueName: \"kubernetes.io/projected/3b333bad-e0f7-4b3a-b21d-7854ad7aafa6-kube-api-access-w8bkj\") pod \"openstackclient\" (UID: \"3b333bad-e0f7-4b3a-b21d-7854ad7aafa6\") " pod="openstack/openstackclient" Sep 30 19:50:48 crc kubenswrapper[4756]: I0930 19:50:48.239593 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/3b333bad-e0f7-4b3a-b21d-7854ad7aafa6-openstack-config-secret\") pod \"openstackclient\" (UID: \"3b333bad-e0f7-4b3a-b21d-7854ad7aafa6\") " pod="openstack/openstackclient" Sep 30 19:50:48 crc kubenswrapper[4756]: I0930 19:50:48.239644 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3b333bad-e0f7-4b3a-b21d-7854ad7aafa6-combined-ca-bundle\") pod \"openstackclient\" (UID: \"3b333bad-e0f7-4b3a-b21d-7854ad7aafa6\") " pod="openstack/openstackclient" Sep 30 19:50:48 crc kubenswrapper[4756]: E0930 19:50:48.272291 4756 log.go:32] "RunPodSandbox from runtime service failed" err=< Sep 30 19:50:48 crc kubenswrapper[4756]: rpc error: code = Unknown desc = failed to create pod network sandbox k8s_openstackclient_openstack_e11b18b8-af91-46d0-b1e3-086a57a0731a_0(91f2622c17b7d76b3f2a5003d53da034e6f9def523c180e36dc555b74fb7239f): error adding pod openstack_openstackclient to CNI network "multus-cni-network": plugin type="multus-shim" name="multus-cni-network" failed (add): CmdAdd (shim): CNI request failed with status 400: 'ContainerID:"91f2622c17b7d76b3f2a5003d53da034e6f9def523c180e36dc555b74fb7239f" Netns:"/var/run/netns/af1dcc09-85f1-422d-a8c3-d2d3a61f8e5d" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openstack;K8S_POD_NAME=openstackclient;K8S_POD_INFRA_CONTAINER_ID=91f2622c17b7d76b3f2a5003d53da034e6f9def523c180e36dc555b74fb7239f;K8S_POD_UID=e11b18b8-af91-46d0-b1e3-086a57a0731a" Path:"" ERRORED: error configuring pod [openstack/openstackclient] networking: Multus: [openstack/openstackclient/e11b18b8-af91-46d0-b1e3-086a57a0731a]: expected pod UID "e11b18b8-af91-46d0-b1e3-086a57a0731a" but got "3b333bad-e0f7-4b3a-b21d-7854ad7aafa6" from Kube API Sep 30 19:50:48 crc kubenswrapper[4756]: ': StdinData: {"binDir":"/var/lib/cni/bin","clusterNetwork":"/host/run/multus/cni/net.d/10-ovn-kubernetes.conf","cniVersion":"0.3.1","daemonSocketDir":"/run/multus/socket","globalNamespaces":"default,openshift-multus,openshift-sriov-network-operator,openshift-cnv","logLevel":"verbose","logToStderr":true,"name":"multus-cni-network","namespaceIsolation":true,"type":"multus-shim"} Sep 30 19:50:48 crc kubenswrapper[4756]: > Sep 30 19:50:48 crc kubenswrapper[4756]: E0930 19:50:48.272348 4756 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err=< Sep 30 19:50:48 crc kubenswrapper[4756]: rpc error: code = Unknown desc = failed to create pod network sandbox k8s_openstackclient_openstack_e11b18b8-af91-46d0-b1e3-086a57a0731a_0(91f2622c17b7d76b3f2a5003d53da034e6f9def523c180e36dc555b74fb7239f): error adding pod openstack_openstackclient to CNI network "multus-cni-network": plugin type="multus-shim" name="multus-cni-network" failed (add): CmdAdd (shim): CNI request failed with status 400: 'ContainerID:"91f2622c17b7d76b3f2a5003d53da034e6f9def523c180e36dc555b74fb7239f" Netns:"/var/run/netns/af1dcc09-85f1-422d-a8c3-d2d3a61f8e5d" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openstack;K8S_POD_NAME=openstackclient;K8S_POD_INFRA_CONTAINER_ID=91f2622c17b7d76b3f2a5003d53da034e6f9def523c180e36dc555b74fb7239f;K8S_POD_UID=e11b18b8-af91-46d0-b1e3-086a57a0731a" Path:"" ERRORED: error configuring pod [openstack/openstackclient] networking: Multus: [openstack/openstackclient/e11b18b8-af91-46d0-b1e3-086a57a0731a]: expected pod UID "e11b18b8-af91-46d0-b1e3-086a57a0731a" but got "3b333bad-e0f7-4b3a-b21d-7854ad7aafa6" from Kube API Sep 30 19:50:48 crc kubenswrapper[4756]: ': StdinData: {"binDir":"/var/lib/cni/bin","clusterNetwork":"/host/run/multus/cni/net.d/10-ovn-kubernetes.conf","cniVersion":"0.3.1","daemonSocketDir":"/run/multus/socket","globalNamespaces":"default,openshift-multus,openshift-sriov-network-operator,openshift-cnv","logLevel":"verbose","logToStderr":true,"name":"multus-cni-network","namespaceIsolation":true,"type":"multus-shim"} Sep 30 19:50:48 crc kubenswrapper[4756]: > pod="openstack/openstackclient" Sep 30 19:50:48 crc kubenswrapper[4756]: I0930 19:50:48.340921 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/3b333bad-e0f7-4b3a-b21d-7854ad7aafa6-openstack-config\") pod \"openstackclient\" (UID: \"3b333bad-e0f7-4b3a-b21d-7854ad7aafa6\") " pod="openstack/openstackclient" Sep 30 19:50:48 crc kubenswrapper[4756]: I0930 19:50:48.341061 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w8bkj\" (UniqueName: \"kubernetes.io/projected/3b333bad-e0f7-4b3a-b21d-7854ad7aafa6-kube-api-access-w8bkj\") pod \"openstackclient\" (UID: \"3b333bad-e0f7-4b3a-b21d-7854ad7aafa6\") " pod="openstack/openstackclient" Sep 30 19:50:48 crc kubenswrapper[4756]: I0930 19:50:48.341086 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/3b333bad-e0f7-4b3a-b21d-7854ad7aafa6-openstack-config-secret\") pod \"openstackclient\" (UID: \"3b333bad-e0f7-4b3a-b21d-7854ad7aafa6\") " pod="openstack/openstackclient" Sep 30 19:50:48 crc kubenswrapper[4756]: I0930 19:50:48.341128 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3b333bad-e0f7-4b3a-b21d-7854ad7aafa6-combined-ca-bundle\") pod \"openstackclient\" (UID: \"3b333bad-e0f7-4b3a-b21d-7854ad7aafa6\") " pod="openstack/openstackclient" Sep 30 19:50:48 crc kubenswrapper[4756]: I0930 19:50:48.344066 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/3b333bad-e0f7-4b3a-b21d-7854ad7aafa6-openstack-config\") pod \"openstackclient\" (UID: \"3b333bad-e0f7-4b3a-b21d-7854ad7aafa6\") " pod="openstack/openstackclient" Sep 30 19:50:48 crc kubenswrapper[4756]: I0930 19:50:48.347260 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3b333bad-e0f7-4b3a-b21d-7854ad7aafa6-combined-ca-bundle\") pod \"openstackclient\" (UID: \"3b333bad-e0f7-4b3a-b21d-7854ad7aafa6\") " pod="openstack/openstackclient" Sep 30 19:50:48 crc kubenswrapper[4756]: I0930 19:50:48.349078 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/3b333bad-e0f7-4b3a-b21d-7854ad7aafa6-openstack-config-secret\") pod \"openstackclient\" (UID: \"3b333bad-e0f7-4b3a-b21d-7854ad7aafa6\") " pod="openstack/openstackclient" Sep 30 19:50:48 crc kubenswrapper[4756]: I0930 19:50:48.361935 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w8bkj\" (UniqueName: \"kubernetes.io/projected/3b333bad-e0f7-4b3a-b21d-7854ad7aafa6-kube-api-access-w8bkj\") pod \"openstackclient\" (UID: \"3b333bad-e0f7-4b3a-b21d-7854ad7aafa6\") " pod="openstack/openstackclient" Sep 30 19:50:48 crc kubenswrapper[4756]: I0930 19:50:48.542630 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"16fbb022-5ffb-41ad-a18b-c9eb1ad86897","Type":"ContainerStarted","Data":"6ddd09ff48db0699e5502527f64bdf95fb3651a12c72edb43b7654c5869384ba"} Sep 30 19:50:48 crc kubenswrapper[4756]: I0930 19:50:48.544348 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Sep 30 19:50:48 crc kubenswrapper[4756]: I0930 19:50:48.544509 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Sep 30 19:50:48 crc kubenswrapper[4756]: I0930 19:50:48.558664 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-8hkvg" Sep 30 19:50:48 crc kubenswrapper[4756]: I0930 19:50:48.562553 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-8hkvg" event={"ID":"fb5b7a75-2b20-4dc3-bd63-e29dbb6cdba2","Type":"ContainerDied","Data":"f1791a8c2c7a0241b12a98bd9995b061ccefb5f9dea263885f0d95d836408466"} Sep 30 19:50:48 crc kubenswrapper[4756]: I0930 19:50:48.562614 4756 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f1791a8c2c7a0241b12a98bd9995b061ccefb5f9dea263885f0d95d836408466" Sep 30 19:50:48 crc kubenswrapper[4756]: I0930 19:50:48.562791 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Sep 30 19:50:48 crc kubenswrapper[4756]: I0930 19:50:48.589765 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=1.452347612 podStartE2EDuration="5.589746133s" podCreationTimestamp="2025-09-30 19:50:43 +0000 UTC" firstStartedPulling="2025-09-30 19:50:43.924833632 +0000 UTC m=+1173.545767109" lastFinishedPulling="2025-09-30 19:50:48.062232153 +0000 UTC m=+1177.683165630" observedRunningTime="2025-09-30 19:50:48.580653815 +0000 UTC m=+1178.201587292" watchObservedRunningTime="2025-09-30 19:50:48.589746133 +0000 UTC m=+1178.210679610" Sep 30 19:50:48 crc kubenswrapper[4756]: I0930 19:50:48.592247 4756 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openstack/openstackclient" oldPodUID="e11b18b8-af91-46d0-b1e3-086a57a0731a" podUID="3b333bad-e0f7-4b3a-b21d-7854ad7aafa6" Sep 30 19:50:48 crc kubenswrapper[4756]: I0930 19:50:48.609354 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Sep 30 19:50:48 crc kubenswrapper[4756]: I0930 19:50:48.645911 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/e11b18b8-af91-46d0-b1e3-086a57a0731a-openstack-config\") pod \"e11b18b8-af91-46d0-b1e3-086a57a0731a\" (UID: \"e11b18b8-af91-46d0-b1e3-086a57a0731a\") " Sep 30 19:50:48 crc kubenswrapper[4756]: I0930 19:50:48.646096 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e11b18b8-af91-46d0-b1e3-086a57a0731a-combined-ca-bundle\") pod \"e11b18b8-af91-46d0-b1e3-086a57a0731a\" (UID: \"e11b18b8-af91-46d0-b1e3-086a57a0731a\") " Sep 30 19:50:48 crc kubenswrapper[4756]: I0930 19:50:48.646127 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-n6rlh\" (UniqueName: \"kubernetes.io/projected/e11b18b8-af91-46d0-b1e3-086a57a0731a-kube-api-access-n6rlh\") pod \"e11b18b8-af91-46d0-b1e3-086a57a0731a\" (UID: \"e11b18b8-af91-46d0-b1e3-086a57a0731a\") " Sep 30 19:50:48 crc kubenswrapper[4756]: I0930 19:50:48.646192 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/e11b18b8-af91-46d0-b1e3-086a57a0731a-openstack-config-secret\") pod \"e11b18b8-af91-46d0-b1e3-086a57a0731a\" (UID: \"e11b18b8-af91-46d0-b1e3-086a57a0731a\") " Sep 30 19:50:48 crc kubenswrapper[4756]: I0930 19:50:48.647165 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e11b18b8-af91-46d0-b1e3-086a57a0731a-openstack-config" (OuterVolumeSpecName: "openstack-config") pod "e11b18b8-af91-46d0-b1e3-086a57a0731a" (UID: "e11b18b8-af91-46d0-b1e3-086a57a0731a"). InnerVolumeSpecName "openstack-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:50:48 crc kubenswrapper[4756]: I0930 19:50:48.653521 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e11b18b8-af91-46d0-b1e3-086a57a0731a-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e11b18b8-af91-46d0-b1e3-086a57a0731a" (UID: "e11b18b8-af91-46d0-b1e3-086a57a0731a"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:50:48 crc kubenswrapper[4756]: I0930 19:50:48.675851 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e11b18b8-af91-46d0-b1e3-086a57a0731a-kube-api-access-n6rlh" (OuterVolumeSpecName: "kube-api-access-n6rlh") pod "e11b18b8-af91-46d0-b1e3-086a57a0731a" (UID: "e11b18b8-af91-46d0-b1e3-086a57a0731a"). InnerVolumeSpecName "kube-api-access-n6rlh". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:50:48 crc kubenswrapper[4756]: I0930 19:50:48.680763 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e11b18b8-af91-46d0-b1e3-086a57a0731a-openstack-config-secret" (OuterVolumeSpecName: "openstack-config-secret") pod "e11b18b8-af91-46d0-b1e3-086a57a0731a" (UID: "e11b18b8-af91-46d0-b1e3-086a57a0731a"). InnerVolumeSpecName "openstack-config-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:50:48 crc kubenswrapper[4756]: I0930 19:50:48.751505 4756 reconciler_common.go:293] "Volume detached for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/e11b18b8-af91-46d0-b1e3-086a57a0731a-openstack-config\") on node \"crc\" DevicePath \"\"" Sep 30 19:50:48 crc kubenswrapper[4756]: I0930 19:50:48.751847 4756 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e11b18b8-af91-46d0-b1e3-086a57a0731a-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 19:50:48 crc kubenswrapper[4756]: I0930 19:50:48.751857 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-n6rlh\" (UniqueName: \"kubernetes.io/projected/e11b18b8-af91-46d0-b1e3-086a57a0731a-kube-api-access-n6rlh\") on node \"crc\" DevicePath \"\"" Sep 30 19:50:48 crc kubenswrapper[4756]: I0930 19:50:48.751872 4756 reconciler_common.go:293] "Volume detached for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/e11b18b8-af91-46d0-b1e3-086a57a0731a-openstack-config-secret\") on node \"crc\" DevicePath \"\"" Sep 30 19:50:48 crc kubenswrapper[4756]: I0930 19:50:48.781237 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-worker-bbf58c-78vxt"] Sep 30 19:50:48 crc kubenswrapper[4756]: I0930 19:50:48.782540 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-bbf58c-78vxt" Sep 30 19:50:48 crc kubenswrapper[4756]: I0930 19:50:48.797675 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-worker-config-data" Sep 30 19:50:48 crc kubenswrapper[4756]: I0930 19:50:48.797929 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-barbican-dockercfg-mv7v6" Sep 30 19:50:48 crc kubenswrapper[4756]: I0930 19:50:48.798160 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-config-data" Sep 30 19:50:48 crc kubenswrapper[4756]: I0930 19:50:48.824454 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-bbf58c-78vxt"] Sep 30 19:50:48 crc kubenswrapper[4756]: I0930 19:50:48.855624 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3c14829a-8f5d-490d-a922-719d81da47a4-logs\") pod \"barbican-worker-bbf58c-78vxt\" (UID: \"3c14829a-8f5d-490d-a922-719d81da47a4\") " pod="openstack/barbican-worker-bbf58c-78vxt" Sep 30 19:50:48 crc kubenswrapper[4756]: I0930 19:50:48.855730 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3c14829a-8f5d-490d-a922-719d81da47a4-config-data\") pod \"barbican-worker-bbf58c-78vxt\" (UID: \"3c14829a-8f5d-490d-a922-719d81da47a4\") " pod="openstack/barbican-worker-bbf58c-78vxt" Sep 30 19:50:48 crc kubenswrapper[4756]: I0930 19:50:48.855840 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3c14829a-8f5d-490d-a922-719d81da47a4-combined-ca-bundle\") pod \"barbican-worker-bbf58c-78vxt\" (UID: \"3c14829a-8f5d-490d-a922-719d81da47a4\") " pod="openstack/barbican-worker-bbf58c-78vxt" Sep 30 19:50:48 crc kubenswrapper[4756]: I0930 19:50:48.855864 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/3c14829a-8f5d-490d-a922-719d81da47a4-config-data-custom\") pod \"barbican-worker-bbf58c-78vxt\" (UID: \"3c14829a-8f5d-490d-a922-719d81da47a4\") " pod="openstack/barbican-worker-bbf58c-78vxt" Sep 30 19:50:48 crc kubenswrapper[4756]: I0930 19:50:48.855882 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gnmx8\" (UniqueName: \"kubernetes.io/projected/3c14829a-8f5d-490d-a922-719d81da47a4-kube-api-access-gnmx8\") pod \"barbican-worker-bbf58c-78vxt\" (UID: \"3c14829a-8f5d-490d-a922-719d81da47a4\") " pod="openstack/barbican-worker-bbf58c-78vxt" Sep 30 19:50:48 crc kubenswrapper[4756]: I0930 19:50:48.865525 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-keystone-listener-7b959497f4-7qz6r"] Sep 30 19:50:48 crc kubenswrapper[4756]: I0930 19:50:48.867023 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-7b959497f4-7qz6r" Sep 30 19:50:48 crc kubenswrapper[4756]: I0930 19:50:48.871863 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-keystone-listener-config-data" Sep 30 19:50:48 crc kubenswrapper[4756]: I0930 19:50:48.924973 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-7b959497f4-7qz6r"] Sep 30 19:50:48 crc kubenswrapper[4756]: I0930 19:50:48.941025 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-59d5ff467f-l8f6v"] Sep 30 19:50:48 crc kubenswrapper[4756]: I0930 19:50:48.942173 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-59d5ff467f-l8f6v" Sep 30 19:50:48 crc kubenswrapper[4756]: I0930 19:50:48.958413 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3c14829a-8f5d-490d-a922-719d81da47a4-logs\") pod \"barbican-worker-bbf58c-78vxt\" (UID: \"3c14829a-8f5d-490d-a922-719d81da47a4\") " pod="openstack/barbican-worker-bbf58c-78vxt" Sep 30 19:50:48 crc kubenswrapper[4756]: I0930 19:50:48.960136 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ee4f7c8d-6f5a-4ab8-9896-71bfbd28bb0c-dns-svc\") pod \"dnsmasq-dns-59d5ff467f-l8f6v\" (UID: \"ee4f7c8d-6f5a-4ab8-9896-71bfbd28bb0c\") " pod="openstack/dnsmasq-dns-59d5ff467f-l8f6v" Sep 30 19:50:48 crc kubenswrapper[4756]: I0930 19:50:48.960253 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/ee4f7c8d-6f5a-4ab8-9896-71bfbd28bb0c-dns-swift-storage-0\") pod \"dnsmasq-dns-59d5ff467f-l8f6v\" (UID: \"ee4f7c8d-6f5a-4ab8-9896-71bfbd28bb0c\") " pod="openstack/dnsmasq-dns-59d5ff467f-l8f6v" Sep 30 19:50:48 crc kubenswrapper[4756]: I0930 19:50:48.960364 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3c14829a-8f5d-490d-a922-719d81da47a4-config-data\") pod \"barbican-worker-bbf58c-78vxt\" (UID: \"3c14829a-8f5d-490d-a922-719d81da47a4\") " pod="openstack/barbican-worker-bbf58c-78vxt" Sep 30 19:50:48 crc kubenswrapper[4756]: I0930 19:50:48.960460 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xk7p5\" (UniqueName: \"kubernetes.io/projected/ee4f7c8d-6f5a-4ab8-9896-71bfbd28bb0c-kube-api-access-xk7p5\") pod \"dnsmasq-dns-59d5ff467f-l8f6v\" (UID: \"ee4f7c8d-6f5a-4ab8-9896-71bfbd28bb0c\") " pod="openstack/dnsmasq-dns-59d5ff467f-l8f6v" Sep 30 19:50:48 crc kubenswrapper[4756]: I0930 19:50:48.960587 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5tcgf\" (UniqueName: \"kubernetes.io/projected/0511d760-5c65-453b-aefd-89ebb9a52ac6-kube-api-access-5tcgf\") pod \"barbican-keystone-listener-7b959497f4-7qz6r\" (UID: \"0511d760-5c65-453b-aefd-89ebb9a52ac6\") " pod="openstack/barbican-keystone-listener-7b959497f4-7qz6r" Sep 30 19:50:48 crc kubenswrapper[4756]: I0930 19:50:48.960678 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0511d760-5c65-453b-aefd-89ebb9a52ac6-combined-ca-bundle\") pod \"barbican-keystone-listener-7b959497f4-7qz6r\" (UID: \"0511d760-5c65-453b-aefd-89ebb9a52ac6\") " pod="openstack/barbican-keystone-listener-7b959497f4-7qz6r" Sep 30 19:50:48 crc kubenswrapper[4756]: I0930 19:50:48.960770 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ee4f7c8d-6f5a-4ab8-9896-71bfbd28bb0c-ovsdbserver-sb\") pod \"dnsmasq-dns-59d5ff467f-l8f6v\" (UID: \"ee4f7c8d-6f5a-4ab8-9896-71bfbd28bb0c\") " pod="openstack/dnsmasq-dns-59d5ff467f-l8f6v" Sep 30 19:50:48 crc kubenswrapper[4756]: I0930 19:50:48.960841 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0511d760-5c65-453b-aefd-89ebb9a52ac6-logs\") pod \"barbican-keystone-listener-7b959497f4-7qz6r\" (UID: \"0511d760-5c65-453b-aefd-89ebb9a52ac6\") " pod="openstack/barbican-keystone-listener-7b959497f4-7qz6r" Sep 30 19:50:48 crc kubenswrapper[4756]: I0930 19:50:48.962232 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3c14829a-8f5d-490d-a922-719d81da47a4-combined-ca-bundle\") pod \"barbican-worker-bbf58c-78vxt\" (UID: \"3c14829a-8f5d-490d-a922-719d81da47a4\") " pod="openstack/barbican-worker-bbf58c-78vxt" Sep 30 19:50:48 crc kubenswrapper[4756]: I0930 19:50:48.962353 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/3c14829a-8f5d-490d-a922-719d81da47a4-config-data-custom\") pod \"barbican-worker-bbf58c-78vxt\" (UID: \"3c14829a-8f5d-490d-a922-719d81da47a4\") " pod="openstack/barbican-worker-bbf58c-78vxt" Sep 30 19:50:48 crc kubenswrapper[4756]: I0930 19:50:48.962438 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gnmx8\" (UniqueName: \"kubernetes.io/projected/3c14829a-8f5d-490d-a922-719d81da47a4-kube-api-access-gnmx8\") pod \"barbican-worker-bbf58c-78vxt\" (UID: \"3c14829a-8f5d-490d-a922-719d81da47a4\") " pod="openstack/barbican-worker-bbf58c-78vxt" Sep 30 19:50:48 crc kubenswrapper[4756]: I0930 19:50:48.962528 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ee4f7c8d-6f5a-4ab8-9896-71bfbd28bb0c-config\") pod \"dnsmasq-dns-59d5ff467f-l8f6v\" (UID: \"ee4f7c8d-6f5a-4ab8-9896-71bfbd28bb0c\") " pod="openstack/dnsmasq-dns-59d5ff467f-l8f6v" Sep 30 19:50:48 crc kubenswrapper[4756]: I0930 19:50:48.962611 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0511d760-5c65-453b-aefd-89ebb9a52ac6-config-data\") pod \"barbican-keystone-listener-7b959497f4-7qz6r\" (UID: \"0511d760-5c65-453b-aefd-89ebb9a52ac6\") " pod="openstack/barbican-keystone-listener-7b959497f4-7qz6r" Sep 30 19:50:48 crc kubenswrapper[4756]: I0930 19:50:48.962672 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/0511d760-5c65-453b-aefd-89ebb9a52ac6-config-data-custom\") pod \"barbican-keystone-listener-7b959497f4-7qz6r\" (UID: \"0511d760-5c65-453b-aefd-89ebb9a52ac6\") " pod="openstack/barbican-keystone-listener-7b959497f4-7qz6r" Sep 30 19:50:48 crc kubenswrapper[4756]: I0930 19:50:48.966861 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3c14829a-8f5d-490d-a922-719d81da47a4-logs\") pod \"barbican-worker-bbf58c-78vxt\" (UID: \"3c14829a-8f5d-490d-a922-719d81da47a4\") " pod="openstack/barbican-worker-bbf58c-78vxt" Sep 30 19:50:48 crc kubenswrapper[4756]: I0930 19:50:48.966934 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ee4f7c8d-6f5a-4ab8-9896-71bfbd28bb0c-ovsdbserver-nb\") pod \"dnsmasq-dns-59d5ff467f-l8f6v\" (UID: \"ee4f7c8d-6f5a-4ab8-9896-71bfbd28bb0c\") " pod="openstack/dnsmasq-dns-59d5ff467f-l8f6v" Sep 30 19:50:48 crc kubenswrapper[4756]: I0930 19:50:48.967299 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/3c14829a-8f5d-490d-a922-719d81da47a4-config-data-custom\") pod \"barbican-worker-bbf58c-78vxt\" (UID: \"3c14829a-8f5d-490d-a922-719d81da47a4\") " pod="openstack/barbican-worker-bbf58c-78vxt" Sep 30 19:50:48 crc kubenswrapper[4756]: I0930 19:50:48.972252 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3c14829a-8f5d-490d-a922-719d81da47a4-combined-ca-bundle\") pod \"barbican-worker-bbf58c-78vxt\" (UID: \"3c14829a-8f5d-490d-a922-719d81da47a4\") " pod="openstack/barbican-worker-bbf58c-78vxt" Sep 30 19:50:48 crc kubenswrapper[4756]: I0930 19:50:48.977100 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-59d5ff467f-l8f6v"] Sep 30 19:50:48 crc kubenswrapper[4756]: I0930 19:50:48.985718 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3c14829a-8f5d-490d-a922-719d81da47a4-config-data\") pod \"barbican-worker-bbf58c-78vxt\" (UID: \"3c14829a-8f5d-490d-a922-719d81da47a4\") " pod="openstack/barbican-worker-bbf58c-78vxt" Sep 30 19:50:48 crc kubenswrapper[4756]: I0930 19:50:48.989618 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gnmx8\" (UniqueName: \"kubernetes.io/projected/3c14829a-8f5d-490d-a922-719d81da47a4-kube-api-access-gnmx8\") pod \"barbican-worker-bbf58c-78vxt\" (UID: \"3c14829a-8f5d-490d-a922-719d81da47a4\") " pod="openstack/barbican-worker-bbf58c-78vxt" Sep 30 19:50:49 crc kubenswrapper[4756]: I0930 19:50:49.019783 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-api-8564f76dcb-9qwnq"] Sep 30 19:50:49 crc kubenswrapper[4756]: I0930 19:50:49.021783 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-8564f76dcb-9qwnq" Sep 30 19:50:49 crc kubenswrapper[4756]: I0930 19:50:49.024946 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-api-config-data" Sep 30 19:50:49 crc kubenswrapper[4756]: I0930 19:50:49.048343 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-8564f76dcb-9qwnq"] Sep 30 19:50:49 crc kubenswrapper[4756]: I0930 19:50:49.069206 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ee4f7c8d-6f5a-4ab8-9896-71bfbd28bb0c-dns-svc\") pod \"dnsmasq-dns-59d5ff467f-l8f6v\" (UID: \"ee4f7c8d-6f5a-4ab8-9896-71bfbd28bb0c\") " pod="openstack/dnsmasq-dns-59d5ff467f-l8f6v" Sep 30 19:50:49 crc kubenswrapper[4756]: I0930 19:50:49.069248 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/ee4f7c8d-6f5a-4ab8-9896-71bfbd28bb0c-dns-swift-storage-0\") pod \"dnsmasq-dns-59d5ff467f-l8f6v\" (UID: \"ee4f7c8d-6f5a-4ab8-9896-71bfbd28bb0c\") " pod="openstack/dnsmasq-dns-59d5ff467f-l8f6v" Sep 30 19:50:49 crc kubenswrapper[4756]: I0930 19:50:49.069278 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xk7p5\" (UniqueName: \"kubernetes.io/projected/ee4f7c8d-6f5a-4ab8-9896-71bfbd28bb0c-kube-api-access-xk7p5\") pod \"dnsmasq-dns-59d5ff467f-l8f6v\" (UID: \"ee4f7c8d-6f5a-4ab8-9896-71bfbd28bb0c\") " pod="openstack/dnsmasq-dns-59d5ff467f-l8f6v" Sep 30 19:50:49 crc kubenswrapper[4756]: I0930 19:50:49.069303 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4763c3d2-bfc4-4c13-94c0-f7d852309b17-logs\") pod \"barbican-api-8564f76dcb-9qwnq\" (UID: \"4763c3d2-bfc4-4c13-94c0-f7d852309b17\") " pod="openstack/barbican-api-8564f76dcb-9qwnq" Sep 30 19:50:49 crc kubenswrapper[4756]: I0930 19:50:49.069328 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5tcgf\" (UniqueName: \"kubernetes.io/projected/0511d760-5c65-453b-aefd-89ebb9a52ac6-kube-api-access-5tcgf\") pod \"barbican-keystone-listener-7b959497f4-7qz6r\" (UID: \"0511d760-5c65-453b-aefd-89ebb9a52ac6\") " pod="openstack/barbican-keystone-listener-7b959497f4-7qz6r" Sep 30 19:50:49 crc kubenswrapper[4756]: I0930 19:50:49.070295 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4763c3d2-bfc4-4c13-94c0-f7d852309b17-config-data\") pod \"barbican-api-8564f76dcb-9qwnq\" (UID: \"4763c3d2-bfc4-4c13-94c0-f7d852309b17\") " pod="openstack/barbican-api-8564f76dcb-9qwnq" Sep 30 19:50:49 crc kubenswrapper[4756]: I0930 19:50:49.070327 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0511d760-5c65-453b-aefd-89ebb9a52ac6-combined-ca-bundle\") pod \"barbican-keystone-listener-7b959497f4-7qz6r\" (UID: \"0511d760-5c65-453b-aefd-89ebb9a52ac6\") " pod="openstack/barbican-keystone-listener-7b959497f4-7qz6r" Sep 30 19:50:49 crc kubenswrapper[4756]: I0930 19:50:49.070350 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/ee4f7c8d-6f5a-4ab8-9896-71bfbd28bb0c-dns-swift-storage-0\") pod \"dnsmasq-dns-59d5ff467f-l8f6v\" (UID: \"ee4f7c8d-6f5a-4ab8-9896-71bfbd28bb0c\") " pod="openstack/dnsmasq-dns-59d5ff467f-l8f6v" Sep 30 19:50:49 crc kubenswrapper[4756]: I0930 19:50:49.070368 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ee4f7c8d-6f5a-4ab8-9896-71bfbd28bb0c-ovsdbserver-sb\") pod \"dnsmasq-dns-59d5ff467f-l8f6v\" (UID: \"ee4f7c8d-6f5a-4ab8-9896-71bfbd28bb0c\") " pod="openstack/dnsmasq-dns-59d5ff467f-l8f6v" Sep 30 19:50:49 crc kubenswrapper[4756]: I0930 19:50:49.070363 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ee4f7c8d-6f5a-4ab8-9896-71bfbd28bb0c-dns-svc\") pod \"dnsmasq-dns-59d5ff467f-l8f6v\" (UID: \"ee4f7c8d-6f5a-4ab8-9896-71bfbd28bb0c\") " pod="openstack/dnsmasq-dns-59d5ff467f-l8f6v" Sep 30 19:50:49 crc kubenswrapper[4756]: I0930 19:50:49.070386 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0511d760-5c65-453b-aefd-89ebb9a52ac6-logs\") pod \"barbican-keystone-listener-7b959497f4-7qz6r\" (UID: \"0511d760-5c65-453b-aefd-89ebb9a52ac6\") " pod="openstack/barbican-keystone-listener-7b959497f4-7qz6r" Sep 30 19:50:49 crc kubenswrapper[4756]: I0930 19:50:49.070439 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/4763c3d2-bfc4-4c13-94c0-f7d852309b17-config-data-custom\") pod \"barbican-api-8564f76dcb-9qwnq\" (UID: \"4763c3d2-bfc4-4c13-94c0-f7d852309b17\") " pod="openstack/barbican-api-8564f76dcb-9qwnq" Sep 30 19:50:49 crc kubenswrapper[4756]: I0930 19:50:49.070484 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ee4f7c8d-6f5a-4ab8-9896-71bfbd28bb0c-config\") pod \"dnsmasq-dns-59d5ff467f-l8f6v\" (UID: \"ee4f7c8d-6f5a-4ab8-9896-71bfbd28bb0c\") " pod="openstack/dnsmasq-dns-59d5ff467f-l8f6v" Sep 30 19:50:49 crc kubenswrapper[4756]: I0930 19:50:49.070508 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4763c3d2-bfc4-4c13-94c0-f7d852309b17-combined-ca-bundle\") pod \"barbican-api-8564f76dcb-9qwnq\" (UID: \"4763c3d2-bfc4-4c13-94c0-f7d852309b17\") " pod="openstack/barbican-api-8564f76dcb-9qwnq" Sep 30 19:50:49 crc kubenswrapper[4756]: I0930 19:50:49.070526 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/0511d760-5c65-453b-aefd-89ebb9a52ac6-config-data-custom\") pod \"barbican-keystone-listener-7b959497f4-7qz6r\" (UID: \"0511d760-5c65-453b-aefd-89ebb9a52ac6\") " pod="openstack/barbican-keystone-listener-7b959497f4-7qz6r" Sep 30 19:50:49 crc kubenswrapper[4756]: I0930 19:50:49.070543 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0511d760-5c65-453b-aefd-89ebb9a52ac6-config-data\") pod \"barbican-keystone-listener-7b959497f4-7qz6r\" (UID: \"0511d760-5c65-453b-aefd-89ebb9a52ac6\") " pod="openstack/barbican-keystone-listener-7b959497f4-7qz6r" Sep 30 19:50:49 crc kubenswrapper[4756]: I0930 19:50:49.070567 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ee4f7c8d-6f5a-4ab8-9896-71bfbd28bb0c-ovsdbserver-nb\") pod \"dnsmasq-dns-59d5ff467f-l8f6v\" (UID: \"ee4f7c8d-6f5a-4ab8-9896-71bfbd28bb0c\") " pod="openstack/dnsmasq-dns-59d5ff467f-l8f6v" Sep 30 19:50:49 crc kubenswrapper[4756]: I0930 19:50:49.070594 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zvshm\" (UniqueName: \"kubernetes.io/projected/4763c3d2-bfc4-4c13-94c0-f7d852309b17-kube-api-access-zvshm\") pod \"barbican-api-8564f76dcb-9qwnq\" (UID: \"4763c3d2-bfc4-4c13-94c0-f7d852309b17\") " pod="openstack/barbican-api-8564f76dcb-9qwnq" Sep 30 19:50:49 crc kubenswrapper[4756]: I0930 19:50:49.071020 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ee4f7c8d-6f5a-4ab8-9896-71bfbd28bb0c-ovsdbserver-sb\") pod \"dnsmasq-dns-59d5ff467f-l8f6v\" (UID: \"ee4f7c8d-6f5a-4ab8-9896-71bfbd28bb0c\") " pod="openstack/dnsmasq-dns-59d5ff467f-l8f6v" Sep 30 19:50:49 crc kubenswrapper[4756]: I0930 19:50:49.071428 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ee4f7c8d-6f5a-4ab8-9896-71bfbd28bb0c-config\") pod \"dnsmasq-dns-59d5ff467f-l8f6v\" (UID: \"ee4f7c8d-6f5a-4ab8-9896-71bfbd28bb0c\") " pod="openstack/dnsmasq-dns-59d5ff467f-l8f6v" Sep 30 19:50:49 crc kubenswrapper[4756]: I0930 19:50:49.071548 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ee4f7c8d-6f5a-4ab8-9896-71bfbd28bb0c-ovsdbserver-nb\") pod \"dnsmasq-dns-59d5ff467f-l8f6v\" (UID: \"ee4f7c8d-6f5a-4ab8-9896-71bfbd28bb0c\") " pod="openstack/dnsmasq-dns-59d5ff467f-l8f6v" Sep 30 19:50:49 crc kubenswrapper[4756]: I0930 19:50:49.071988 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0511d760-5c65-453b-aefd-89ebb9a52ac6-logs\") pod \"barbican-keystone-listener-7b959497f4-7qz6r\" (UID: \"0511d760-5c65-453b-aefd-89ebb9a52ac6\") " pod="openstack/barbican-keystone-listener-7b959497f4-7qz6r" Sep 30 19:50:49 crc kubenswrapper[4756]: I0930 19:50:49.075520 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0511d760-5c65-453b-aefd-89ebb9a52ac6-config-data\") pod \"barbican-keystone-listener-7b959497f4-7qz6r\" (UID: \"0511d760-5c65-453b-aefd-89ebb9a52ac6\") " pod="openstack/barbican-keystone-listener-7b959497f4-7qz6r" Sep 30 19:50:49 crc kubenswrapper[4756]: I0930 19:50:49.078336 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Sep 30 19:50:49 crc kubenswrapper[4756]: I0930 19:50:49.079521 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/0511d760-5c65-453b-aefd-89ebb9a52ac6-config-data-custom\") pod \"barbican-keystone-listener-7b959497f4-7qz6r\" (UID: \"0511d760-5c65-453b-aefd-89ebb9a52ac6\") " pod="openstack/barbican-keystone-listener-7b959497f4-7qz6r" Sep 30 19:50:49 crc kubenswrapper[4756]: I0930 19:50:49.080043 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0511d760-5c65-453b-aefd-89ebb9a52ac6-combined-ca-bundle\") pod \"barbican-keystone-listener-7b959497f4-7qz6r\" (UID: \"0511d760-5c65-453b-aefd-89ebb9a52ac6\") " pod="openstack/barbican-keystone-listener-7b959497f4-7qz6r" Sep 30 19:50:49 crc kubenswrapper[4756]: I0930 19:50:49.080243 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-pljns" Sep 30 19:50:49 crc kubenswrapper[4756]: I0930 19:50:49.095798 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xk7p5\" (UniqueName: \"kubernetes.io/projected/ee4f7c8d-6f5a-4ab8-9896-71bfbd28bb0c-kube-api-access-xk7p5\") pod \"dnsmasq-dns-59d5ff467f-l8f6v\" (UID: \"ee4f7c8d-6f5a-4ab8-9896-71bfbd28bb0c\") " pod="openstack/dnsmasq-dns-59d5ff467f-l8f6v" Sep 30 19:50:49 crc kubenswrapper[4756]: I0930 19:50:49.099478 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5tcgf\" (UniqueName: \"kubernetes.io/projected/0511d760-5c65-453b-aefd-89ebb9a52ac6-kube-api-access-5tcgf\") pod \"barbican-keystone-listener-7b959497f4-7qz6r\" (UID: \"0511d760-5c65-453b-aefd-89ebb9a52ac6\") " pod="openstack/barbican-keystone-listener-7b959497f4-7qz6r" Sep 30 19:50:49 crc kubenswrapper[4756]: I0930 19:50:49.129088 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-59d5ff467f-l8f6v" Sep 30 19:50:49 crc kubenswrapper[4756]: I0930 19:50:49.171741 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e11b18b8-af91-46d0-b1e3-086a57a0731a" path="/var/lib/kubelet/pods/e11b18b8-af91-46d0-b1e3-086a57a0731a/volumes" Sep 30 19:50:49 crc kubenswrapper[4756]: I0930 19:50:49.172235 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2d1a09a3-9f2d-41ec-a2de-009e4835154e-combined-ca-bundle\") pod \"2d1a09a3-9f2d-41ec-a2de-009e4835154e\" (UID: \"2d1a09a3-9f2d-41ec-a2de-009e4835154e\") " Sep 30 19:50:49 crc kubenswrapper[4756]: I0930 19:50:49.172753 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mcf85\" (UniqueName: \"kubernetes.io/projected/2d1a09a3-9f2d-41ec-a2de-009e4835154e-kube-api-access-mcf85\") pod \"2d1a09a3-9f2d-41ec-a2de-009e4835154e\" (UID: \"2d1a09a3-9f2d-41ec-a2de-009e4835154e\") " Sep 30 19:50:49 crc kubenswrapper[4756]: I0930 19:50:49.172790 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/2d1a09a3-9f2d-41ec-a2de-009e4835154e-config\") pod \"2d1a09a3-9f2d-41ec-a2de-009e4835154e\" (UID: \"2d1a09a3-9f2d-41ec-a2de-009e4835154e\") " Sep 30 19:50:49 crc kubenswrapper[4756]: I0930 19:50:49.173540 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zvshm\" (UniqueName: \"kubernetes.io/projected/4763c3d2-bfc4-4c13-94c0-f7d852309b17-kube-api-access-zvshm\") pod \"barbican-api-8564f76dcb-9qwnq\" (UID: \"4763c3d2-bfc4-4c13-94c0-f7d852309b17\") " pod="openstack/barbican-api-8564f76dcb-9qwnq" Sep 30 19:50:49 crc kubenswrapper[4756]: I0930 19:50:49.173691 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4763c3d2-bfc4-4c13-94c0-f7d852309b17-logs\") pod \"barbican-api-8564f76dcb-9qwnq\" (UID: \"4763c3d2-bfc4-4c13-94c0-f7d852309b17\") " pod="openstack/barbican-api-8564f76dcb-9qwnq" Sep 30 19:50:49 crc kubenswrapper[4756]: I0930 19:50:49.173825 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4763c3d2-bfc4-4c13-94c0-f7d852309b17-config-data\") pod \"barbican-api-8564f76dcb-9qwnq\" (UID: \"4763c3d2-bfc4-4c13-94c0-f7d852309b17\") " pod="openstack/barbican-api-8564f76dcb-9qwnq" Sep 30 19:50:49 crc kubenswrapper[4756]: I0930 19:50:49.173922 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/4763c3d2-bfc4-4c13-94c0-f7d852309b17-config-data-custom\") pod \"barbican-api-8564f76dcb-9qwnq\" (UID: \"4763c3d2-bfc4-4c13-94c0-f7d852309b17\") " pod="openstack/barbican-api-8564f76dcb-9qwnq" Sep 30 19:50:49 crc kubenswrapper[4756]: I0930 19:50:49.173987 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4763c3d2-bfc4-4c13-94c0-f7d852309b17-combined-ca-bundle\") pod \"barbican-api-8564f76dcb-9qwnq\" (UID: \"4763c3d2-bfc4-4c13-94c0-f7d852309b17\") " pod="openstack/barbican-api-8564f76dcb-9qwnq" Sep 30 19:50:49 crc kubenswrapper[4756]: I0930 19:50:49.174815 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4763c3d2-bfc4-4c13-94c0-f7d852309b17-logs\") pod \"barbican-api-8564f76dcb-9qwnq\" (UID: \"4763c3d2-bfc4-4c13-94c0-f7d852309b17\") " pod="openstack/barbican-api-8564f76dcb-9qwnq" Sep 30 19:50:49 crc kubenswrapper[4756]: I0930 19:50:49.179335 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4763c3d2-bfc4-4c13-94c0-f7d852309b17-combined-ca-bundle\") pod \"barbican-api-8564f76dcb-9qwnq\" (UID: \"4763c3d2-bfc4-4c13-94c0-f7d852309b17\") " pod="openstack/barbican-api-8564f76dcb-9qwnq" Sep 30 19:50:49 crc kubenswrapper[4756]: I0930 19:50:49.180293 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4763c3d2-bfc4-4c13-94c0-f7d852309b17-config-data\") pod \"barbican-api-8564f76dcb-9qwnq\" (UID: \"4763c3d2-bfc4-4c13-94c0-f7d852309b17\") " pod="openstack/barbican-api-8564f76dcb-9qwnq" Sep 30 19:50:49 crc kubenswrapper[4756]: I0930 19:50:49.183247 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2d1a09a3-9f2d-41ec-a2de-009e4835154e-kube-api-access-mcf85" (OuterVolumeSpecName: "kube-api-access-mcf85") pod "2d1a09a3-9f2d-41ec-a2de-009e4835154e" (UID: "2d1a09a3-9f2d-41ec-a2de-009e4835154e"). InnerVolumeSpecName "kube-api-access-mcf85". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:50:49 crc kubenswrapper[4756]: I0930 19:50:49.183653 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/4763c3d2-bfc4-4c13-94c0-f7d852309b17-config-data-custom\") pod \"barbican-api-8564f76dcb-9qwnq\" (UID: \"4763c3d2-bfc4-4c13-94c0-f7d852309b17\") " pod="openstack/barbican-api-8564f76dcb-9qwnq" Sep 30 19:50:49 crc kubenswrapper[4756]: I0930 19:50:49.189378 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zvshm\" (UniqueName: \"kubernetes.io/projected/4763c3d2-bfc4-4c13-94c0-f7d852309b17-kube-api-access-zvshm\") pod \"barbican-api-8564f76dcb-9qwnq\" (UID: \"4763c3d2-bfc4-4c13-94c0-f7d852309b17\") " pod="openstack/barbican-api-8564f76dcb-9qwnq" Sep 30 19:50:49 crc kubenswrapper[4756]: I0930 19:50:49.218825 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-bbf58c-78vxt" Sep 30 19:50:49 crc kubenswrapper[4756]: I0930 19:50:49.230688 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2d1a09a3-9f2d-41ec-a2de-009e4835154e-config" (OuterVolumeSpecName: "config") pod "2d1a09a3-9f2d-41ec-a2de-009e4835154e" (UID: "2d1a09a3-9f2d-41ec-a2de-009e4835154e"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:50:49 crc kubenswrapper[4756]: I0930 19:50:49.238648 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-7b959497f4-7qz6r" Sep 30 19:50:49 crc kubenswrapper[4756]: I0930 19:50:49.240817 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2d1a09a3-9f2d-41ec-a2de-009e4835154e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "2d1a09a3-9f2d-41ec-a2de-009e4835154e" (UID: "2d1a09a3-9f2d-41ec-a2de-009e4835154e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:50:49 crc kubenswrapper[4756]: I0930 19:50:49.277533 4756 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2d1a09a3-9f2d-41ec-a2de-009e4835154e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 19:50:49 crc kubenswrapper[4756]: I0930 19:50:49.277559 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mcf85\" (UniqueName: \"kubernetes.io/projected/2d1a09a3-9f2d-41ec-a2de-009e4835154e-kube-api-access-mcf85\") on node \"crc\" DevicePath \"\"" Sep 30 19:50:49 crc kubenswrapper[4756]: I0930 19:50:49.277570 4756 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/2d1a09a3-9f2d-41ec-a2de-009e4835154e-config\") on node \"crc\" DevicePath \"\"" Sep 30 19:50:49 crc kubenswrapper[4756]: I0930 19:50:49.440680 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-8564f76dcb-9qwnq" Sep 30 19:50:49 crc kubenswrapper[4756]: I0930 19:50:49.446845 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-59d5ff467f-l8f6v"] Sep 30 19:50:49 crc kubenswrapper[4756]: W0930 19:50:49.452849 4756 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podee4f7c8d_6f5a_4ab8_9896_71bfbd28bb0c.slice/crio-42fb38ef49b833a3a35055a9d887ca8f8361ddefdc8394f6e656049da01e41f8 WatchSource:0}: Error finding container 42fb38ef49b833a3a35055a9d887ca8f8361ddefdc8394f6e656049da01e41f8: Status 404 returned error can't find the container with id 42fb38ef49b833a3a35055a9d887ca8f8361ddefdc8394f6e656049da01e41f8 Sep 30 19:50:49 crc kubenswrapper[4756]: I0930 19:50:49.520898 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Sep 30 19:50:49 crc kubenswrapper[4756]: I0930 19:50:49.674496 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-7b959497f4-7qz6r"] Sep 30 19:50:49 crc kubenswrapper[4756]: I0930 19:50:49.735818 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-pljns" Sep 30 19:50:49 crc kubenswrapper[4756]: I0930 19:50:49.735989 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-pljns" event={"ID":"2d1a09a3-9f2d-41ec-a2de-009e4835154e","Type":"ContainerDied","Data":"7f33e058ec4363989668a3a68891e078e0d96660f2395e48f31c1ccbbe686a2a"} Sep 30 19:50:49 crc kubenswrapper[4756]: I0930 19:50:49.736030 4756 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7f33e058ec4363989668a3a68891e078e0d96660f2395e48f31c1ccbbe686a2a" Sep 30 19:50:49 crc kubenswrapper[4756]: I0930 19:50:49.743798 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"3b333bad-e0f7-4b3a-b21d-7854ad7aafa6","Type":"ContainerStarted","Data":"79a6561c2a180cfffea0b48d024967660299b24f856dc23790651705da56bb27"} Sep 30 19:50:49 crc kubenswrapper[4756]: I0930 19:50:49.745814 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-59d5ff467f-l8f6v" event={"ID":"ee4f7c8d-6f5a-4ab8-9896-71bfbd28bb0c","Type":"ContainerStarted","Data":"42fb38ef49b833a3a35055a9d887ca8f8361ddefdc8394f6e656049da01e41f8"} Sep 30 19:50:49 crc kubenswrapper[4756]: I0930 19:50:49.745869 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Sep 30 19:50:49 crc kubenswrapper[4756]: I0930 19:50:49.752232 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-59d5ff467f-l8f6v"] Sep 30 19:50:49 crc kubenswrapper[4756]: I0930 19:50:49.764160 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-75c8ddd69c-kjrvj"] Sep 30 19:50:49 crc kubenswrapper[4756]: E0930 19:50:49.764671 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2d1a09a3-9f2d-41ec-a2de-009e4835154e" containerName="neutron-db-sync" Sep 30 19:50:49 crc kubenswrapper[4756]: I0930 19:50:49.764692 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="2d1a09a3-9f2d-41ec-a2de-009e4835154e" containerName="neutron-db-sync" Sep 30 19:50:49 crc kubenswrapper[4756]: I0930 19:50:49.764914 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="2d1a09a3-9f2d-41ec-a2de-009e4835154e" containerName="neutron-db-sync" Sep 30 19:50:49 crc kubenswrapper[4756]: I0930 19:50:49.765835 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-75c8ddd69c-kjrvj" Sep 30 19:50:49 crc kubenswrapper[4756]: I0930 19:50:49.794909 4756 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openstack/openstackclient" oldPodUID="e11b18b8-af91-46d0-b1e3-086a57a0731a" podUID="3b333bad-e0f7-4b3a-b21d-7854ad7aafa6" Sep 30 19:50:49 crc kubenswrapper[4756]: I0930 19:50:49.806448 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-75c8ddd69c-kjrvj"] Sep 30 19:50:49 crc kubenswrapper[4756]: I0930 19:50:49.840077 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-bbf58c-78vxt"] Sep 30 19:50:49 crc kubenswrapper[4756]: I0930 19:50:49.864471 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-5647cd86c6-mmv8n"] Sep 30 19:50:49 crc kubenswrapper[4756]: I0930 19:50:49.865960 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-5647cd86c6-mmv8n" Sep 30 19:50:49 crc kubenswrapper[4756]: I0930 19:50:49.867912 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-neutron-dockercfg-2jxvh" Sep 30 19:50:49 crc kubenswrapper[4756]: I0930 19:50:49.868125 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-config" Sep 30 19:50:49 crc kubenswrapper[4756]: I0930 19:50:49.868240 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-httpd-config" Sep 30 19:50:49 crc kubenswrapper[4756]: I0930 19:50:49.868487 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-ovndbs" Sep 30 19:50:49 crc kubenswrapper[4756]: I0930 19:50:49.872521 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-5647cd86c6-mmv8n"] Sep 30 19:50:49 crc kubenswrapper[4756]: I0930 19:50:49.905269 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s66ps\" (UniqueName: \"kubernetes.io/projected/46cc1f38-e166-40c0-9d00-7a074fa80234-kube-api-access-s66ps\") pod \"dnsmasq-dns-75c8ddd69c-kjrvj\" (UID: \"46cc1f38-e166-40c0-9d00-7a074fa80234\") " pod="openstack/dnsmasq-dns-75c8ddd69c-kjrvj" Sep 30 19:50:49 crc kubenswrapper[4756]: I0930 19:50:49.905344 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/46cc1f38-e166-40c0-9d00-7a074fa80234-dns-swift-storage-0\") pod \"dnsmasq-dns-75c8ddd69c-kjrvj\" (UID: \"46cc1f38-e166-40c0-9d00-7a074fa80234\") " pod="openstack/dnsmasq-dns-75c8ddd69c-kjrvj" Sep 30 19:50:49 crc kubenswrapper[4756]: I0930 19:50:49.905375 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/46cc1f38-e166-40c0-9d00-7a074fa80234-ovsdbserver-sb\") pod \"dnsmasq-dns-75c8ddd69c-kjrvj\" (UID: \"46cc1f38-e166-40c0-9d00-7a074fa80234\") " pod="openstack/dnsmasq-dns-75c8ddd69c-kjrvj" Sep 30 19:50:49 crc kubenswrapper[4756]: I0930 19:50:49.907087 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/46cc1f38-e166-40c0-9d00-7a074fa80234-ovsdbserver-nb\") pod \"dnsmasq-dns-75c8ddd69c-kjrvj\" (UID: \"46cc1f38-e166-40c0-9d00-7a074fa80234\") " pod="openstack/dnsmasq-dns-75c8ddd69c-kjrvj" Sep 30 19:50:49 crc kubenswrapper[4756]: I0930 19:50:49.907125 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/46cc1f38-e166-40c0-9d00-7a074fa80234-config\") pod \"dnsmasq-dns-75c8ddd69c-kjrvj\" (UID: \"46cc1f38-e166-40c0-9d00-7a074fa80234\") " pod="openstack/dnsmasq-dns-75c8ddd69c-kjrvj" Sep 30 19:50:49 crc kubenswrapper[4756]: I0930 19:50:49.907184 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/46cc1f38-e166-40c0-9d00-7a074fa80234-dns-svc\") pod \"dnsmasq-dns-75c8ddd69c-kjrvj\" (UID: \"46cc1f38-e166-40c0-9d00-7a074fa80234\") " pod="openstack/dnsmasq-dns-75c8ddd69c-kjrvj" Sep 30 19:50:50 crc kubenswrapper[4756]: I0930 19:50:50.013049 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/46cc1f38-e166-40c0-9d00-7a074fa80234-dns-svc\") pod \"dnsmasq-dns-75c8ddd69c-kjrvj\" (UID: \"46cc1f38-e166-40c0-9d00-7a074fa80234\") " pod="openstack/dnsmasq-dns-75c8ddd69c-kjrvj" Sep 30 19:50:50 crc kubenswrapper[4756]: I0930 19:50:50.013111 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a8513cfc-d18c-4884-b931-f0e8192734a3-combined-ca-bundle\") pod \"neutron-5647cd86c6-mmv8n\" (UID: \"a8513cfc-d18c-4884-b931-f0e8192734a3\") " pod="openstack/neutron-5647cd86c6-mmv8n" Sep 30 19:50:50 crc kubenswrapper[4756]: I0930 19:50:50.013129 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-js5tc\" (UniqueName: \"kubernetes.io/projected/a8513cfc-d18c-4884-b931-f0e8192734a3-kube-api-access-js5tc\") pod \"neutron-5647cd86c6-mmv8n\" (UID: \"a8513cfc-d18c-4884-b931-f0e8192734a3\") " pod="openstack/neutron-5647cd86c6-mmv8n" Sep 30 19:50:50 crc kubenswrapper[4756]: I0930 19:50:50.013150 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/a8513cfc-d18c-4884-b931-f0e8192734a3-ovndb-tls-certs\") pod \"neutron-5647cd86c6-mmv8n\" (UID: \"a8513cfc-d18c-4884-b931-f0e8192734a3\") " pod="openstack/neutron-5647cd86c6-mmv8n" Sep 30 19:50:50 crc kubenswrapper[4756]: I0930 19:50:50.013169 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s66ps\" (UniqueName: \"kubernetes.io/projected/46cc1f38-e166-40c0-9d00-7a074fa80234-kube-api-access-s66ps\") pod \"dnsmasq-dns-75c8ddd69c-kjrvj\" (UID: \"46cc1f38-e166-40c0-9d00-7a074fa80234\") " pod="openstack/dnsmasq-dns-75c8ddd69c-kjrvj" Sep 30 19:50:50 crc kubenswrapper[4756]: I0930 19:50:50.013202 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/46cc1f38-e166-40c0-9d00-7a074fa80234-dns-swift-storage-0\") pod \"dnsmasq-dns-75c8ddd69c-kjrvj\" (UID: \"46cc1f38-e166-40c0-9d00-7a074fa80234\") " pod="openstack/dnsmasq-dns-75c8ddd69c-kjrvj" Sep 30 19:50:50 crc kubenswrapper[4756]: I0930 19:50:50.013223 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/a8513cfc-d18c-4884-b931-f0e8192734a3-httpd-config\") pod \"neutron-5647cd86c6-mmv8n\" (UID: \"a8513cfc-d18c-4884-b931-f0e8192734a3\") " pod="openstack/neutron-5647cd86c6-mmv8n" Sep 30 19:50:50 crc kubenswrapper[4756]: I0930 19:50:50.013247 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/46cc1f38-e166-40c0-9d00-7a074fa80234-ovsdbserver-sb\") pod \"dnsmasq-dns-75c8ddd69c-kjrvj\" (UID: \"46cc1f38-e166-40c0-9d00-7a074fa80234\") " pod="openstack/dnsmasq-dns-75c8ddd69c-kjrvj" Sep 30 19:50:50 crc kubenswrapper[4756]: I0930 19:50:50.013283 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/46cc1f38-e166-40c0-9d00-7a074fa80234-ovsdbserver-nb\") pod \"dnsmasq-dns-75c8ddd69c-kjrvj\" (UID: \"46cc1f38-e166-40c0-9d00-7a074fa80234\") " pod="openstack/dnsmasq-dns-75c8ddd69c-kjrvj" Sep 30 19:50:50 crc kubenswrapper[4756]: I0930 19:50:50.013312 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/46cc1f38-e166-40c0-9d00-7a074fa80234-config\") pod \"dnsmasq-dns-75c8ddd69c-kjrvj\" (UID: \"46cc1f38-e166-40c0-9d00-7a074fa80234\") " pod="openstack/dnsmasq-dns-75c8ddd69c-kjrvj" Sep 30 19:50:50 crc kubenswrapper[4756]: I0930 19:50:50.013336 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/a8513cfc-d18c-4884-b931-f0e8192734a3-config\") pod \"neutron-5647cd86c6-mmv8n\" (UID: \"a8513cfc-d18c-4884-b931-f0e8192734a3\") " pod="openstack/neutron-5647cd86c6-mmv8n" Sep 30 19:50:50 crc kubenswrapper[4756]: I0930 19:50:50.014292 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/46cc1f38-e166-40c0-9d00-7a074fa80234-dns-swift-storage-0\") pod \"dnsmasq-dns-75c8ddd69c-kjrvj\" (UID: \"46cc1f38-e166-40c0-9d00-7a074fa80234\") " pod="openstack/dnsmasq-dns-75c8ddd69c-kjrvj" Sep 30 19:50:50 crc kubenswrapper[4756]: I0930 19:50:50.014899 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/46cc1f38-e166-40c0-9d00-7a074fa80234-ovsdbserver-sb\") pod \"dnsmasq-dns-75c8ddd69c-kjrvj\" (UID: \"46cc1f38-e166-40c0-9d00-7a074fa80234\") " pod="openstack/dnsmasq-dns-75c8ddd69c-kjrvj" Sep 30 19:50:50 crc kubenswrapper[4756]: I0930 19:50:50.015036 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/46cc1f38-e166-40c0-9d00-7a074fa80234-dns-svc\") pod \"dnsmasq-dns-75c8ddd69c-kjrvj\" (UID: \"46cc1f38-e166-40c0-9d00-7a074fa80234\") " pod="openstack/dnsmasq-dns-75c8ddd69c-kjrvj" Sep 30 19:50:50 crc kubenswrapper[4756]: I0930 19:50:50.015445 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/46cc1f38-e166-40c0-9d00-7a074fa80234-ovsdbserver-nb\") pod \"dnsmasq-dns-75c8ddd69c-kjrvj\" (UID: \"46cc1f38-e166-40c0-9d00-7a074fa80234\") " pod="openstack/dnsmasq-dns-75c8ddd69c-kjrvj" Sep 30 19:50:50 crc kubenswrapper[4756]: I0930 19:50:50.015609 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/46cc1f38-e166-40c0-9d00-7a074fa80234-config\") pod \"dnsmasq-dns-75c8ddd69c-kjrvj\" (UID: \"46cc1f38-e166-40c0-9d00-7a074fa80234\") " pod="openstack/dnsmasq-dns-75c8ddd69c-kjrvj" Sep 30 19:50:50 crc kubenswrapper[4756]: I0930 19:50:50.069852 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s66ps\" (UniqueName: \"kubernetes.io/projected/46cc1f38-e166-40c0-9d00-7a074fa80234-kube-api-access-s66ps\") pod \"dnsmasq-dns-75c8ddd69c-kjrvj\" (UID: \"46cc1f38-e166-40c0-9d00-7a074fa80234\") " pod="openstack/dnsmasq-dns-75c8ddd69c-kjrvj" Sep 30 19:50:50 crc kubenswrapper[4756]: I0930 19:50:50.073499 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-8564f76dcb-9qwnq"] Sep 30 19:50:50 crc kubenswrapper[4756]: I0930 19:50:50.117724 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/a8513cfc-d18c-4884-b931-f0e8192734a3-httpd-config\") pod \"neutron-5647cd86c6-mmv8n\" (UID: \"a8513cfc-d18c-4884-b931-f0e8192734a3\") " pod="openstack/neutron-5647cd86c6-mmv8n" Sep 30 19:50:50 crc kubenswrapper[4756]: I0930 19:50:50.117812 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/a8513cfc-d18c-4884-b931-f0e8192734a3-config\") pod \"neutron-5647cd86c6-mmv8n\" (UID: \"a8513cfc-d18c-4884-b931-f0e8192734a3\") " pod="openstack/neutron-5647cd86c6-mmv8n" Sep 30 19:50:50 crc kubenswrapper[4756]: I0930 19:50:50.117866 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a8513cfc-d18c-4884-b931-f0e8192734a3-combined-ca-bundle\") pod \"neutron-5647cd86c6-mmv8n\" (UID: \"a8513cfc-d18c-4884-b931-f0e8192734a3\") " pod="openstack/neutron-5647cd86c6-mmv8n" Sep 30 19:50:50 crc kubenswrapper[4756]: I0930 19:50:50.117883 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-js5tc\" (UniqueName: \"kubernetes.io/projected/a8513cfc-d18c-4884-b931-f0e8192734a3-kube-api-access-js5tc\") pod \"neutron-5647cd86c6-mmv8n\" (UID: \"a8513cfc-d18c-4884-b931-f0e8192734a3\") " pod="openstack/neutron-5647cd86c6-mmv8n" Sep 30 19:50:50 crc kubenswrapper[4756]: I0930 19:50:50.117908 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/a8513cfc-d18c-4884-b931-f0e8192734a3-ovndb-tls-certs\") pod \"neutron-5647cd86c6-mmv8n\" (UID: \"a8513cfc-d18c-4884-b931-f0e8192734a3\") " pod="openstack/neutron-5647cd86c6-mmv8n" Sep 30 19:50:50 crc kubenswrapper[4756]: I0930 19:50:50.122994 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/a8513cfc-d18c-4884-b931-f0e8192734a3-ovndb-tls-certs\") pod \"neutron-5647cd86c6-mmv8n\" (UID: \"a8513cfc-d18c-4884-b931-f0e8192734a3\") " pod="openstack/neutron-5647cd86c6-mmv8n" Sep 30 19:50:50 crc kubenswrapper[4756]: I0930 19:50:50.123661 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/a8513cfc-d18c-4884-b931-f0e8192734a3-httpd-config\") pod \"neutron-5647cd86c6-mmv8n\" (UID: \"a8513cfc-d18c-4884-b931-f0e8192734a3\") " pod="openstack/neutron-5647cd86c6-mmv8n" Sep 30 19:50:50 crc kubenswrapper[4756]: I0930 19:50:50.123927 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/a8513cfc-d18c-4884-b931-f0e8192734a3-config\") pod \"neutron-5647cd86c6-mmv8n\" (UID: \"a8513cfc-d18c-4884-b931-f0e8192734a3\") " pod="openstack/neutron-5647cd86c6-mmv8n" Sep 30 19:50:50 crc kubenswrapper[4756]: I0930 19:50:50.133672 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a8513cfc-d18c-4884-b931-f0e8192734a3-combined-ca-bundle\") pod \"neutron-5647cd86c6-mmv8n\" (UID: \"a8513cfc-d18c-4884-b931-f0e8192734a3\") " pod="openstack/neutron-5647cd86c6-mmv8n" Sep 30 19:50:50 crc kubenswrapper[4756]: I0930 19:50:50.144999 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-js5tc\" (UniqueName: \"kubernetes.io/projected/a8513cfc-d18c-4884-b931-f0e8192734a3-kube-api-access-js5tc\") pod \"neutron-5647cd86c6-mmv8n\" (UID: \"a8513cfc-d18c-4884-b931-f0e8192734a3\") " pod="openstack/neutron-5647cd86c6-mmv8n" Sep 30 19:50:50 crc kubenswrapper[4756]: I0930 19:50:50.235258 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-75c8ddd69c-kjrvj" Sep 30 19:50:50 crc kubenswrapper[4756]: I0930 19:50:50.280327 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-5647cd86c6-mmv8n" Sep 30 19:50:50 crc kubenswrapper[4756]: I0930 19:50:50.757413 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-bbf58c-78vxt" event={"ID":"3c14829a-8f5d-490d-a922-719d81da47a4","Type":"ContainerStarted","Data":"fb78833292dfd2206bd7a9a24184bb9c1a0ea31bd2ada4020993f68b93f2d03f"} Sep 30 19:50:50 crc kubenswrapper[4756]: I0930 19:50:50.763962 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-7b959497f4-7qz6r" event={"ID":"0511d760-5c65-453b-aefd-89ebb9a52ac6","Type":"ContainerStarted","Data":"5a135196ad107e441834a5114f959ba7f203574a4bd58c03d661dcc56a67f77f"} Sep 30 19:50:50 crc kubenswrapper[4756]: I0930 19:50:50.765625 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-8564f76dcb-9qwnq" event={"ID":"4763c3d2-bfc4-4c13-94c0-f7d852309b17","Type":"ContainerStarted","Data":"0da4b3b06e25cc58e04e2a2d12d8d0b9964704dda90da088c73458a4f101cac4"} Sep 30 19:50:50 crc kubenswrapper[4756]: I0930 19:50:50.765651 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-8564f76dcb-9qwnq" event={"ID":"4763c3d2-bfc4-4c13-94c0-f7d852309b17","Type":"ContainerStarted","Data":"a9fb663db742268ff6709a83fa05391902d2464b18f50b8142ccb0124eab0605"} Sep 30 19:50:50 crc kubenswrapper[4756]: I0930 19:50:50.765660 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-8564f76dcb-9qwnq" event={"ID":"4763c3d2-bfc4-4c13-94c0-f7d852309b17","Type":"ContainerStarted","Data":"88ae8364e70387b3c5fea443861cee65b377662d9a658c2787daf9dadec0cb43"} Sep 30 19:50:50 crc kubenswrapper[4756]: I0930 19:50:50.766464 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-8564f76dcb-9qwnq" Sep 30 19:50:50 crc kubenswrapper[4756]: I0930 19:50:50.766638 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-8564f76dcb-9qwnq" Sep 30 19:50:50 crc kubenswrapper[4756]: I0930 19:50:50.814053 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-api-8564f76dcb-9qwnq" podStartSLOduration=2.814039825 podStartE2EDuration="2.814039825s" podCreationTimestamp="2025-09-30 19:50:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 19:50:50.81308451 +0000 UTC m=+1180.434017987" watchObservedRunningTime="2025-09-30 19:50:50.814039825 +0000 UTC m=+1180.434973302" Sep 30 19:50:50 crc kubenswrapper[4756]: I0930 19:50:50.830626 4756 generic.go:334] "Generic (PLEG): container finished" podID="ee4f7c8d-6f5a-4ab8-9896-71bfbd28bb0c" containerID="70973824c3dff8dd458ff730c01bcd5fc1cd58b782baf2f077be751653932f5d" exitCode=0 Sep 30 19:50:50 crc kubenswrapper[4756]: I0930 19:50:50.830699 4756 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Sep 30 19:50:50 crc kubenswrapper[4756]: I0930 19:50:50.831386 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-59d5ff467f-l8f6v" event={"ID":"ee4f7c8d-6f5a-4ab8-9896-71bfbd28bb0c","Type":"ContainerDied","Data":"70973824c3dff8dd458ff730c01bcd5fc1cd58b782baf2f077be751653932f5d"} Sep 30 19:50:50 crc kubenswrapper[4756]: I0930 19:50:50.928139 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-75c8ddd69c-kjrvj"] Sep 30 19:50:51 crc kubenswrapper[4756]: I0930 19:50:51.031593 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-5647cd86c6-mmv8n"] Sep 30 19:50:51 crc kubenswrapper[4756]: I0930 19:50:51.326410 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-59d5ff467f-l8f6v" Sep 30 19:50:51 crc kubenswrapper[4756]: I0930 19:50:51.453998 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/ee4f7c8d-6f5a-4ab8-9896-71bfbd28bb0c-dns-swift-storage-0\") pod \"ee4f7c8d-6f5a-4ab8-9896-71bfbd28bb0c\" (UID: \"ee4f7c8d-6f5a-4ab8-9896-71bfbd28bb0c\") " Sep 30 19:50:51 crc kubenswrapper[4756]: I0930 19:50:51.454067 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ee4f7c8d-6f5a-4ab8-9896-71bfbd28bb0c-ovsdbserver-sb\") pod \"ee4f7c8d-6f5a-4ab8-9896-71bfbd28bb0c\" (UID: \"ee4f7c8d-6f5a-4ab8-9896-71bfbd28bb0c\") " Sep 30 19:50:51 crc kubenswrapper[4756]: I0930 19:50:51.454109 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ee4f7c8d-6f5a-4ab8-9896-71bfbd28bb0c-ovsdbserver-nb\") pod \"ee4f7c8d-6f5a-4ab8-9896-71bfbd28bb0c\" (UID: \"ee4f7c8d-6f5a-4ab8-9896-71bfbd28bb0c\") " Sep 30 19:50:51 crc kubenswrapper[4756]: I0930 19:50:51.454137 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ee4f7c8d-6f5a-4ab8-9896-71bfbd28bb0c-config\") pod \"ee4f7c8d-6f5a-4ab8-9896-71bfbd28bb0c\" (UID: \"ee4f7c8d-6f5a-4ab8-9896-71bfbd28bb0c\") " Sep 30 19:50:51 crc kubenswrapper[4756]: I0930 19:50:51.454227 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xk7p5\" (UniqueName: \"kubernetes.io/projected/ee4f7c8d-6f5a-4ab8-9896-71bfbd28bb0c-kube-api-access-xk7p5\") pod \"ee4f7c8d-6f5a-4ab8-9896-71bfbd28bb0c\" (UID: \"ee4f7c8d-6f5a-4ab8-9896-71bfbd28bb0c\") " Sep 30 19:50:51 crc kubenswrapper[4756]: I0930 19:50:51.454260 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ee4f7c8d-6f5a-4ab8-9896-71bfbd28bb0c-dns-svc\") pod \"ee4f7c8d-6f5a-4ab8-9896-71bfbd28bb0c\" (UID: \"ee4f7c8d-6f5a-4ab8-9896-71bfbd28bb0c\") " Sep 30 19:50:51 crc kubenswrapper[4756]: I0930 19:50:51.458912 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ee4f7c8d-6f5a-4ab8-9896-71bfbd28bb0c-kube-api-access-xk7p5" (OuterVolumeSpecName: "kube-api-access-xk7p5") pod "ee4f7c8d-6f5a-4ab8-9896-71bfbd28bb0c" (UID: "ee4f7c8d-6f5a-4ab8-9896-71bfbd28bb0c"). InnerVolumeSpecName "kube-api-access-xk7p5". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:50:51 crc kubenswrapper[4756]: I0930 19:50:51.475472 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ee4f7c8d-6f5a-4ab8-9896-71bfbd28bb0c-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "ee4f7c8d-6f5a-4ab8-9896-71bfbd28bb0c" (UID: "ee4f7c8d-6f5a-4ab8-9896-71bfbd28bb0c"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:50:51 crc kubenswrapper[4756]: I0930 19:50:51.481663 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ee4f7c8d-6f5a-4ab8-9896-71bfbd28bb0c-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "ee4f7c8d-6f5a-4ab8-9896-71bfbd28bb0c" (UID: "ee4f7c8d-6f5a-4ab8-9896-71bfbd28bb0c"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:50:51 crc kubenswrapper[4756]: I0930 19:50:51.483171 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ee4f7c8d-6f5a-4ab8-9896-71bfbd28bb0c-config" (OuterVolumeSpecName: "config") pod "ee4f7c8d-6f5a-4ab8-9896-71bfbd28bb0c" (UID: "ee4f7c8d-6f5a-4ab8-9896-71bfbd28bb0c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:50:51 crc kubenswrapper[4756]: I0930 19:50:51.487800 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ee4f7c8d-6f5a-4ab8-9896-71bfbd28bb0c-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "ee4f7c8d-6f5a-4ab8-9896-71bfbd28bb0c" (UID: "ee4f7c8d-6f5a-4ab8-9896-71bfbd28bb0c"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:50:51 crc kubenswrapper[4756]: I0930 19:50:51.497567 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ee4f7c8d-6f5a-4ab8-9896-71bfbd28bb0c-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "ee4f7c8d-6f5a-4ab8-9896-71bfbd28bb0c" (UID: "ee4f7c8d-6f5a-4ab8-9896-71bfbd28bb0c"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:50:51 crc kubenswrapper[4756]: I0930 19:50:51.555888 4756 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/ee4f7c8d-6f5a-4ab8-9896-71bfbd28bb0c-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Sep 30 19:50:51 crc kubenswrapper[4756]: I0930 19:50:51.555912 4756 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ee4f7c8d-6f5a-4ab8-9896-71bfbd28bb0c-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Sep 30 19:50:51 crc kubenswrapper[4756]: I0930 19:50:51.555922 4756 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ee4f7c8d-6f5a-4ab8-9896-71bfbd28bb0c-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Sep 30 19:50:51 crc kubenswrapper[4756]: I0930 19:50:51.555931 4756 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ee4f7c8d-6f5a-4ab8-9896-71bfbd28bb0c-config\") on node \"crc\" DevicePath \"\"" Sep 30 19:50:51 crc kubenswrapper[4756]: I0930 19:50:51.555939 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xk7p5\" (UniqueName: \"kubernetes.io/projected/ee4f7c8d-6f5a-4ab8-9896-71bfbd28bb0c-kube-api-access-xk7p5\") on node \"crc\" DevicePath \"\"" Sep 30 19:50:51 crc kubenswrapper[4756]: I0930 19:50:51.555949 4756 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ee4f7c8d-6f5a-4ab8-9896-71bfbd28bb0c-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 30 19:50:51 crc kubenswrapper[4756]: I0930 19:50:51.633741 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Sep 30 19:50:51 crc kubenswrapper[4756]: I0930 19:50:51.864195 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-59d5ff467f-l8f6v" event={"ID":"ee4f7c8d-6f5a-4ab8-9896-71bfbd28bb0c","Type":"ContainerDied","Data":"42fb38ef49b833a3a35055a9d887ca8f8361ddefdc8394f6e656049da01e41f8"} Sep 30 19:50:51 crc kubenswrapper[4756]: I0930 19:50:51.864529 4756 scope.go:117] "RemoveContainer" containerID="70973824c3dff8dd458ff730c01bcd5fc1cd58b782baf2f077be751653932f5d" Sep 30 19:50:51 crc kubenswrapper[4756]: I0930 19:50:51.864475 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-59d5ff467f-l8f6v" Sep 30 19:50:51 crc kubenswrapper[4756]: I0930 19:50:51.868826 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-5647cd86c6-mmv8n" event={"ID":"a8513cfc-d18c-4884-b931-f0e8192734a3","Type":"ContainerStarted","Data":"192228cbe9f8707045ae7438487eea8771f1c6ef226112ba0726991cabd15ecf"} Sep 30 19:50:51 crc kubenswrapper[4756]: I0930 19:50:51.868869 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-5647cd86c6-mmv8n" event={"ID":"a8513cfc-d18c-4884-b931-f0e8192734a3","Type":"ContainerStarted","Data":"a0d0a2793ffe10ff819905e8fa8f947fb5983c9223ed4b9f710124623474411e"} Sep 30 19:50:51 crc kubenswrapper[4756]: I0930 19:50:51.868880 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-5647cd86c6-mmv8n" event={"ID":"a8513cfc-d18c-4884-b931-f0e8192734a3","Type":"ContainerStarted","Data":"4e6b792543fa081046422ae031779435b360f00ddf74874cb3eca0045122a212"} Sep 30 19:50:51 crc kubenswrapper[4756]: I0930 19:50:51.869272 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/neutron-5647cd86c6-mmv8n" Sep 30 19:50:51 crc kubenswrapper[4756]: I0930 19:50:51.881096 4756 generic.go:334] "Generic (PLEG): container finished" podID="46cc1f38-e166-40c0-9d00-7a074fa80234" containerID="01b91d0f9da313bfca8bffebfd7b6cecce1b443dbdf629a45760c8c71b6a16cf" exitCode=0 Sep 30 19:50:51 crc kubenswrapper[4756]: I0930 19:50:51.881883 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-75c8ddd69c-kjrvj" event={"ID":"46cc1f38-e166-40c0-9d00-7a074fa80234","Type":"ContainerDied","Data":"01b91d0f9da313bfca8bffebfd7b6cecce1b443dbdf629a45760c8c71b6a16cf"} Sep 30 19:50:51 crc kubenswrapper[4756]: I0930 19:50:51.881978 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-75c8ddd69c-kjrvj" event={"ID":"46cc1f38-e166-40c0-9d00-7a074fa80234","Type":"ContainerStarted","Data":"0578c207917db85c2b4829ed762e18c9066790f1fe9898f12f296cb00e985cf8"} Sep 30 19:50:51 crc kubenswrapper[4756]: I0930 19:50:51.893148 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-5647cd86c6-mmv8n" podStartSLOduration=2.893130123 podStartE2EDuration="2.893130123s" podCreationTimestamp="2025-09-30 19:50:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 19:50:51.889111698 +0000 UTC m=+1181.510045175" watchObservedRunningTime="2025-09-30 19:50:51.893130123 +0000 UTC m=+1181.514063600" Sep 30 19:50:51 crc kubenswrapper[4756]: I0930 19:50:51.962746 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-59d5ff467f-l8f6v"] Sep 30 19:50:51 crc kubenswrapper[4756]: I0930 19:50:51.975331 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-59d5ff467f-l8f6v"] Sep 30 19:50:52 crc kubenswrapper[4756]: I0930 19:50:52.572926 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-d4df5d75-8jctn"] Sep 30 19:50:52 crc kubenswrapper[4756]: E0930 19:50:52.573792 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ee4f7c8d-6f5a-4ab8-9896-71bfbd28bb0c" containerName="init" Sep 30 19:50:52 crc kubenswrapper[4756]: I0930 19:50:52.573810 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="ee4f7c8d-6f5a-4ab8-9896-71bfbd28bb0c" containerName="init" Sep 30 19:50:52 crc kubenswrapper[4756]: I0930 19:50:52.573998 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="ee4f7c8d-6f5a-4ab8-9896-71bfbd28bb0c" containerName="init" Sep 30 19:50:52 crc kubenswrapper[4756]: I0930 19:50:52.575016 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-d4df5d75-8jctn" Sep 30 19:50:52 crc kubenswrapper[4756]: I0930 19:50:52.578523 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-internal-svc" Sep 30 19:50:52 crc kubenswrapper[4756]: I0930 19:50:52.578642 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-public-svc" Sep 30 19:50:52 crc kubenswrapper[4756]: I0930 19:50:52.595847 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-d4df5d75-8jctn"] Sep 30 19:50:52 crc kubenswrapper[4756]: I0930 19:50:52.662671 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Sep 30 19:50:52 crc kubenswrapper[4756]: I0930 19:50:52.662718 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Sep 30 19:50:52 crc kubenswrapper[4756]: I0930 19:50:52.679598 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/9dead4ff-0706-4a5e-b6ae-c388ae1c2313-internal-tls-certs\") pod \"neutron-d4df5d75-8jctn\" (UID: \"9dead4ff-0706-4a5e-b6ae-c388ae1c2313\") " pod="openstack/neutron-d4df5d75-8jctn" Sep 30 19:50:52 crc kubenswrapper[4756]: I0930 19:50:52.679665 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/9dead4ff-0706-4a5e-b6ae-c388ae1c2313-public-tls-certs\") pod \"neutron-d4df5d75-8jctn\" (UID: \"9dead4ff-0706-4a5e-b6ae-c388ae1c2313\") " pod="openstack/neutron-d4df5d75-8jctn" Sep 30 19:50:52 crc kubenswrapper[4756]: I0930 19:50:52.679719 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/9dead4ff-0706-4a5e-b6ae-c388ae1c2313-config\") pod \"neutron-d4df5d75-8jctn\" (UID: \"9dead4ff-0706-4a5e-b6ae-c388ae1c2313\") " pod="openstack/neutron-d4df5d75-8jctn" Sep 30 19:50:52 crc kubenswrapper[4756]: I0930 19:50:52.679753 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/9dead4ff-0706-4a5e-b6ae-c388ae1c2313-ovndb-tls-certs\") pod \"neutron-d4df5d75-8jctn\" (UID: \"9dead4ff-0706-4a5e-b6ae-c388ae1c2313\") " pod="openstack/neutron-d4df5d75-8jctn" Sep 30 19:50:52 crc kubenswrapper[4756]: I0930 19:50:52.679778 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/9dead4ff-0706-4a5e-b6ae-c388ae1c2313-httpd-config\") pod \"neutron-d4df5d75-8jctn\" (UID: \"9dead4ff-0706-4a5e-b6ae-c388ae1c2313\") " pod="openstack/neutron-d4df5d75-8jctn" Sep 30 19:50:52 crc kubenswrapper[4756]: I0930 19:50:52.679813 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xmhvz\" (UniqueName: \"kubernetes.io/projected/9dead4ff-0706-4a5e-b6ae-c388ae1c2313-kube-api-access-xmhvz\") pod \"neutron-d4df5d75-8jctn\" (UID: \"9dead4ff-0706-4a5e-b6ae-c388ae1c2313\") " pod="openstack/neutron-d4df5d75-8jctn" Sep 30 19:50:52 crc kubenswrapper[4756]: I0930 19:50:52.679832 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9dead4ff-0706-4a5e-b6ae-c388ae1c2313-combined-ca-bundle\") pod \"neutron-d4df5d75-8jctn\" (UID: \"9dead4ff-0706-4a5e-b6ae-c388ae1c2313\") " pod="openstack/neutron-d4df5d75-8jctn" Sep 30 19:50:52 crc kubenswrapper[4756]: I0930 19:50:52.721330 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Sep 30 19:50:52 crc kubenswrapper[4756]: I0930 19:50:52.735665 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Sep 30 19:50:52 crc kubenswrapper[4756]: I0930 19:50:52.783358 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/9dead4ff-0706-4a5e-b6ae-c388ae1c2313-ovndb-tls-certs\") pod \"neutron-d4df5d75-8jctn\" (UID: \"9dead4ff-0706-4a5e-b6ae-c388ae1c2313\") " pod="openstack/neutron-d4df5d75-8jctn" Sep 30 19:50:52 crc kubenswrapper[4756]: I0930 19:50:52.783432 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/9dead4ff-0706-4a5e-b6ae-c388ae1c2313-httpd-config\") pod \"neutron-d4df5d75-8jctn\" (UID: \"9dead4ff-0706-4a5e-b6ae-c388ae1c2313\") " pod="openstack/neutron-d4df5d75-8jctn" Sep 30 19:50:52 crc kubenswrapper[4756]: I0930 19:50:52.783494 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xmhvz\" (UniqueName: \"kubernetes.io/projected/9dead4ff-0706-4a5e-b6ae-c388ae1c2313-kube-api-access-xmhvz\") pod \"neutron-d4df5d75-8jctn\" (UID: \"9dead4ff-0706-4a5e-b6ae-c388ae1c2313\") " pod="openstack/neutron-d4df5d75-8jctn" Sep 30 19:50:52 crc kubenswrapper[4756]: I0930 19:50:52.783513 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9dead4ff-0706-4a5e-b6ae-c388ae1c2313-combined-ca-bundle\") pod \"neutron-d4df5d75-8jctn\" (UID: \"9dead4ff-0706-4a5e-b6ae-c388ae1c2313\") " pod="openstack/neutron-d4df5d75-8jctn" Sep 30 19:50:52 crc kubenswrapper[4756]: I0930 19:50:52.783550 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/9dead4ff-0706-4a5e-b6ae-c388ae1c2313-internal-tls-certs\") pod \"neutron-d4df5d75-8jctn\" (UID: \"9dead4ff-0706-4a5e-b6ae-c388ae1c2313\") " pod="openstack/neutron-d4df5d75-8jctn" Sep 30 19:50:52 crc kubenswrapper[4756]: I0930 19:50:52.783599 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/9dead4ff-0706-4a5e-b6ae-c388ae1c2313-public-tls-certs\") pod \"neutron-d4df5d75-8jctn\" (UID: \"9dead4ff-0706-4a5e-b6ae-c388ae1c2313\") " pod="openstack/neutron-d4df5d75-8jctn" Sep 30 19:50:52 crc kubenswrapper[4756]: I0930 19:50:52.783663 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/9dead4ff-0706-4a5e-b6ae-c388ae1c2313-config\") pod \"neutron-d4df5d75-8jctn\" (UID: \"9dead4ff-0706-4a5e-b6ae-c388ae1c2313\") " pod="openstack/neutron-d4df5d75-8jctn" Sep 30 19:50:52 crc kubenswrapper[4756]: I0930 19:50:52.797432 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/9dead4ff-0706-4a5e-b6ae-c388ae1c2313-config\") pod \"neutron-d4df5d75-8jctn\" (UID: \"9dead4ff-0706-4a5e-b6ae-c388ae1c2313\") " pod="openstack/neutron-d4df5d75-8jctn" Sep 30 19:50:52 crc kubenswrapper[4756]: I0930 19:50:52.799281 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/9dead4ff-0706-4a5e-b6ae-c388ae1c2313-internal-tls-certs\") pod \"neutron-d4df5d75-8jctn\" (UID: \"9dead4ff-0706-4a5e-b6ae-c388ae1c2313\") " pod="openstack/neutron-d4df5d75-8jctn" Sep 30 19:50:52 crc kubenswrapper[4756]: I0930 19:50:52.801124 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/9dead4ff-0706-4a5e-b6ae-c388ae1c2313-ovndb-tls-certs\") pod \"neutron-d4df5d75-8jctn\" (UID: \"9dead4ff-0706-4a5e-b6ae-c388ae1c2313\") " pod="openstack/neutron-d4df5d75-8jctn" Sep 30 19:50:52 crc kubenswrapper[4756]: I0930 19:50:52.805660 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/9dead4ff-0706-4a5e-b6ae-c388ae1c2313-httpd-config\") pod \"neutron-d4df5d75-8jctn\" (UID: \"9dead4ff-0706-4a5e-b6ae-c388ae1c2313\") " pod="openstack/neutron-d4df5d75-8jctn" Sep 30 19:50:52 crc kubenswrapper[4756]: I0930 19:50:52.807158 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9dead4ff-0706-4a5e-b6ae-c388ae1c2313-combined-ca-bundle\") pod \"neutron-d4df5d75-8jctn\" (UID: \"9dead4ff-0706-4a5e-b6ae-c388ae1c2313\") " pod="openstack/neutron-d4df5d75-8jctn" Sep 30 19:50:52 crc kubenswrapper[4756]: I0930 19:50:52.824472 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xmhvz\" (UniqueName: \"kubernetes.io/projected/9dead4ff-0706-4a5e-b6ae-c388ae1c2313-kube-api-access-xmhvz\") pod \"neutron-d4df5d75-8jctn\" (UID: \"9dead4ff-0706-4a5e-b6ae-c388ae1c2313\") " pod="openstack/neutron-d4df5d75-8jctn" Sep 30 19:50:52 crc kubenswrapper[4756]: I0930 19:50:52.826237 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/9dead4ff-0706-4a5e-b6ae-c388ae1c2313-public-tls-certs\") pod \"neutron-d4df5d75-8jctn\" (UID: \"9dead4ff-0706-4a5e-b6ae-c388ae1c2313\") " pod="openstack/neutron-d4df5d75-8jctn" Sep 30 19:50:52 crc kubenswrapper[4756]: I0930 19:50:52.893713 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Sep 30 19:50:52 crc kubenswrapper[4756]: I0930 19:50:52.894061 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Sep 30 19:50:53 crc kubenswrapper[4756]: I0930 19:50:53.067363 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-d4df5d75-8jctn" Sep 30 19:50:53 crc kubenswrapper[4756]: I0930 19:50:53.129172 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ee4f7c8d-6f5a-4ab8-9896-71bfbd28bb0c" path="/var/lib/kubelet/pods/ee4f7c8d-6f5a-4ab8-9896-71bfbd28bb0c/volumes" Sep 30 19:50:54 crc kubenswrapper[4756]: I0930 19:50:54.249716 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-d4df5d75-8jctn"] Sep 30 19:50:54 crc kubenswrapper[4756]: W0930 19:50:54.275993 4756 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9dead4ff_0706_4a5e_b6ae_c388ae1c2313.slice/crio-25c76c73cfb9fb2968a5124f862e69e4d34b8f3323caca226d9c67b8fa92fac5 WatchSource:0}: Error finding container 25c76c73cfb9fb2968a5124f862e69e4d34b8f3323caca226d9c67b8fa92fac5: Status 404 returned error can't find the container with id 25c76c73cfb9fb2968a5124f862e69e4d34b8f3323caca226d9c67b8fa92fac5 Sep 30 19:50:54 crc kubenswrapper[4756]: I0930 19:50:54.345983 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-proxy-5f8d8945f5-z5bmn"] Sep 30 19:50:54 crc kubenswrapper[4756]: I0930 19:50:54.348976 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-proxy-5f8d8945f5-z5bmn" Sep 30 19:50:54 crc kubenswrapper[4756]: I0930 19:50:54.355041 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-swift-internal-svc" Sep 30 19:50:54 crc kubenswrapper[4756]: I0930 19:50:54.359141 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-proxy-config-data" Sep 30 19:50:54 crc kubenswrapper[4756]: I0930 19:50:54.360740 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-swift-public-svc" Sep 30 19:50:54 crc kubenswrapper[4756]: I0930 19:50:54.385868 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-proxy-5f8d8945f5-z5bmn"] Sep 30 19:50:54 crc kubenswrapper[4756]: I0930 19:50:54.519795 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0a1a5fbf-06be-4bbe-a7ab-9a3becd5f84d-config-data\") pod \"swift-proxy-5f8d8945f5-z5bmn\" (UID: \"0a1a5fbf-06be-4bbe-a7ab-9a3becd5f84d\") " pod="openstack/swift-proxy-5f8d8945f5-z5bmn" Sep 30 19:50:54 crc kubenswrapper[4756]: I0930 19:50:54.519841 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/0a1a5fbf-06be-4bbe-a7ab-9a3becd5f84d-etc-swift\") pod \"swift-proxy-5f8d8945f5-z5bmn\" (UID: \"0a1a5fbf-06be-4bbe-a7ab-9a3becd5f84d\") " pod="openstack/swift-proxy-5f8d8945f5-z5bmn" Sep 30 19:50:54 crc kubenswrapper[4756]: I0930 19:50:54.519866 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0a1a5fbf-06be-4bbe-a7ab-9a3becd5f84d-combined-ca-bundle\") pod \"swift-proxy-5f8d8945f5-z5bmn\" (UID: \"0a1a5fbf-06be-4bbe-a7ab-9a3becd5f84d\") " pod="openstack/swift-proxy-5f8d8945f5-z5bmn" Sep 30 19:50:54 crc kubenswrapper[4756]: I0930 19:50:54.519916 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/0a1a5fbf-06be-4bbe-a7ab-9a3becd5f84d-public-tls-certs\") pod \"swift-proxy-5f8d8945f5-z5bmn\" (UID: \"0a1a5fbf-06be-4bbe-a7ab-9a3becd5f84d\") " pod="openstack/swift-proxy-5f8d8945f5-z5bmn" Sep 30 19:50:54 crc kubenswrapper[4756]: I0930 19:50:54.519935 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-phfbs\" (UniqueName: \"kubernetes.io/projected/0a1a5fbf-06be-4bbe-a7ab-9a3becd5f84d-kube-api-access-phfbs\") pod \"swift-proxy-5f8d8945f5-z5bmn\" (UID: \"0a1a5fbf-06be-4bbe-a7ab-9a3becd5f84d\") " pod="openstack/swift-proxy-5f8d8945f5-z5bmn" Sep 30 19:50:54 crc kubenswrapper[4756]: I0930 19:50:54.519955 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/0a1a5fbf-06be-4bbe-a7ab-9a3becd5f84d-internal-tls-certs\") pod \"swift-proxy-5f8d8945f5-z5bmn\" (UID: \"0a1a5fbf-06be-4bbe-a7ab-9a3becd5f84d\") " pod="openstack/swift-proxy-5f8d8945f5-z5bmn" Sep 30 19:50:54 crc kubenswrapper[4756]: I0930 19:50:54.519989 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0a1a5fbf-06be-4bbe-a7ab-9a3becd5f84d-run-httpd\") pod \"swift-proxy-5f8d8945f5-z5bmn\" (UID: \"0a1a5fbf-06be-4bbe-a7ab-9a3becd5f84d\") " pod="openstack/swift-proxy-5f8d8945f5-z5bmn" Sep 30 19:50:54 crc kubenswrapper[4756]: I0930 19:50:54.520023 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0a1a5fbf-06be-4bbe-a7ab-9a3becd5f84d-log-httpd\") pod \"swift-proxy-5f8d8945f5-z5bmn\" (UID: \"0a1a5fbf-06be-4bbe-a7ab-9a3becd5f84d\") " pod="openstack/swift-proxy-5f8d8945f5-z5bmn" Sep 30 19:50:54 crc kubenswrapper[4756]: I0930 19:50:54.622339 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/0a1a5fbf-06be-4bbe-a7ab-9a3becd5f84d-public-tls-certs\") pod \"swift-proxy-5f8d8945f5-z5bmn\" (UID: \"0a1a5fbf-06be-4bbe-a7ab-9a3becd5f84d\") " pod="openstack/swift-proxy-5f8d8945f5-z5bmn" Sep 30 19:50:54 crc kubenswrapper[4756]: I0930 19:50:54.622863 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-phfbs\" (UniqueName: \"kubernetes.io/projected/0a1a5fbf-06be-4bbe-a7ab-9a3becd5f84d-kube-api-access-phfbs\") pod \"swift-proxy-5f8d8945f5-z5bmn\" (UID: \"0a1a5fbf-06be-4bbe-a7ab-9a3becd5f84d\") " pod="openstack/swift-proxy-5f8d8945f5-z5bmn" Sep 30 19:50:54 crc kubenswrapper[4756]: I0930 19:50:54.623426 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/0a1a5fbf-06be-4bbe-a7ab-9a3becd5f84d-internal-tls-certs\") pod \"swift-proxy-5f8d8945f5-z5bmn\" (UID: \"0a1a5fbf-06be-4bbe-a7ab-9a3becd5f84d\") " pod="openstack/swift-proxy-5f8d8945f5-z5bmn" Sep 30 19:50:54 crc kubenswrapper[4756]: I0930 19:50:54.623584 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0a1a5fbf-06be-4bbe-a7ab-9a3becd5f84d-run-httpd\") pod \"swift-proxy-5f8d8945f5-z5bmn\" (UID: \"0a1a5fbf-06be-4bbe-a7ab-9a3becd5f84d\") " pod="openstack/swift-proxy-5f8d8945f5-z5bmn" Sep 30 19:50:54 crc kubenswrapper[4756]: I0930 19:50:54.623705 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0a1a5fbf-06be-4bbe-a7ab-9a3becd5f84d-log-httpd\") pod \"swift-proxy-5f8d8945f5-z5bmn\" (UID: \"0a1a5fbf-06be-4bbe-a7ab-9a3becd5f84d\") " pod="openstack/swift-proxy-5f8d8945f5-z5bmn" Sep 30 19:50:54 crc kubenswrapper[4756]: I0930 19:50:54.623823 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0a1a5fbf-06be-4bbe-a7ab-9a3becd5f84d-config-data\") pod \"swift-proxy-5f8d8945f5-z5bmn\" (UID: \"0a1a5fbf-06be-4bbe-a7ab-9a3becd5f84d\") " pod="openstack/swift-proxy-5f8d8945f5-z5bmn" Sep 30 19:50:54 crc kubenswrapper[4756]: I0930 19:50:54.623911 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/0a1a5fbf-06be-4bbe-a7ab-9a3becd5f84d-etc-swift\") pod \"swift-proxy-5f8d8945f5-z5bmn\" (UID: \"0a1a5fbf-06be-4bbe-a7ab-9a3becd5f84d\") " pod="openstack/swift-proxy-5f8d8945f5-z5bmn" Sep 30 19:50:54 crc kubenswrapper[4756]: I0930 19:50:54.623991 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0a1a5fbf-06be-4bbe-a7ab-9a3becd5f84d-combined-ca-bundle\") pod \"swift-proxy-5f8d8945f5-z5bmn\" (UID: \"0a1a5fbf-06be-4bbe-a7ab-9a3becd5f84d\") " pod="openstack/swift-proxy-5f8d8945f5-z5bmn" Sep 30 19:50:54 crc kubenswrapper[4756]: I0930 19:50:54.625815 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0a1a5fbf-06be-4bbe-a7ab-9a3becd5f84d-log-httpd\") pod \"swift-proxy-5f8d8945f5-z5bmn\" (UID: \"0a1a5fbf-06be-4bbe-a7ab-9a3becd5f84d\") " pod="openstack/swift-proxy-5f8d8945f5-z5bmn" Sep 30 19:50:54 crc kubenswrapper[4756]: I0930 19:50:54.626483 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0a1a5fbf-06be-4bbe-a7ab-9a3becd5f84d-run-httpd\") pod \"swift-proxy-5f8d8945f5-z5bmn\" (UID: \"0a1a5fbf-06be-4bbe-a7ab-9a3becd5f84d\") " pod="openstack/swift-proxy-5f8d8945f5-z5bmn" Sep 30 19:50:54 crc kubenswrapper[4756]: I0930 19:50:54.631679 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/0a1a5fbf-06be-4bbe-a7ab-9a3becd5f84d-internal-tls-certs\") pod \"swift-proxy-5f8d8945f5-z5bmn\" (UID: \"0a1a5fbf-06be-4bbe-a7ab-9a3becd5f84d\") " pod="openstack/swift-proxy-5f8d8945f5-z5bmn" Sep 30 19:50:54 crc kubenswrapper[4756]: I0930 19:50:54.631864 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/0a1a5fbf-06be-4bbe-a7ab-9a3becd5f84d-public-tls-certs\") pod \"swift-proxy-5f8d8945f5-z5bmn\" (UID: \"0a1a5fbf-06be-4bbe-a7ab-9a3becd5f84d\") " pod="openstack/swift-proxy-5f8d8945f5-z5bmn" Sep 30 19:50:54 crc kubenswrapper[4756]: I0930 19:50:54.632614 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0a1a5fbf-06be-4bbe-a7ab-9a3becd5f84d-config-data\") pod \"swift-proxy-5f8d8945f5-z5bmn\" (UID: \"0a1a5fbf-06be-4bbe-a7ab-9a3becd5f84d\") " pod="openstack/swift-proxy-5f8d8945f5-z5bmn" Sep 30 19:50:54 crc kubenswrapper[4756]: I0930 19:50:54.632913 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/0a1a5fbf-06be-4bbe-a7ab-9a3becd5f84d-etc-swift\") pod \"swift-proxy-5f8d8945f5-z5bmn\" (UID: \"0a1a5fbf-06be-4bbe-a7ab-9a3becd5f84d\") " pod="openstack/swift-proxy-5f8d8945f5-z5bmn" Sep 30 19:50:54 crc kubenswrapper[4756]: I0930 19:50:54.633746 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0a1a5fbf-06be-4bbe-a7ab-9a3becd5f84d-combined-ca-bundle\") pod \"swift-proxy-5f8d8945f5-z5bmn\" (UID: \"0a1a5fbf-06be-4bbe-a7ab-9a3becd5f84d\") " pod="openstack/swift-proxy-5f8d8945f5-z5bmn" Sep 30 19:50:54 crc kubenswrapper[4756]: I0930 19:50:54.651540 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-phfbs\" (UniqueName: \"kubernetes.io/projected/0a1a5fbf-06be-4bbe-a7ab-9a3becd5f84d-kube-api-access-phfbs\") pod \"swift-proxy-5f8d8945f5-z5bmn\" (UID: \"0a1a5fbf-06be-4bbe-a7ab-9a3becd5f84d\") " pod="openstack/swift-proxy-5f8d8945f5-z5bmn" Sep 30 19:50:54 crc kubenswrapper[4756]: I0930 19:50:54.704235 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-proxy-5f8d8945f5-z5bmn" Sep 30 19:50:54 crc kubenswrapper[4756]: I0930 19:50:54.943933 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-7b959497f4-7qz6r" event={"ID":"0511d760-5c65-453b-aefd-89ebb9a52ac6","Type":"ContainerStarted","Data":"5a28b01b0fd0453603e79634b63f1a8bc7b5df27306c20e99103f36e06cb4aff"} Sep 30 19:50:54 crc kubenswrapper[4756]: I0930 19:50:54.944361 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-7b959497f4-7qz6r" event={"ID":"0511d760-5c65-453b-aefd-89ebb9a52ac6","Type":"ContainerStarted","Data":"9ba683265e05ae2f501e9cd38e1f199da027465720a175466d77105b0eedb2f4"} Sep 30 19:50:54 crc kubenswrapper[4756]: I0930 19:50:54.955639 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-bbf58c-78vxt" event={"ID":"3c14829a-8f5d-490d-a922-719d81da47a4","Type":"ContainerStarted","Data":"46a6dadf1f6a1d4672d2a84013aec14bd516d6c5c4b16525937c9d80c7745458"} Sep 30 19:50:54 crc kubenswrapper[4756]: I0930 19:50:54.955680 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-bbf58c-78vxt" event={"ID":"3c14829a-8f5d-490d-a922-719d81da47a4","Type":"ContainerStarted","Data":"baa07f784aff7642d4619ac09ef1f58c98f9a2e756003ce12cf3674dba0d7ec6"} Sep 30 19:50:54 crc kubenswrapper[4756]: I0930 19:50:54.968583 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-d4df5d75-8jctn" event={"ID":"9dead4ff-0706-4a5e-b6ae-c388ae1c2313","Type":"ContainerStarted","Data":"900bdddf277fe7986f2ca62e27752937a1ace2d13997fb2c03e798ffa202304c"} Sep 30 19:50:54 crc kubenswrapper[4756]: I0930 19:50:54.968634 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-d4df5d75-8jctn" event={"ID":"9dead4ff-0706-4a5e-b6ae-c388ae1c2313","Type":"ContainerStarted","Data":"25c76c73cfb9fb2968a5124f862e69e4d34b8f3323caca226d9c67b8fa92fac5"} Sep 30 19:50:54 crc kubenswrapper[4756]: I0930 19:50:54.975182 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-keystone-listener-7b959497f4-7qz6r" podStartSLOduration=3.109818717 podStartE2EDuration="6.975155626s" podCreationTimestamp="2025-09-30 19:50:48 +0000 UTC" firstStartedPulling="2025-09-30 19:50:49.731142029 +0000 UTC m=+1179.352075496" lastFinishedPulling="2025-09-30 19:50:53.596478928 +0000 UTC m=+1183.217412405" observedRunningTime="2025-09-30 19:50:54.960252127 +0000 UTC m=+1184.581185604" watchObservedRunningTime="2025-09-30 19:50:54.975155626 +0000 UTC m=+1184.596089103" Sep 30 19:50:54 crc kubenswrapper[4756]: I0930 19:50:54.981578 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-75c8ddd69c-kjrvj" event={"ID":"46cc1f38-e166-40c0-9d00-7a074fa80234","Type":"ContainerStarted","Data":"892c57a7fcb232717fe12258a15d82b1e9df9a6b1ea5c6203ef8a97a7978ce63"} Sep 30 19:50:54 crc kubenswrapper[4756]: I0930 19:50:54.982412 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-75c8ddd69c-kjrvj" Sep 30 19:50:54 crc kubenswrapper[4756]: I0930 19:50:54.991943 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-worker-bbf58c-78vxt" podStartSLOduration=3.240268148 podStartE2EDuration="6.991922365s" podCreationTimestamp="2025-09-30 19:50:48 +0000 UTC" firstStartedPulling="2025-09-30 19:50:49.845668433 +0000 UTC m=+1179.466601910" lastFinishedPulling="2025-09-30 19:50:53.59732265 +0000 UTC m=+1183.218256127" observedRunningTime="2025-09-30 19:50:54.984564292 +0000 UTC m=+1184.605497759" watchObservedRunningTime="2025-09-30 19:50:54.991922365 +0000 UTC m=+1184.612855842" Sep 30 19:50:55 crc kubenswrapper[4756]: I0930 19:50:55.011497 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-75c8ddd69c-kjrvj" podStartSLOduration=6.011484116 podStartE2EDuration="6.011484116s" podCreationTimestamp="2025-09-30 19:50:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 19:50:55.009481704 +0000 UTC m=+1184.630415201" watchObservedRunningTime="2025-09-30 19:50:55.011484116 +0000 UTC m=+1184.632417593" Sep 30 19:50:55 crc kubenswrapper[4756]: I0930 19:50:55.322064 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-proxy-5f8d8945f5-z5bmn"] Sep 30 19:50:55 crc kubenswrapper[4756]: I0930 19:50:55.515384 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-api-684ccb4bd6-x97br"] Sep 30 19:50:55 crc kubenswrapper[4756]: I0930 19:50:55.519761 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-684ccb4bd6-x97br" Sep 30 19:50:55 crc kubenswrapper[4756]: I0930 19:50:55.524697 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-barbican-internal-svc" Sep 30 19:50:55 crc kubenswrapper[4756]: I0930 19:50:55.524711 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-barbican-public-svc" Sep 30 19:50:55 crc kubenswrapper[4756]: I0930 19:50:55.533926 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-684ccb4bd6-x97br"] Sep 30 19:50:55 crc kubenswrapper[4756]: I0930 19:50:55.649055 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6fbaa08d-11b7-4498-aeb2-e82ac13f64d0-combined-ca-bundle\") pod \"barbican-api-684ccb4bd6-x97br\" (UID: \"6fbaa08d-11b7-4498-aeb2-e82ac13f64d0\") " pod="openstack/barbican-api-684ccb4bd6-x97br" Sep 30 19:50:55 crc kubenswrapper[4756]: I0930 19:50:55.649127 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6fbaa08d-11b7-4498-aeb2-e82ac13f64d0-config-data\") pod \"barbican-api-684ccb4bd6-x97br\" (UID: \"6fbaa08d-11b7-4498-aeb2-e82ac13f64d0\") " pod="openstack/barbican-api-684ccb4bd6-x97br" Sep 30 19:50:55 crc kubenswrapper[4756]: I0930 19:50:55.649160 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/6fbaa08d-11b7-4498-aeb2-e82ac13f64d0-internal-tls-certs\") pod \"barbican-api-684ccb4bd6-x97br\" (UID: \"6fbaa08d-11b7-4498-aeb2-e82ac13f64d0\") " pod="openstack/barbican-api-684ccb4bd6-x97br" Sep 30 19:50:55 crc kubenswrapper[4756]: I0930 19:50:55.649179 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xl47p\" (UniqueName: \"kubernetes.io/projected/6fbaa08d-11b7-4498-aeb2-e82ac13f64d0-kube-api-access-xl47p\") pod \"barbican-api-684ccb4bd6-x97br\" (UID: \"6fbaa08d-11b7-4498-aeb2-e82ac13f64d0\") " pod="openstack/barbican-api-684ccb4bd6-x97br" Sep 30 19:50:55 crc kubenswrapper[4756]: I0930 19:50:55.649363 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6fbaa08d-11b7-4498-aeb2-e82ac13f64d0-logs\") pod \"barbican-api-684ccb4bd6-x97br\" (UID: \"6fbaa08d-11b7-4498-aeb2-e82ac13f64d0\") " pod="openstack/barbican-api-684ccb4bd6-x97br" Sep 30 19:50:55 crc kubenswrapper[4756]: I0930 19:50:55.649503 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/6fbaa08d-11b7-4498-aeb2-e82ac13f64d0-config-data-custom\") pod \"barbican-api-684ccb4bd6-x97br\" (UID: \"6fbaa08d-11b7-4498-aeb2-e82ac13f64d0\") " pod="openstack/barbican-api-684ccb4bd6-x97br" Sep 30 19:50:55 crc kubenswrapper[4756]: I0930 19:50:55.649581 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/6fbaa08d-11b7-4498-aeb2-e82ac13f64d0-public-tls-certs\") pod \"barbican-api-684ccb4bd6-x97br\" (UID: \"6fbaa08d-11b7-4498-aeb2-e82ac13f64d0\") " pod="openstack/barbican-api-684ccb4bd6-x97br" Sep 30 19:50:55 crc kubenswrapper[4756]: I0930 19:50:55.715928 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Sep 30 19:50:55 crc kubenswrapper[4756]: I0930 19:50:55.716016 4756 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Sep 30 19:50:55 crc kubenswrapper[4756]: I0930 19:50:55.727179 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Sep 30 19:50:55 crc kubenswrapper[4756]: I0930 19:50:55.751313 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/6fbaa08d-11b7-4498-aeb2-e82ac13f64d0-internal-tls-certs\") pod \"barbican-api-684ccb4bd6-x97br\" (UID: \"6fbaa08d-11b7-4498-aeb2-e82ac13f64d0\") " pod="openstack/barbican-api-684ccb4bd6-x97br" Sep 30 19:50:55 crc kubenswrapper[4756]: I0930 19:50:55.751367 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xl47p\" (UniqueName: \"kubernetes.io/projected/6fbaa08d-11b7-4498-aeb2-e82ac13f64d0-kube-api-access-xl47p\") pod \"barbican-api-684ccb4bd6-x97br\" (UID: \"6fbaa08d-11b7-4498-aeb2-e82ac13f64d0\") " pod="openstack/barbican-api-684ccb4bd6-x97br" Sep 30 19:50:55 crc kubenswrapper[4756]: I0930 19:50:55.751455 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6fbaa08d-11b7-4498-aeb2-e82ac13f64d0-logs\") pod \"barbican-api-684ccb4bd6-x97br\" (UID: \"6fbaa08d-11b7-4498-aeb2-e82ac13f64d0\") " pod="openstack/barbican-api-684ccb4bd6-x97br" Sep 30 19:50:55 crc kubenswrapper[4756]: I0930 19:50:55.751492 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/6fbaa08d-11b7-4498-aeb2-e82ac13f64d0-config-data-custom\") pod \"barbican-api-684ccb4bd6-x97br\" (UID: \"6fbaa08d-11b7-4498-aeb2-e82ac13f64d0\") " pod="openstack/barbican-api-684ccb4bd6-x97br" Sep 30 19:50:55 crc kubenswrapper[4756]: I0930 19:50:55.751545 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/6fbaa08d-11b7-4498-aeb2-e82ac13f64d0-public-tls-certs\") pod \"barbican-api-684ccb4bd6-x97br\" (UID: \"6fbaa08d-11b7-4498-aeb2-e82ac13f64d0\") " pod="openstack/barbican-api-684ccb4bd6-x97br" Sep 30 19:50:55 crc kubenswrapper[4756]: I0930 19:50:55.751605 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6fbaa08d-11b7-4498-aeb2-e82ac13f64d0-combined-ca-bundle\") pod \"barbican-api-684ccb4bd6-x97br\" (UID: \"6fbaa08d-11b7-4498-aeb2-e82ac13f64d0\") " pod="openstack/barbican-api-684ccb4bd6-x97br" Sep 30 19:50:55 crc kubenswrapper[4756]: I0930 19:50:55.751638 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6fbaa08d-11b7-4498-aeb2-e82ac13f64d0-config-data\") pod \"barbican-api-684ccb4bd6-x97br\" (UID: \"6fbaa08d-11b7-4498-aeb2-e82ac13f64d0\") " pod="openstack/barbican-api-684ccb4bd6-x97br" Sep 30 19:50:55 crc kubenswrapper[4756]: I0930 19:50:55.752940 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6fbaa08d-11b7-4498-aeb2-e82ac13f64d0-logs\") pod \"barbican-api-684ccb4bd6-x97br\" (UID: \"6fbaa08d-11b7-4498-aeb2-e82ac13f64d0\") " pod="openstack/barbican-api-684ccb4bd6-x97br" Sep 30 19:50:55 crc kubenswrapper[4756]: I0930 19:50:55.755938 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6fbaa08d-11b7-4498-aeb2-e82ac13f64d0-combined-ca-bundle\") pod \"barbican-api-684ccb4bd6-x97br\" (UID: \"6fbaa08d-11b7-4498-aeb2-e82ac13f64d0\") " pod="openstack/barbican-api-684ccb4bd6-x97br" Sep 30 19:50:55 crc kubenswrapper[4756]: I0930 19:50:55.755982 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/6fbaa08d-11b7-4498-aeb2-e82ac13f64d0-internal-tls-certs\") pod \"barbican-api-684ccb4bd6-x97br\" (UID: \"6fbaa08d-11b7-4498-aeb2-e82ac13f64d0\") " pod="openstack/barbican-api-684ccb4bd6-x97br" Sep 30 19:50:55 crc kubenswrapper[4756]: I0930 19:50:55.757164 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6fbaa08d-11b7-4498-aeb2-e82ac13f64d0-config-data\") pod \"barbican-api-684ccb4bd6-x97br\" (UID: \"6fbaa08d-11b7-4498-aeb2-e82ac13f64d0\") " pod="openstack/barbican-api-684ccb4bd6-x97br" Sep 30 19:50:55 crc kubenswrapper[4756]: I0930 19:50:55.757876 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/6fbaa08d-11b7-4498-aeb2-e82ac13f64d0-public-tls-certs\") pod \"barbican-api-684ccb4bd6-x97br\" (UID: \"6fbaa08d-11b7-4498-aeb2-e82ac13f64d0\") " pod="openstack/barbican-api-684ccb4bd6-x97br" Sep 30 19:50:55 crc kubenswrapper[4756]: I0930 19:50:55.759293 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/6fbaa08d-11b7-4498-aeb2-e82ac13f64d0-config-data-custom\") pod \"barbican-api-684ccb4bd6-x97br\" (UID: \"6fbaa08d-11b7-4498-aeb2-e82ac13f64d0\") " pod="openstack/barbican-api-684ccb4bd6-x97br" Sep 30 19:50:55 crc kubenswrapper[4756]: I0930 19:50:55.773750 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xl47p\" (UniqueName: \"kubernetes.io/projected/6fbaa08d-11b7-4498-aeb2-e82ac13f64d0-kube-api-access-xl47p\") pod \"barbican-api-684ccb4bd6-x97br\" (UID: \"6fbaa08d-11b7-4498-aeb2-e82ac13f64d0\") " pod="openstack/barbican-api-684ccb4bd6-x97br" Sep 30 19:50:55 crc kubenswrapper[4756]: I0930 19:50:55.847110 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-684ccb4bd6-x97br" Sep 30 19:50:56 crc kubenswrapper[4756]: I0930 19:50:56.008193 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-d4df5d75-8jctn" event={"ID":"9dead4ff-0706-4a5e-b6ae-c388ae1c2313","Type":"ContainerStarted","Data":"9a4fb3358fd6f28ed48e21d357f0e517d47e56682744c7e3918edea524d6cfd4"} Sep 30 19:50:56 crc kubenswrapper[4756]: I0930 19:50:56.008456 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/neutron-d4df5d75-8jctn" Sep 30 19:50:56 crc kubenswrapper[4756]: I0930 19:50:56.016654 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-5f8d8945f5-z5bmn" event={"ID":"0a1a5fbf-06be-4bbe-a7ab-9a3becd5f84d","Type":"ContainerStarted","Data":"8d013d9f6a59da2bcacdb9355cb603a5a104cc57783d02a5fc8770f2294bb8f0"} Sep 30 19:50:56 crc kubenswrapper[4756]: I0930 19:50:56.016691 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-5f8d8945f5-z5bmn" event={"ID":"0a1a5fbf-06be-4bbe-a7ab-9a3becd5f84d","Type":"ContainerStarted","Data":"2ab71e28b3fab8dbf1f220c5eb4490f4cf686ee22cd811d509ce95839b4580b6"} Sep 30 19:50:56 crc kubenswrapper[4756]: I0930 19:50:56.016704 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-5f8d8945f5-z5bmn" event={"ID":"0a1a5fbf-06be-4bbe-a7ab-9a3becd5f84d","Type":"ContainerStarted","Data":"b4bc6478bbf289ef6ad9dcd6e7f00f20e768178dd143775b77f42f3b2af3bfcd"} Sep 30 19:50:56 crc kubenswrapper[4756]: I0930 19:50:56.038002 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-d4df5d75-8jctn" podStartSLOduration=4.037979739 podStartE2EDuration="4.037979739s" podCreationTimestamp="2025-09-30 19:50:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 19:50:56.028415409 +0000 UTC m=+1185.649348886" watchObservedRunningTime="2025-09-30 19:50:56.037979739 +0000 UTC m=+1185.658913216" Sep 30 19:50:56 crc kubenswrapper[4756]: I0930 19:50:56.075068 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-proxy-5f8d8945f5-z5bmn" podStartSLOduration=2.075048338 podStartE2EDuration="2.075048338s" podCreationTimestamp="2025-09-30 19:50:54 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 19:50:56.055737463 +0000 UTC m=+1185.676670940" watchObservedRunningTime="2025-09-30 19:50:56.075048338 +0000 UTC m=+1185.695981815" Sep 30 19:50:56 crc kubenswrapper[4756]: I0930 19:50:56.241853 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 30 19:50:56 crc kubenswrapper[4756]: I0930 19:50:56.242333 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="16fbb022-5ffb-41ad-a18b-c9eb1ad86897" containerName="ceilometer-central-agent" containerID="cri-o://863f612cd545ac0fb2069bdbe890b8c349b3ace0bee3682e85b0690dbbe53426" gracePeriod=30 Sep 30 19:50:56 crc kubenswrapper[4756]: I0930 19:50:56.242773 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="16fbb022-5ffb-41ad-a18b-c9eb1ad86897" containerName="proxy-httpd" containerID="cri-o://6ddd09ff48db0699e5502527f64bdf95fb3651a12c72edb43b7654c5869384ba" gracePeriod=30 Sep 30 19:50:56 crc kubenswrapper[4756]: I0930 19:50:56.242820 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="16fbb022-5ffb-41ad-a18b-c9eb1ad86897" containerName="sg-core" containerID="cri-o://5958179af95498993e03abf8ecb860e02ff9989d57c79429653db9897c8d3b02" gracePeriod=30 Sep 30 19:50:56 crc kubenswrapper[4756]: I0930 19:50:56.242848 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="16fbb022-5ffb-41ad-a18b-c9eb1ad86897" containerName="ceilometer-notification-agent" containerID="cri-o://1d7ee6a247afc0df8d24969be2e9021b5b7950050932c2f4b2dc96b9234b945e" gracePeriod=30 Sep 30 19:50:56 crc kubenswrapper[4756]: I0930 19:50:56.479382 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-684ccb4bd6-x97br"] Sep 30 19:50:57 crc kubenswrapper[4756]: I0930 19:50:57.035959 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-684ccb4bd6-x97br" event={"ID":"6fbaa08d-11b7-4498-aeb2-e82ac13f64d0","Type":"ContainerStarted","Data":"207904952f6540ef119af443514c1298c2cf8341c4b2adc7b85d185c51cdf006"} Sep 30 19:50:57 crc kubenswrapper[4756]: I0930 19:50:57.036287 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-684ccb4bd6-x97br" event={"ID":"6fbaa08d-11b7-4498-aeb2-e82ac13f64d0","Type":"ContainerStarted","Data":"b64e18bd6132f9cc117785155229a1d2aa54ec5983a3ee45e5e205868af69f65"} Sep 30 19:50:57 crc kubenswrapper[4756]: I0930 19:50:57.036301 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-684ccb4bd6-x97br" event={"ID":"6fbaa08d-11b7-4498-aeb2-e82ac13f64d0","Type":"ContainerStarted","Data":"d1ba4efee963eace33036fd3dbc002cb7169e4269312a53dc8ccb5b7e91e4838"} Sep 30 19:50:57 crc kubenswrapper[4756]: I0930 19:50:57.036348 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-684ccb4bd6-x97br" Sep 30 19:50:57 crc kubenswrapper[4756]: I0930 19:50:57.036369 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-684ccb4bd6-x97br" Sep 30 19:50:57 crc kubenswrapper[4756]: I0930 19:50:57.041997 4756 generic.go:334] "Generic (PLEG): container finished" podID="16fbb022-5ffb-41ad-a18b-c9eb1ad86897" containerID="6ddd09ff48db0699e5502527f64bdf95fb3651a12c72edb43b7654c5869384ba" exitCode=0 Sep 30 19:50:57 crc kubenswrapper[4756]: I0930 19:50:57.042042 4756 generic.go:334] "Generic (PLEG): container finished" podID="16fbb022-5ffb-41ad-a18b-c9eb1ad86897" containerID="5958179af95498993e03abf8ecb860e02ff9989d57c79429653db9897c8d3b02" exitCode=2 Sep 30 19:50:57 crc kubenswrapper[4756]: I0930 19:50:57.042056 4756 generic.go:334] "Generic (PLEG): container finished" podID="16fbb022-5ffb-41ad-a18b-c9eb1ad86897" containerID="863f612cd545ac0fb2069bdbe890b8c349b3ace0bee3682e85b0690dbbe53426" exitCode=0 Sep 30 19:50:57 crc kubenswrapper[4756]: I0930 19:50:57.042079 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"16fbb022-5ffb-41ad-a18b-c9eb1ad86897","Type":"ContainerDied","Data":"6ddd09ff48db0699e5502527f64bdf95fb3651a12c72edb43b7654c5869384ba"} Sep 30 19:50:57 crc kubenswrapper[4756]: I0930 19:50:57.042136 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"16fbb022-5ffb-41ad-a18b-c9eb1ad86897","Type":"ContainerDied","Data":"5958179af95498993e03abf8ecb860e02ff9989d57c79429653db9897c8d3b02"} Sep 30 19:50:57 crc kubenswrapper[4756]: I0930 19:50:57.042151 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"16fbb022-5ffb-41ad-a18b-c9eb1ad86897","Type":"ContainerDied","Data":"863f612cd545ac0fb2069bdbe890b8c349b3ace0bee3682e85b0690dbbe53426"} Sep 30 19:50:57 crc kubenswrapper[4756]: I0930 19:50:57.042971 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/swift-proxy-5f8d8945f5-z5bmn" Sep 30 19:50:57 crc kubenswrapper[4756]: I0930 19:50:57.043008 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/swift-proxy-5f8d8945f5-z5bmn" Sep 30 19:50:57 crc kubenswrapper[4756]: I0930 19:50:57.058950 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-api-684ccb4bd6-x97br" podStartSLOduration=2.058935007 podStartE2EDuration="2.058935007s" podCreationTimestamp="2025-09-30 19:50:55 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 19:50:57.054427859 +0000 UTC m=+1186.675361336" watchObservedRunningTime="2025-09-30 19:50:57.058935007 +0000 UTC m=+1186.679868484" Sep 30 19:50:58 crc kubenswrapper[4756]: I0930 19:50:58.052107 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-j57gg" event={"ID":"0f0f53de-f545-431e-9901-e45eedc73bf9","Type":"ContainerStarted","Data":"138f52c6994ac2758b4cb61b9a1de7c4625cdf55dd17f5766e35643527e4dcaa"} Sep 30 19:50:58 crc kubenswrapper[4756]: I0930 19:50:58.065959 4756 generic.go:334] "Generic (PLEG): container finished" podID="16fbb022-5ffb-41ad-a18b-c9eb1ad86897" containerID="1d7ee6a247afc0df8d24969be2e9021b5b7950050932c2f4b2dc96b9234b945e" exitCode=0 Sep 30 19:50:58 crc kubenswrapper[4756]: I0930 19:50:58.066241 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"16fbb022-5ffb-41ad-a18b-c9eb1ad86897","Type":"ContainerDied","Data":"1d7ee6a247afc0df8d24969be2e9021b5b7950050932c2f4b2dc96b9234b945e"} Sep 30 19:50:58 crc kubenswrapper[4756]: I0930 19:50:58.066297 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"16fbb022-5ffb-41ad-a18b-c9eb1ad86897","Type":"ContainerDied","Data":"d1a0fb8f5168f822aeb8d6ef85a84aba6911b96775c3e94cc80304ad3e77dd3f"} Sep 30 19:50:58 crc kubenswrapper[4756]: I0930 19:50:58.066309 4756 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d1a0fb8f5168f822aeb8d6ef85a84aba6911b96775c3e94cc80304ad3e77dd3f" Sep 30 19:50:58 crc kubenswrapper[4756]: I0930 19:50:58.075317 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-db-sync-j57gg" podStartSLOduration=6.781745625 podStartE2EDuration="39.075295804s" podCreationTimestamp="2025-09-30 19:50:19 +0000 UTC" firstStartedPulling="2025-09-30 19:50:24.439208221 +0000 UTC m=+1154.060141698" lastFinishedPulling="2025-09-30 19:50:56.7327584 +0000 UTC m=+1186.353691877" observedRunningTime="2025-09-30 19:50:58.070468037 +0000 UTC m=+1187.691401524" watchObservedRunningTime="2025-09-30 19:50:58.075295804 +0000 UTC m=+1187.696229281" Sep 30 19:50:58 crc kubenswrapper[4756]: I0930 19:50:58.092955 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 19:50:58 crc kubenswrapper[4756]: I0930 19:50:58.219238 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/16fbb022-5ffb-41ad-a18b-c9eb1ad86897-run-httpd\") pod \"16fbb022-5ffb-41ad-a18b-c9eb1ad86897\" (UID: \"16fbb022-5ffb-41ad-a18b-c9eb1ad86897\") " Sep 30 19:50:58 crc kubenswrapper[4756]: I0930 19:50:58.219300 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/16fbb022-5ffb-41ad-a18b-c9eb1ad86897-combined-ca-bundle\") pod \"16fbb022-5ffb-41ad-a18b-c9eb1ad86897\" (UID: \"16fbb022-5ffb-41ad-a18b-c9eb1ad86897\") " Sep 30 19:50:58 crc kubenswrapper[4756]: I0930 19:50:58.219405 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/16fbb022-5ffb-41ad-a18b-c9eb1ad86897-sg-core-conf-yaml\") pod \"16fbb022-5ffb-41ad-a18b-c9eb1ad86897\" (UID: \"16fbb022-5ffb-41ad-a18b-c9eb1ad86897\") " Sep 30 19:50:58 crc kubenswrapper[4756]: I0930 19:50:58.219468 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/16fbb022-5ffb-41ad-a18b-c9eb1ad86897-config-data\") pod \"16fbb022-5ffb-41ad-a18b-c9eb1ad86897\" (UID: \"16fbb022-5ffb-41ad-a18b-c9eb1ad86897\") " Sep 30 19:50:58 crc kubenswrapper[4756]: I0930 19:50:58.219507 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/16fbb022-5ffb-41ad-a18b-c9eb1ad86897-log-httpd\") pod \"16fbb022-5ffb-41ad-a18b-c9eb1ad86897\" (UID: \"16fbb022-5ffb-41ad-a18b-c9eb1ad86897\") " Sep 30 19:50:58 crc kubenswrapper[4756]: I0930 19:50:58.219607 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2fftl\" (UniqueName: \"kubernetes.io/projected/16fbb022-5ffb-41ad-a18b-c9eb1ad86897-kube-api-access-2fftl\") pod \"16fbb022-5ffb-41ad-a18b-c9eb1ad86897\" (UID: \"16fbb022-5ffb-41ad-a18b-c9eb1ad86897\") " Sep 30 19:50:58 crc kubenswrapper[4756]: I0930 19:50:58.219652 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/16fbb022-5ffb-41ad-a18b-c9eb1ad86897-scripts\") pod \"16fbb022-5ffb-41ad-a18b-c9eb1ad86897\" (UID: \"16fbb022-5ffb-41ad-a18b-c9eb1ad86897\") " Sep 30 19:50:58 crc kubenswrapper[4756]: I0930 19:50:58.221945 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/16fbb022-5ffb-41ad-a18b-c9eb1ad86897-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "16fbb022-5ffb-41ad-a18b-c9eb1ad86897" (UID: "16fbb022-5ffb-41ad-a18b-c9eb1ad86897"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 19:50:58 crc kubenswrapper[4756]: I0930 19:50:58.227413 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/16fbb022-5ffb-41ad-a18b-c9eb1ad86897-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "16fbb022-5ffb-41ad-a18b-c9eb1ad86897" (UID: "16fbb022-5ffb-41ad-a18b-c9eb1ad86897"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 19:50:58 crc kubenswrapper[4756]: I0930 19:50:58.231542 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/16fbb022-5ffb-41ad-a18b-c9eb1ad86897-scripts" (OuterVolumeSpecName: "scripts") pod "16fbb022-5ffb-41ad-a18b-c9eb1ad86897" (UID: "16fbb022-5ffb-41ad-a18b-c9eb1ad86897"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:50:58 crc kubenswrapper[4756]: I0930 19:50:58.246824 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/16fbb022-5ffb-41ad-a18b-c9eb1ad86897-kube-api-access-2fftl" (OuterVolumeSpecName: "kube-api-access-2fftl") pod "16fbb022-5ffb-41ad-a18b-c9eb1ad86897" (UID: "16fbb022-5ffb-41ad-a18b-c9eb1ad86897"). InnerVolumeSpecName "kube-api-access-2fftl". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:50:58 crc kubenswrapper[4756]: I0930 19:50:58.274496 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/16fbb022-5ffb-41ad-a18b-c9eb1ad86897-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "16fbb022-5ffb-41ad-a18b-c9eb1ad86897" (UID: "16fbb022-5ffb-41ad-a18b-c9eb1ad86897"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:50:58 crc kubenswrapper[4756]: I0930 19:50:58.321527 4756 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/16fbb022-5ffb-41ad-a18b-c9eb1ad86897-log-httpd\") on node \"crc\" DevicePath \"\"" Sep 30 19:50:58 crc kubenswrapper[4756]: I0930 19:50:58.321574 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2fftl\" (UniqueName: \"kubernetes.io/projected/16fbb022-5ffb-41ad-a18b-c9eb1ad86897-kube-api-access-2fftl\") on node \"crc\" DevicePath \"\"" Sep 30 19:50:58 crc kubenswrapper[4756]: I0930 19:50:58.321585 4756 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/16fbb022-5ffb-41ad-a18b-c9eb1ad86897-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 19:50:58 crc kubenswrapper[4756]: I0930 19:50:58.321593 4756 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/16fbb022-5ffb-41ad-a18b-c9eb1ad86897-run-httpd\") on node \"crc\" DevicePath \"\"" Sep 30 19:50:58 crc kubenswrapper[4756]: I0930 19:50:58.321602 4756 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/16fbb022-5ffb-41ad-a18b-c9eb1ad86897-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Sep 30 19:50:58 crc kubenswrapper[4756]: I0930 19:50:58.338143 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/16fbb022-5ffb-41ad-a18b-c9eb1ad86897-config-data" (OuterVolumeSpecName: "config-data") pod "16fbb022-5ffb-41ad-a18b-c9eb1ad86897" (UID: "16fbb022-5ffb-41ad-a18b-c9eb1ad86897"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:50:58 crc kubenswrapper[4756]: I0930 19:50:58.357593 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/16fbb022-5ffb-41ad-a18b-c9eb1ad86897-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "16fbb022-5ffb-41ad-a18b-c9eb1ad86897" (UID: "16fbb022-5ffb-41ad-a18b-c9eb1ad86897"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:50:58 crc kubenswrapper[4756]: I0930 19:50:58.423132 4756 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/16fbb022-5ffb-41ad-a18b-c9eb1ad86897-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 19:50:58 crc kubenswrapper[4756]: I0930 19:50:58.423156 4756 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/16fbb022-5ffb-41ad-a18b-c9eb1ad86897-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 19:50:59 crc kubenswrapper[4756]: I0930 19:50:59.078101 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 19:50:59 crc kubenswrapper[4756]: I0930 19:50:59.130251 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 30 19:50:59 crc kubenswrapper[4756]: I0930 19:50:59.135365 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Sep 30 19:50:59 crc kubenswrapper[4756]: I0930 19:50:59.147440 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Sep 30 19:50:59 crc kubenswrapper[4756]: E0930 19:50:59.147851 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="16fbb022-5ffb-41ad-a18b-c9eb1ad86897" containerName="proxy-httpd" Sep 30 19:50:59 crc kubenswrapper[4756]: I0930 19:50:59.147870 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="16fbb022-5ffb-41ad-a18b-c9eb1ad86897" containerName="proxy-httpd" Sep 30 19:50:59 crc kubenswrapper[4756]: E0930 19:50:59.147883 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="16fbb022-5ffb-41ad-a18b-c9eb1ad86897" containerName="ceilometer-notification-agent" Sep 30 19:50:59 crc kubenswrapper[4756]: I0930 19:50:59.147890 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="16fbb022-5ffb-41ad-a18b-c9eb1ad86897" containerName="ceilometer-notification-agent" Sep 30 19:50:59 crc kubenswrapper[4756]: E0930 19:50:59.147914 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="16fbb022-5ffb-41ad-a18b-c9eb1ad86897" containerName="ceilometer-central-agent" Sep 30 19:50:59 crc kubenswrapper[4756]: I0930 19:50:59.147920 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="16fbb022-5ffb-41ad-a18b-c9eb1ad86897" containerName="ceilometer-central-agent" Sep 30 19:50:59 crc kubenswrapper[4756]: E0930 19:50:59.147939 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="16fbb022-5ffb-41ad-a18b-c9eb1ad86897" containerName="sg-core" Sep 30 19:50:59 crc kubenswrapper[4756]: I0930 19:50:59.147945 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="16fbb022-5ffb-41ad-a18b-c9eb1ad86897" containerName="sg-core" Sep 30 19:50:59 crc kubenswrapper[4756]: I0930 19:50:59.148119 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="16fbb022-5ffb-41ad-a18b-c9eb1ad86897" containerName="ceilometer-central-agent" Sep 30 19:50:59 crc kubenswrapper[4756]: I0930 19:50:59.148132 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="16fbb022-5ffb-41ad-a18b-c9eb1ad86897" containerName="proxy-httpd" Sep 30 19:50:59 crc kubenswrapper[4756]: I0930 19:50:59.148148 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="16fbb022-5ffb-41ad-a18b-c9eb1ad86897" containerName="sg-core" Sep 30 19:50:59 crc kubenswrapper[4756]: I0930 19:50:59.148165 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="16fbb022-5ffb-41ad-a18b-c9eb1ad86897" containerName="ceilometer-notification-agent" Sep 30 19:50:59 crc kubenswrapper[4756]: I0930 19:50:59.149838 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 19:50:59 crc kubenswrapper[4756]: I0930 19:50:59.154989 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Sep 30 19:50:59 crc kubenswrapper[4756]: I0930 19:50:59.155731 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Sep 30 19:50:59 crc kubenswrapper[4756]: I0930 19:50:59.181225 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 30 19:50:59 crc kubenswrapper[4756]: I0930 19:50:59.242009 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s2rfk\" (UniqueName: \"kubernetes.io/projected/ccbdfb43-a43c-4509-b6e5-ed193ac842b4-kube-api-access-s2rfk\") pod \"ceilometer-0\" (UID: \"ccbdfb43-a43c-4509-b6e5-ed193ac842b4\") " pod="openstack/ceilometer-0" Sep 30 19:50:59 crc kubenswrapper[4756]: I0930 19:50:59.242471 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ccbdfb43-a43c-4509-b6e5-ed193ac842b4-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"ccbdfb43-a43c-4509-b6e5-ed193ac842b4\") " pod="openstack/ceilometer-0" Sep 30 19:50:59 crc kubenswrapper[4756]: I0930 19:50:59.242573 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ccbdfb43-a43c-4509-b6e5-ed193ac842b4-config-data\") pod \"ceilometer-0\" (UID: \"ccbdfb43-a43c-4509-b6e5-ed193ac842b4\") " pod="openstack/ceilometer-0" Sep 30 19:50:59 crc kubenswrapper[4756]: I0930 19:50:59.242695 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ccbdfb43-a43c-4509-b6e5-ed193ac842b4-scripts\") pod \"ceilometer-0\" (UID: \"ccbdfb43-a43c-4509-b6e5-ed193ac842b4\") " pod="openstack/ceilometer-0" Sep 30 19:50:59 crc kubenswrapper[4756]: I0930 19:50:59.243797 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/ccbdfb43-a43c-4509-b6e5-ed193ac842b4-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"ccbdfb43-a43c-4509-b6e5-ed193ac842b4\") " pod="openstack/ceilometer-0" Sep 30 19:50:59 crc kubenswrapper[4756]: I0930 19:50:59.244212 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ccbdfb43-a43c-4509-b6e5-ed193ac842b4-log-httpd\") pod \"ceilometer-0\" (UID: \"ccbdfb43-a43c-4509-b6e5-ed193ac842b4\") " pod="openstack/ceilometer-0" Sep 30 19:50:59 crc kubenswrapper[4756]: I0930 19:50:59.244244 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ccbdfb43-a43c-4509-b6e5-ed193ac842b4-run-httpd\") pod \"ceilometer-0\" (UID: \"ccbdfb43-a43c-4509-b6e5-ed193ac842b4\") " pod="openstack/ceilometer-0" Sep 30 19:50:59 crc kubenswrapper[4756]: I0930 19:50:59.345586 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/ccbdfb43-a43c-4509-b6e5-ed193ac842b4-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"ccbdfb43-a43c-4509-b6e5-ed193ac842b4\") " pod="openstack/ceilometer-0" Sep 30 19:50:59 crc kubenswrapper[4756]: I0930 19:50:59.345640 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ccbdfb43-a43c-4509-b6e5-ed193ac842b4-run-httpd\") pod \"ceilometer-0\" (UID: \"ccbdfb43-a43c-4509-b6e5-ed193ac842b4\") " pod="openstack/ceilometer-0" Sep 30 19:50:59 crc kubenswrapper[4756]: I0930 19:50:59.345657 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ccbdfb43-a43c-4509-b6e5-ed193ac842b4-log-httpd\") pod \"ceilometer-0\" (UID: \"ccbdfb43-a43c-4509-b6e5-ed193ac842b4\") " pod="openstack/ceilometer-0" Sep 30 19:50:59 crc kubenswrapper[4756]: I0930 19:50:59.345685 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2rfk\" (UniqueName: \"kubernetes.io/projected/ccbdfb43-a43c-4509-b6e5-ed193ac842b4-kube-api-access-s2rfk\") pod \"ceilometer-0\" (UID: \"ccbdfb43-a43c-4509-b6e5-ed193ac842b4\") " pod="openstack/ceilometer-0" Sep 30 19:50:59 crc kubenswrapper[4756]: I0930 19:50:59.345702 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ccbdfb43-a43c-4509-b6e5-ed193ac842b4-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"ccbdfb43-a43c-4509-b6e5-ed193ac842b4\") " pod="openstack/ceilometer-0" Sep 30 19:50:59 crc kubenswrapper[4756]: I0930 19:50:59.345736 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ccbdfb43-a43c-4509-b6e5-ed193ac842b4-config-data\") pod \"ceilometer-0\" (UID: \"ccbdfb43-a43c-4509-b6e5-ed193ac842b4\") " pod="openstack/ceilometer-0" Sep 30 19:50:59 crc kubenswrapper[4756]: I0930 19:50:59.345808 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ccbdfb43-a43c-4509-b6e5-ed193ac842b4-scripts\") pod \"ceilometer-0\" (UID: \"ccbdfb43-a43c-4509-b6e5-ed193ac842b4\") " pod="openstack/ceilometer-0" Sep 30 19:50:59 crc kubenswrapper[4756]: I0930 19:50:59.346740 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ccbdfb43-a43c-4509-b6e5-ed193ac842b4-log-httpd\") pod \"ceilometer-0\" (UID: \"ccbdfb43-a43c-4509-b6e5-ed193ac842b4\") " pod="openstack/ceilometer-0" Sep 30 19:50:59 crc kubenswrapper[4756]: I0930 19:50:59.346944 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ccbdfb43-a43c-4509-b6e5-ed193ac842b4-run-httpd\") pod \"ceilometer-0\" (UID: \"ccbdfb43-a43c-4509-b6e5-ed193ac842b4\") " pod="openstack/ceilometer-0" Sep 30 19:50:59 crc kubenswrapper[4756]: I0930 19:50:59.354891 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/ccbdfb43-a43c-4509-b6e5-ed193ac842b4-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"ccbdfb43-a43c-4509-b6e5-ed193ac842b4\") " pod="openstack/ceilometer-0" Sep 30 19:50:59 crc kubenswrapper[4756]: I0930 19:50:59.354975 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ccbdfb43-a43c-4509-b6e5-ed193ac842b4-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"ccbdfb43-a43c-4509-b6e5-ed193ac842b4\") " pod="openstack/ceilometer-0" Sep 30 19:50:59 crc kubenswrapper[4756]: I0930 19:50:59.356682 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ccbdfb43-a43c-4509-b6e5-ed193ac842b4-scripts\") pod \"ceilometer-0\" (UID: \"ccbdfb43-a43c-4509-b6e5-ed193ac842b4\") " pod="openstack/ceilometer-0" Sep 30 19:50:59 crc kubenswrapper[4756]: I0930 19:50:59.361022 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ccbdfb43-a43c-4509-b6e5-ed193ac842b4-config-data\") pod \"ceilometer-0\" (UID: \"ccbdfb43-a43c-4509-b6e5-ed193ac842b4\") " pod="openstack/ceilometer-0" Sep 30 19:50:59 crc kubenswrapper[4756]: I0930 19:50:59.374681 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2rfk\" (UniqueName: \"kubernetes.io/projected/ccbdfb43-a43c-4509-b6e5-ed193ac842b4-kube-api-access-s2rfk\") pod \"ceilometer-0\" (UID: \"ccbdfb43-a43c-4509-b6e5-ed193ac842b4\") " pod="openstack/ceilometer-0" Sep 30 19:50:59 crc kubenswrapper[4756]: I0930 19:50:59.469935 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 19:50:59 crc kubenswrapper[4756]: I0930 19:50:59.987838 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 30 19:51:00 crc kubenswrapper[4756]: I0930 19:51:00.087410 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ccbdfb43-a43c-4509-b6e5-ed193ac842b4","Type":"ContainerStarted","Data":"c0efa8e44016b81bba0da5124e255628603ecb99eec44a52ef84277ee40638d9"} Sep 30 19:51:00 crc kubenswrapper[4756]: I0930 19:51:00.237513 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-75c8ddd69c-kjrvj" Sep 30 19:51:00 crc kubenswrapper[4756]: I0930 19:51:00.316917 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-8b5c85b87-tkqp5"] Sep 30 19:51:00 crc kubenswrapper[4756]: I0930 19:51:00.317209 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-8b5c85b87-tkqp5" podUID="62aa5559-f0c5-4603-95ec-795ff309390c" containerName="dnsmasq-dns" containerID="cri-o://b64e97da8b9cfcc67199d8a195264e9958c5a9eb2e03df50f6cc7efcf6186f51" gracePeriod=10 Sep 30 19:51:00 crc kubenswrapper[4756]: I0930 19:51:00.979644 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8b5c85b87-tkqp5" Sep 30 19:51:01 crc kubenswrapper[4756]: I0930 19:51:01.076493 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/62aa5559-f0c5-4603-95ec-795ff309390c-config\") pod \"62aa5559-f0c5-4603-95ec-795ff309390c\" (UID: \"62aa5559-f0c5-4603-95ec-795ff309390c\") " Sep 30 19:51:01 crc kubenswrapper[4756]: I0930 19:51:01.076549 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/62aa5559-f0c5-4603-95ec-795ff309390c-dns-svc\") pod \"62aa5559-f0c5-4603-95ec-795ff309390c\" (UID: \"62aa5559-f0c5-4603-95ec-795ff309390c\") " Sep 30 19:51:01 crc kubenswrapper[4756]: I0930 19:51:01.076591 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jcbrr\" (UniqueName: \"kubernetes.io/projected/62aa5559-f0c5-4603-95ec-795ff309390c-kube-api-access-jcbrr\") pod \"62aa5559-f0c5-4603-95ec-795ff309390c\" (UID: \"62aa5559-f0c5-4603-95ec-795ff309390c\") " Sep 30 19:51:01 crc kubenswrapper[4756]: I0930 19:51:01.078074 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/62aa5559-f0c5-4603-95ec-795ff309390c-ovsdbserver-nb\") pod \"62aa5559-f0c5-4603-95ec-795ff309390c\" (UID: \"62aa5559-f0c5-4603-95ec-795ff309390c\") " Sep 30 19:51:01 crc kubenswrapper[4756]: I0930 19:51:01.078156 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/62aa5559-f0c5-4603-95ec-795ff309390c-ovsdbserver-sb\") pod \"62aa5559-f0c5-4603-95ec-795ff309390c\" (UID: \"62aa5559-f0c5-4603-95ec-795ff309390c\") " Sep 30 19:51:01 crc kubenswrapper[4756]: I0930 19:51:01.078376 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/62aa5559-f0c5-4603-95ec-795ff309390c-dns-swift-storage-0\") pod \"62aa5559-f0c5-4603-95ec-795ff309390c\" (UID: \"62aa5559-f0c5-4603-95ec-795ff309390c\") " Sep 30 19:51:01 crc kubenswrapper[4756]: I0930 19:51:01.108688 4756 generic.go:334] "Generic (PLEG): container finished" podID="62aa5559-f0c5-4603-95ec-795ff309390c" containerID="b64e97da8b9cfcc67199d8a195264e9958c5a9eb2e03df50f6cc7efcf6186f51" exitCode=0 Sep 30 19:51:01 crc kubenswrapper[4756]: I0930 19:51:01.108750 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8b5c85b87-tkqp5" event={"ID":"62aa5559-f0c5-4603-95ec-795ff309390c","Type":"ContainerDied","Data":"b64e97da8b9cfcc67199d8a195264e9958c5a9eb2e03df50f6cc7efcf6186f51"} Sep 30 19:51:01 crc kubenswrapper[4756]: I0930 19:51:01.108796 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8b5c85b87-tkqp5" event={"ID":"62aa5559-f0c5-4603-95ec-795ff309390c","Type":"ContainerDied","Data":"a0f95a21abeea8cd6122320d3f13c4041f9f67af3e610f59996b6c3c9859e35d"} Sep 30 19:51:01 crc kubenswrapper[4756]: I0930 19:51:01.108816 4756 scope.go:117] "RemoveContainer" containerID="b64e97da8b9cfcc67199d8a195264e9958c5a9eb2e03df50f6cc7efcf6186f51" Sep 30 19:51:01 crc kubenswrapper[4756]: I0930 19:51:01.108970 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8b5c85b87-tkqp5" Sep 30 19:51:01 crc kubenswrapper[4756]: I0930 19:51:01.121629 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/62aa5559-f0c5-4603-95ec-795ff309390c-kube-api-access-jcbrr" (OuterVolumeSpecName: "kube-api-access-jcbrr") pod "62aa5559-f0c5-4603-95ec-795ff309390c" (UID: "62aa5559-f0c5-4603-95ec-795ff309390c"). InnerVolumeSpecName "kube-api-access-jcbrr". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:51:01 crc kubenswrapper[4756]: I0930 19:51:01.143470 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="16fbb022-5ffb-41ad-a18b-c9eb1ad86897" path="/var/lib/kubelet/pods/16fbb022-5ffb-41ad-a18b-c9eb1ad86897/volumes" Sep 30 19:51:01 crc kubenswrapper[4756]: I0930 19:51:01.182325 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jcbrr\" (UniqueName: \"kubernetes.io/projected/62aa5559-f0c5-4603-95ec-795ff309390c-kube-api-access-jcbrr\") on node \"crc\" DevicePath \"\"" Sep 30 19:51:01 crc kubenswrapper[4756]: I0930 19:51:01.200145 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/62aa5559-f0c5-4603-95ec-795ff309390c-config" (OuterVolumeSpecName: "config") pod "62aa5559-f0c5-4603-95ec-795ff309390c" (UID: "62aa5559-f0c5-4603-95ec-795ff309390c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:51:01 crc kubenswrapper[4756]: I0930 19:51:01.201665 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/62aa5559-f0c5-4603-95ec-795ff309390c-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "62aa5559-f0c5-4603-95ec-795ff309390c" (UID: "62aa5559-f0c5-4603-95ec-795ff309390c"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:51:01 crc kubenswrapper[4756]: I0930 19:51:01.225808 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/62aa5559-f0c5-4603-95ec-795ff309390c-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "62aa5559-f0c5-4603-95ec-795ff309390c" (UID: "62aa5559-f0c5-4603-95ec-795ff309390c"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:51:01 crc kubenswrapper[4756]: I0930 19:51:01.249960 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/62aa5559-f0c5-4603-95ec-795ff309390c-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "62aa5559-f0c5-4603-95ec-795ff309390c" (UID: "62aa5559-f0c5-4603-95ec-795ff309390c"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:51:01 crc kubenswrapper[4756]: I0930 19:51:01.271713 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/62aa5559-f0c5-4603-95ec-795ff309390c-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "62aa5559-f0c5-4603-95ec-795ff309390c" (UID: "62aa5559-f0c5-4603-95ec-795ff309390c"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:51:01 crc kubenswrapper[4756]: I0930 19:51:01.285684 4756 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/62aa5559-f0c5-4603-95ec-795ff309390c-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Sep 30 19:51:01 crc kubenswrapper[4756]: I0930 19:51:01.285784 4756 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/62aa5559-f0c5-4603-95ec-795ff309390c-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Sep 30 19:51:01 crc kubenswrapper[4756]: I0930 19:51:01.285796 4756 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/62aa5559-f0c5-4603-95ec-795ff309390c-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Sep 30 19:51:01 crc kubenswrapper[4756]: I0930 19:51:01.285833 4756 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/62aa5559-f0c5-4603-95ec-795ff309390c-config\") on node \"crc\" DevicePath \"\"" Sep 30 19:51:01 crc kubenswrapper[4756]: I0930 19:51:01.285842 4756 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/62aa5559-f0c5-4603-95ec-795ff309390c-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 30 19:51:01 crc kubenswrapper[4756]: I0930 19:51:01.445452 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-8b5c85b87-tkqp5"] Sep 30 19:51:01 crc kubenswrapper[4756]: I0930 19:51:01.475802 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-8b5c85b87-tkqp5"] Sep 30 19:51:01 crc kubenswrapper[4756]: I0930 19:51:01.516169 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-8564f76dcb-9qwnq" Sep 30 19:51:01 crc kubenswrapper[4756]: I0930 19:51:01.672167 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-8564f76dcb-9qwnq" Sep 30 19:51:02 crc kubenswrapper[4756]: I0930 19:51:02.118508 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ccbdfb43-a43c-4509-b6e5-ed193ac842b4","Type":"ContainerStarted","Data":"91cd1557435975cd45d505313ea110a49f31f920f481b283d7a2944ac2043e6f"} Sep 30 19:51:02 crc kubenswrapper[4756]: I0930 19:51:02.288495 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 30 19:51:03 crc kubenswrapper[4756]: I0930 19:51:03.128568 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="62aa5559-f0c5-4603-95ec-795ff309390c" path="/var/lib/kubelet/pods/62aa5559-f0c5-4603-95ec-795ff309390c/volumes" Sep 30 19:51:03 crc kubenswrapper[4756]: I0930 19:51:03.909046 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Sep 30 19:51:03 crc kubenswrapper[4756]: I0930 19:51:03.909608 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="90342cf7-3a20-4e6d-a8f6-f70c474cd1f6" containerName="glance-log" containerID="cri-o://ccd5bd63d9943a688550e01265425f0136d6c0e5460a307fca4d7581333bfff3" gracePeriod=30 Sep 30 19:51:03 crc kubenswrapper[4756]: I0930 19:51:03.909728 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="90342cf7-3a20-4e6d-a8f6-f70c474cd1f6" containerName="glance-httpd" containerID="cri-o://ea63f30719f7ef7c365ee9dedbcfdbef0aa5898605f064674fae1e0af74bf574" gracePeriod=30 Sep 30 19:51:04 crc kubenswrapper[4756]: I0930 19:51:04.141503 4756 generic.go:334] "Generic (PLEG): container finished" podID="0f0f53de-f545-431e-9901-e45eedc73bf9" containerID="138f52c6994ac2758b4cb61b9a1de7c4625cdf55dd17f5766e35643527e4dcaa" exitCode=0 Sep 30 19:51:04 crc kubenswrapper[4756]: I0930 19:51:04.141593 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-j57gg" event={"ID":"0f0f53de-f545-431e-9901-e45eedc73bf9","Type":"ContainerDied","Data":"138f52c6994ac2758b4cb61b9a1de7c4625cdf55dd17f5766e35643527e4dcaa"} Sep 30 19:51:04 crc kubenswrapper[4756]: I0930 19:51:04.145102 4756 generic.go:334] "Generic (PLEG): container finished" podID="90342cf7-3a20-4e6d-a8f6-f70c474cd1f6" containerID="ccd5bd63d9943a688550e01265425f0136d6c0e5460a307fca4d7581333bfff3" exitCode=143 Sep 30 19:51:04 crc kubenswrapper[4756]: I0930 19:51:04.145132 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"90342cf7-3a20-4e6d-a8f6-f70c474cd1f6","Type":"ContainerDied","Data":"ccd5bd63d9943a688550e01265425f0136d6c0e5460a307fca4d7581333bfff3"} Sep 30 19:51:04 crc kubenswrapper[4756]: I0930 19:51:04.708599 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/swift-proxy-5f8d8945f5-z5bmn" Sep 30 19:51:04 crc kubenswrapper[4756]: I0930 19:51:04.711933 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/swift-proxy-5f8d8945f5-z5bmn" Sep 30 19:51:04 crc kubenswrapper[4756]: I0930 19:51:04.995277 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-db-create-wrpgb"] Sep 30 19:51:04 crc kubenswrapper[4756]: E0930 19:51:04.996435 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="62aa5559-f0c5-4603-95ec-795ff309390c" containerName="dnsmasq-dns" Sep 30 19:51:04 crc kubenswrapper[4756]: I0930 19:51:04.996458 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="62aa5559-f0c5-4603-95ec-795ff309390c" containerName="dnsmasq-dns" Sep 30 19:51:04 crc kubenswrapper[4756]: E0930 19:51:04.996492 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="62aa5559-f0c5-4603-95ec-795ff309390c" containerName="init" Sep 30 19:51:04 crc kubenswrapper[4756]: I0930 19:51:04.996502 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="62aa5559-f0c5-4603-95ec-795ff309390c" containerName="init" Sep 30 19:51:04 crc kubenswrapper[4756]: I0930 19:51:04.996997 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="62aa5559-f0c5-4603-95ec-795ff309390c" containerName="dnsmasq-dns" Sep 30 19:51:04 crc kubenswrapper[4756]: I0930 19:51:04.997833 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-wrpgb" Sep 30 19:51:05 crc kubenswrapper[4756]: I0930 19:51:05.015457 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-db-create-wrpgb"] Sep 30 19:51:05 crc kubenswrapper[4756]: I0930 19:51:05.066830 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gzlc8\" (UniqueName: \"kubernetes.io/projected/9a0dfe5d-82f1-43c6-ad07-91da00e313d1-kube-api-access-gzlc8\") pod \"nova-api-db-create-wrpgb\" (UID: \"9a0dfe5d-82f1-43c6-ad07-91da00e313d1\") " pod="openstack/nova-api-db-create-wrpgb" Sep 30 19:51:05 crc kubenswrapper[4756]: I0930 19:51:05.094054 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-db-create-zzp77"] Sep 30 19:51:05 crc kubenswrapper[4756]: I0930 19:51:05.095284 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-zzp77" Sep 30 19:51:05 crc kubenswrapper[4756]: I0930 19:51:05.114958 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-db-create-zzp77"] Sep 30 19:51:05 crc kubenswrapper[4756]: I0930 19:51:05.170050 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9vqfb\" (UniqueName: \"kubernetes.io/projected/caf2d23d-add8-47d3-b675-97cdffa9680f-kube-api-access-9vqfb\") pod \"nova-cell0-db-create-zzp77\" (UID: \"caf2d23d-add8-47d3-b675-97cdffa9680f\") " pod="openstack/nova-cell0-db-create-zzp77" Sep 30 19:51:05 crc kubenswrapper[4756]: I0930 19:51:05.170141 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gzlc8\" (UniqueName: \"kubernetes.io/projected/9a0dfe5d-82f1-43c6-ad07-91da00e313d1-kube-api-access-gzlc8\") pod \"nova-api-db-create-wrpgb\" (UID: \"9a0dfe5d-82f1-43c6-ad07-91da00e313d1\") " pod="openstack/nova-api-db-create-wrpgb" Sep 30 19:51:05 crc kubenswrapper[4756]: I0930 19:51:05.199290 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gzlc8\" (UniqueName: \"kubernetes.io/projected/9a0dfe5d-82f1-43c6-ad07-91da00e313d1-kube-api-access-gzlc8\") pod \"nova-api-db-create-wrpgb\" (UID: \"9a0dfe5d-82f1-43c6-ad07-91da00e313d1\") " pod="openstack/nova-api-db-create-wrpgb" Sep 30 19:51:05 crc kubenswrapper[4756]: I0930 19:51:05.271542 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9vqfb\" (UniqueName: \"kubernetes.io/projected/caf2d23d-add8-47d3-b675-97cdffa9680f-kube-api-access-9vqfb\") pod \"nova-cell0-db-create-zzp77\" (UID: \"caf2d23d-add8-47d3-b675-97cdffa9680f\") " pod="openstack/nova-cell0-db-create-zzp77" Sep 30 19:51:05 crc kubenswrapper[4756]: I0930 19:51:05.303353 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-db-create-t8wq8"] Sep 30 19:51:05 crc kubenswrapper[4756]: I0930 19:51:05.306023 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-t8wq8" Sep 30 19:51:05 crc kubenswrapper[4756]: I0930 19:51:05.315549 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9vqfb\" (UniqueName: \"kubernetes.io/projected/caf2d23d-add8-47d3-b675-97cdffa9680f-kube-api-access-9vqfb\") pod \"nova-cell0-db-create-zzp77\" (UID: \"caf2d23d-add8-47d3-b675-97cdffa9680f\") " pod="openstack/nova-cell0-db-create-zzp77" Sep 30 19:51:05 crc kubenswrapper[4756]: I0930 19:51:05.317185 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-db-create-t8wq8"] Sep 30 19:51:05 crc kubenswrapper[4756]: I0930 19:51:05.332831 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-wrpgb" Sep 30 19:51:05 crc kubenswrapper[4756]: I0930 19:51:05.373090 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6txjz\" (UniqueName: \"kubernetes.io/projected/1a2ea5dd-0745-414b-8ba3-283132a38f5b-kube-api-access-6txjz\") pod \"nova-cell1-db-create-t8wq8\" (UID: \"1a2ea5dd-0745-414b-8ba3-283132a38f5b\") " pod="openstack/nova-cell1-db-create-t8wq8" Sep 30 19:51:05 crc kubenswrapper[4756]: I0930 19:51:05.428611 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-zzp77" Sep 30 19:51:05 crc kubenswrapper[4756]: I0930 19:51:05.474626 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6txjz\" (UniqueName: \"kubernetes.io/projected/1a2ea5dd-0745-414b-8ba3-283132a38f5b-kube-api-access-6txjz\") pod \"nova-cell1-db-create-t8wq8\" (UID: \"1a2ea5dd-0745-414b-8ba3-283132a38f5b\") " pod="openstack/nova-cell1-db-create-t8wq8" Sep 30 19:51:05 crc kubenswrapper[4756]: I0930 19:51:05.514858 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6txjz\" (UniqueName: \"kubernetes.io/projected/1a2ea5dd-0745-414b-8ba3-283132a38f5b-kube-api-access-6txjz\") pod \"nova-cell1-db-create-t8wq8\" (UID: \"1a2ea5dd-0745-414b-8ba3-283132a38f5b\") " pod="openstack/nova-cell1-db-create-t8wq8" Sep 30 19:51:05 crc kubenswrapper[4756]: I0930 19:51:05.684987 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-t8wq8" Sep 30 19:51:06 crc kubenswrapper[4756]: I0930 19:51:06.922744 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 30 19:51:06 crc kubenswrapper[4756]: I0930 19:51:06.922974 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="c2e63e93-d8e4-4069-bdb3-364f98238496" containerName="glance-log" containerID="cri-o://8d0874be0504e1031955aae03da566574435a30a2e4a55949466b466b1df6659" gracePeriod=30 Sep 30 19:51:06 crc kubenswrapper[4756]: I0930 19:51:06.923078 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="c2e63e93-d8e4-4069-bdb3-364f98238496" containerName="glance-httpd" containerID="cri-o://8a95f664cddc2bed4974f5a0200a7fdbeb71cb63fb51c1b95c363b9e5da33cc0" gracePeriod=30 Sep 30 19:51:07 crc kubenswrapper[4756]: I0930 19:51:07.172592 4756 generic.go:334] "Generic (PLEG): container finished" podID="c2e63e93-d8e4-4069-bdb3-364f98238496" containerID="8d0874be0504e1031955aae03da566574435a30a2e4a55949466b466b1df6659" exitCode=143 Sep 30 19:51:07 crc kubenswrapper[4756]: I0930 19:51:07.172680 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"c2e63e93-d8e4-4069-bdb3-364f98238496","Type":"ContainerDied","Data":"8d0874be0504e1031955aae03da566574435a30a2e4a55949466b466b1df6659"} Sep 30 19:51:07 crc kubenswrapper[4756]: I0930 19:51:07.447696 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-684ccb4bd6-x97br" Sep 30 19:51:07 crc kubenswrapper[4756]: I0930 19:51:07.450464 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-684ccb4bd6-x97br" Sep 30 19:51:07 crc kubenswrapper[4756]: I0930 19:51:07.527122 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-8564f76dcb-9qwnq"] Sep 30 19:51:07 crc kubenswrapper[4756]: I0930 19:51:07.539250 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-8564f76dcb-9qwnq" podUID="4763c3d2-bfc4-4c13-94c0-f7d852309b17" containerName="barbican-api-log" containerID="cri-o://a9fb663db742268ff6709a83fa05391902d2464b18f50b8142ccb0124eab0605" gracePeriod=30 Sep 30 19:51:07 crc kubenswrapper[4756]: I0930 19:51:07.539938 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-8564f76dcb-9qwnq" podUID="4763c3d2-bfc4-4c13-94c0-f7d852309b17" containerName="barbican-api" containerID="cri-o://0da4b3b06e25cc58e04e2a2d12d8d0b9964704dda90da088c73458a4f101cac4" gracePeriod=30 Sep 30 19:51:08 crc kubenswrapper[4756]: I0930 19:51:08.186072 4756 generic.go:334] "Generic (PLEG): container finished" podID="4763c3d2-bfc4-4c13-94c0-f7d852309b17" containerID="a9fb663db742268ff6709a83fa05391902d2464b18f50b8142ccb0124eab0605" exitCode=143 Sep 30 19:51:08 crc kubenswrapper[4756]: I0930 19:51:08.186150 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-8564f76dcb-9qwnq" event={"ID":"4763c3d2-bfc4-4c13-94c0-f7d852309b17","Type":"ContainerDied","Data":"a9fb663db742268ff6709a83fa05391902d2464b18f50b8142ccb0124eab0605"} Sep 30 19:51:08 crc kubenswrapper[4756]: I0930 19:51:08.192430 4756 generic.go:334] "Generic (PLEG): container finished" podID="90342cf7-3a20-4e6d-a8f6-f70c474cd1f6" containerID="ea63f30719f7ef7c365ee9dedbcfdbef0aa5898605f064674fae1e0af74bf574" exitCode=0 Sep 30 19:51:08 crc kubenswrapper[4756]: I0930 19:51:08.192512 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"90342cf7-3a20-4e6d-a8f6-f70c474cd1f6","Type":"ContainerDied","Data":"ea63f30719f7ef7c365ee9dedbcfdbef0aa5898605f064674fae1e0af74bf574"} Sep 30 19:51:09 crc kubenswrapper[4756]: I0930 19:51:09.107708 4756 scope.go:117] "RemoveContainer" containerID="31682d67530027d45efd041583c7d83d586d117a1d88a2d45baea5a4670ecdc4" Sep 30 19:51:09 crc kubenswrapper[4756]: I0930 19:51:09.205444 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-j57gg" Sep 30 19:51:09 crc kubenswrapper[4756]: I0930 19:51:09.207189 4756 scope.go:117] "RemoveContainer" containerID="b64e97da8b9cfcc67199d8a195264e9958c5a9eb2e03df50f6cc7efcf6186f51" Sep 30 19:51:09 crc kubenswrapper[4756]: E0930 19:51:09.212606 4756 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b64e97da8b9cfcc67199d8a195264e9958c5a9eb2e03df50f6cc7efcf6186f51\": container with ID starting with b64e97da8b9cfcc67199d8a195264e9958c5a9eb2e03df50f6cc7efcf6186f51 not found: ID does not exist" containerID="b64e97da8b9cfcc67199d8a195264e9958c5a9eb2e03df50f6cc7efcf6186f51" Sep 30 19:51:09 crc kubenswrapper[4756]: I0930 19:51:09.212659 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b64e97da8b9cfcc67199d8a195264e9958c5a9eb2e03df50f6cc7efcf6186f51"} err="failed to get container status \"b64e97da8b9cfcc67199d8a195264e9958c5a9eb2e03df50f6cc7efcf6186f51\": rpc error: code = NotFound desc = could not find container \"b64e97da8b9cfcc67199d8a195264e9958c5a9eb2e03df50f6cc7efcf6186f51\": container with ID starting with b64e97da8b9cfcc67199d8a195264e9958c5a9eb2e03df50f6cc7efcf6186f51 not found: ID does not exist" Sep 30 19:51:09 crc kubenswrapper[4756]: I0930 19:51:09.212697 4756 scope.go:117] "RemoveContainer" containerID="31682d67530027d45efd041583c7d83d586d117a1d88a2d45baea5a4670ecdc4" Sep 30 19:51:09 crc kubenswrapper[4756]: E0930 19:51:09.214708 4756 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"31682d67530027d45efd041583c7d83d586d117a1d88a2d45baea5a4670ecdc4\": container with ID starting with 31682d67530027d45efd041583c7d83d586d117a1d88a2d45baea5a4670ecdc4 not found: ID does not exist" containerID="31682d67530027d45efd041583c7d83d586d117a1d88a2d45baea5a4670ecdc4" Sep 30 19:51:09 crc kubenswrapper[4756]: I0930 19:51:09.214754 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"31682d67530027d45efd041583c7d83d586d117a1d88a2d45baea5a4670ecdc4"} err="failed to get container status \"31682d67530027d45efd041583c7d83d586d117a1d88a2d45baea5a4670ecdc4\": rpc error: code = NotFound desc = could not find container \"31682d67530027d45efd041583c7d83d586d117a1d88a2d45baea5a4670ecdc4\": container with ID starting with 31682d67530027d45efd041583c7d83d586d117a1d88a2d45baea5a4670ecdc4 not found: ID does not exist" Sep 30 19:51:09 crc kubenswrapper[4756]: I0930 19:51:09.215080 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-j57gg" event={"ID":"0f0f53de-f545-431e-9901-e45eedc73bf9","Type":"ContainerDied","Data":"b2853b890958d67d9161f6df4fed70d6affc6576649913ae035a6340ce281549"} Sep 30 19:51:09 crc kubenswrapper[4756]: I0930 19:51:09.215107 4756 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b2853b890958d67d9161f6df4fed70d6affc6576649913ae035a6340ce281549" Sep 30 19:51:09 crc kubenswrapper[4756]: I0930 19:51:09.215164 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-j57gg" Sep 30 19:51:09 crc kubenswrapper[4756]: I0930 19:51:09.242943 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gxmgt\" (UniqueName: \"kubernetes.io/projected/0f0f53de-f545-431e-9901-e45eedc73bf9-kube-api-access-gxmgt\") pod \"0f0f53de-f545-431e-9901-e45eedc73bf9\" (UID: \"0f0f53de-f545-431e-9901-e45eedc73bf9\") " Sep 30 19:51:09 crc kubenswrapper[4756]: I0930 19:51:09.243041 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0f0f53de-f545-431e-9901-e45eedc73bf9-scripts\") pod \"0f0f53de-f545-431e-9901-e45eedc73bf9\" (UID: \"0f0f53de-f545-431e-9901-e45eedc73bf9\") " Sep 30 19:51:09 crc kubenswrapper[4756]: I0930 19:51:09.243078 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/0f0f53de-f545-431e-9901-e45eedc73bf9-db-sync-config-data\") pod \"0f0f53de-f545-431e-9901-e45eedc73bf9\" (UID: \"0f0f53de-f545-431e-9901-e45eedc73bf9\") " Sep 30 19:51:09 crc kubenswrapper[4756]: I0930 19:51:09.243099 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0f0f53de-f545-431e-9901-e45eedc73bf9-combined-ca-bundle\") pod \"0f0f53de-f545-431e-9901-e45eedc73bf9\" (UID: \"0f0f53de-f545-431e-9901-e45eedc73bf9\") " Sep 30 19:51:09 crc kubenswrapper[4756]: I0930 19:51:09.243126 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0f0f53de-f545-431e-9901-e45eedc73bf9-config-data\") pod \"0f0f53de-f545-431e-9901-e45eedc73bf9\" (UID: \"0f0f53de-f545-431e-9901-e45eedc73bf9\") " Sep 30 19:51:09 crc kubenswrapper[4756]: I0930 19:51:09.243206 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/0f0f53de-f545-431e-9901-e45eedc73bf9-etc-machine-id\") pod \"0f0f53de-f545-431e-9901-e45eedc73bf9\" (UID: \"0f0f53de-f545-431e-9901-e45eedc73bf9\") " Sep 30 19:51:09 crc kubenswrapper[4756]: I0930 19:51:09.250100 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0f0f53de-f545-431e-9901-e45eedc73bf9-kube-api-access-gxmgt" (OuterVolumeSpecName: "kube-api-access-gxmgt") pod "0f0f53de-f545-431e-9901-e45eedc73bf9" (UID: "0f0f53de-f545-431e-9901-e45eedc73bf9"). InnerVolumeSpecName "kube-api-access-gxmgt". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:51:09 crc kubenswrapper[4756]: I0930 19:51:09.250833 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/0f0f53de-f545-431e-9901-e45eedc73bf9-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "0f0f53de-f545-431e-9901-e45eedc73bf9" (UID: "0f0f53de-f545-431e-9901-e45eedc73bf9"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 19:51:09 crc kubenswrapper[4756]: I0930 19:51:09.259608 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0f0f53de-f545-431e-9901-e45eedc73bf9-scripts" (OuterVolumeSpecName: "scripts") pod "0f0f53de-f545-431e-9901-e45eedc73bf9" (UID: "0f0f53de-f545-431e-9901-e45eedc73bf9"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:51:09 crc kubenswrapper[4756]: I0930 19:51:09.266555 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0f0f53de-f545-431e-9901-e45eedc73bf9-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "0f0f53de-f545-431e-9901-e45eedc73bf9" (UID: "0f0f53de-f545-431e-9901-e45eedc73bf9"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:51:09 crc kubenswrapper[4756]: I0930 19:51:09.273000 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0f0f53de-f545-431e-9901-e45eedc73bf9-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "0f0f53de-f545-431e-9901-e45eedc73bf9" (UID: "0f0f53de-f545-431e-9901-e45eedc73bf9"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:51:09 crc kubenswrapper[4756]: I0930 19:51:09.330972 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0f0f53de-f545-431e-9901-e45eedc73bf9-config-data" (OuterVolumeSpecName: "config-data") pod "0f0f53de-f545-431e-9901-e45eedc73bf9" (UID: "0f0f53de-f545-431e-9901-e45eedc73bf9"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:51:09 crc kubenswrapper[4756]: I0930 19:51:09.345158 4756 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0f0f53de-f545-431e-9901-e45eedc73bf9-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 19:51:09 crc kubenswrapper[4756]: I0930 19:51:09.345456 4756 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/0f0f53de-f545-431e-9901-e45eedc73bf9-etc-machine-id\") on node \"crc\" DevicePath \"\"" Sep 30 19:51:09 crc kubenswrapper[4756]: I0930 19:51:09.345467 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gxmgt\" (UniqueName: \"kubernetes.io/projected/0f0f53de-f545-431e-9901-e45eedc73bf9-kube-api-access-gxmgt\") on node \"crc\" DevicePath \"\"" Sep 30 19:51:09 crc kubenswrapper[4756]: I0930 19:51:09.345475 4756 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0f0f53de-f545-431e-9901-e45eedc73bf9-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 19:51:09 crc kubenswrapper[4756]: I0930 19:51:09.345483 4756 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/0f0f53de-f545-431e-9901-e45eedc73bf9-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 19:51:09 crc kubenswrapper[4756]: I0930 19:51:09.345492 4756 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0f0f53de-f545-431e-9901-e45eedc73bf9-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 19:51:09 crc kubenswrapper[4756]: I0930 19:51:09.568614 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-db-create-zzp77"] Sep 30 19:51:09 crc kubenswrapper[4756]: I0930 19:51:09.578474 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Sep 30 19:51:09 crc kubenswrapper[4756]: I0930 19:51:09.659223 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/90342cf7-3a20-4e6d-a8f6-f70c474cd1f6-scripts\") pod \"90342cf7-3a20-4e6d-a8f6-f70c474cd1f6\" (UID: \"90342cf7-3a20-4e6d-a8f6-f70c474cd1f6\") " Sep 30 19:51:09 crc kubenswrapper[4756]: I0930 19:51:09.659477 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/90342cf7-3a20-4e6d-a8f6-f70c474cd1f6-public-tls-certs\") pod \"90342cf7-3a20-4e6d-a8f6-f70c474cd1f6\" (UID: \"90342cf7-3a20-4e6d-a8f6-f70c474cd1f6\") " Sep 30 19:51:09 crc kubenswrapper[4756]: I0930 19:51:09.659501 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/90342cf7-3a20-4e6d-a8f6-f70c474cd1f6-logs\") pod \"90342cf7-3a20-4e6d-a8f6-f70c474cd1f6\" (UID: \"90342cf7-3a20-4e6d-a8f6-f70c474cd1f6\") " Sep 30 19:51:09 crc kubenswrapper[4756]: I0930 19:51:09.659574 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"90342cf7-3a20-4e6d-a8f6-f70c474cd1f6\" (UID: \"90342cf7-3a20-4e6d-a8f6-f70c474cd1f6\") " Sep 30 19:51:09 crc kubenswrapper[4756]: I0930 19:51:09.659660 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mxkt2\" (UniqueName: \"kubernetes.io/projected/90342cf7-3a20-4e6d-a8f6-f70c474cd1f6-kube-api-access-mxkt2\") pod \"90342cf7-3a20-4e6d-a8f6-f70c474cd1f6\" (UID: \"90342cf7-3a20-4e6d-a8f6-f70c474cd1f6\") " Sep 30 19:51:09 crc kubenswrapper[4756]: I0930 19:51:09.659711 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/90342cf7-3a20-4e6d-a8f6-f70c474cd1f6-combined-ca-bundle\") pod \"90342cf7-3a20-4e6d-a8f6-f70c474cd1f6\" (UID: \"90342cf7-3a20-4e6d-a8f6-f70c474cd1f6\") " Sep 30 19:51:09 crc kubenswrapper[4756]: I0930 19:51:09.659752 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/90342cf7-3a20-4e6d-a8f6-f70c474cd1f6-httpd-run\") pod \"90342cf7-3a20-4e6d-a8f6-f70c474cd1f6\" (UID: \"90342cf7-3a20-4e6d-a8f6-f70c474cd1f6\") " Sep 30 19:51:09 crc kubenswrapper[4756]: I0930 19:51:09.659851 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/90342cf7-3a20-4e6d-a8f6-f70c474cd1f6-config-data\") pod \"90342cf7-3a20-4e6d-a8f6-f70c474cd1f6\" (UID: \"90342cf7-3a20-4e6d-a8f6-f70c474cd1f6\") " Sep 30 19:51:09 crc kubenswrapper[4756]: I0930 19:51:09.660217 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/90342cf7-3a20-4e6d-a8f6-f70c474cd1f6-logs" (OuterVolumeSpecName: "logs") pod "90342cf7-3a20-4e6d-a8f6-f70c474cd1f6" (UID: "90342cf7-3a20-4e6d-a8f6-f70c474cd1f6"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 19:51:09 crc kubenswrapper[4756]: I0930 19:51:09.660702 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/90342cf7-3a20-4e6d-a8f6-f70c474cd1f6-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "90342cf7-3a20-4e6d-a8f6-f70c474cd1f6" (UID: "90342cf7-3a20-4e6d-a8f6-f70c474cd1f6"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 19:51:09 crc kubenswrapper[4756]: I0930 19:51:09.664184 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/90342cf7-3a20-4e6d-a8f6-f70c474cd1f6-scripts" (OuterVolumeSpecName: "scripts") pod "90342cf7-3a20-4e6d-a8f6-f70c474cd1f6" (UID: "90342cf7-3a20-4e6d-a8f6-f70c474cd1f6"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:51:09 crc kubenswrapper[4756]: I0930 19:51:09.666070 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/90342cf7-3a20-4e6d-a8f6-f70c474cd1f6-kube-api-access-mxkt2" (OuterVolumeSpecName: "kube-api-access-mxkt2") pod "90342cf7-3a20-4e6d-a8f6-f70c474cd1f6" (UID: "90342cf7-3a20-4e6d-a8f6-f70c474cd1f6"). InnerVolumeSpecName "kube-api-access-mxkt2". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:51:09 crc kubenswrapper[4756]: I0930 19:51:09.668573 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage10-crc" (OuterVolumeSpecName: "glance") pod "90342cf7-3a20-4e6d-a8f6-f70c474cd1f6" (UID: "90342cf7-3a20-4e6d-a8f6-f70c474cd1f6"). InnerVolumeSpecName "local-storage10-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Sep 30 19:51:09 crc kubenswrapper[4756]: I0930 19:51:09.695108 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/90342cf7-3a20-4e6d-a8f6-f70c474cd1f6-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "90342cf7-3a20-4e6d-a8f6-f70c474cd1f6" (UID: "90342cf7-3a20-4e6d-a8f6-f70c474cd1f6"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:51:09 crc kubenswrapper[4756]: I0930 19:51:09.738833 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/90342cf7-3a20-4e6d-a8f6-f70c474cd1f6-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "90342cf7-3a20-4e6d-a8f6-f70c474cd1f6" (UID: "90342cf7-3a20-4e6d-a8f6-f70c474cd1f6"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:51:09 crc kubenswrapper[4756]: I0930 19:51:09.744210 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-db-create-wrpgb"] Sep 30 19:51:09 crc kubenswrapper[4756]: I0930 19:51:09.754588 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/90342cf7-3a20-4e6d-a8f6-f70c474cd1f6-config-data" (OuterVolumeSpecName: "config-data") pod "90342cf7-3a20-4e6d-a8f6-f70c474cd1f6" (UID: "90342cf7-3a20-4e6d-a8f6-f70c474cd1f6"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:51:09 crc kubenswrapper[4756]: I0930 19:51:09.761573 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mxkt2\" (UniqueName: \"kubernetes.io/projected/90342cf7-3a20-4e6d-a8f6-f70c474cd1f6-kube-api-access-mxkt2\") on node \"crc\" DevicePath \"\"" Sep 30 19:51:09 crc kubenswrapper[4756]: I0930 19:51:09.761597 4756 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/90342cf7-3a20-4e6d-a8f6-f70c474cd1f6-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 19:51:09 crc kubenswrapper[4756]: I0930 19:51:09.761607 4756 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/90342cf7-3a20-4e6d-a8f6-f70c474cd1f6-httpd-run\") on node \"crc\" DevicePath \"\"" Sep 30 19:51:09 crc kubenswrapper[4756]: I0930 19:51:09.761616 4756 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/90342cf7-3a20-4e6d-a8f6-f70c474cd1f6-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 19:51:09 crc kubenswrapper[4756]: I0930 19:51:09.761623 4756 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/90342cf7-3a20-4e6d-a8f6-f70c474cd1f6-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 19:51:09 crc kubenswrapper[4756]: I0930 19:51:09.761631 4756 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/90342cf7-3a20-4e6d-a8f6-f70c474cd1f6-public-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 30 19:51:09 crc kubenswrapper[4756]: I0930 19:51:09.761638 4756 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/90342cf7-3a20-4e6d-a8f6-f70c474cd1f6-logs\") on node \"crc\" DevicePath \"\"" Sep 30 19:51:09 crc kubenswrapper[4756]: I0930 19:51:09.761668 4756 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") on node \"crc\" " Sep 30 19:51:09 crc kubenswrapper[4756]: I0930 19:51:09.779504 4756 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage10-crc" (UniqueName: "kubernetes.io/local-volume/local-storage10-crc") on node "crc" Sep 30 19:51:09 crc kubenswrapper[4756]: I0930 19:51:09.817186 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-db-create-t8wq8"] Sep 30 19:51:09 crc kubenswrapper[4756]: I0930 19:51:09.862955 4756 reconciler_common.go:293] "Volume detached for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") on node \"crc\" DevicePath \"\"" Sep 30 19:51:10 crc kubenswrapper[4756]: I0930 19:51:10.226347 4756 generic.go:334] "Generic (PLEG): container finished" podID="1a2ea5dd-0745-414b-8ba3-283132a38f5b" containerID="50da31e7ac0b473cd4688ff664a4f0390cc7fa80d4d402e57bb5dbbd273e547e" exitCode=0 Sep 30 19:51:10 crc kubenswrapper[4756]: I0930 19:51:10.226649 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-t8wq8" event={"ID":"1a2ea5dd-0745-414b-8ba3-283132a38f5b","Type":"ContainerDied","Data":"50da31e7ac0b473cd4688ff664a4f0390cc7fa80d4d402e57bb5dbbd273e547e"} Sep 30 19:51:10 crc kubenswrapper[4756]: I0930 19:51:10.226673 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-t8wq8" event={"ID":"1a2ea5dd-0745-414b-8ba3-283132a38f5b","Type":"ContainerStarted","Data":"9b97a0bccaaf8780399a23ec6a67cb55dcfbc223580cbe12609e12fcc7b0bf73"} Sep 30 19:51:10 crc kubenswrapper[4756]: I0930 19:51:10.228603 4756 generic.go:334] "Generic (PLEG): container finished" podID="caf2d23d-add8-47d3-b675-97cdffa9680f" containerID="ea06fef84d4e35b2e9eb548c83e401e03a330d1d6c02e93bb182740cf50abbf7" exitCode=0 Sep 30 19:51:10 crc kubenswrapper[4756]: I0930 19:51:10.228650 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-zzp77" event={"ID":"caf2d23d-add8-47d3-b675-97cdffa9680f","Type":"ContainerDied","Data":"ea06fef84d4e35b2e9eb548c83e401e03a330d1d6c02e93bb182740cf50abbf7"} Sep 30 19:51:10 crc kubenswrapper[4756]: I0930 19:51:10.228674 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-zzp77" event={"ID":"caf2d23d-add8-47d3-b675-97cdffa9680f","Type":"ContainerStarted","Data":"d9d4d91098144bd36a2885d3d77a810b54a705991af9221ab9c9304edbd719ed"} Sep 30 19:51:10 crc kubenswrapper[4756]: I0930 19:51:10.230655 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ccbdfb43-a43c-4509-b6e5-ed193ac842b4","Type":"ContainerStarted","Data":"25dd6fcc14b9bdd7ae6c26c049e0e5e4c8b1a005a2e52838493a28d697e6a0e2"} Sep 30 19:51:10 crc kubenswrapper[4756]: I0930 19:51:10.232784 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"90342cf7-3a20-4e6d-a8f6-f70c474cd1f6","Type":"ContainerDied","Data":"f3d99f1858dbcab52e03f81f1b706ca0bc16f951aa653b8ce609e8ef2c640ef2"} Sep 30 19:51:10 crc kubenswrapper[4756]: I0930 19:51:10.232821 4756 scope.go:117] "RemoveContainer" containerID="ea63f30719f7ef7c365ee9dedbcfdbef0aa5898605f064674fae1e0af74bf574" Sep 30 19:51:10 crc kubenswrapper[4756]: I0930 19:51:10.232821 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Sep 30 19:51:10 crc kubenswrapper[4756]: I0930 19:51:10.235025 4756 generic.go:334] "Generic (PLEG): container finished" podID="9a0dfe5d-82f1-43c6-ad07-91da00e313d1" containerID="d359624da58f1703e3e0aaf51f2b403fbc047680e4375c39c8c5cf26280ba6fb" exitCode=0 Sep 30 19:51:10 crc kubenswrapper[4756]: I0930 19:51:10.235061 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-wrpgb" event={"ID":"9a0dfe5d-82f1-43c6-ad07-91da00e313d1","Type":"ContainerDied","Data":"d359624da58f1703e3e0aaf51f2b403fbc047680e4375c39c8c5cf26280ba6fb"} Sep 30 19:51:10 crc kubenswrapper[4756]: I0930 19:51:10.235076 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-wrpgb" event={"ID":"9a0dfe5d-82f1-43c6-ad07-91da00e313d1","Type":"ContainerStarted","Data":"bc9915636b8cfbfba408efea46e0b4b7bf582869faf761a311fec9169e04657b"} Sep 30 19:51:10 crc kubenswrapper[4756]: I0930 19:51:10.244963 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"3b333bad-e0f7-4b3a-b21d-7854ad7aafa6","Type":"ContainerStarted","Data":"3ab87a4918f1e8e6e14e0abdb176a7153997521aac91d77dc59e7592393d999c"} Sep 30 19:51:10 crc kubenswrapper[4756]: I0930 19:51:10.275529 4756 scope.go:117] "RemoveContainer" containerID="ccd5bd63d9943a688550e01265425f0136d6c0e5460a307fca4d7581333bfff3" Sep 30 19:51:10 crc kubenswrapper[4756]: I0930 19:51:10.323227 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstackclient" podStartSLOduration=2.204494893 podStartE2EDuration="22.323205013s" podCreationTimestamp="2025-09-30 19:50:48 +0000 UTC" firstStartedPulling="2025-09-30 19:50:49.024754474 +0000 UTC m=+1178.645687951" lastFinishedPulling="2025-09-30 19:51:09.143464594 +0000 UTC m=+1198.764398071" observedRunningTime="2025-09-30 19:51:10.296379812 +0000 UTC m=+1199.917313289" watchObservedRunningTime="2025-09-30 19:51:10.323205013 +0000 UTC m=+1199.944138490" Sep 30 19:51:10 crc kubenswrapper[4756]: I0930 19:51:10.359891 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Sep 30 19:51:10 crc kubenswrapper[4756]: I0930 19:51:10.373316 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Sep 30 19:51:10 crc kubenswrapper[4756]: I0930 19:51:10.412458 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Sep 30 19:51:10 crc kubenswrapper[4756]: E0930 19:51:10.412883 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="90342cf7-3a20-4e6d-a8f6-f70c474cd1f6" containerName="glance-log" Sep 30 19:51:10 crc kubenswrapper[4756]: I0930 19:51:10.412895 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="90342cf7-3a20-4e6d-a8f6-f70c474cd1f6" containerName="glance-log" Sep 30 19:51:10 crc kubenswrapper[4756]: E0930 19:51:10.412908 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0f0f53de-f545-431e-9901-e45eedc73bf9" containerName="cinder-db-sync" Sep 30 19:51:10 crc kubenswrapper[4756]: I0930 19:51:10.412914 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="0f0f53de-f545-431e-9901-e45eedc73bf9" containerName="cinder-db-sync" Sep 30 19:51:10 crc kubenswrapper[4756]: E0930 19:51:10.412946 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="90342cf7-3a20-4e6d-a8f6-f70c474cd1f6" containerName="glance-httpd" Sep 30 19:51:10 crc kubenswrapper[4756]: I0930 19:51:10.412951 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="90342cf7-3a20-4e6d-a8f6-f70c474cd1f6" containerName="glance-httpd" Sep 30 19:51:10 crc kubenswrapper[4756]: I0930 19:51:10.413119 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="0f0f53de-f545-431e-9901-e45eedc73bf9" containerName="cinder-db-sync" Sep 30 19:51:10 crc kubenswrapper[4756]: I0930 19:51:10.413136 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="90342cf7-3a20-4e6d-a8f6-f70c474cd1f6" containerName="glance-httpd" Sep 30 19:51:10 crc kubenswrapper[4756]: I0930 19:51:10.413150 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="90342cf7-3a20-4e6d-a8f6-f70c474cd1f6" containerName="glance-log" Sep 30 19:51:10 crc kubenswrapper[4756]: I0930 19:51:10.414076 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Sep 30 19:51:10 crc kubenswrapper[4756]: I0930 19:51:10.417799 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-public-svc" Sep 30 19:51:10 crc kubenswrapper[4756]: I0930 19:51:10.418006 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Sep 30 19:51:10 crc kubenswrapper[4756]: I0930 19:51:10.445960 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Sep 30 19:51:10 crc kubenswrapper[4756]: I0930 19:51:10.483508 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0befea8f-1ced-4703-b2a2-a10e154271f4-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"0befea8f-1ced-4703-b2a2-a10e154271f4\") " pod="openstack/glance-default-external-api-0" Sep 30 19:51:10 crc kubenswrapper[4756]: I0930 19:51:10.483582 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/0befea8f-1ced-4703-b2a2-a10e154271f4-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"0befea8f-1ced-4703-b2a2-a10e154271f4\") " pod="openstack/glance-default-external-api-0" Sep 30 19:51:10 crc kubenswrapper[4756]: I0930 19:51:10.483682 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0befea8f-1ced-4703-b2a2-a10e154271f4-config-data\") pod \"glance-default-external-api-0\" (UID: \"0befea8f-1ced-4703-b2a2-a10e154271f4\") " pod="openstack/glance-default-external-api-0" Sep 30 19:51:10 crc kubenswrapper[4756]: I0930 19:51:10.483767 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-external-api-0\" (UID: \"0befea8f-1ced-4703-b2a2-a10e154271f4\") " pod="openstack/glance-default-external-api-0" Sep 30 19:51:10 crc kubenswrapper[4756]: I0930 19:51:10.483823 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0befea8f-1ced-4703-b2a2-a10e154271f4-scripts\") pod \"glance-default-external-api-0\" (UID: \"0befea8f-1ced-4703-b2a2-a10e154271f4\") " pod="openstack/glance-default-external-api-0" Sep 30 19:51:10 crc kubenswrapper[4756]: I0930 19:51:10.483844 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6m6rn\" (UniqueName: \"kubernetes.io/projected/0befea8f-1ced-4703-b2a2-a10e154271f4-kube-api-access-6m6rn\") pod \"glance-default-external-api-0\" (UID: \"0befea8f-1ced-4703-b2a2-a10e154271f4\") " pod="openstack/glance-default-external-api-0" Sep 30 19:51:10 crc kubenswrapper[4756]: I0930 19:51:10.483906 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0befea8f-1ced-4703-b2a2-a10e154271f4-logs\") pod \"glance-default-external-api-0\" (UID: \"0befea8f-1ced-4703-b2a2-a10e154271f4\") " pod="openstack/glance-default-external-api-0" Sep 30 19:51:10 crc kubenswrapper[4756]: I0930 19:51:10.483961 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/0befea8f-1ced-4703-b2a2-a10e154271f4-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"0befea8f-1ced-4703-b2a2-a10e154271f4\") " pod="openstack/glance-default-external-api-0" Sep 30 19:51:10 crc kubenswrapper[4756]: I0930 19:51:10.507337 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5784cf869f-4qwd4"] Sep 30 19:51:10 crc kubenswrapper[4756]: I0930 19:51:10.508994 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5784cf869f-4qwd4" Sep 30 19:51:10 crc kubenswrapper[4756]: I0930 19:51:10.540348 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5784cf869f-4qwd4"] Sep 30 19:51:10 crc kubenswrapper[4756]: I0930 19:51:10.580957 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-scheduler-0"] Sep 30 19:51:10 crc kubenswrapper[4756]: I0930 19:51:10.584535 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Sep 30 19:51:10 crc kubenswrapper[4756]: I0930 19:51:10.585005 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0befea8f-1ced-4703-b2a2-a10e154271f4-logs\") pod \"glance-default-external-api-0\" (UID: \"0befea8f-1ced-4703-b2a2-a10e154271f4\") " pod="openstack/glance-default-external-api-0" Sep 30 19:51:10 crc kubenswrapper[4756]: I0930 19:51:10.585059 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/0befea8f-1ced-4703-b2a2-a10e154271f4-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"0befea8f-1ced-4703-b2a2-a10e154271f4\") " pod="openstack/glance-default-external-api-0" Sep 30 19:51:10 crc kubenswrapper[4756]: I0930 19:51:10.585103 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/78d7d65c-1041-4ec8-97df-885ea20328ea-ovsdbserver-nb\") pod \"dnsmasq-dns-5784cf869f-4qwd4\" (UID: \"78d7d65c-1041-4ec8-97df-885ea20328ea\") " pod="openstack/dnsmasq-dns-5784cf869f-4qwd4" Sep 30 19:51:10 crc kubenswrapper[4756]: I0930 19:51:10.585129 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0befea8f-1ced-4703-b2a2-a10e154271f4-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"0befea8f-1ced-4703-b2a2-a10e154271f4\") " pod="openstack/glance-default-external-api-0" Sep 30 19:51:10 crc kubenswrapper[4756]: I0930 19:51:10.585146 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/78d7d65c-1041-4ec8-97df-885ea20328ea-dns-swift-storage-0\") pod \"dnsmasq-dns-5784cf869f-4qwd4\" (UID: \"78d7d65c-1041-4ec8-97df-885ea20328ea\") " pod="openstack/dnsmasq-dns-5784cf869f-4qwd4" Sep 30 19:51:10 crc kubenswrapper[4756]: I0930 19:51:10.585166 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/78d7d65c-1041-4ec8-97df-885ea20328ea-config\") pod \"dnsmasq-dns-5784cf869f-4qwd4\" (UID: \"78d7d65c-1041-4ec8-97df-885ea20328ea\") " pod="openstack/dnsmasq-dns-5784cf869f-4qwd4" Sep 30 19:51:10 crc kubenswrapper[4756]: I0930 19:51:10.585186 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/0befea8f-1ced-4703-b2a2-a10e154271f4-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"0befea8f-1ced-4703-b2a2-a10e154271f4\") " pod="openstack/glance-default-external-api-0" Sep 30 19:51:10 crc kubenswrapper[4756]: I0930 19:51:10.585219 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-99v94\" (UniqueName: \"kubernetes.io/projected/78d7d65c-1041-4ec8-97df-885ea20328ea-kube-api-access-99v94\") pod \"dnsmasq-dns-5784cf869f-4qwd4\" (UID: \"78d7d65c-1041-4ec8-97df-885ea20328ea\") " pod="openstack/dnsmasq-dns-5784cf869f-4qwd4" Sep 30 19:51:10 crc kubenswrapper[4756]: I0930 19:51:10.585241 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0befea8f-1ced-4703-b2a2-a10e154271f4-config-data\") pod \"glance-default-external-api-0\" (UID: \"0befea8f-1ced-4703-b2a2-a10e154271f4\") " pod="openstack/glance-default-external-api-0" Sep 30 19:51:10 crc kubenswrapper[4756]: I0930 19:51:10.585267 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/78d7d65c-1041-4ec8-97df-885ea20328ea-ovsdbserver-sb\") pod \"dnsmasq-dns-5784cf869f-4qwd4\" (UID: \"78d7d65c-1041-4ec8-97df-885ea20328ea\") " pod="openstack/dnsmasq-dns-5784cf869f-4qwd4" Sep 30 19:51:10 crc kubenswrapper[4756]: I0930 19:51:10.585290 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-external-api-0\" (UID: \"0befea8f-1ced-4703-b2a2-a10e154271f4\") " pod="openstack/glance-default-external-api-0" Sep 30 19:51:10 crc kubenswrapper[4756]: I0930 19:51:10.585319 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0befea8f-1ced-4703-b2a2-a10e154271f4-scripts\") pod \"glance-default-external-api-0\" (UID: \"0befea8f-1ced-4703-b2a2-a10e154271f4\") " pod="openstack/glance-default-external-api-0" Sep 30 19:51:10 crc kubenswrapper[4756]: I0930 19:51:10.585334 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6m6rn\" (UniqueName: \"kubernetes.io/projected/0befea8f-1ced-4703-b2a2-a10e154271f4-kube-api-access-6m6rn\") pod \"glance-default-external-api-0\" (UID: \"0befea8f-1ced-4703-b2a2-a10e154271f4\") " pod="openstack/glance-default-external-api-0" Sep 30 19:51:10 crc kubenswrapper[4756]: I0930 19:51:10.585352 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/78d7d65c-1041-4ec8-97df-885ea20328ea-dns-svc\") pod \"dnsmasq-dns-5784cf869f-4qwd4\" (UID: \"78d7d65c-1041-4ec8-97df-885ea20328ea\") " pod="openstack/dnsmasq-dns-5784cf869f-4qwd4" Sep 30 19:51:10 crc kubenswrapper[4756]: I0930 19:51:10.585929 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0befea8f-1ced-4703-b2a2-a10e154271f4-logs\") pod \"glance-default-external-api-0\" (UID: \"0befea8f-1ced-4703-b2a2-a10e154271f4\") " pod="openstack/glance-default-external-api-0" Sep 30 19:51:10 crc kubenswrapper[4756]: I0930 19:51:10.592435 4756 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-external-api-0\" (UID: \"0befea8f-1ced-4703-b2a2-a10e154271f4\") device mount path \"/mnt/openstack/pv10\"" pod="openstack/glance-default-external-api-0" Sep 30 19:51:10 crc kubenswrapper[4756]: I0930 19:51:10.593118 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-config-data" Sep 30 19:51:10 crc kubenswrapper[4756]: I0930 19:51:10.593316 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scheduler-config-data" Sep 30 19:51:10 crc kubenswrapper[4756]: I0930 19:51:10.593425 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-cinder-dockercfg-bk8mp" Sep 30 19:51:10 crc kubenswrapper[4756]: I0930 19:51:10.593507 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scripts" Sep 30 19:51:10 crc kubenswrapper[4756]: I0930 19:51:10.595153 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/0befea8f-1ced-4703-b2a2-a10e154271f4-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"0befea8f-1ced-4703-b2a2-a10e154271f4\") " pod="openstack/glance-default-external-api-0" Sep 30 19:51:10 crc kubenswrapper[4756]: I0930 19:51:10.595439 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/0befea8f-1ced-4703-b2a2-a10e154271f4-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"0befea8f-1ced-4703-b2a2-a10e154271f4\") " pod="openstack/glance-default-external-api-0" Sep 30 19:51:10 crc kubenswrapper[4756]: I0930 19:51:10.599142 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0befea8f-1ced-4703-b2a2-a10e154271f4-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"0befea8f-1ced-4703-b2a2-a10e154271f4\") " pod="openstack/glance-default-external-api-0" Sep 30 19:51:10 crc kubenswrapper[4756]: I0930 19:51:10.609018 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0befea8f-1ced-4703-b2a2-a10e154271f4-scripts\") pod \"glance-default-external-api-0\" (UID: \"0befea8f-1ced-4703-b2a2-a10e154271f4\") " pod="openstack/glance-default-external-api-0" Sep 30 19:51:10 crc kubenswrapper[4756]: I0930 19:51:10.612701 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Sep 30 19:51:10 crc kubenswrapper[4756]: I0930 19:51:10.614321 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0befea8f-1ced-4703-b2a2-a10e154271f4-config-data\") pod \"glance-default-external-api-0\" (UID: \"0befea8f-1ced-4703-b2a2-a10e154271f4\") " pod="openstack/glance-default-external-api-0" Sep 30 19:51:10 crc kubenswrapper[4756]: I0930 19:51:10.633808 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-external-api-0\" (UID: \"0befea8f-1ced-4703-b2a2-a10e154271f4\") " pod="openstack/glance-default-external-api-0" Sep 30 19:51:10 crc kubenswrapper[4756]: I0930 19:51:10.640121 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6m6rn\" (UniqueName: \"kubernetes.io/projected/0befea8f-1ced-4703-b2a2-a10e154271f4-kube-api-access-6m6rn\") pod \"glance-default-external-api-0\" (UID: \"0befea8f-1ced-4703-b2a2-a10e154271f4\") " pod="openstack/glance-default-external-api-0" Sep 30 19:51:10 crc kubenswrapper[4756]: I0930 19:51:10.673628 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-api-0"] Sep 30 19:51:10 crc kubenswrapper[4756]: I0930 19:51:10.675097 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Sep 30 19:51:10 crc kubenswrapper[4756]: I0930 19:51:10.678143 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-api-config-data" Sep 30 19:51:10 crc kubenswrapper[4756]: I0930 19:51:10.686197 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/78d7d65c-1041-4ec8-97df-885ea20328ea-ovsdbserver-nb\") pod \"dnsmasq-dns-5784cf869f-4qwd4\" (UID: \"78d7d65c-1041-4ec8-97df-885ea20328ea\") " pod="openstack/dnsmasq-dns-5784cf869f-4qwd4" Sep 30 19:51:10 crc kubenswrapper[4756]: I0930 19:51:10.686256 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vw2b4\" (UniqueName: \"kubernetes.io/projected/fbf46aee-6848-4d8c-a589-ffe70c2387d4-kube-api-access-vw2b4\") pod \"cinder-scheduler-0\" (UID: \"fbf46aee-6848-4d8c-a589-ffe70c2387d4\") " pod="openstack/cinder-scheduler-0" Sep 30 19:51:10 crc kubenswrapper[4756]: I0930 19:51:10.686282 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/78d7d65c-1041-4ec8-97df-885ea20328ea-dns-swift-storage-0\") pod \"dnsmasq-dns-5784cf869f-4qwd4\" (UID: \"78d7d65c-1041-4ec8-97df-885ea20328ea\") " pod="openstack/dnsmasq-dns-5784cf869f-4qwd4" Sep 30 19:51:10 crc kubenswrapper[4756]: I0930 19:51:10.686321 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/78d7d65c-1041-4ec8-97df-885ea20328ea-config\") pod \"dnsmasq-dns-5784cf869f-4qwd4\" (UID: \"78d7d65c-1041-4ec8-97df-885ea20328ea\") " pod="openstack/dnsmasq-dns-5784cf869f-4qwd4" Sep 30 19:51:10 crc kubenswrapper[4756]: I0930 19:51:10.686359 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-99v94\" (UniqueName: \"kubernetes.io/projected/78d7d65c-1041-4ec8-97df-885ea20328ea-kube-api-access-99v94\") pod \"dnsmasq-dns-5784cf869f-4qwd4\" (UID: \"78d7d65c-1041-4ec8-97df-885ea20328ea\") " pod="openstack/dnsmasq-dns-5784cf869f-4qwd4" Sep 30 19:51:10 crc kubenswrapper[4756]: I0930 19:51:10.686422 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fbf46aee-6848-4d8c-a589-ffe70c2387d4-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"fbf46aee-6848-4d8c-a589-ffe70c2387d4\") " pod="openstack/cinder-scheduler-0" Sep 30 19:51:10 crc kubenswrapper[4756]: I0930 19:51:10.686440 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/78d7d65c-1041-4ec8-97df-885ea20328ea-ovsdbserver-sb\") pod \"dnsmasq-dns-5784cf869f-4qwd4\" (UID: \"78d7d65c-1041-4ec8-97df-885ea20328ea\") " pod="openstack/dnsmasq-dns-5784cf869f-4qwd4" Sep 30 19:51:10 crc kubenswrapper[4756]: I0930 19:51:10.686484 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/93d007d0-df18-4967-be12-2216c5340439-etc-machine-id\") pod \"cinder-api-0\" (UID: \"93d007d0-df18-4967-be12-2216c5340439\") " pod="openstack/cinder-api-0" Sep 30 19:51:10 crc kubenswrapper[4756]: I0930 19:51:10.686510 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/93d007d0-df18-4967-be12-2216c5340439-config-data\") pod \"cinder-api-0\" (UID: \"93d007d0-df18-4967-be12-2216c5340439\") " pod="openstack/cinder-api-0" Sep 30 19:51:10 crc kubenswrapper[4756]: I0930 19:51:10.686547 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/93d007d0-df18-4967-be12-2216c5340439-config-data-custom\") pod \"cinder-api-0\" (UID: \"93d007d0-df18-4967-be12-2216c5340439\") " pod="openstack/cinder-api-0" Sep 30 19:51:10 crc kubenswrapper[4756]: I0930 19:51:10.686566 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/fbf46aee-6848-4d8c-a589-ffe70c2387d4-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"fbf46aee-6848-4d8c-a589-ffe70c2387d4\") " pod="openstack/cinder-scheduler-0" Sep 30 19:51:10 crc kubenswrapper[4756]: I0930 19:51:10.686579 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/fbf46aee-6848-4d8c-a589-ffe70c2387d4-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"fbf46aee-6848-4d8c-a589-ffe70c2387d4\") " pod="openstack/cinder-scheduler-0" Sep 30 19:51:10 crc kubenswrapper[4756]: I0930 19:51:10.686623 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fbf46aee-6848-4d8c-a589-ffe70c2387d4-config-data\") pod \"cinder-scheduler-0\" (UID: \"fbf46aee-6848-4d8c-a589-ffe70c2387d4\") " pod="openstack/cinder-scheduler-0" Sep 30 19:51:10 crc kubenswrapper[4756]: I0930 19:51:10.686642 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fbf46aee-6848-4d8c-a589-ffe70c2387d4-scripts\") pod \"cinder-scheduler-0\" (UID: \"fbf46aee-6848-4d8c-a589-ffe70c2387d4\") " pod="openstack/cinder-scheduler-0" Sep 30 19:51:10 crc kubenswrapper[4756]: I0930 19:51:10.686660 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/78d7d65c-1041-4ec8-97df-885ea20328ea-dns-svc\") pod \"dnsmasq-dns-5784cf869f-4qwd4\" (UID: \"78d7d65c-1041-4ec8-97df-885ea20328ea\") " pod="openstack/dnsmasq-dns-5784cf869f-4qwd4" Sep 30 19:51:10 crc kubenswrapper[4756]: I0930 19:51:10.686711 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-887gt\" (UniqueName: \"kubernetes.io/projected/93d007d0-df18-4967-be12-2216c5340439-kube-api-access-887gt\") pod \"cinder-api-0\" (UID: \"93d007d0-df18-4967-be12-2216c5340439\") " pod="openstack/cinder-api-0" Sep 30 19:51:10 crc kubenswrapper[4756]: I0930 19:51:10.686731 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/93d007d0-df18-4967-be12-2216c5340439-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"93d007d0-df18-4967-be12-2216c5340439\") " pod="openstack/cinder-api-0" Sep 30 19:51:10 crc kubenswrapper[4756]: I0930 19:51:10.687186 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/78d7d65c-1041-4ec8-97df-885ea20328ea-ovsdbserver-nb\") pod \"dnsmasq-dns-5784cf869f-4qwd4\" (UID: \"78d7d65c-1041-4ec8-97df-885ea20328ea\") " pod="openstack/dnsmasq-dns-5784cf869f-4qwd4" Sep 30 19:51:10 crc kubenswrapper[4756]: I0930 19:51:10.687775 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/78d7d65c-1041-4ec8-97df-885ea20328ea-config\") pod \"dnsmasq-dns-5784cf869f-4qwd4\" (UID: \"78d7d65c-1041-4ec8-97df-885ea20328ea\") " pod="openstack/dnsmasq-dns-5784cf869f-4qwd4" Sep 30 19:51:10 crc kubenswrapper[4756]: I0930 19:51:10.687927 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/93d007d0-df18-4967-be12-2216c5340439-scripts\") pod \"cinder-api-0\" (UID: \"93d007d0-df18-4967-be12-2216c5340439\") " pod="openstack/cinder-api-0" Sep 30 19:51:10 crc kubenswrapper[4756]: I0930 19:51:10.687986 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/93d007d0-df18-4967-be12-2216c5340439-logs\") pod \"cinder-api-0\" (UID: \"93d007d0-df18-4967-be12-2216c5340439\") " pod="openstack/cinder-api-0" Sep 30 19:51:10 crc kubenswrapper[4756]: I0930 19:51:10.688596 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/78d7d65c-1041-4ec8-97df-885ea20328ea-ovsdbserver-sb\") pod \"dnsmasq-dns-5784cf869f-4qwd4\" (UID: \"78d7d65c-1041-4ec8-97df-885ea20328ea\") " pod="openstack/dnsmasq-dns-5784cf869f-4qwd4" Sep 30 19:51:10 crc kubenswrapper[4756]: I0930 19:51:10.694052 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/78d7d65c-1041-4ec8-97df-885ea20328ea-dns-svc\") pod \"dnsmasq-dns-5784cf869f-4qwd4\" (UID: \"78d7d65c-1041-4ec8-97df-885ea20328ea\") " pod="openstack/dnsmasq-dns-5784cf869f-4qwd4" Sep 30 19:51:10 crc kubenswrapper[4756]: I0930 19:51:10.695073 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/78d7d65c-1041-4ec8-97df-885ea20328ea-dns-swift-storage-0\") pod \"dnsmasq-dns-5784cf869f-4qwd4\" (UID: \"78d7d65c-1041-4ec8-97df-885ea20328ea\") " pod="openstack/dnsmasq-dns-5784cf869f-4qwd4" Sep 30 19:51:10 crc kubenswrapper[4756]: I0930 19:51:10.701624 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Sep 30 19:51:10 crc kubenswrapper[4756]: I0930 19:51:10.705928 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-99v94\" (UniqueName: \"kubernetes.io/projected/78d7d65c-1041-4ec8-97df-885ea20328ea-kube-api-access-99v94\") pod \"dnsmasq-dns-5784cf869f-4qwd4\" (UID: \"78d7d65c-1041-4ec8-97df-885ea20328ea\") " pod="openstack/dnsmasq-dns-5784cf869f-4qwd4" Sep 30 19:51:10 crc kubenswrapper[4756]: I0930 19:51:10.785532 4756 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-8564f76dcb-9qwnq" podUID="4763c3d2-bfc4-4c13-94c0-f7d852309b17" containerName="barbican-api" probeResult="failure" output="Get \"http://10.217.0.160:9311/healthcheck\": read tcp 10.217.0.2:53852->10.217.0.160:9311: read: connection reset by peer" Sep 30 19:51:10 crc kubenswrapper[4756]: I0930 19:51:10.785577 4756 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-8564f76dcb-9qwnq" podUID="4763c3d2-bfc4-4c13-94c0-f7d852309b17" containerName="barbican-api-log" probeResult="failure" output="Get \"http://10.217.0.160:9311/healthcheck\": read tcp 10.217.0.2:53862->10.217.0.160:9311: read: connection reset by peer" Sep 30 19:51:10 crc kubenswrapper[4756]: I0930 19:51:10.787634 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Sep 30 19:51:10 crc kubenswrapper[4756]: I0930 19:51:10.792422 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fbf46aee-6848-4d8c-a589-ffe70c2387d4-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"fbf46aee-6848-4d8c-a589-ffe70c2387d4\") " pod="openstack/cinder-scheduler-0" Sep 30 19:51:10 crc kubenswrapper[4756]: I0930 19:51:10.792458 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/93d007d0-df18-4967-be12-2216c5340439-etc-machine-id\") pod \"cinder-api-0\" (UID: \"93d007d0-df18-4967-be12-2216c5340439\") " pod="openstack/cinder-api-0" Sep 30 19:51:10 crc kubenswrapper[4756]: I0930 19:51:10.792830 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/93d007d0-df18-4967-be12-2216c5340439-config-data\") pod \"cinder-api-0\" (UID: \"93d007d0-df18-4967-be12-2216c5340439\") " pod="openstack/cinder-api-0" Sep 30 19:51:10 crc kubenswrapper[4756]: I0930 19:51:10.792854 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/93d007d0-df18-4967-be12-2216c5340439-config-data-custom\") pod \"cinder-api-0\" (UID: \"93d007d0-df18-4967-be12-2216c5340439\") " pod="openstack/cinder-api-0" Sep 30 19:51:10 crc kubenswrapper[4756]: I0930 19:51:10.793416 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/fbf46aee-6848-4d8c-a589-ffe70c2387d4-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"fbf46aee-6848-4d8c-a589-ffe70c2387d4\") " pod="openstack/cinder-scheduler-0" Sep 30 19:51:10 crc kubenswrapper[4756]: I0930 19:51:10.793446 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/fbf46aee-6848-4d8c-a589-ffe70c2387d4-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"fbf46aee-6848-4d8c-a589-ffe70c2387d4\") " pod="openstack/cinder-scheduler-0" Sep 30 19:51:10 crc kubenswrapper[4756]: I0930 19:51:10.793471 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fbf46aee-6848-4d8c-a589-ffe70c2387d4-config-data\") pod \"cinder-scheduler-0\" (UID: \"fbf46aee-6848-4d8c-a589-ffe70c2387d4\") " pod="openstack/cinder-scheduler-0" Sep 30 19:51:10 crc kubenswrapper[4756]: I0930 19:51:10.793483 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fbf46aee-6848-4d8c-a589-ffe70c2387d4-scripts\") pod \"cinder-scheduler-0\" (UID: \"fbf46aee-6848-4d8c-a589-ffe70c2387d4\") " pod="openstack/cinder-scheduler-0" Sep 30 19:51:10 crc kubenswrapper[4756]: I0930 19:51:10.793513 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-887gt\" (UniqueName: \"kubernetes.io/projected/93d007d0-df18-4967-be12-2216c5340439-kube-api-access-887gt\") pod \"cinder-api-0\" (UID: \"93d007d0-df18-4967-be12-2216c5340439\") " pod="openstack/cinder-api-0" Sep 30 19:51:10 crc kubenswrapper[4756]: I0930 19:51:10.793532 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/93d007d0-df18-4967-be12-2216c5340439-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"93d007d0-df18-4967-be12-2216c5340439\") " pod="openstack/cinder-api-0" Sep 30 19:51:10 crc kubenswrapper[4756]: I0930 19:51:10.793566 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/93d007d0-df18-4967-be12-2216c5340439-scripts\") pod \"cinder-api-0\" (UID: \"93d007d0-df18-4967-be12-2216c5340439\") " pod="openstack/cinder-api-0" Sep 30 19:51:10 crc kubenswrapper[4756]: I0930 19:51:10.793589 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/93d007d0-df18-4967-be12-2216c5340439-logs\") pod \"cinder-api-0\" (UID: \"93d007d0-df18-4967-be12-2216c5340439\") " pod="openstack/cinder-api-0" Sep 30 19:51:10 crc kubenswrapper[4756]: I0930 19:51:10.793614 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vw2b4\" (UniqueName: \"kubernetes.io/projected/fbf46aee-6848-4d8c-a589-ffe70c2387d4-kube-api-access-vw2b4\") pod \"cinder-scheduler-0\" (UID: \"fbf46aee-6848-4d8c-a589-ffe70c2387d4\") " pod="openstack/cinder-scheduler-0" Sep 30 19:51:10 crc kubenswrapper[4756]: I0930 19:51:10.794151 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/fbf46aee-6848-4d8c-a589-ffe70c2387d4-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"fbf46aee-6848-4d8c-a589-ffe70c2387d4\") " pod="openstack/cinder-scheduler-0" Sep 30 19:51:10 crc kubenswrapper[4756]: I0930 19:51:10.798883 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fbf46aee-6848-4d8c-a589-ffe70c2387d4-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"fbf46aee-6848-4d8c-a589-ffe70c2387d4\") " pod="openstack/cinder-scheduler-0" Sep 30 19:51:10 crc kubenswrapper[4756]: I0930 19:51:10.798967 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/93d007d0-df18-4967-be12-2216c5340439-etc-machine-id\") pod \"cinder-api-0\" (UID: \"93d007d0-df18-4967-be12-2216c5340439\") " pod="openstack/cinder-api-0" Sep 30 19:51:10 crc kubenswrapper[4756]: I0930 19:51:10.800011 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/93d007d0-df18-4967-be12-2216c5340439-logs\") pod \"cinder-api-0\" (UID: \"93d007d0-df18-4967-be12-2216c5340439\") " pod="openstack/cinder-api-0" Sep 30 19:51:10 crc kubenswrapper[4756]: I0930 19:51:10.803198 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/fbf46aee-6848-4d8c-a589-ffe70c2387d4-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"fbf46aee-6848-4d8c-a589-ffe70c2387d4\") " pod="openstack/cinder-scheduler-0" Sep 30 19:51:10 crc kubenswrapper[4756]: I0930 19:51:10.803802 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fbf46aee-6848-4d8c-a589-ffe70c2387d4-scripts\") pod \"cinder-scheduler-0\" (UID: \"fbf46aee-6848-4d8c-a589-ffe70c2387d4\") " pod="openstack/cinder-scheduler-0" Sep 30 19:51:10 crc kubenswrapper[4756]: I0930 19:51:10.809619 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fbf46aee-6848-4d8c-a589-ffe70c2387d4-config-data\") pod \"cinder-scheduler-0\" (UID: \"fbf46aee-6848-4d8c-a589-ffe70c2387d4\") " pod="openstack/cinder-scheduler-0" Sep 30 19:51:10 crc kubenswrapper[4756]: I0930 19:51:10.811103 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vw2b4\" (UniqueName: \"kubernetes.io/projected/fbf46aee-6848-4d8c-a589-ffe70c2387d4-kube-api-access-vw2b4\") pod \"cinder-scheduler-0\" (UID: \"fbf46aee-6848-4d8c-a589-ffe70c2387d4\") " pod="openstack/cinder-scheduler-0" Sep 30 19:51:10 crc kubenswrapper[4756]: I0930 19:51:10.813674 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/93d007d0-df18-4967-be12-2216c5340439-scripts\") pod \"cinder-api-0\" (UID: \"93d007d0-df18-4967-be12-2216c5340439\") " pod="openstack/cinder-api-0" Sep 30 19:51:10 crc kubenswrapper[4756]: I0930 19:51:10.814074 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/93d007d0-df18-4967-be12-2216c5340439-config-data\") pod \"cinder-api-0\" (UID: \"93d007d0-df18-4967-be12-2216c5340439\") " pod="openstack/cinder-api-0" Sep 30 19:51:10 crc kubenswrapper[4756]: I0930 19:51:10.815008 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/93d007d0-df18-4967-be12-2216c5340439-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"93d007d0-df18-4967-be12-2216c5340439\") " pod="openstack/cinder-api-0" Sep 30 19:51:10 crc kubenswrapper[4756]: I0930 19:51:10.822962 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-887gt\" (UniqueName: \"kubernetes.io/projected/93d007d0-df18-4967-be12-2216c5340439-kube-api-access-887gt\") pod \"cinder-api-0\" (UID: \"93d007d0-df18-4967-be12-2216c5340439\") " pod="openstack/cinder-api-0" Sep 30 19:51:10 crc kubenswrapper[4756]: I0930 19:51:10.823129 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/93d007d0-df18-4967-be12-2216c5340439-config-data-custom\") pod \"cinder-api-0\" (UID: \"93d007d0-df18-4967-be12-2216c5340439\") " pod="openstack/cinder-api-0" Sep 30 19:51:10 crc kubenswrapper[4756]: I0930 19:51:10.847140 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5784cf869f-4qwd4" Sep 30 19:51:11 crc kubenswrapper[4756]: I0930 19:51:11.006623 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Sep 30 19:51:11 crc kubenswrapper[4756]: I0930 19:51:11.023980 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Sep 30 19:51:11 crc kubenswrapper[4756]: I0930 19:51:11.147762 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="90342cf7-3a20-4e6d-a8f6-f70c474cd1f6" path="/var/lib/kubelet/pods/90342cf7-3a20-4e6d-a8f6-f70c474cd1f6/volumes" Sep 30 19:51:11 crc kubenswrapper[4756]: I0930 19:51:11.210854 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Sep 30 19:51:11 crc kubenswrapper[4756]: I0930 19:51:11.272869 4756 generic.go:334] "Generic (PLEG): container finished" podID="4763c3d2-bfc4-4c13-94c0-f7d852309b17" containerID="0da4b3b06e25cc58e04e2a2d12d8d0b9964704dda90da088c73458a4f101cac4" exitCode=0 Sep 30 19:51:11 crc kubenswrapper[4756]: I0930 19:51:11.272928 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-8564f76dcb-9qwnq" event={"ID":"4763c3d2-bfc4-4c13-94c0-f7d852309b17","Type":"ContainerDied","Data":"0da4b3b06e25cc58e04e2a2d12d8d0b9964704dda90da088c73458a4f101cac4"} Sep 30 19:51:11 crc kubenswrapper[4756]: I0930 19:51:11.272954 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-8564f76dcb-9qwnq" event={"ID":"4763c3d2-bfc4-4c13-94c0-f7d852309b17","Type":"ContainerDied","Data":"88ae8364e70387b3c5fea443861cee65b377662d9a658c2787daf9dadec0cb43"} Sep 30 19:51:11 crc kubenswrapper[4756]: I0930 19:51:11.272964 4756 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="88ae8364e70387b3c5fea443861cee65b377662d9a658c2787daf9dadec0cb43" Sep 30 19:51:11 crc kubenswrapper[4756]: I0930 19:51:11.315813 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ccbdfb43-a43c-4509-b6e5-ed193ac842b4","Type":"ContainerStarted","Data":"c5eea2fa4f508645946ee9d174b5d251fc6d4e8d78f0e9913a88b04671851f41"} Sep 30 19:51:11 crc kubenswrapper[4756]: I0930 19:51:11.331998 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/c2e63e93-d8e4-4069-bdb3-364f98238496-internal-tls-certs\") pod \"c2e63e93-d8e4-4069-bdb3-364f98238496\" (UID: \"c2e63e93-d8e4-4069-bdb3-364f98238496\") " Sep 30 19:51:11 crc kubenswrapper[4756]: I0930 19:51:11.332100 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c2e63e93-d8e4-4069-bdb3-364f98238496-logs\") pod \"c2e63e93-d8e4-4069-bdb3-364f98238496\" (UID: \"c2e63e93-d8e4-4069-bdb3-364f98238496\") " Sep 30 19:51:11 crc kubenswrapper[4756]: I0930 19:51:11.332179 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c2e63e93-d8e4-4069-bdb3-364f98238496-scripts\") pod \"c2e63e93-d8e4-4069-bdb3-364f98238496\" (UID: \"c2e63e93-d8e4-4069-bdb3-364f98238496\") " Sep 30 19:51:11 crc kubenswrapper[4756]: I0930 19:51:11.332221 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-p29gg\" (UniqueName: \"kubernetes.io/projected/c2e63e93-d8e4-4069-bdb3-364f98238496-kube-api-access-p29gg\") pod \"c2e63e93-d8e4-4069-bdb3-364f98238496\" (UID: \"c2e63e93-d8e4-4069-bdb3-364f98238496\") " Sep 30 19:51:11 crc kubenswrapper[4756]: I0930 19:51:11.332244 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"c2e63e93-d8e4-4069-bdb3-364f98238496\" (UID: \"c2e63e93-d8e4-4069-bdb3-364f98238496\") " Sep 30 19:51:11 crc kubenswrapper[4756]: I0930 19:51:11.332263 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c2e63e93-d8e4-4069-bdb3-364f98238496-combined-ca-bundle\") pod \"c2e63e93-d8e4-4069-bdb3-364f98238496\" (UID: \"c2e63e93-d8e4-4069-bdb3-364f98238496\") " Sep 30 19:51:11 crc kubenswrapper[4756]: I0930 19:51:11.332284 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c2e63e93-d8e4-4069-bdb3-364f98238496-config-data\") pod \"c2e63e93-d8e4-4069-bdb3-364f98238496\" (UID: \"c2e63e93-d8e4-4069-bdb3-364f98238496\") " Sep 30 19:51:11 crc kubenswrapper[4756]: I0930 19:51:11.332831 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c2e63e93-d8e4-4069-bdb3-364f98238496-logs" (OuterVolumeSpecName: "logs") pod "c2e63e93-d8e4-4069-bdb3-364f98238496" (UID: "c2e63e93-d8e4-4069-bdb3-364f98238496"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 19:51:11 crc kubenswrapper[4756]: I0930 19:51:11.333110 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/c2e63e93-d8e4-4069-bdb3-364f98238496-httpd-run\") pod \"c2e63e93-d8e4-4069-bdb3-364f98238496\" (UID: \"c2e63e93-d8e4-4069-bdb3-364f98238496\") " Sep 30 19:51:11 crc kubenswrapper[4756]: I0930 19:51:11.335232 4756 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c2e63e93-d8e4-4069-bdb3-364f98238496-logs\") on node \"crc\" DevicePath \"\"" Sep 30 19:51:11 crc kubenswrapper[4756]: I0930 19:51:11.336210 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c2e63e93-d8e4-4069-bdb3-364f98238496-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "c2e63e93-d8e4-4069-bdb3-364f98238496" (UID: "c2e63e93-d8e4-4069-bdb3-364f98238496"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 19:51:11 crc kubenswrapper[4756]: I0930 19:51:11.338298 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c2e63e93-d8e4-4069-bdb3-364f98238496-kube-api-access-p29gg" (OuterVolumeSpecName: "kube-api-access-p29gg") pod "c2e63e93-d8e4-4069-bdb3-364f98238496" (UID: "c2e63e93-d8e4-4069-bdb3-364f98238496"). InnerVolumeSpecName "kube-api-access-p29gg". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:51:11 crc kubenswrapper[4756]: I0930 19:51:11.339385 4756 generic.go:334] "Generic (PLEG): container finished" podID="c2e63e93-d8e4-4069-bdb3-364f98238496" containerID="8a95f664cddc2bed4974f5a0200a7fdbeb71cb63fb51c1b95c363b9e5da33cc0" exitCode=0 Sep 30 19:51:11 crc kubenswrapper[4756]: I0930 19:51:11.339515 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Sep 30 19:51:11 crc kubenswrapper[4756]: I0930 19:51:11.339523 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"c2e63e93-d8e4-4069-bdb3-364f98238496","Type":"ContainerDied","Data":"8a95f664cddc2bed4974f5a0200a7fdbeb71cb63fb51c1b95c363b9e5da33cc0"} Sep 30 19:51:11 crc kubenswrapper[4756]: I0930 19:51:11.339598 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"c2e63e93-d8e4-4069-bdb3-364f98238496","Type":"ContainerDied","Data":"fbab5dc611d5e41a7e3d393f3c2db7f7b81f391215770f09f21b591f6a092580"} Sep 30 19:51:11 crc kubenswrapper[4756]: I0930 19:51:11.339636 4756 scope.go:117] "RemoveContainer" containerID="8a95f664cddc2bed4974f5a0200a7fdbeb71cb63fb51c1b95c363b9e5da33cc0" Sep 30 19:51:11 crc kubenswrapper[4756]: I0930 19:51:11.341063 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c2e63e93-d8e4-4069-bdb3-364f98238496-scripts" (OuterVolumeSpecName: "scripts") pod "c2e63e93-d8e4-4069-bdb3-364f98238496" (UID: "c2e63e93-d8e4-4069-bdb3-364f98238496"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:51:11 crc kubenswrapper[4756]: I0930 19:51:11.342874 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage07-crc" (OuterVolumeSpecName: "glance") pod "c2e63e93-d8e4-4069-bdb3-364f98238496" (UID: "c2e63e93-d8e4-4069-bdb3-364f98238496"). InnerVolumeSpecName "local-storage07-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Sep 30 19:51:11 crc kubenswrapper[4756]: I0930 19:51:11.357715 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-8564f76dcb-9qwnq" Sep 30 19:51:11 crc kubenswrapper[4756]: I0930 19:51:11.367684 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c2e63e93-d8e4-4069-bdb3-364f98238496-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "c2e63e93-d8e4-4069-bdb3-364f98238496" (UID: "c2e63e93-d8e4-4069-bdb3-364f98238496"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:51:11 crc kubenswrapper[4756]: I0930 19:51:11.397552 4756 scope.go:117] "RemoveContainer" containerID="8d0874be0504e1031955aae03da566574435a30a2e4a55949466b466b1df6659" Sep 30 19:51:11 crc kubenswrapper[4756]: I0930 19:51:11.406654 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c2e63e93-d8e4-4069-bdb3-364f98238496-config-data" (OuterVolumeSpecName: "config-data") pod "c2e63e93-d8e4-4069-bdb3-364f98238496" (UID: "c2e63e93-d8e4-4069-bdb3-364f98238496"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:51:11 crc kubenswrapper[4756]: I0930 19:51:11.414171 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c2e63e93-d8e4-4069-bdb3-364f98238496-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "c2e63e93-d8e4-4069-bdb3-364f98238496" (UID: "c2e63e93-d8e4-4069-bdb3-364f98238496"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:51:11 crc kubenswrapper[4756]: I0930 19:51:11.458641 4756 scope.go:117] "RemoveContainer" containerID="8a95f664cddc2bed4974f5a0200a7fdbeb71cb63fb51c1b95c363b9e5da33cc0" Sep 30 19:51:11 crc kubenswrapper[4756]: I0930 19:51:11.459292 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4763c3d2-bfc4-4c13-94c0-f7d852309b17-combined-ca-bundle\") pod \"4763c3d2-bfc4-4c13-94c0-f7d852309b17\" (UID: \"4763c3d2-bfc4-4c13-94c0-f7d852309b17\") " Sep 30 19:51:11 crc kubenswrapper[4756]: I0930 19:51:11.459336 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zvshm\" (UniqueName: \"kubernetes.io/projected/4763c3d2-bfc4-4c13-94c0-f7d852309b17-kube-api-access-zvshm\") pod \"4763c3d2-bfc4-4c13-94c0-f7d852309b17\" (UID: \"4763c3d2-bfc4-4c13-94c0-f7d852309b17\") " Sep 30 19:51:11 crc kubenswrapper[4756]: I0930 19:51:11.459368 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/4763c3d2-bfc4-4c13-94c0-f7d852309b17-config-data-custom\") pod \"4763c3d2-bfc4-4c13-94c0-f7d852309b17\" (UID: \"4763c3d2-bfc4-4c13-94c0-f7d852309b17\") " Sep 30 19:51:11 crc kubenswrapper[4756]: I0930 19:51:11.459452 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4763c3d2-bfc4-4c13-94c0-f7d852309b17-logs\") pod \"4763c3d2-bfc4-4c13-94c0-f7d852309b17\" (UID: \"4763c3d2-bfc4-4c13-94c0-f7d852309b17\") " Sep 30 19:51:11 crc kubenswrapper[4756]: I0930 19:51:11.459564 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4763c3d2-bfc4-4c13-94c0-f7d852309b17-config-data\") pod \"4763c3d2-bfc4-4c13-94c0-f7d852309b17\" (UID: \"4763c3d2-bfc4-4c13-94c0-f7d852309b17\") " Sep 30 19:51:11 crc kubenswrapper[4756]: E0930 19:51:11.459761 4756 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8a95f664cddc2bed4974f5a0200a7fdbeb71cb63fb51c1b95c363b9e5da33cc0\": container with ID starting with 8a95f664cddc2bed4974f5a0200a7fdbeb71cb63fb51c1b95c363b9e5da33cc0 not found: ID does not exist" containerID="8a95f664cddc2bed4974f5a0200a7fdbeb71cb63fb51c1b95c363b9e5da33cc0" Sep 30 19:51:11 crc kubenswrapper[4756]: I0930 19:51:11.459841 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8a95f664cddc2bed4974f5a0200a7fdbeb71cb63fb51c1b95c363b9e5da33cc0"} err="failed to get container status \"8a95f664cddc2bed4974f5a0200a7fdbeb71cb63fb51c1b95c363b9e5da33cc0\": rpc error: code = NotFound desc = could not find container \"8a95f664cddc2bed4974f5a0200a7fdbeb71cb63fb51c1b95c363b9e5da33cc0\": container with ID starting with 8a95f664cddc2bed4974f5a0200a7fdbeb71cb63fb51c1b95c363b9e5da33cc0 not found: ID does not exist" Sep 30 19:51:11 crc kubenswrapper[4756]: I0930 19:51:11.459866 4756 scope.go:117] "RemoveContainer" containerID="8d0874be0504e1031955aae03da566574435a30a2e4a55949466b466b1df6659" Sep 30 19:51:11 crc kubenswrapper[4756]: I0930 19:51:11.460164 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-p29gg\" (UniqueName: \"kubernetes.io/projected/c2e63e93-d8e4-4069-bdb3-364f98238496-kube-api-access-p29gg\") on node \"crc\" DevicePath \"\"" Sep 30 19:51:11 crc kubenswrapper[4756]: I0930 19:51:11.460192 4756 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") on node \"crc\" " Sep 30 19:51:11 crc kubenswrapper[4756]: I0930 19:51:11.460206 4756 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c2e63e93-d8e4-4069-bdb3-364f98238496-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 19:51:11 crc kubenswrapper[4756]: I0930 19:51:11.460217 4756 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c2e63e93-d8e4-4069-bdb3-364f98238496-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 19:51:11 crc kubenswrapper[4756]: I0930 19:51:11.460228 4756 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/c2e63e93-d8e4-4069-bdb3-364f98238496-httpd-run\") on node \"crc\" DevicePath \"\"" Sep 30 19:51:11 crc kubenswrapper[4756]: I0930 19:51:11.460254 4756 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/c2e63e93-d8e4-4069-bdb3-364f98238496-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 30 19:51:11 crc kubenswrapper[4756]: I0930 19:51:11.460265 4756 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c2e63e93-d8e4-4069-bdb3-364f98238496-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 19:51:11 crc kubenswrapper[4756]: E0930 19:51:11.461093 4756 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8d0874be0504e1031955aae03da566574435a30a2e4a55949466b466b1df6659\": container with ID starting with 8d0874be0504e1031955aae03da566574435a30a2e4a55949466b466b1df6659 not found: ID does not exist" containerID="8d0874be0504e1031955aae03da566574435a30a2e4a55949466b466b1df6659" Sep 30 19:51:11 crc kubenswrapper[4756]: I0930 19:51:11.461136 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8d0874be0504e1031955aae03da566574435a30a2e4a55949466b466b1df6659"} err="failed to get container status \"8d0874be0504e1031955aae03da566574435a30a2e4a55949466b466b1df6659\": rpc error: code = NotFound desc = could not find container \"8d0874be0504e1031955aae03da566574435a30a2e4a55949466b466b1df6659\": container with ID starting with 8d0874be0504e1031955aae03da566574435a30a2e4a55949466b466b1df6659 not found: ID does not exist" Sep 30 19:51:11 crc kubenswrapper[4756]: I0930 19:51:11.462617 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4763c3d2-bfc4-4c13-94c0-f7d852309b17-logs" (OuterVolumeSpecName: "logs") pod "4763c3d2-bfc4-4c13-94c0-f7d852309b17" (UID: "4763c3d2-bfc4-4c13-94c0-f7d852309b17"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 19:51:11 crc kubenswrapper[4756]: I0930 19:51:11.470785 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4763c3d2-bfc4-4c13-94c0-f7d852309b17-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "4763c3d2-bfc4-4c13-94c0-f7d852309b17" (UID: "4763c3d2-bfc4-4c13-94c0-f7d852309b17"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:51:11 crc kubenswrapper[4756]: I0930 19:51:11.474514 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4763c3d2-bfc4-4c13-94c0-f7d852309b17-kube-api-access-zvshm" (OuterVolumeSpecName: "kube-api-access-zvshm") pod "4763c3d2-bfc4-4c13-94c0-f7d852309b17" (UID: "4763c3d2-bfc4-4c13-94c0-f7d852309b17"). InnerVolumeSpecName "kube-api-access-zvshm". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:51:11 crc kubenswrapper[4756]: I0930 19:51:11.485554 4756 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage07-crc" (UniqueName: "kubernetes.io/local-volume/local-storage07-crc") on node "crc" Sep 30 19:51:11 crc kubenswrapper[4756]: I0930 19:51:11.525558 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4763c3d2-bfc4-4c13-94c0-f7d852309b17-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "4763c3d2-bfc4-4c13-94c0-f7d852309b17" (UID: "4763c3d2-bfc4-4c13-94c0-f7d852309b17"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:51:11 crc kubenswrapper[4756]: I0930 19:51:11.528773 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4763c3d2-bfc4-4c13-94c0-f7d852309b17-config-data" (OuterVolumeSpecName: "config-data") pod "4763c3d2-bfc4-4c13-94c0-f7d852309b17" (UID: "4763c3d2-bfc4-4c13-94c0-f7d852309b17"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:51:11 crc kubenswrapper[4756]: I0930 19:51:11.560930 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5784cf869f-4qwd4"] Sep 30 19:51:11 crc kubenswrapper[4756]: I0930 19:51:11.562303 4756 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4763c3d2-bfc4-4c13-94c0-f7d852309b17-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 19:51:11 crc kubenswrapper[4756]: I0930 19:51:11.562472 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zvshm\" (UniqueName: \"kubernetes.io/projected/4763c3d2-bfc4-4c13-94c0-f7d852309b17-kube-api-access-zvshm\") on node \"crc\" DevicePath \"\"" Sep 30 19:51:11 crc kubenswrapper[4756]: I0930 19:51:11.562532 4756 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/4763c3d2-bfc4-4c13-94c0-f7d852309b17-config-data-custom\") on node \"crc\" DevicePath \"\"" Sep 30 19:51:11 crc kubenswrapper[4756]: I0930 19:51:11.562584 4756 reconciler_common.go:293] "Volume detached for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") on node \"crc\" DevicePath \"\"" Sep 30 19:51:11 crc kubenswrapper[4756]: I0930 19:51:11.562634 4756 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4763c3d2-bfc4-4c13-94c0-f7d852309b17-logs\") on node \"crc\" DevicePath \"\"" Sep 30 19:51:11 crc kubenswrapper[4756]: I0930 19:51:11.562697 4756 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4763c3d2-bfc4-4c13-94c0-f7d852309b17-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 19:51:11 crc kubenswrapper[4756]: I0930 19:51:11.628719 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Sep 30 19:51:11 crc kubenswrapper[4756]: I0930 19:51:11.706100 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 30 19:51:11 crc kubenswrapper[4756]: I0930 19:51:11.719957 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 30 19:51:11 crc kubenswrapper[4756]: I0930 19:51:11.730561 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 30 19:51:11 crc kubenswrapper[4756]: E0930 19:51:11.731241 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4763c3d2-bfc4-4c13-94c0-f7d852309b17" containerName="barbican-api" Sep 30 19:51:11 crc kubenswrapper[4756]: I0930 19:51:11.731310 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="4763c3d2-bfc4-4c13-94c0-f7d852309b17" containerName="barbican-api" Sep 30 19:51:11 crc kubenswrapper[4756]: E0930 19:51:11.731380 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c2e63e93-d8e4-4069-bdb3-364f98238496" containerName="glance-httpd" Sep 30 19:51:11 crc kubenswrapper[4756]: I0930 19:51:11.731455 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="c2e63e93-d8e4-4069-bdb3-364f98238496" containerName="glance-httpd" Sep 30 19:51:11 crc kubenswrapper[4756]: E0930 19:51:11.731523 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c2e63e93-d8e4-4069-bdb3-364f98238496" containerName="glance-log" Sep 30 19:51:11 crc kubenswrapper[4756]: I0930 19:51:11.731657 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="c2e63e93-d8e4-4069-bdb3-364f98238496" containerName="glance-log" Sep 30 19:51:11 crc kubenswrapper[4756]: E0930 19:51:11.731757 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4763c3d2-bfc4-4c13-94c0-f7d852309b17" containerName="barbican-api-log" Sep 30 19:51:11 crc kubenswrapper[4756]: I0930 19:51:11.731857 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="4763c3d2-bfc4-4c13-94c0-f7d852309b17" containerName="barbican-api-log" Sep 30 19:51:11 crc kubenswrapper[4756]: I0930 19:51:11.732180 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="4763c3d2-bfc4-4c13-94c0-f7d852309b17" containerName="barbican-api" Sep 30 19:51:11 crc kubenswrapper[4756]: I0930 19:51:11.732265 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="4763c3d2-bfc4-4c13-94c0-f7d852309b17" containerName="barbican-api-log" Sep 30 19:51:11 crc kubenswrapper[4756]: I0930 19:51:11.732355 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="c2e63e93-d8e4-4069-bdb3-364f98238496" containerName="glance-log" Sep 30 19:51:11 crc kubenswrapper[4756]: I0930 19:51:11.732464 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="c2e63e93-d8e4-4069-bdb3-364f98238496" containerName="glance-httpd" Sep 30 19:51:11 crc kubenswrapper[4756]: I0930 19:51:11.741875 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Sep 30 19:51:11 crc kubenswrapper[4756]: I0930 19:51:11.743837 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-internal-svc" Sep 30 19:51:11 crc kubenswrapper[4756]: I0930 19:51:11.744464 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Sep 30 19:51:11 crc kubenswrapper[4756]: I0930 19:51:11.753171 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 30 19:51:11 crc kubenswrapper[4756]: I0930 19:51:11.777757 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Sep 30 19:51:11 crc kubenswrapper[4756]: I0930 19:51:11.891380 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/207e631b-3ef5-430f-bad1-4a9972dc8eb4-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"207e631b-3ef5-430f-bad1-4a9972dc8eb4\") " pod="openstack/glance-default-internal-api-0" Sep 30 19:51:11 crc kubenswrapper[4756]: I0930 19:51:11.891470 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8z7pf\" (UniqueName: \"kubernetes.io/projected/207e631b-3ef5-430f-bad1-4a9972dc8eb4-kube-api-access-8z7pf\") pod \"glance-default-internal-api-0\" (UID: \"207e631b-3ef5-430f-bad1-4a9972dc8eb4\") " pod="openstack/glance-default-internal-api-0" Sep 30 19:51:11 crc kubenswrapper[4756]: I0930 19:51:11.891519 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-internal-api-0\" (UID: \"207e631b-3ef5-430f-bad1-4a9972dc8eb4\") " pod="openstack/glance-default-internal-api-0" Sep 30 19:51:11 crc kubenswrapper[4756]: I0930 19:51:11.891539 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/207e631b-3ef5-430f-bad1-4a9972dc8eb4-config-data\") pod \"glance-default-internal-api-0\" (UID: \"207e631b-3ef5-430f-bad1-4a9972dc8eb4\") " pod="openstack/glance-default-internal-api-0" Sep 30 19:51:11 crc kubenswrapper[4756]: I0930 19:51:11.891587 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/207e631b-3ef5-430f-bad1-4a9972dc8eb4-logs\") pod \"glance-default-internal-api-0\" (UID: \"207e631b-3ef5-430f-bad1-4a9972dc8eb4\") " pod="openstack/glance-default-internal-api-0" Sep 30 19:51:11 crc kubenswrapper[4756]: I0930 19:51:11.891603 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/207e631b-3ef5-430f-bad1-4a9972dc8eb4-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"207e631b-3ef5-430f-bad1-4a9972dc8eb4\") " pod="openstack/glance-default-internal-api-0" Sep 30 19:51:11 crc kubenswrapper[4756]: I0930 19:51:11.891678 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/207e631b-3ef5-430f-bad1-4a9972dc8eb4-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"207e631b-3ef5-430f-bad1-4a9972dc8eb4\") " pod="openstack/glance-default-internal-api-0" Sep 30 19:51:11 crc kubenswrapper[4756]: I0930 19:51:11.891701 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/207e631b-3ef5-430f-bad1-4a9972dc8eb4-scripts\") pod \"glance-default-internal-api-0\" (UID: \"207e631b-3ef5-430f-bad1-4a9972dc8eb4\") " pod="openstack/glance-default-internal-api-0" Sep 30 19:51:11 crc kubenswrapper[4756]: I0930 19:51:11.955380 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-t8wq8" Sep 30 19:51:12 crc kubenswrapper[4756]: I0930 19:51:11.993890 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/207e631b-3ef5-430f-bad1-4a9972dc8eb4-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"207e631b-3ef5-430f-bad1-4a9972dc8eb4\") " pod="openstack/glance-default-internal-api-0" Sep 30 19:51:12 crc kubenswrapper[4756]: I0930 19:51:11.993934 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8z7pf\" (UniqueName: \"kubernetes.io/projected/207e631b-3ef5-430f-bad1-4a9972dc8eb4-kube-api-access-8z7pf\") pod \"glance-default-internal-api-0\" (UID: \"207e631b-3ef5-430f-bad1-4a9972dc8eb4\") " pod="openstack/glance-default-internal-api-0" Sep 30 19:51:12 crc kubenswrapper[4756]: I0930 19:51:11.993970 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-internal-api-0\" (UID: \"207e631b-3ef5-430f-bad1-4a9972dc8eb4\") " pod="openstack/glance-default-internal-api-0" Sep 30 19:51:12 crc kubenswrapper[4756]: I0930 19:51:11.993992 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/207e631b-3ef5-430f-bad1-4a9972dc8eb4-config-data\") pod \"glance-default-internal-api-0\" (UID: \"207e631b-3ef5-430f-bad1-4a9972dc8eb4\") " pod="openstack/glance-default-internal-api-0" Sep 30 19:51:12 crc kubenswrapper[4756]: I0930 19:51:11.994010 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/207e631b-3ef5-430f-bad1-4a9972dc8eb4-logs\") pod \"glance-default-internal-api-0\" (UID: \"207e631b-3ef5-430f-bad1-4a9972dc8eb4\") " pod="openstack/glance-default-internal-api-0" Sep 30 19:51:12 crc kubenswrapper[4756]: I0930 19:51:11.994024 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/207e631b-3ef5-430f-bad1-4a9972dc8eb4-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"207e631b-3ef5-430f-bad1-4a9972dc8eb4\") " pod="openstack/glance-default-internal-api-0" Sep 30 19:51:12 crc kubenswrapper[4756]: I0930 19:51:11.994090 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/207e631b-3ef5-430f-bad1-4a9972dc8eb4-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"207e631b-3ef5-430f-bad1-4a9972dc8eb4\") " pod="openstack/glance-default-internal-api-0" Sep 30 19:51:12 crc kubenswrapper[4756]: I0930 19:51:11.994113 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/207e631b-3ef5-430f-bad1-4a9972dc8eb4-scripts\") pod \"glance-default-internal-api-0\" (UID: \"207e631b-3ef5-430f-bad1-4a9972dc8eb4\") " pod="openstack/glance-default-internal-api-0" Sep 30 19:51:12 crc kubenswrapper[4756]: I0930 19:51:11.995264 4756 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-internal-api-0\" (UID: \"207e631b-3ef5-430f-bad1-4a9972dc8eb4\") device mount path \"/mnt/openstack/pv07\"" pod="openstack/glance-default-internal-api-0" Sep 30 19:51:12 crc kubenswrapper[4756]: I0930 19:51:11.995312 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/207e631b-3ef5-430f-bad1-4a9972dc8eb4-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"207e631b-3ef5-430f-bad1-4a9972dc8eb4\") " pod="openstack/glance-default-internal-api-0" Sep 30 19:51:12 crc kubenswrapper[4756]: I0930 19:51:12.000052 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/207e631b-3ef5-430f-bad1-4a9972dc8eb4-logs\") pod \"glance-default-internal-api-0\" (UID: \"207e631b-3ef5-430f-bad1-4a9972dc8eb4\") " pod="openstack/glance-default-internal-api-0" Sep 30 19:51:12 crc kubenswrapper[4756]: I0930 19:51:12.011887 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/207e631b-3ef5-430f-bad1-4a9972dc8eb4-scripts\") pod \"glance-default-internal-api-0\" (UID: \"207e631b-3ef5-430f-bad1-4a9972dc8eb4\") " pod="openstack/glance-default-internal-api-0" Sep 30 19:51:12 crc kubenswrapper[4756]: I0930 19:51:12.013027 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/207e631b-3ef5-430f-bad1-4a9972dc8eb4-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"207e631b-3ef5-430f-bad1-4a9972dc8eb4\") " pod="openstack/glance-default-internal-api-0" Sep 30 19:51:12 crc kubenswrapper[4756]: I0930 19:51:12.021060 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8z7pf\" (UniqueName: \"kubernetes.io/projected/207e631b-3ef5-430f-bad1-4a9972dc8eb4-kube-api-access-8z7pf\") pod \"glance-default-internal-api-0\" (UID: \"207e631b-3ef5-430f-bad1-4a9972dc8eb4\") " pod="openstack/glance-default-internal-api-0" Sep 30 19:51:12 crc kubenswrapper[4756]: I0930 19:51:12.023137 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/207e631b-3ef5-430f-bad1-4a9972dc8eb4-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"207e631b-3ef5-430f-bad1-4a9972dc8eb4\") " pod="openstack/glance-default-internal-api-0" Sep 30 19:51:12 crc kubenswrapper[4756]: I0930 19:51:12.034136 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/207e631b-3ef5-430f-bad1-4a9972dc8eb4-config-data\") pod \"glance-default-internal-api-0\" (UID: \"207e631b-3ef5-430f-bad1-4a9972dc8eb4\") " pod="openstack/glance-default-internal-api-0" Sep 30 19:51:12 crc kubenswrapper[4756]: I0930 19:51:12.056004 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-internal-api-0\" (UID: \"207e631b-3ef5-430f-bad1-4a9972dc8eb4\") " pod="openstack/glance-default-internal-api-0" Sep 30 19:51:12 crc kubenswrapper[4756]: I0930 19:51:12.101718 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Sep 30 19:51:12 crc kubenswrapper[4756]: I0930 19:51:12.107163 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6txjz\" (UniqueName: \"kubernetes.io/projected/1a2ea5dd-0745-414b-8ba3-283132a38f5b-kube-api-access-6txjz\") pod \"1a2ea5dd-0745-414b-8ba3-283132a38f5b\" (UID: \"1a2ea5dd-0745-414b-8ba3-283132a38f5b\") " Sep 30 19:51:12 crc kubenswrapper[4756]: I0930 19:51:12.133741 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1a2ea5dd-0745-414b-8ba3-283132a38f5b-kube-api-access-6txjz" (OuterVolumeSpecName: "kube-api-access-6txjz") pod "1a2ea5dd-0745-414b-8ba3-283132a38f5b" (UID: "1a2ea5dd-0745-414b-8ba3-283132a38f5b"). InnerVolumeSpecName "kube-api-access-6txjz". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:51:12 crc kubenswrapper[4756]: I0930 19:51:12.209031 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6txjz\" (UniqueName: \"kubernetes.io/projected/1a2ea5dd-0745-414b-8ba3-283132a38f5b-kube-api-access-6txjz\") on node \"crc\" DevicePath \"\"" Sep 30 19:51:12 crc kubenswrapper[4756]: I0930 19:51:12.368804 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Sep 30 19:51:12 crc kubenswrapper[4756]: I0930 19:51:12.393611 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-zzp77" Sep 30 19:51:12 crc kubenswrapper[4756]: I0930 19:51:12.402637 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-wrpgb" Sep 30 19:51:12 crc kubenswrapper[4756]: I0930 19:51:12.477464 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-wrpgb" event={"ID":"9a0dfe5d-82f1-43c6-ad07-91da00e313d1","Type":"ContainerDied","Data":"bc9915636b8cfbfba408efea46e0b4b7bf582869faf761a311fec9169e04657b"} Sep 30 19:51:12 crc kubenswrapper[4756]: I0930 19:51:12.477516 4756 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="bc9915636b8cfbfba408efea46e0b4b7bf582869faf761a311fec9169e04657b" Sep 30 19:51:12 crc kubenswrapper[4756]: I0930 19:51:12.479187 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-t8wq8" event={"ID":"1a2ea5dd-0745-414b-8ba3-283132a38f5b","Type":"ContainerDied","Data":"9b97a0bccaaf8780399a23ec6a67cb55dcfbc223580cbe12609e12fcc7b0bf73"} Sep 30 19:51:12 crc kubenswrapper[4756]: I0930 19:51:12.479206 4756 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9b97a0bccaaf8780399a23ec6a67cb55dcfbc223580cbe12609e12fcc7b0bf73" Sep 30 19:51:12 crc kubenswrapper[4756]: I0930 19:51:12.479246 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-t8wq8" Sep 30 19:51:12 crc kubenswrapper[4756]: I0930 19:51:12.482557 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-zzp77" event={"ID":"caf2d23d-add8-47d3-b675-97cdffa9680f","Type":"ContainerDied","Data":"d9d4d91098144bd36a2885d3d77a810b54a705991af9221ab9c9304edbd719ed"} Sep 30 19:51:12 crc kubenswrapper[4756]: I0930 19:51:12.482578 4756 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d9d4d91098144bd36a2885d3d77a810b54a705991af9221ab9c9304edbd719ed" Sep 30 19:51:12 crc kubenswrapper[4756]: I0930 19:51:12.482592 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-zzp77" Sep 30 19:51:12 crc kubenswrapper[4756]: I0930 19:51:12.487301 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"fbf46aee-6848-4d8c-a589-ffe70c2387d4","Type":"ContainerStarted","Data":"fc4fd0c65e8eb53bce6313ec5636e56cf1e5139ab7729e6b94cf6288c118b32f"} Sep 30 19:51:12 crc kubenswrapper[4756]: I0930 19:51:12.488956 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"93d007d0-df18-4967-be12-2216c5340439","Type":"ContainerStarted","Data":"b3ad087e9120547d268183116bcda27fc54269285012aa37cd7e7d9d1439c20c"} Sep 30 19:51:12 crc kubenswrapper[4756]: I0930 19:51:12.499616 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ccbdfb43-a43c-4509-b6e5-ed193ac842b4","Type":"ContainerStarted","Data":"d2df4deeda92d5f46b45d497713585ff2e90e53e870d0c9502d285b4796907d6"} Sep 30 19:51:12 crc kubenswrapper[4756]: I0930 19:51:12.499784 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="ccbdfb43-a43c-4509-b6e5-ed193ac842b4" containerName="ceilometer-central-agent" containerID="cri-o://91cd1557435975cd45d505313ea110a49f31f920f481b283d7a2944ac2043e6f" gracePeriod=30 Sep 30 19:51:12 crc kubenswrapper[4756]: I0930 19:51:12.499874 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Sep 30 19:51:12 crc kubenswrapper[4756]: I0930 19:51:12.499990 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="ccbdfb43-a43c-4509-b6e5-ed193ac842b4" containerName="proxy-httpd" containerID="cri-o://d2df4deeda92d5f46b45d497713585ff2e90e53e870d0c9502d285b4796907d6" gracePeriod=30 Sep 30 19:51:12 crc kubenswrapper[4756]: I0930 19:51:12.500150 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="ccbdfb43-a43c-4509-b6e5-ed193ac842b4" containerName="ceilometer-notification-agent" containerID="cri-o://25dd6fcc14b9bdd7ae6c26c049e0e5e4c8b1a005a2e52838493a28d697e6a0e2" gracePeriod=30 Sep 30 19:51:12 crc kubenswrapper[4756]: I0930 19:51:12.500194 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="ccbdfb43-a43c-4509-b6e5-ed193ac842b4" containerName="sg-core" containerID="cri-o://c5eea2fa4f508645946ee9d174b5d251fc6d4e8d78f0e9913a88b04671851f41" gracePeriod=30 Sep 30 19:51:12 crc kubenswrapper[4756]: I0930 19:51:12.514694 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"0befea8f-1ced-4703-b2a2-a10e154271f4","Type":"ContainerStarted","Data":"ae5a3b95769c08291481e06f22b1517c9339cff6ebc043bb2f8fd1964148fa8f"} Sep 30 19:51:12 crc kubenswrapper[4756]: I0930 19:51:12.517490 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gzlc8\" (UniqueName: \"kubernetes.io/projected/9a0dfe5d-82f1-43c6-ad07-91da00e313d1-kube-api-access-gzlc8\") pod \"9a0dfe5d-82f1-43c6-ad07-91da00e313d1\" (UID: \"9a0dfe5d-82f1-43c6-ad07-91da00e313d1\") " Sep 30 19:51:12 crc kubenswrapper[4756]: I0930 19:51:12.517678 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9vqfb\" (UniqueName: \"kubernetes.io/projected/caf2d23d-add8-47d3-b675-97cdffa9680f-kube-api-access-9vqfb\") pod \"caf2d23d-add8-47d3-b675-97cdffa9680f\" (UID: \"caf2d23d-add8-47d3-b675-97cdffa9680f\") " Sep 30 19:51:12 crc kubenswrapper[4756]: I0930 19:51:12.526692 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9a0dfe5d-82f1-43c6-ad07-91da00e313d1-kube-api-access-gzlc8" (OuterVolumeSpecName: "kube-api-access-gzlc8") pod "9a0dfe5d-82f1-43c6-ad07-91da00e313d1" (UID: "9a0dfe5d-82f1-43c6-ad07-91da00e313d1"). InnerVolumeSpecName "kube-api-access-gzlc8". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:51:12 crc kubenswrapper[4756]: I0930 19:51:12.541007 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/caf2d23d-add8-47d3-b675-97cdffa9680f-kube-api-access-9vqfb" (OuterVolumeSpecName: "kube-api-access-9vqfb") pod "caf2d23d-add8-47d3-b675-97cdffa9680f" (UID: "caf2d23d-add8-47d3-b675-97cdffa9680f"). InnerVolumeSpecName "kube-api-access-9vqfb". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:51:12 crc kubenswrapper[4756]: I0930 19:51:12.541246 4756 generic.go:334] "Generic (PLEG): container finished" podID="78d7d65c-1041-4ec8-97df-885ea20328ea" containerID="b8b76eb8b3e1eda91f6d68df720fb41c71f2d82cfa6e3420643684942b12a26b" exitCode=0 Sep 30 19:51:12 crc kubenswrapper[4756]: I0930 19:51:12.541334 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-8564f76dcb-9qwnq" Sep 30 19:51:12 crc kubenswrapper[4756]: I0930 19:51:12.542322 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5784cf869f-4qwd4" event={"ID":"78d7d65c-1041-4ec8-97df-885ea20328ea","Type":"ContainerDied","Data":"b8b76eb8b3e1eda91f6d68df720fb41c71f2d82cfa6e3420643684942b12a26b"} Sep 30 19:51:12 crc kubenswrapper[4756]: I0930 19:51:12.542746 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5784cf869f-4qwd4" event={"ID":"78d7d65c-1041-4ec8-97df-885ea20328ea","Type":"ContainerStarted","Data":"263e4d83488c467b9ab1a780cf4d661166e4185a0fb7d5a2e745febf930183a2"} Sep 30 19:51:12 crc kubenswrapper[4756]: I0930 19:51:12.557099 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=1.954860766 podStartE2EDuration="13.557082736s" podCreationTimestamp="2025-09-30 19:50:59 +0000 UTC" firstStartedPulling="2025-09-30 19:51:00.000976821 +0000 UTC m=+1189.621910308" lastFinishedPulling="2025-09-30 19:51:11.603198801 +0000 UTC m=+1201.224132278" observedRunningTime="2025-09-30 19:51:12.539695871 +0000 UTC m=+1202.160629358" watchObservedRunningTime="2025-09-30 19:51:12.557082736 +0000 UTC m=+1202.178016213" Sep 30 19:51:12 crc kubenswrapper[4756]: I0930 19:51:12.620894 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gzlc8\" (UniqueName: \"kubernetes.io/projected/9a0dfe5d-82f1-43c6-ad07-91da00e313d1-kube-api-access-gzlc8\") on node \"crc\" DevicePath \"\"" Sep 30 19:51:12 crc kubenswrapper[4756]: I0930 19:51:12.620935 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9vqfb\" (UniqueName: \"kubernetes.io/projected/caf2d23d-add8-47d3-b675-97cdffa9680f-kube-api-access-9vqfb\") on node \"crc\" DevicePath \"\"" Sep 30 19:51:12 crc kubenswrapper[4756]: I0930 19:51:12.622902 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-8564f76dcb-9qwnq"] Sep 30 19:51:12 crc kubenswrapper[4756]: I0930 19:51:12.642408 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-api-8564f76dcb-9qwnq"] Sep 30 19:51:13 crc kubenswrapper[4756]: I0930 19:51:13.095804 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 30 19:51:13 crc kubenswrapper[4756]: W0930 19:51:13.114600 4756 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod207e631b_3ef5_430f_bad1_4a9972dc8eb4.slice/crio-86f0982dddf30b4ac618648f508315697aef35ef0776de68567129b452543324 WatchSource:0}: Error finding container 86f0982dddf30b4ac618648f508315697aef35ef0776de68567129b452543324: Status 404 returned error can't find the container with id 86f0982dddf30b4ac618648f508315697aef35ef0776de68567129b452543324 Sep 30 19:51:13 crc kubenswrapper[4756]: I0930 19:51:13.138696 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4763c3d2-bfc4-4c13-94c0-f7d852309b17" path="/var/lib/kubelet/pods/4763c3d2-bfc4-4c13-94c0-f7d852309b17/volumes" Sep 30 19:51:13 crc kubenswrapper[4756]: I0930 19:51:13.139665 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c2e63e93-d8e4-4069-bdb3-364f98238496" path="/var/lib/kubelet/pods/c2e63e93-d8e4-4069-bdb3-364f98238496/volumes" Sep 30 19:51:13 crc kubenswrapper[4756]: I0930 19:51:13.431062 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Sep 30 19:51:13 crc kubenswrapper[4756]: I0930 19:51:13.514827 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 19:51:13 crc kubenswrapper[4756]: I0930 19:51:13.556246 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"93d007d0-df18-4967-be12-2216c5340439","Type":"ContainerStarted","Data":"cc444e2801b2228906841448a0456019fe64b3f3de08ee2818d998756403f848"} Sep 30 19:51:13 crc kubenswrapper[4756]: I0930 19:51:13.559475 4756 generic.go:334] "Generic (PLEG): container finished" podID="ccbdfb43-a43c-4509-b6e5-ed193ac842b4" containerID="d2df4deeda92d5f46b45d497713585ff2e90e53e870d0c9502d285b4796907d6" exitCode=0 Sep 30 19:51:13 crc kubenswrapper[4756]: I0930 19:51:13.559506 4756 generic.go:334] "Generic (PLEG): container finished" podID="ccbdfb43-a43c-4509-b6e5-ed193ac842b4" containerID="c5eea2fa4f508645946ee9d174b5d251fc6d4e8d78f0e9913a88b04671851f41" exitCode=2 Sep 30 19:51:13 crc kubenswrapper[4756]: I0930 19:51:13.559514 4756 generic.go:334] "Generic (PLEG): container finished" podID="ccbdfb43-a43c-4509-b6e5-ed193ac842b4" containerID="25dd6fcc14b9bdd7ae6c26c049e0e5e4c8b1a005a2e52838493a28d697e6a0e2" exitCode=0 Sep 30 19:51:13 crc kubenswrapper[4756]: I0930 19:51:13.559523 4756 generic.go:334] "Generic (PLEG): container finished" podID="ccbdfb43-a43c-4509-b6e5-ed193ac842b4" containerID="91cd1557435975cd45d505313ea110a49f31f920f481b283d7a2944ac2043e6f" exitCode=0 Sep 30 19:51:13 crc kubenswrapper[4756]: I0930 19:51:13.559596 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ccbdfb43-a43c-4509-b6e5-ed193ac842b4","Type":"ContainerDied","Data":"d2df4deeda92d5f46b45d497713585ff2e90e53e870d0c9502d285b4796907d6"} Sep 30 19:51:13 crc kubenswrapper[4756]: I0930 19:51:13.559606 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 19:51:13 crc kubenswrapper[4756]: I0930 19:51:13.559620 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ccbdfb43-a43c-4509-b6e5-ed193ac842b4","Type":"ContainerDied","Data":"c5eea2fa4f508645946ee9d174b5d251fc6d4e8d78f0e9913a88b04671851f41"} Sep 30 19:51:13 crc kubenswrapper[4756]: I0930 19:51:13.559630 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ccbdfb43-a43c-4509-b6e5-ed193ac842b4","Type":"ContainerDied","Data":"25dd6fcc14b9bdd7ae6c26c049e0e5e4c8b1a005a2e52838493a28d697e6a0e2"} Sep 30 19:51:13 crc kubenswrapper[4756]: I0930 19:51:13.559638 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ccbdfb43-a43c-4509-b6e5-ed193ac842b4","Type":"ContainerDied","Data":"91cd1557435975cd45d505313ea110a49f31f920f481b283d7a2944ac2043e6f"} Sep 30 19:51:13 crc kubenswrapper[4756]: I0930 19:51:13.559640 4756 scope.go:117] "RemoveContainer" containerID="d2df4deeda92d5f46b45d497713585ff2e90e53e870d0c9502d285b4796907d6" Sep 30 19:51:13 crc kubenswrapper[4756]: I0930 19:51:13.560369 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ccbdfb43-a43c-4509-b6e5-ed193ac842b4","Type":"ContainerDied","Data":"c0efa8e44016b81bba0da5124e255628603ecb99eec44a52ef84277ee40638d9"} Sep 30 19:51:13 crc kubenswrapper[4756]: I0930 19:51:13.563723 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"0befea8f-1ced-4703-b2a2-a10e154271f4","Type":"ContainerStarted","Data":"184b0bb964b5d979736c25d5b822cdec9e2f47d28b7b5da07359b35546a67d3b"} Sep 30 19:51:13 crc kubenswrapper[4756]: I0930 19:51:13.577678 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5784cf869f-4qwd4" event={"ID":"78d7d65c-1041-4ec8-97df-885ea20328ea","Type":"ContainerStarted","Data":"aa920ad2cb5c3c09829c17c32c5930c36c12bdb5b70db6d64f0c0b6f86627d08"} Sep 30 19:51:13 crc kubenswrapper[4756]: I0930 19:51:13.578751 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-5784cf869f-4qwd4" Sep 30 19:51:13 crc kubenswrapper[4756]: I0930 19:51:13.602885 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-5784cf869f-4qwd4" podStartSLOduration=3.602867983 podStartE2EDuration="3.602867983s" podCreationTimestamp="2025-09-30 19:51:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 19:51:13.595602453 +0000 UTC m=+1203.216535930" watchObservedRunningTime="2025-09-30 19:51:13.602867983 +0000 UTC m=+1203.223801460" Sep 30 19:51:13 crc kubenswrapper[4756]: I0930 19:51:13.610041 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-wrpgb" Sep 30 19:51:13 crc kubenswrapper[4756]: I0930 19:51:13.610866 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"207e631b-3ef5-430f-bad1-4a9972dc8eb4","Type":"ContainerStarted","Data":"86f0982dddf30b4ac618648f508315697aef35ef0776de68567129b452543324"} Sep 30 19:51:13 crc kubenswrapper[4756]: I0930 19:51:13.642630 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s2rfk\" (UniqueName: \"kubernetes.io/projected/ccbdfb43-a43c-4509-b6e5-ed193ac842b4-kube-api-access-s2rfk\") pod \"ccbdfb43-a43c-4509-b6e5-ed193ac842b4\" (UID: \"ccbdfb43-a43c-4509-b6e5-ed193ac842b4\") " Sep 30 19:51:13 crc kubenswrapper[4756]: I0930 19:51:13.642736 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/ccbdfb43-a43c-4509-b6e5-ed193ac842b4-sg-core-conf-yaml\") pod \"ccbdfb43-a43c-4509-b6e5-ed193ac842b4\" (UID: \"ccbdfb43-a43c-4509-b6e5-ed193ac842b4\") " Sep 30 19:51:13 crc kubenswrapper[4756]: I0930 19:51:13.642776 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ccbdfb43-a43c-4509-b6e5-ed193ac842b4-scripts\") pod \"ccbdfb43-a43c-4509-b6e5-ed193ac842b4\" (UID: \"ccbdfb43-a43c-4509-b6e5-ed193ac842b4\") " Sep 30 19:51:13 crc kubenswrapper[4756]: I0930 19:51:13.642806 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ccbdfb43-a43c-4509-b6e5-ed193ac842b4-log-httpd\") pod \"ccbdfb43-a43c-4509-b6e5-ed193ac842b4\" (UID: \"ccbdfb43-a43c-4509-b6e5-ed193ac842b4\") " Sep 30 19:51:13 crc kubenswrapper[4756]: I0930 19:51:13.642832 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ccbdfb43-a43c-4509-b6e5-ed193ac842b4-run-httpd\") pod \"ccbdfb43-a43c-4509-b6e5-ed193ac842b4\" (UID: \"ccbdfb43-a43c-4509-b6e5-ed193ac842b4\") " Sep 30 19:51:13 crc kubenswrapper[4756]: I0930 19:51:13.642908 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ccbdfb43-a43c-4509-b6e5-ed193ac842b4-config-data\") pod \"ccbdfb43-a43c-4509-b6e5-ed193ac842b4\" (UID: \"ccbdfb43-a43c-4509-b6e5-ed193ac842b4\") " Sep 30 19:51:13 crc kubenswrapper[4756]: I0930 19:51:13.642965 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ccbdfb43-a43c-4509-b6e5-ed193ac842b4-combined-ca-bundle\") pod \"ccbdfb43-a43c-4509-b6e5-ed193ac842b4\" (UID: \"ccbdfb43-a43c-4509-b6e5-ed193ac842b4\") " Sep 30 19:51:13 crc kubenswrapper[4756]: I0930 19:51:13.645233 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ccbdfb43-a43c-4509-b6e5-ed193ac842b4-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "ccbdfb43-a43c-4509-b6e5-ed193ac842b4" (UID: "ccbdfb43-a43c-4509-b6e5-ed193ac842b4"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 19:51:13 crc kubenswrapper[4756]: I0930 19:51:13.645746 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ccbdfb43-a43c-4509-b6e5-ed193ac842b4-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "ccbdfb43-a43c-4509-b6e5-ed193ac842b4" (UID: "ccbdfb43-a43c-4509-b6e5-ed193ac842b4"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 19:51:13 crc kubenswrapper[4756]: I0930 19:51:13.656665 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ccbdfb43-a43c-4509-b6e5-ed193ac842b4-scripts" (OuterVolumeSpecName: "scripts") pod "ccbdfb43-a43c-4509-b6e5-ed193ac842b4" (UID: "ccbdfb43-a43c-4509-b6e5-ed193ac842b4"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:51:13 crc kubenswrapper[4756]: I0930 19:51:13.662631 4756 scope.go:117] "RemoveContainer" containerID="c5eea2fa4f508645946ee9d174b5d251fc6d4e8d78f0e9913a88b04671851f41" Sep 30 19:51:13 crc kubenswrapper[4756]: I0930 19:51:13.662633 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ccbdfb43-a43c-4509-b6e5-ed193ac842b4-kube-api-access-s2rfk" (OuterVolumeSpecName: "kube-api-access-s2rfk") pod "ccbdfb43-a43c-4509-b6e5-ed193ac842b4" (UID: "ccbdfb43-a43c-4509-b6e5-ed193ac842b4"). InnerVolumeSpecName "kube-api-access-s2rfk". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:51:13 crc kubenswrapper[4756]: I0930 19:51:13.695238 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ccbdfb43-a43c-4509-b6e5-ed193ac842b4-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "ccbdfb43-a43c-4509-b6e5-ed193ac842b4" (UID: "ccbdfb43-a43c-4509-b6e5-ed193ac842b4"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:51:13 crc kubenswrapper[4756]: I0930 19:51:13.719001 4756 scope.go:117] "RemoveContainer" containerID="25dd6fcc14b9bdd7ae6c26c049e0e5e4c8b1a005a2e52838493a28d697e6a0e2" Sep 30 19:51:13 crc kubenswrapper[4756]: I0930 19:51:13.742847 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ccbdfb43-a43c-4509-b6e5-ed193ac842b4-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ccbdfb43-a43c-4509-b6e5-ed193ac842b4" (UID: "ccbdfb43-a43c-4509-b6e5-ed193ac842b4"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:51:13 crc kubenswrapper[4756]: I0930 19:51:13.745208 4756 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ccbdfb43-a43c-4509-b6e5-ed193ac842b4-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 19:51:13 crc kubenswrapper[4756]: I0930 19:51:13.745252 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s2rfk\" (UniqueName: \"kubernetes.io/projected/ccbdfb43-a43c-4509-b6e5-ed193ac842b4-kube-api-access-s2rfk\") on node \"crc\" DevicePath \"\"" Sep 30 19:51:13 crc kubenswrapper[4756]: I0930 19:51:13.745266 4756 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/ccbdfb43-a43c-4509-b6e5-ed193ac842b4-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Sep 30 19:51:13 crc kubenswrapper[4756]: I0930 19:51:13.745278 4756 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ccbdfb43-a43c-4509-b6e5-ed193ac842b4-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 19:51:13 crc kubenswrapper[4756]: I0930 19:51:13.745287 4756 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ccbdfb43-a43c-4509-b6e5-ed193ac842b4-log-httpd\") on node \"crc\" DevicePath \"\"" Sep 30 19:51:13 crc kubenswrapper[4756]: I0930 19:51:13.745296 4756 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ccbdfb43-a43c-4509-b6e5-ed193ac842b4-run-httpd\") on node \"crc\" DevicePath \"\"" Sep 30 19:51:13 crc kubenswrapper[4756]: I0930 19:51:13.745428 4756 scope.go:117] "RemoveContainer" containerID="91cd1557435975cd45d505313ea110a49f31f920f481b283d7a2944ac2043e6f" Sep 30 19:51:13 crc kubenswrapper[4756]: I0930 19:51:13.779070 4756 scope.go:117] "RemoveContainer" containerID="d2df4deeda92d5f46b45d497713585ff2e90e53e870d0c9502d285b4796907d6" Sep 30 19:51:13 crc kubenswrapper[4756]: E0930 19:51:13.779460 4756 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d2df4deeda92d5f46b45d497713585ff2e90e53e870d0c9502d285b4796907d6\": container with ID starting with d2df4deeda92d5f46b45d497713585ff2e90e53e870d0c9502d285b4796907d6 not found: ID does not exist" containerID="d2df4deeda92d5f46b45d497713585ff2e90e53e870d0c9502d285b4796907d6" Sep 30 19:51:13 crc kubenswrapper[4756]: I0930 19:51:13.779488 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d2df4deeda92d5f46b45d497713585ff2e90e53e870d0c9502d285b4796907d6"} err="failed to get container status \"d2df4deeda92d5f46b45d497713585ff2e90e53e870d0c9502d285b4796907d6\": rpc error: code = NotFound desc = could not find container \"d2df4deeda92d5f46b45d497713585ff2e90e53e870d0c9502d285b4796907d6\": container with ID starting with d2df4deeda92d5f46b45d497713585ff2e90e53e870d0c9502d285b4796907d6 not found: ID does not exist" Sep 30 19:51:13 crc kubenswrapper[4756]: I0930 19:51:13.779507 4756 scope.go:117] "RemoveContainer" containerID="c5eea2fa4f508645946ee9d174b5d251fc6d4e8d78f0e9913a88b04671851f41" Sep 30 19:51:13 crc kubenswrapper[4756]: E0930 19:51:13.780145 4756 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c5eea2fa4f508645946ee9d174b5d251fc6d4e8d78f0e9913a88b04671851f41\": container with ID starting with c5eea2fa4f508645946ee9d174b5d251fc6d4e8d78f0e9913a88b04671851f41 not found: ID does not exist" containerID="c5eea2fa4f508645946ee9d174b5d251fc6d4e8d78f0e9913a88b04671851f41" Sep 30 19:51:13 crc kubenswrapper[4756]: I0930 19:51:13.780167 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c5eea2fa4f508645946ee9d174b5d251fc6d4e8d78f0e9913a88b04671851f41"} err="failed to get container status \"c5eea2fa4f508645946ee9d174b5d251fc6d4e8d78f0e9913a88b04671851f41\": rpc error: code = NotFound desc = could not find container \"c5eea2fa4f508645946ee9d174b5d251fc6d4e8d78f0e9913a88b04671851f41\": container with ID starting with c5eea2fa4f508645946ee9d174b5d251fc6d4e8d78f0e9913a88b04671851f41 not found: ID does not exist" Sep 30 19:51:13 crc kubenswrapper[4756]: I0930 19:51:13.780181 4756 scope.go:117] "RemoveContainer" containerID="25dd6fcc14b9bdd7ae6c26c049e0e5e4c8b1a005a2e52838493a28d697e6a0e2" Sep 30 19:51:13 crc kubenswrapper[4756]: E0930 19:51:13.780848 4756 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"25dd6fcc14b9bdd7ae6c26c049e0e5e4c8b1a005a2e52838493a28d697e6a0e2\": container with ID starting with 25dd6fcc14b9bdd7ae6c26c049e0e5e4c8b1a005a2e52838493a28d697e6a0e2 not found: ID does not exist" containerID="25dd6fcc14b9bdd7ae6c26c049e0e5e4c8b1a005a2e52838493a28d697e6a0e2" Sep 30 19:51:13 crc kubenswrapper[4756]: I0930 19:51:13.780900 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"25dd6fcc14b9bdd7ae6c26c049e0e5e4c8b1a005a2e52838493a28d697e6a0e2"} err="failed to get container status \"25dd6fcc14b9bdd7ae6c26c049e0e5e4c8b1a005a2e52838493a28d697e6a0e2\": rpc error: code = NotFound desc = could not find container \"25dd6fcc14b9bdd7ae6c26c049e0e5e4c8b1a005a2e52838493a28d697e6a0e2\": container with ID starting with 25dd6fcc14b9bdd7ae6c26c049e0e5e4c8b1a005a2e52838493a28d697e6a0e2 not found: ID does not exist" Sep 30 19:51:13 crc kubenswrapper[4756]: I0930 19:51:13.780929 4756 scope.go:117] "RemoveContainer" containerID="91cd1557435975cd45d505313ea110a49f31f920f481b283d7a2944ac2043e6f" Sep 30 19:51:13 crc kubenswrapper[4756]: E0930 19:51:13.781198 4756 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"91cd1557435975cd45d505313ea110a49f31f920f481b283d7a2944ac2043e6f\": container with ID starting with 91cd1557435975cd45d505313ea110a49f31f920f481b283d7a2944ac2043e6f not found: ID does not exist" containerID="91cd1557435975cd45d505313ea110a49f31f920f481b283d7a2944ac2043e6f" Sep 30 19:51:13 crc kubenswrapper[4756]: I0930 19:51:13.781220 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"91cd1557435975cd45d505313ea110a49f31f920f481b283d7a2944ac2043e6f"} err="failed to get container status \"91cd1557435975cd45d505313ea110a49f31f920f481b283d7a2944ac2043e6f\": rpc error: code = NotFound desc = could not find container \"91cd1557435975cd45d505313ea110a49f31f920f481b283d7a2944ac2043e6f\": container with ID starting with 91cd1557435975cd45d505313ea110a49f31f920f481b283d7a2944ac2043e6f not found: ID does not exist" Sep 30 19:51:13 crc kubenswrapper[4756]: I0930 19:51:13.781233 4756 scope.go:117] "RemoveContainer" containerID="d2df4deeda92d5f46b45d497713585ff2e90e53e870d0c9502d285b4796907d6" Sep 30 19:51:13 crc kubenswrapper[4756]: I0930 19:51:13.782543 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d2df4deeda92d5f46b45d497713585ff2e90e53e870d0c9502d285b4796907d6"} err="failed to get container status \"d2df4deeda92d5f46b45d497713585ff2e90e53e870d0c9502d285b4796907d6\": rpc error: code = NotFound desc = could not find container \"d2df4deeda92d5f46b45d497713585ff2e90e53e870d0c9502d285b4796907d6\": container with ID starting with d2df4deeda92d5f46b45d497713585ff2e90e53e870d0c9502d285b4796907d6 not found: ID does not exist" Sep 30 19:51:13 crc kubenswrapper[4756]: I0930 19:51:13.782564 4756 scope.go:117] "RemoveContainer" containerID="c5eea2fa4f508645946ee9d174b5d251fc6d4e8d78f0e9913a88b04671851f41" Sep 30 19:51:13 crc kubenswrapper[4756]: I0930 19:51:13.782858 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c5eea2fa4f508645946ee9d174b5d251fc6d4e8d78f0e9913a88b04671851f41"} err="failed to get container status \"c5eea2fa4f508645946ee9d174b5d251fc6d4e8d78f0e9913a88b04671851f41\": rpc error: code = NotFound desc = could not find container \"c5eea2fa4f508645946ee9d174b5d251fc6d4e8d78f0e9913a88b04671851f41\": container with ID starting with c5eea2fa4f508645946ee9d174b5d251fc6d4e8d78f0e9913a88b04671851f41 not found: ID does not exist" Sep 30 19:51:13 crc kubenswrapper[4756]: I0930 19:51:13.782879 4756 scope.go:117] "RemoveContainer" containerID="25dd6fcc14b9bdd7ae6c26c049e0e5e4c8b1a005a2e52838493a28d697e6a0e2" Sep 30 19:51:13 crc kubenswrapper[4756]: I0930 19:51:13.783128 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"25dd6fcc14b9bdd7ae6c26c049e0e5e4c8b1a005a2e52838493a28d697e6a0e2"} err="failed to get container status \"25dd6fcc14b9bdd7ae6c26c049e0e5e4c8b1a005a2e52838493a28d697e6a0e2\": rpc error: code = NotFound desc = could not find container \"25dd6fcc14b9bdd7ae6c26c049e0e5e4c8b1a005a2e52838493a28d697e6a0e2\": container with ID starting with 25dd6fcc14b9bdd7ae6c26c049e0e5e4c8b1a005a2e52838493a28d697e6a0e2 not found: ID does not exist" Sep 30 19:51:13 crc kubenswrapper[4756]: I0930 19:51:13.783169 4756 scope.go:117] "RemoveContainer" containerID="91cd1557435975cd45d505313ea110a49f31f920f481b283d7a2944ac2043e6f" Sep 30 19:51:13 crc kubenswrapper[4756]: I0930 19:51:13.783220 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ccbdfb43-a43c-4509-b6e5-ed193ac842b4-config-data" (OuterVolumeSpecName: "config-data") pod "ccbdfb43-a43c-4509-b6e5-ed193ac842b4" (UID: "ccbdfb43-a43c-4509-b6e5-ed193ac842b4"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:51:13 crc kubenswrapper[4756]: I0930 19:51:13.783488 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"91cd1557435975cd45d505313ea110a49f31f920f481b283d7a2944ac2043e6f"} err="failed to get container status \"91cd1557435975cd45d505313ea110a49f31f920f481b283d7a2944ac2043e6f\": rpc error: code = NotFound desc = could not find container \"91cd1557435975cd45d505313ea110a49f31f920f481b283d7a2944ac2043e6f\": container with ID starting with 91cd1557435975cd45d505313ea110a49f31f920f481b283d7a2944ac2043e6f not found: ID does not exist" Sep 30 19:51:13 crc kubenswrapper[4756]: I0930 19:51:13.783510 4756 scope.go:117] "RemoveContainer" containerID="d2df4deeda92d5f46b45d497713585ff2e90e53e870d0c9502d285b4796907d6" Sep 30 19:51:13 crc kubenswrapper[4756]: I0930 19:51:13.783825 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d2df4deeda92d5f46b45d497713585ff2e90e53e870d0c9502d285b4796907d6"} err="failed to get container status \"d2df4deeda92d5f46b45d497713585ff2e90e53e870d0c9502d285b4796907d6\": rpc error: code = NotFound desc = could not find container \"d2df4deeda92d5f46b45d497713585ff2e90e53e870d0c9502d285b4796907d6\": container with ID starting with d2df4deeda92d5f46b45d497713585ff2e90e53e870d0c9502d285b4796907d6 not found: ID does not exist" Sep 30 19:51:13 crc kubenswrapper[4756]: I0930 19:51:13.783846 4756 scope.go:117] "RemoveContainer" containerID="c5eea2fa4f508645946ee9d174b5d251fc6d4e8d78f0e9913a88b04671851f41" Sep 30 19:51:13 crc kubenswrapper[4756]: I0930 19:51:13.784205 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c5eea2fa4f508645946ee9d174b5d251fc6d4e8d78f0e9913a88b04671851f41"} err="failed to get container status \"c5eea2fa4f508645946ee9d174b5d251fc6d4e8d78f0e9913a88b04671851f41\": rpc error: code = NotFound desc = could not find container \"c5eea2fa4f508645946ee9d174b5d251fc6d4e8d78f0e9913a88b04671851f41\": container with ID starting with c5eea2fa4f508645946ee9d174b5d251fc6d4e8d78f0e9913a88b04671851f41 not found: ID does not exist" Sep 30 19:51:13 crc kubenswrapper[4756]: I0930 19:51:13.784248 4756 scope.go:117] "RemoveContainer" containerID="25dd6fcc14b9bdd7ae6c26c049e0e5e4c8b1a005a2e52838493a28d697e6a0e2" Sep 30 19:51:13 crc kubenswrapper[4756]: I0930 19:51:13.784898 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"25dd6fcc14b9bdd7ae6c26c049e0e5e4c8b1a005a2e52838493a28d697e6a0e2"} err="failed to get container status \"25dd6fcc14b9bdd7ae6c26c049e0e5e4c8b1a005a2e52838493a28d697e6a0e2\": rpc error: code = NotFound desc = could not find container \"25dd6fcc14b9bdd7ae6c26c049e0e5e4c8b1a005a2e52838493a28d697e6a0e2\": container with ID starting with 25dd6fcc14b9bdd7ae6c26c049e0e5e4c8b1a005a2e52838493a28d697e6a0e2 not found: ID does not exist" Sep 30 19:51:13 crc kubenswrapper[4756]: I0930 19:51:13.784928 4756 scope.go:117] "RemoveContainer" containerID="91cd1557435975cd45d505313ea110a49f31f920f481b283d7a2944ac2043e6f" Sep 30 19:51:13 crc kubenswrapper[4756]: I0930 19:51:13.785191 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"91cd1557435975cd45d505313ea110a49f31f920f481b283d7a2944ac2043e6f"} err="failed to get container status \"91cd1557435975cd45d505313ea110a49f31f920f481b283d7a2944ac2043e6f\": rpc error: code = NotFound desc = could not find container \"91cd1557435975cd45d505313ea110a49f31f920f481b283d7a2944ac2043e6f\": container with ID starting with 91cd1557435975cd45d505313ea110a49f31f920f481b283d7a2944ac2043e6f not found: ID does not exist" Sep 30 19:51:13 crc kubenswrapper[4756]: I0930 19:51:13.785228 4756 scope.go:117] "RemoveContainer" containerID="d2df4deeda92d5f46b45d497713585ff2e90e53e870d0c9502d285b4796907d6" Sep 30 19:51:13 crc kubenswrapper[4756]: I0930 19:51:13.785499 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d2df4deeda92d5f46b45d497713585ff2e90e53e870d0c9502d285b4796907d6"} err="failed to get container status \"d2df4deeda92d5f46b45d497713585ff2e90e53e870d0c9502d285b4796907d6\": rpc error: code = NotFound desc = could not find container \"d2df4deeda92d5f46b45d497713585ff2e90e53e870d0c9502d285b4796907d6\": container with ID starting with d2df4deeda92d5f46b45d497713585ff2e90e53e870d0c9502d285b4796907d6 not found: ID does not exist" Sep 30 19:51:13 crc kubenswrapper[4756]: I0930 19:51:13.785522 4756 scope.go:117] "RemoveContainer" containerID="c5eea2fa4f508645946ee9d174b5d251fc6d4e8d78f0e9913a88b04671851f41" Sep 30 19:51:13 crc kubenswrapper[4756]: I0930 19:51:13.785754 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c5eea2fa4f508645946ee9d174b5d251fc6d4e8d78f0e9913a88b04671851f41"} err="failed to get container status \"c5eea2fa4f508645946ee9d174b5d251fc6d4e8d78f0e9913a88b04671851f41\": rpc error: code = NotFound desc = could not find container \"c5eea2fa4f508645946ee9d174b5d251fc6d4e8d78f0e9913a88b04671851f41\": container with ID starting with c5eea2fa4f508645946ee9d174b5d251fc6d4e8d78f0e9913a88b04671851f41 not found: ID does not exist" Sep 30 19:51:13 crc kubenswrapper[4756]: I0930 19:51:13.785807 4756 scope.go:117] "RemoveContainer" containerID="25dd6fcc14b9bdd7ae6c26c049e0e5e4c8b1a005a2e52838493a28d697e6a0e2" Sep 30 19:51:13 crc kubenswrapper[4756]: I0930 19:51:13.786055 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"25dd6fcc14b9bdd7ae6c26c049e0e5e4c8b1a005a2e52838493a28d697e6a0e2"} err="failed to get container status \"25dd6fcc14b9bdd7ae6c26c049e0e5e4c8b1a005a2e52838493a28d697e6a0e2\": rpc error: code = NotFound desc = could not find container \"25dd6fcc14b9bdd7ae6c26c049e0e5e4c8b1a005a2e52838493a28d697e6a0e2\": container with ID starting with 25dd6fcc14b9bdd7ae6c26c049e0e5e4c8b1a005a2e52838493a28d697e6a0e2 not found: ID does not exist" Sep 30 19:51:13 crc kubenswrapper[4756]: I0930 19:51:13.786074 4756 scope.go:117] "RemoveContainer" containerID="91cd1557435975cd45d505313ea110a49f31f920f481b283d7a2944ac2043e6f" Sep 30 19:51:13 crc kubenswrapper[4756]: I0930 19:51:13.786948 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"91cd1557435975cd45d505313ea110a49f31f920f481b283d7a2944ac2043e6f"} err="failed to get container status \"91cd1557435975cd45d505313ea110a49f31f920f481b283d7a2944ac2043e6f\": rpc error: code = NotFound desc = could not find container \"91cd1557435975cd45d505313ea110a49f31f920f481b283d7a2944ac2043e6f\": container with ID starting with 91cd1557435975cd45d505313ea110a49f31f920f481b283d7a2944ac2043e6f not found: ID does not exist" Sep 30 19:51:13 crc kubenswrapper[4756]: I0930 19:51:13.847104 4756 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ccbdfb43-a43c-4509-b6e5-ed193ac842b4-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 19:51:13 crc kubenswrapper[4756]: I0930 19:51:13.906454 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 30 19:51:13 crc kubenswrapper[4756]: I0930 19:51:13.911249 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Sep 30 19:51:13 crc kubenswrapper[4756]: I0930 19:51:13.927197 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Sep 30 19:51:13 crc kubenswrapper[4756]: E0930 19:51:13.927794 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ccbdfb43-a43c-4509-b6e5-ed193ac842b4" containerName="ceilometer-notification-agent" Sep 30 19:51:13 crc kubenswrapper[4756]: I0930 19:51:13.927807 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="ccbdfb43-a43c-4509-b6e5-ed193ac842b4" containerName="ceilometer-notification-agent" Sep 30 19:51:13 crc kubenswrapper[4756]: E0930 19:51:13.927825 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1a2ea5dd-0745-414b-8ba3-283132a38f5b" containerName="mariadb-database-create" Sep 30 19:51:13 crc kubenswrapper[4756]: I0930 19:51:13.927831 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="1a2ea5dd-0745-414b-8ba3-283132a38f5b" containerName="mariadb-database-create" Sep 30 19:51:13 crc kubenswrapper[4756]: E0930 19:51:13.927846 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ccbdfb43-a43c-4509-b6e5-ed193ac842b4" containerName="ceilometer-central-agent" Sep 30 19:51:13 crc kubenswrapper[4756]: I0930 19:51:13.927852 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="ccbdfb43-a43c-4509-b6e5-ed193ac842b4" containerName="ceilometer-central-agent" Sep 30 19:51:13 crc kubenswrapper[4756]: E0930 19:51:13.927863 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="caf2d23d-add8-47d3-b675-97cdffa9680f" containerName="mariadb-database-create" Sep 30 19:51:13 crc kubenswrapper[4756]: I0930 19:51:13.927870 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="caf2d23d-add8-47d3-b675-97cdffa9680f" containerName="mariadb-database-create" Sep 30 19:51:13 crc kubenswrapper[4756]: E0930 19:51:13.927886 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ccbdfb43-a43c-4509-b6e5-ed193ac842b4" containerName="sg-core" Sep 30 19:51:13 crc kubenswrapper[4756]: I0930 19:51:13.927894 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="ccbdfb43-a43c-4509-b6e5-ed193ac842b4" containerName="sg-core" Sep 30 19:51:13 crc kubenswrapper[4756]: E0930 19:51:13.927906 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9a0dfe5d-82f1-43c6-ad07-91da00e313d1" containerName="mariadb-database-create" Sep 30 19:51:13 crc kubenswrapper[4756]: I0930 19:51:13.927912 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="9a0dfe5d-82f1-43c6-ad07-91da00e313d1" containerName="mariadb-database-create" Sep 30 19:51:13 crc kubenswrapper[4756]: E0930 19:51:13.927920 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ccbdfb43-a43c-4509-b6e5-ed193ac842b4" containerName="proxy-httpd" Sep 30 19:51:13 crc kubenswrapper[4756]: I0930 19:51:13.927927 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="ccbdfb43-a43c-4509-b6e5-ed193ac842b4" containerName="proxy-httpd" Sep 30 19:51:13 crc kubenswrapper[4756]: I0930 19:51:13.928101 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="ccbdfb43-a43c-4509-b6e5-ed193ac842b4" containerName="sg-core" Sep 30 19:51:13 crc kubenswrapper[4756]: I0930 19:51:13.928123 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="ccbdfb43-a43c-4509-b6e5-ed193ac842b4" containerName="ceilometer-notification-agent" Sep 30 19:51:13 crc kubenswrapper[4756]: I0930 19:51:13.928133 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="ccbdfb43-a43c-4509-b6e5-ed193ac842b4" containerName="ceilometer-central-agent" Sep 30 19:51:13 crc kubenswrapper[4756]: I0930 19:51:13.928143 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="1a2ea5dd-0745-414b-8ba3-283132a38f5b" containerName="mariadb-database-create" Sep 30 19:51:13 crc kubenswrapper[4756]: I0930 19:51:13.928153 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="ccbdfb43-a43c-4509-b6e5-ed193ac842b4" containerName="proxy-httpd" Sep 30 19:51:13 crc kubenswrapper[4756]: I0930 19:51:13.928169 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="9a0dfe5d-82f1-43c6-ad07-91da00e313d1" containerName="mariadb-database-create" Sep 30 19:51:13 crc kubenswrapper[4756]: I0930 19:51:13.928630 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="caf2d23d-add8-47d3-b675-97cdffa9680f" containerName="mariadb-database-create" Sep 30 19:51:13 crc kubenswrapper[4756]: I0930 19:51:13.930206 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 19:51:13 crc kubenswrapper[4756]: I0930 19:51:13.933778 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Sep 30 19:51:13 crc kubenswrapper[4756]: I0930 19:51:13.933916 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Sep 30 19:51:13 crc kubenswrapper[4756]: I0930 19:51:13.938943 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 30 19:51:14 crc kubenswrapper[4756]: I0930 19:51:14.053106 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d992dec2-7b97-4d28-9174-60c1c8361b88-run-httpd\") pod \"ceilometer-0\" (UID: \"d992dec2-7b97-4d28-9174-60c1c8361b88\") " pod="openstack/ceilometer-0" Sep 30 19:51:14 crc kubenswrapper[4756]: I0930 19:51:14.053146 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d992dec2-7b97-4d28-9174-60c1c8361b88-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"d992dec2-7b97-4d28-9174-60c1c8361b88\") " pod="openstack/ceilometer-0" Sep 30 19:51:14 crc kubenswrapper[4756]: I0930 19:51:14.053176 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d992dec2-7b97-4d28-9174-60c1c8361b88-config-data\") pod \"ceilometer-0\" (UID: \"d992dec2-7b97-4d28-9174-60c1c8361b88\") " pod="openstack/ceilometer-0" Sep 30 19:51:14 crc kubenswrapper[4756]: I0930 19:51:14.053519 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d992dec2-7b97-4d28-9174-60c1c8361b88-log-httpd\") pod \"ceilometer-0\" (UID: \"d992dec2-7b97-4d28-9174-60c1c8361b88\") " pod="openstack/ceilometer-0" Sep 30 19:51:14 crc kubenswrapper[4756]: I0930 19:51:14.054149 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/d992dec2-7b97-4d28-9174-60c1c8361b88-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"d992dec2-7b97-4d28-9174-60c1c8361b88\") " pod="openstack/ceilometer-0" Sep 30 19:51:14 crc kubenswrapper[4756]: I0930 19:51:14.054744 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d992dec2-7b97-4d28-9174-60c1c8361b88-scripts\") pod \"ceilometer-0\" (UID: \"d992dec2-7b97-4d28-9174-60c1c8361b88\") " pod="openstack/ceilometer-0" Sep 30 19:51:14 crc kubenswrapper[4756]: I0930 19:51:14.054899 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sw7k8\" (UniqueName: \"kubernetes.io/projected/d992dec2-7b97-4d28-9174-60c1c8361b88-kube-api-access-sw7k8\") pod \"ceilometer-0\" (UID: \"d992dec2-7b97-4d28-9174-60c1c8361b88\") " pod="openstack/ceilometer-0" Sep 30 19:51:14 crc kubenswrapper[4756]: I0930 19:51:14.156992 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sw7k8\" (UniqueName: \"kubernetes.io/projected/d992dec2-7b97-4d28-9174-60c1c8361b88-kube-api-access-sw7k8\") pod \"ceilometer-0\" (UID: \"d992dec2-7b97-4d28-9174-60c1c8361b88\") " pod="openstack/ceilometer-0" Sep 30 19:51:14 crc kubenswrapper[4756]: I0930 19:51:14.157320 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d992dec2-7b97-4d28-9174-60c1c8361b88-run-httpd\") pod \"ceilometer-0\" (UID: \"d992dec2-7b97-4d28-9174-60c1c8361b88\") " pod="openstack/ceilometer-0" Sep 30 19:51:14 crc kubenswrapper[4756]: I0930 19:51:14.157345 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d992dec2-7b97-4d28-9174-60c1c8361b88-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"d992dec2-7b97-4d28-9174-60c1c8361b88\") " pod="openstack/ceilometer-0" Sep 30 19:51:14 crc kubenswrapper[4756]: I0930 19:51:14.157411 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d992dec2-7b97-4d28-9174-60c1c8361b88-config-data\") pod \"ceilometer-0\" (UID: \"d992dec2-7b97-4d28-9174-60c1c8361b88\") " pod="openstack/ceilometer-0" Sep 30 19:51:14 crc kubenswrapper[4756]: I0930 19:51:14.157441 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d992dec2-7b97-4d28-9174-60c1c8361b88-log-httpd\") pod \"ceilometer-0\" (UID: \"d992dec2-7b97-4d28-9174-60c1c8361b88\") " pod="openstack/ceilometer-0" Sep 30 19:51:14 crc kubenswrapper[4756]: I0930 19:51:14.157462 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/d992dec2-7b97-4d28-9174-60c1c8361b88-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"d992dec2-7b97-4d28-9174-60c1c8361b88\") " pod="openstack/ceilometer-0" Sep 30 19:51:14 crc kubenswrapper[4756]: I0930 19:51:14.157505 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d992dec2-7b97-4d28-9174-60c1c8361b88-scripts\") pod \"ceilometer-0\" (UID: \"d992dec2-7b97-4d28-9174-60c1c8361b88\") " pod="openstack/ceilometer-0" Sep 30 19:51:14 crc kubenswrapper[4756]: I0930 19:51:14.157852 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d992dec2-7b97-4d28-9174-60c1c8361b88-log-httpd\") pod \"ceilometer-0\" (UID: \"d992dec2-7b97-4d28-9174-60c1c8361b88\") " pod="openstack/ceilometer-0" Sep 30 19:51:14 crc kubenswrapper[4756]: I0930 19:51:14.157948 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d992dec2-7b97-4d28-9174-60c1c8361b88-run-httpd\") pod \"ceilometer-0\" (UID: \"d992dec2-7b97-4d28-9174-60c1c8361b88\") " pod="openstack/ceilometer-0" Sep 30 19:51:14 crc kubenswrapper[4756]: I0930 19:51:14.163236 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d992dec2-7b97-4d28-9174-60c1c8361b88-scripts\") pod \"ceilometer-0\" (UID: \"d992dec2-7b97-4d28-9174-60c1c8361b88\") " pod="openstack/ceilometer-0" Sep 30 19:51:14 crc kubenswrapper[4756]: I0930 19:51:14.163988 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d992dec2-7b97-4d28-9174-60c1c8361b88-config-data\") pod \"ceilometer-0\" (UID: \"d992dec2-7b97-4d28-9174-60c1c8361b88\") " pod="openstack/ceilometer-0" Sep 30 19:51:14 crc kubenswrapper[4756]: I0930 19:51:14.169030 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/d992dec2-7b97-4d28-9174-60c1c8361b88-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"d992dec2-7b97-4d28-9174-60c1c8361b88\") " pod="openstack/ceilometer-0" Sep 30 19:51:14 crc kubenswrapper[4756]: I0930 19:51:14.169203 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d992dec2-7b97-4d28-9174-60c1c8361b88-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"d992dec2-7b97-4d28-9174-60c1c8361b88\") " pod="openstack/ceilometer-0" Sep 30 19:51:14 crc kubenswrapper[4756]: I0930 19:51:14.173460 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sw7k8\" (UniqueName: \"kubernetes.io/projected/d992dec2-7b97-4d28-9174-60c1c8361b88-kube-api-access-sw7k8\") pod \"ceilometer-0\" (UID: \"d992dec2-7b97-4d28-9174-60c1c8361b88\") " pod="openstack/ceilometer-0" Sep 30 19:51:14 crc kubenswrapper[4756]: I0930 19:51:14.280079 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 19:51:14 crc kubenswrapper[4756]: I0930 19:51:14.649991 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"207e631b-3ef5-430f-bad1-4a9972dc8eb4","Type":"ContainerStarted","Data":"910743b27955784ee4da86c480d093283c52757852937aeb938dde4695e57b83"} Sep 30 19:51:14 crc kubenswrapper[4756]: I0930 19:51:14.706367 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"fbf46aee-6848-4d8c-a589-ffe70c2387d4","Type":"ContainerStarted","Data":"8352a541769fee05d33c41e061dfaf820a55e151a987ed96661aab954b987e28"} Sep 30 19:51:14 crc kubenswrapper[4756]: I0930 19:51:14.712811 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"93d007d0-df18-4967-be12-2216c5340439","Type":"ContainerStarted","Data":"f7a6eb5776c4a8823f0cb13b0ff6bcfede5152242aa963f91b6a152eff120e49"} Sep 30 19:51:14 crc kubenswrapper[4756]: I0930 19:51:14.713008 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cinder-api-0" Sep 30 19:51:14 crc kubenswrapper[4756]: I0930 19:51:14.713018 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="93d007d0-df18-4967-be12-2216c5340439" containerName="cinder-api-log" containerID="cri-o://cc444e2801b2228906841448a0456019fe64b3f3de08ee2818d998756403f848" gracePeriod=30 Sep 30 19:51:14 crc kubenswrapper[4756]: I0930 19:51:14.713106 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="93d007d0-df18-4967-be12-2216c5340439" containerName="cinder-api" containerID="cri-o://f7a6eb5776c4a8823f0cb13b0ff6bcfede5152242aa963f91b6a152eff120e49" gracePeriod=30 Sep 30 19:51:14 crc kubenswrapper[4756]: I0930 19:51:14.739194 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"0befea8f-1ced-4703-b2a2-a10e154271f4","Type":"ContainerStarted","Data":"a96ca6143ea765730fa414e7b169828518681d4fa546277df96764fd20203c32"} Sep 30 19:51:14 crc kubenswrapper[4756]: I0930 19:51:14.739756 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-api-0" podStartSLOduration=4.739739121 podStartE2EDuration="4.739739121s" podCreationTimestamp="2025-09-30 19:51:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 19:51:14.733304892 +0000 UTC m=+1204.354238389" watchObservedRunningTime="2025-09-30 19:51:14.739739121 +0000 UTC m=+1204.360672598" Sep 30 19:51:14 crc kubenswrapper[4756]: I0930 19:51:14.763356 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=4.763338097 podStartE2EDuration="4.763338097s" podCreationTimestamp="2025-09-30 19:51:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 19:51:14.753162261 +0000 UTC m=+1204.374095738" watchObservedRunningTime="2025-09-30 19:51:14.763338097 +0000 UTC m=+1204.384271574" Sep 30 19:51:14 crc kubenswrapper[4756]: I0930 19:51:14.845349 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 30 19:51:15 crc kubenswrapper[4756]: I0930 19:51:15.126075 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ccbdfb43-a43c-4509-b6e5-ed193ac842b4" path="/var/lib/kubelet/pods/ccbdfb43-a43c-4509-b6e5-ed193ac842b4/volumes" Sep 30 19:51:15 crc kubenswrapper[4756]: I0930 19:51:15.223446 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-c915-account-create-77pfr"] Sep 30 19:51:15 crc kubenswrapper[4756]: I0930 19:51:15.224706 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-c915-account-create-77pfr" Sep 30 19:51:15 crc kubenswrapper[4756]: I0930 19:51:15.226714 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-db-secret" Sep 30 19:51:15 crc kubenswrapper[4756]: I0930 19:51:15.238444 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-c915-account-create-77pfr"] Sep 30 19:51:15 crc kubenswrapper[4756]: I0930 19:51:15.300264 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nn9wt\" (UniqueName: \"kubernetes.io/projected/9aa77118-06fc-4e3b-be73-b1cb6e95ae70-kube-api-access-nn9wt\") pod \"nova-api-c915-account-create-77pfr\" (UID: \"9aa77118-06fc-4e3b-be73-b1cb6e95ae70\") " pod="openstack/nova-api-c915-account-create-77pfr" Sep 30 19:51:15 crc kubenswrapper[4756]: I0930 19:51:15.356664 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Sep 30 19:51:15 crc kubenswrapper[4756]: I0930 19:51:15.402188 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nn9wt\" (UniqueName: \"kubernetes.io/projected/9aa77118-06fc-4e3b-be73-b1cb6e95ae70-kube-api-access-nn9wt\") pod \"nova-api-c915-account-create-77pfr\" (UID: \"9aa77118-06fc-4e3b-be73-b1cb6e95ae70\") " pod="openstack/nova-api-c915-account-create-77pfr" Sep 30 19:51:15 crc kubenswrapper[4756]: I0930 19:51:15.429891 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nn9wt\" (UniqueName: \"kubernetes.io/projected/9aa77118-06fc-4e3b-be73-b1cb6e95ae70-kube-api-access-nn9wt\") pod \"nova-api-c915-account-create-77pfr\" (UID: \"9aa77118-06fc-4e3b-be73-b1cb6e95ae70\") " pod="openstack/nova-api-c915-account-create-77pfr" Sep 30 19:51:15 crc kubenswrapper[4756]: I0930 19:51:15.431143 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-073f-account-create-45blc"] Sep 30 19:51:15 crc kubenswrapper[4756]: E0930 19:51:15.431545 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="93d007d0-df18-4967-be12-2216c5340439" containerName="cinder-api" Sep 30 19:51:15 crc kubenswrapper[4756]: I0930 19:51:15.431560 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="93d007d0-df18-4967-be12-2216c5340439" containerName="cinder-api" Sep 30 19:51:15 crc kubenswrapper[4756]: E0930 19:51:15.431601 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="93d007d0-df18-4967-be12-2216c5340439" containerName="cinder-api-log" Sep 30 19:51:15 crc kubenswrapper[4756]: I0930 19:51:15.431608 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="93d007d0-df18-4967-be12-2216c5340439" containerName="cinder-api-log" Sep 30 19:51:15 crc kubenswrapper[4756]: I0930 19:51:15.431780 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="93d007d0-df18-4967-be12-2216c5340439" containerName="cinder-api-log" Sep 30 19:51:15 crc kubenswrapper[4756]: I0930 19:51:15.431801 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="93d007d0-df18-4967-be12-2216c5340439" containerName="cinder-api" Sep 30 19:51:15 crc kubenswrapper[4756]: I0930 19:51:15.432346 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-073f-account-create-45blc" Sep 30 19:51:15 crc kubenswrapper[4756]: I0930 19:51:15.436875 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-db-secret" Sep 30 19:51:15 crc kubenswrapper[4756]: I0930 19:51:15.466809 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-073f-account-create-45blc"] Sep 30 19:51:15 crc kubenswrapper[4756]: I0930 19:51:15.503770 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/93d007d0-df18-4967-be12-2216c5340439-config-data-custom\") pod \"93d007d0-df18-4967-be12-2216c5340439\" (UID: \"93d007d0-df18-4967-be12-2216c5340439\") " Sep 30 19:51:15 crc kubenswrapper[4756]: I0930 19:51:15.503846 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/93d007d0-df18-4967-be12-2216c5340439-etc-machine-id\") pod \"93d007d0-df18-4967-be12-2216c5340439\" (UID: \"93d007d0-df18-4967-be12-2216c5340439\") " Sep 30 19:51:15 crc kubenswrapper[4756]: I0930 19:51:15.503942 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/93d007d0-df18-4967-be12-2216c5340439-combined-ca-bundle\") pod \"93d007d0-df18-4967-be12-2216c5340439\" (UID: \"93d007d0-df18-4967-be12-2216c5340439\") " Sep 30 19:51:15 crc kubenswrapper[4756]: I0930 19:51:15.503983 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/93d007d0-df18-4967-be12-2216c5340439-config-data\") pod \"93d007d0-df18-4967-be12-2216c5340439\" (UID: \"93d007d0-df18-4967-be12-2216c5340439\") " Sep 30 19:51:15 crc kubenswrapper[4756]: I0930 19:51:15.504012 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/93d007d0-df18-4967-be12-2216c5340439-scripts\") pod \"93d007d0-df18-4967-be12-2216c5340439\" (UID: \"93d007d0-df18-4967-be12-2216c5340439\") " Sep 30 19:51:15 crc kubenswrapper[4756]: I0930 19:51:15.504064 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-887gt\" (UniqueName: \"kubernetes.io/projected/93d007d0-df18-4967-be12-2216c5340439-kube-api-access-887gt\") pod \"93d007d0-df18-4967-be12-2216c5340439\" (UID: \"93d007d0-df18-4967-be12-2216c5340439\") " Sep 30 19:51:15 crc kubenswrapper[4756]: I0930 19:51:15.504125 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/93d007d0-df18-4967-be12-2216c5340439-logs\") pod \"93d007d0-df18-4967-be12-2216c5340439\" (UID: \"93d007d0-df18-4967-be12-2216c5340439\") " Sep 30 19:51:15 crc kubenswrapper[4756]: I0930 19:51:15.504447 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zgp59\" (UniqueName: \"kubernetes.io/projected/39016fe2-5f7f-45fe-8f6e-4bce3684c667-kube-api-access-zgp59\") pod \"nova-cell0-073f-account-create-45blc\" (UID: \"39016fe2-5f7f-45fe-8f6e-4bce3684c667\") " pod="openstack/nova-cell0-073f-account-create-45blc" Sep 30 19:51:15 crc kubenswrapper[4756]: I0930 19:51:15.507672 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/93d007d0-df18-4967-be12-2216c5340439-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "93d007d0-df18-4967-be12-2216c5340439" (UID: "93d007d0-df18-4967-be12-2216c5340439"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 19:51:15 crc kubenswrapper[4756]: I0930 19:51:15.508536 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/93d007d0-df18-4967-be12-2216c5340439-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "93d007d0-df18-4967-be12-2216c5340439" (UID: "93d007d0-df18-4967-be12-2216c5340439"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:51:15 crc kubenswrapper[4756]: I0930 19:51:15.508590 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/93d007d0-df18-4967-be12-2216c5340439-logs" (OuterVolumeSpecName: "logs") pod "93d007d0-df18-4967-be12-2216c5340439" (UID: "93d007d0-df18-4967-be12-2216c5340439"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 19:51:15 crc kubenswrapper[4756]: I0930 19:51:15.510443 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/93d007d0-df18-4967-be12-2216c5340439-kube-api-access-887gt" (OuterVolumeSpecName: "kube-api-access-887gt") pod "93d007d0-df18-4967-be12-2216c5340439" (UID: "93d007d0-df18-4967-be12-2216c5340439"). InnerVolumeSpecName "kube-api-access-887gt". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:51:15 crc kubenswrapper[4756]: I0930 19:51:15.519569 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/93d007d0-df18-4967-be12-2216c5340439-scripts" (OuterVolumeSpecName: "scripts") pod "93d007d0-df18-4967-be12-2216c5340439" (UID: "93d007d0-df18-4967-be12-2216c5340439"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:51:15 crc kubenswrapper[4756]: I0930 19:51:15.543046 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/93d007d0-df18-4967-be12-2216c5340439-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "93d007d0-df18-4967-be12-2216c5340439" (UID: "93d007d0-df18-4967-be12-2216c5340439"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:51:15 crc kubenswrapper[4756]: I0930 19:51:15.545848 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-c915-account-create-77pfr" Sep 30 19:51:15 crc kubenswrapper[4756]: I0930 19:51:15.561900 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/93d007d0-df18-4967-be12-2216c5340439-config-data" (OuterVolumeSpecName: "config-data") pod "93d007d0-df18-4967-be12-2216c5340439" (UID: "93d007d0-df18-4967-be12-2216c5340439"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:51:15 crc kubenswrapper[4756]: I0930 19:51:15.609700 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zgp59\" (UniqueName: \"kubernetes.io/projected/39016fe2-5f7f-45fe-8f6e-4bce3684c667-kube-api-access-zgp59\") pod \"nova-cell0-073f-account-create-45blc\" (UID: \"39016fe2-5f7f-45fe-8f6e-4bce3684c667\") " pod="openstack/nova-cell0-073f-account-create-45blc" Sep 30 19:51:15 crc kubenswrapper[4756]: I0930 19:51:15.613937 4756 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/93d007d0-df18-4967-be12-2216c5340439-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 19:51:15 crc kubenswrapper[4756]: I0930 19:51:15.614167 4756 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/93d007d0-df18-4967-be12-2216c5340439-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 19:51:15 crc kubenswrapper[4756]: I0930 19:51:15.614206 4756 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/93d007d0-df18-4967-be12-2216c5340439-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 19:51:15 crc kubenswrapper[4756]: I0930 19:51:15.614223 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-887gt\" (UniqueName: \"kubernetes.io/projected/93d007d0-df18-4967-be12-2216c5340439-kube-api-access-887gt\") on node \"crc\" DevicePath \"\"" Sep 30 19:51:15 crc kubenswrapper[4756]: I0930 19:51:15.614235 4756 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/93d007d0-df18-4967-be12-2216c5340439-logs\") on node \"crc\" DevicePath \"\"" Sep 30 19:51:15 crc kubenswrapper[4756]: I0930 19:51:15.614246 4756 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/93d007d0-df18-4967-be12-2216c5340439-config-data-custom\") on node \"crc\" DevicePath \"\"" Sep 30 19:51:15 crc kubenswrapper[4756]: I0930 19:51:15.614257 4756 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/93d007d0-df18-4967-be12-2216c5340439-etc-machine-id\") on node \"crc\" DevicePath \"\"" Sep 30 19:51:15 crc kubenswrapper[4756]: I0930 19:51:15.635152 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zgp59\" (UniqueName: \"kubernetes.io/projected/39016fe2-5f7f-45fe-8f6e-4bce3684c667-kube-api-access-zgp59\") pod \"nova-cell0-073f-account-create-45blc\" (UID: \"39016fe2-5f7f-45fe-8f6e-4bce3684c667\") " pod="openstack/nova-cell0-073f-account-create-45blc" Sep 30 19:51:15 crc kubenswrapper[4756]: I0930 19:51:15.786767 4756 generic.go:334] "Generic (PLEG): container finished" podID="93d007d0-df18-4967-be12-2216c5340439" containerID="f7a6eb5776c4a8823f0cb13b0ff6bcfede5152242aa963f91b6a152eff120e49" exitCode=0 Sep 30 19:51:15 crc kubenswrapper[4756]: I0930 19:51:15.787017 4756 generic.go:334] "Generic (PLEG): container finished" podID="93d007d0-df18-4967-be12-2216c5340439" containerID="cc444e2801b2228906841448a0456019fe64b3f3de08ee2818d998756403f848" exitCode=143 Sep 30 19:51:15 crc kubenswrapper[4756]: I0930 19:51:15.787095 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"93d007d0-df18-4967-be12-2216c5340439","Type":"ContainerDied","Data":"f7a6eb5776c4a8823f0cb13b0ff6bcfede5152242aa963f91b6a152eff120e49"} Sep 30 19:51:15 crc kubenswrapper[4756]: I0930 19:51:15.787120 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"93d007d0-df18-4967-be12-2216c5340439","Type":"ContainerDied","Data":"cc444e2801b2228906841448a0456019fe64b3f3de08ee2818d998756403f848"} Sep 30 19:51:15 crc kubenswrapper[4756]: I0930 19:51:15.787131 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"93d007d0-df18-4967-be12-2216c5340439","Type":"ContainerDied","Data":"b3ad087e9120547d268183116bcda27fc54269285012aa37cd7e7d9d1439c20c"} Sep 30 19:51:15 crc kubenswrapper[4756]: I0930 19:51:15.787146 4756 scope.go:117] "RemoveContainer" containerID="f7a6eb5776c4a8823f0cb13b0ff6bcfede5152242aa963f91b6a152eff120e49" Sep 30 19:51:15 crc kubenswrapper[4756]: I0930 19:51:15.787299 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Sep 30 19:51:15 crc kubenswrapper[4756]: I0930 19:51:15.799463 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-073f-account-create-45blc" Sep 30 19:51:15 crc kubenswrapper[4756]: I0930 19:51:15.800665 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d992dec2-7b97-4d28-9174-60c1c8361b88","Type":"ContainerStarted","Data":"98b9abc1faee822957f865610dacccc74f583dcf0f17bcdb790be269df6317c7"} Sep 30 19:51:15 crc kubenswrapper[4756]: I0930 19:51:15.800703 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d992dec2-7b97-4d28-9174-60c1c8361b88","Type":"ContainerStarted","Data":"33d98f1d6d02111189c6e1b1ab0f6902f78e99e854c358ddf47e783b2a3ff245"} Sep 30 19:51:15 crc kubenswrapper[4756]: I0930 19:51:15.810361 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"207e631b-3ef5-430f-bad1-4a9972dc8eb4","Type":"ContainerStarted","Data":"af672176e9d6745734bae103367eaab92c17b4c3f23ec9cc9c702908a2e393cc"} Sep 30 19:51:15 crc kubenswrapper[4756]: I0930 19:51:15.814237 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"fbf46aee-6848-4d8c-a589-ffe70c2387d4","Type":"ContainerStarted","Data":"e03407d0dd60f8e978c9740bfb9c9d10ba442d27e0fbc113a19c0e1d6095fe4e"} Sep 30 19:51:15 crc kubenswrapper[4756]: I0930 19:51:15.838612 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=4.838593684 podStartE2EDuration="4.838593684s" podCreationTimestamp="2025-09-30 19:51:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 19:51:15.833411788 +0000 UTC m=+1205.454345265" watchObservedRunningTime="2025-09-30 19:51:15.838593684 +0000 UTC m=+1205.459527171" Sep 30 19:51:15 crc kubenswrapper[4756]: I0930 19:51:15.851488 4756 scope.go:117] "RemoveContainer" containerID="cc444e2801b2228906841448a0456019fe64b3f3de08ee2818d998756403f848" Sep 30 19:51:15 crc kubenswrapper[4756]: I0930 19:51:15.863884 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-scheduler-0" podStartSLOduration=4.52863138 podStartE2EDuration="5.863866674s" podCreationTimestamp="2025-09-30 19:51:10 +0000 UTC" firstStartedPulling="2025-09-30 19:51:11.799275786 +0000 UTC m=+1201.420209263" lastFinishedPulling="2025-09-30 19:51:13.13451108 +0000 UTC m=+1202.755444557" observedRunningTime="2025-09-30 19:51:15.853983306 +0000 UTC m=+1205.474916793" watchObservedRunningTime="2025-09-30 19:51:15.863866674 +0000 UTC m=+1205.484800151" Sep 30 19:51:15 crc kubenswrapper[4756]: I0930 19:51:15.896012 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Sep 30 19:51:15 crc kubenswrapper[4756]: I0930 19:51:15.905374 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-api-0"] Sep 30 19:51:15 crc kubenswrapper[4756]: I0930 19:51:15.911623 4756 scope.go:117] "RemoveContainer" containerID="f7a6eb5776c4a8823f0cb13b0ff6bcfede5152242aa963f91b6a152eff120e49" Sep 30 19:51:15 crc kubenswrapper[4756]: E0930 19:51:15.913287 4756 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f7a6eb5776c4a8823f0cb13b0ff6bcfede5152242aa963f91b6a152eff120e49\": container with ID starting with f7a6eb5776c4a8823f0cb13b0ff6bcfede5152242aa963f91b6a152eff120e49 not found: ID does not exist" containerID="f7a6eb5776c4a8823f0cb13b0ff6bcfede5152242aa963f91b6a152eff120e49" Sep 30 19:51:15 crc kubenswrapper[4756]: I0930 19:51:15.913317 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f7a6eb5776c4a8823f0cb13b0ff6bcfede5152242aa963f91b6a152eff120e49"} err="failed to get container status \"f7a6eb5776c4a8823f0cb13b0ff6bcfede5152242aa963f91b6a152eff120e49\": rpc error: code = NotFound desc = could not find container \"f7a6eb5776c4a8823f0cb13b0ff6bcfede5152242aa963f91b6a152eff120e49\": container with ID starting with f7a6eb5776c4a8823f0cb13b0ff6bcfede5152242aa963f91b6a152eff120e49 not found: ID does not exist" Sep 30 19:51:15 crc kubenswrapper[4756]: I0930 19:51:15.913336 4756 scope.go:117] "RemoveContainer" containerID="cc444e2801b2228906841448a0456019fe64b3f3de08ee2818d998756403f848" Sep 30 19:51:15 crc kubenswrapper[4756]: I0930 19:51:15.918730 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-api-0"] Sep 30 19:51:15 crc kubenswrapper[4756]: I0930 19:51:15.920250 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Sep 30 19:51:15 crc kubenswrapper[4756]: E0930 19:51:15.923832 4756 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cc444e2801b2228906841448a0456019fe64b3f3de08ee2818d998756403f848\": container with ID starting with cc444e2801b2228906841448a0456019fe64b3f3de08ee2818d998756403f848 not found: ID does not exist" containerID="cc444e2801b2228906841448a0456019fe64b3f3de08ee2818d998756403f848" Sep 30 19:51:15 crc kubenswrapper[4756]: I0930 19:51:15.923868 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cc444e2801b2228906841448a0456019fe64b3f3de08ee2818d998756403f848"} err="failed to get container status \"cc444e2801b2228906841448a0456019fe64b3f3de08ee2818d998756403f848\": rpc error: code = NotFound desc = could not find container \"cc444e2801b2228906841448a0456019fe64b3f3de08ee2818d998756403f848\": container with ID starting with cc444e2801b2228906841448a0456019fe64b3f3de08ee2818d998756403f848 not found: ID does not exist" Sep 30 19:51:15 crc kubenswrapper[4756]: I0930 19:51:15.923889 4756 scope.go:117] "RemoveContainer" containerID="f7a6eb5776c4a8823f0cb13b0ff6bcfede5152242aa963f91b6a152eff120e49" Sep 30 19:51:15 crc kubenswrapper[4756]: I0930 19:51:15.923952 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Sep 30 19:51:15 crc kubenswrapper[4756]: I0930 19:51:15.924937 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cinder-internal-svc" Sep 30 19:51:15 crc kubenswrapper[4756]: I0930 19:51:15.926319 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f7a6eb5776c4a8823f0cb13b0ff6bcfede5152242aa963f91b6a152eff120e49"} err="failed to get container status \"f7a6eb5776c4a8823f0cb13b0ff6bcfede5152242aa963f91b6a152eff120e49\": rpc error: code = NotFound desc = could not find container \"f7a6eb5776c4a8823f0cb13b0ff6bcfede5152242aa963f91b6a152eff120e49\": container with ID starting with f7a6eb5776c4a8823f0cb13b0ff6bcfede5152242aa963f91b6a152eff120e49 not found: ID does not exist" Sep 30 19:51:15 crc kubenswrapper[4756]: I0930 19:51:15.926352 4756 scope.go:117] "RemoveContainer" containerID="cc444e2801b2228906841448a0456019fe64b3f3de08ee2818d998756403f848" Sep 30 19:51:15 crc kubenswrapper[4756]: I0930 19:51:15.926756 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-api-config-data" Sep 30 19:51:15 crc kubenswrapper[4756]: I0930 19:51:15.926905 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cinder-public-svc" Sep 30 19:51:15 crc kubenswrapper[4756]: I0930 19:51:15.930801 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cc444e2801b2228906841448a0456019fe64b3f3de08ee2818d998756403f848"} err="failed to get container status \"cc444e2801b2228906841448a0456019fe64b3f3de08ee2818d998756403f848\": rpc error: code = NotFound desc = could not find container \"cc444e2801b2228906841448a0456019fe64b3f3de08ee2818d998756403f848\": container with ID starting with cc444e2801b2228906841448a0456019fe64b3f3de08ee2818d998756403f848 not found: ID does not exist" Sep 30 19:51:16 crc kubenswrapper[4756]: I0930 19:51:16.007487 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-scheduler-0" Sep 30 19:51:16 crc kubenswrapper[4756]: I0930 19:51:16.026639 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ebd344ad-8ef7-4a5c-8b74-1af845398b3b-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"ebd344ad-8ef7-4a5c-8b74-1af845398b3b\") " pod="openstack/cinder-api-0" Sep 30 19:51:16 crc kubenswrapper[4756]: I0930 19:51:16.026690 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ebd344ad-8ef7-4a5c-8b74-1af845398b3b-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"ebd344ad-8ef7-4a5c-8b74-1af845398b3b\") " pod="openstack/cinder-api-0" Sep 30 19:51:16 crc kubenswrapper[4756]: I0930 19:51:16.026715 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ebd344ad-8ef7-4a5c-8b74-1af845398b3b-config-data-custom\") pod \"cinder-api-0\" (UID: \"ebd344ad-8ef7-4a5c-8b74-1af845398b3b\") " pod="openstack/cinder-api-0" Sep 30 19:51:16 crc kubenswrapper[4756]: I0930 19:51:16.026731 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ebd344ad-8ef7-4a5c-8b74-1af845398b3b-config-data\") pod \"cinder-api-0\" (UID: \"ebd344ad-8ef7-4a5c-8b74-1af845398b3b\") " pod="openstack/cinder-api-0" Sep 30 19:51:16 crc kubenswrapper[4756]: I0930 19:51:16.026748 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ebd344ad-8ef7-4a5c-8b74-1af845398b3b-logs\") pod \"cinder-api-0\" (UID: \"ebd344ad-8ef7-4a5c-8b74-1af845398b3b\") " pod="openstack/cinder-api-0" Sep 30 19:51:16 crc kubenswrapper[4756]: I0930 19:51:16.026769 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ebd344ad-8ef7-4a5c-8b74-1af845398b3b-public-tls-certs\") pod \"cinder-api-0\" (UID: \"ebd344ad-8ef7-4a5c-8b74-1af845398b3b\") " pod="openstack/cinder-api-0" Sep 30 19:51:16 crc kubenswrapper[4756]: I0930 19:51:16.026842 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dm9sn\" (UniqueName: \"kubernetes.io/projected/ebd344ad-8ef7-4a5c-8b74-1af845398b3b-kube-api-access-dm9sn\") pod \"cinder-api-0\" (UID: \"ebd344ad-8ef7-4a5c-8b74-1af845398b3b\") " pod="openstack/cinder-api-0" Sep 30 19:51:16 crc kubenswrapper[4756]: I0930 19:51:16.026880 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/ebd344ad-8ef7-4a5c-8b74-1af845398b3b-etc-machine-id\") pod \"cinder-api-0\" (UID: \"ebd344ad-8ef7-4a5c-8b74-1af845398b3b\") " pod="openstack/cinder-api-0" Sep 30 19:51:16 crc kubenswrapper[4756]: I0930 19:51:16.026898 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ebd344ad-8ef7-4a5c-8b74-1af845398b3b-scripts\") pod \"cinder-api-0\" (UID: \"ebd344ad-8ef7-4a5c-8b74-1af845398b3b\") " pod="openstack/cinder-api-0" Sep 30 19:51:16 crc kubenswrapper[4756]: I0930 19:51:16.057327 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-c915-account-create-77pfr"] Sep 30 19:51:16 crc kubenswrapper[4756]: I0930 19:51:16.128885 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ebd344ad-8ef7-4a5c-8b74-1af845398b3b-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"ebd344ad-8ef7-4a5c-8b74-1af845398b3b\") " pod="openstack/cinder-api-0" Sep 30 19:51:16 crc kubenswrapper[4756]: I0930 19:51:16.129130 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ebd344ad-8ef7-4a5c-8b74-1af845398b3b-config-data-custom\") pod \"cinder-api-0\" (UID: \"ebd344ad-8ef7-4a5c-8b74-1af845398b3b\") " pod="openstack/cinder-api-0" Sep 30 19:51:16 crc kubenswrapper[4756]: I0930 19:51:16.129151 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ebd344ad-8ef7-4a5c-8b74-1af845398b3b-config-data\") pod \"cinder-api-0\" (UID: \"ebd344ad-8ef7-4a5c-8b74-1af845398b3b\") " pod="openstack/cinder-api-0" Sep 30 19:51:16 crc kubenswrapper[4756]: I0930 19:51:16.129167 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ebd344ad-8ef7-4a5c-8b74-1af845398b3b-logs\") pod \"cinder-api-0\" (UID: \"ebd344ad-8ef7-4a5c-8b74-1af845398b3b\") " pod="openstack/cinder-api-0" Sep 30 19:51:16 crc kubenswrapper[4756]: I0930 19:51:16.129194 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ebd344ad-8ef7-4a5c-8b74-1af845398b3b-public-tls-certs\") pod \"cinder-api-0\" (UID: \"ebd344ad-8ef7-4a5c-8b74-1af845398b3b\") " pod="openstack/cinder-api-0" Sep 30 19:51:16 crc kubenswrapper[4756]: I0930 19:51:16.129266 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dm9sn\" (UniqueName: \"kubernetes.io/projected/ebd344ad-8ef7-4a5c-8b74-1af845398b3b-kube-api-access-dm9sn\") pod \"cinder-api-0\" (UID: \"ebd344ad-8ef7-4a5c-8b74-1af845398b3b\") " pod="openstack/cinder-api-0" Sep 30 19:51:16 crc kubenswrapper[4756]: I0930 19:51:16.129306 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/ebd344ad-8ef7-4a5c-8b74-1af845398b3b-etc-machine-id\") pod \"cinder-api-0\" (UID: \"ebd344ad-8ef7-4a5c-8b74-1af845398b3b\") " pod="openstack/cinder-api-0" Sep 30 19:51:16 crc kubenswrapper[4756]: I0930 19:51:16.129324 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ebd344ad-8ef7-4a5c-8b74-1af845398b3b-scripts\") pod \"cinder-api-0\" (UID: \"ebd344ad-8ef7-4a5c-8b74-1af845398b3b\") " pod="openstack/cinder-api-0" Sep 30 19:51:16 crc kubenswrapper[4756]: I0930 19:51:16.129356 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ebd344ad-8ef7-4a5c-8b74-1af845398b3b-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"ebd344ad-8ef7-4a5c-8b74-1af845398b3b\") " pod="openstack/cinder-api-0" Sep 30 19:51:16 crc kubenswrapper[4756]: I0930 19:51:16.130416 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ebd344ad-8ef7-4a5c-8b74-1af845398b3b-logs\") pod \"cinder-api-0\" (UID: \"ebd344ad-8ef7-4a5c-8b74-1af845398b3b\") " pod="openstack/cinder-api-0" Sep 30 19:51:16 crc kubenswrapper[4756]: I0930 19:51:16.130996 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/ebd344ad-8ef7-4a5c-8b74-1af845398b3b-etc-machine-id\") pod \"cinder-api-0\" (UID: \"ebd344ad-8ef7-4a5c-8b74-1af845398b3b\") " pod="openstack/cinder-api-0" Sep 30 19:51:16 crc kubenswrapper[4756]: I0930 19:51:16.138107 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ebd344ad-8ef7-4a5c-8b74-1af845398b3b-public-tls-certs\") pod \"cinder-api-0\" (UID: \"ebd344ad-8ef7-4a5c-8b74-1af845398b3b\") " pod="openstack/cinder-api-0" Sep 30 19:51:16 crc kubenswrapper[4756]: I0930 19:51:16.137775 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ebd344ad-8ef7-4a5c-8b74-1af845398b3b-scripts\") pod \"cinder-api-0\" (UID: \"ebd344ad-8ef7-4a5c-8b74-1af845398b3b\") " pod="openstack/cinder-api-0" Sep 30 19:51:16 crc kubenswrapper[4756]: I0930 19:51:16.138823 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ebd344ad-8ef7-4a5c-8b74-1af845398b3b-config-data-custom\") pod \"cinder-api-0\" (UID: \"ebd344ad-8ef7-4a5c-8b74-1af845398b3b\") " pod="openstack/cinder-api-0" Sep 30 19:51:16 crc kubenswrapper[4756]: I0930 19:51:16.138876 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ebd344ad-8ef7-4a5c-8b74-1af845398b3b-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"ebd344ad-8ef7-4a5c-8b74-1af845398b3b\") " pod="openstack/cinder-api-0" Sep 30 19:51:16 crc kubenswrapper[4756]: I0930 19:51:16.146638 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ebd344ad-8ef7-4a5c-8b74-1af845398b3b-config-data\") pod \"cinder-api-0\" (UID: \"ebd344ad-8ef7-4a5c-8b74-1af845398b3b\") " pod="openstack/cinder-api-0" Sep 30 19:51:16 crc kubenswrapper[4756]: I0930 19:51:16.149644 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ebd344ad-8ef7-4a5c-8b74-1af845398b3b-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"ebd344ad-8ef7-4a5c-8b74-1af845398b3b\") " pod="openstack/cinder-api-0" Sep 30 19:51:16 crc kubenswrapper[4756]: I0930 19:51:16.151945 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dm9sn\" (UniqueName: \"kubernetes.io/projected/ebd344ad-8ef7-4a5c-8b74-1af845398b3b-kube-api-access-dm9sn\") pod \"cinder-api-0\" (UID: \"ebd344ad-8ef7-4a5c-8b74-1af845398b3b\") " pod="openstack/cinder-api-0" Sep 30 19:51:16 crc kubenswrapper[4756]: I0930 19:51:16.277963 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Sep 30 19:51:16 crc kubenswrapper[4756]: I0930 19:51:16.360381 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-073f-account-create-45blc"] Sep 30 19:51:16 crc kubenswrapper[4756]: W0930 19:51:16.377425 4756 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod39016fe2_5f7f_45fe_8f6e_4bce3684c667.slice/crio-8d350c43b443c91aae754d350ad609b4e7addb922b7244f5fc2da334809c15eb WatchSource:0}: Error finding container 8d350c43b443c91aae754d350ad609b4e7addb922b7244f5fc2da334809c15eb: Status 404 returned error can't find the container with id 8d350c43b443c91aae754d350ad609b4e7addb922b7244f5fc2da334809c15eb Sep 30 19:51:16 crc kubenswrapper[4756]: I0930 19:51:16.574619 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 30 19:51:16 crc kubenswrapper[4756]: I0930 19:51:16.761966 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Sep 30 19:51:16 crc kubenswrapper[4756]: I0930 19:51:16.826322 4756 generic.go:334] "Generic (PLEG): container finished" podID="9aa77118-06fc-4e3b-be73-b1cb6e95ae70" containerID="e4a6232e87ef083723c07a59c58f19271017670018abbfa1105600aabf771695" exitCode=0 Sep 30 19:51:16 crc kubenswrapper[4756]: I0930 19:51:16.826386 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-c915-account-create-77pfr" event={"ID":"9aa77118-06fc-4e3b-be73-b1cb6e95ae70","Type":"ContainerDied","Data":"e4a6232e87ef083723c07a59c58f19271017670018abbfa1105600aabf771695"} Sep 30 19:51:16 crc kubenswrapper[4756]: I0930 19:51:16.826426 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-c915-account-create-77pfr" event={"ID":"9aa77118-06fc-4e3b-be73-b1cb6e95ae70","Type":"ContainerStarted","Data":"e4927beb90084b0150c27386e5ea9deb5033a628e980d0c8b4f0e9474c23e064"} Sep 30 19:51:16 crc kubenswrapper[4756]: I0930 19:51:16.829137 4756 generic.go:334] "Generic (PLEG): container finished" podID="39016fe2-5f7f-45fe-8f6e-4bce3684c667" containerID="a88c7d451b77d6db9b1544138620e9cae6b1e1fa6c6f9218fa77d075c959d40e" exitCode=0 Sep 30 19:51:16 crc kubenswrapper[4756]: I0930 19:51:16.829187 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-073f-account-create-45blc" event={"ID":"39016fe2-5f7f-45fe-8f6e-4bce3684c667","Type":"ContainerDied","Data":"a88c7d451b77d6db9b1544138620e9cae6b1e1fa6c6f9218fa77d075c959d40e"} Sep 30 19:51:16 crc kubenswrapper[4756]: I0930 19:51:16.829203 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-073f-account-create-45blc" event={"ID":"39016fe2-5f7f-45fe-8f6e-4bce3684c667","Type":"ContainerStarted","Data":"8d350c43b443c91aae754d350ad609b4e7addb922b7244f5fc2da334809c15eb"} Sep 30 19:51:16 crc kubenswrapper[4756]: I0930 19:51:16.842842 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d992dec2-7b97-4d28-9174-60c1c8361b88","Type":"ContainerStarted","Data":"ed381317dcf81febf0daebab1d73cf36737f33f383a75dec75ad693dad5e778d"} Sep 30 19:51:16 crc kubenswrapper[4756]: I0930 19:51:16.850317 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"ebd344ad-8ef7-4a5c-8b74-1af845398b3b","Type":"ContainerStarted","Data":"4ddd58096914be50411a905f20f0137a78da0564094809dcc3b23b355fadfd18"} Sep 30 19:51:17 crc kubenswrapper[4756]: I0930 19:51:17.129772 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="93d007d0-df18-4967-be12-2216c5340439" path="/var/lib/kubelet/pods/93d007d0-df18-4967-be12-2216c5340439/volumes" Sep 30 19:51:17 crc kubenswrapper[4756]: I0930 19:51:17.861111 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"ebd344ad-8ef7-4a5c-8b74-1af845398b3b","Type":"ContainerStarted","Data":"34a9a3332fdf0e5c76c740afbc2528fa100673eb4984317f115c8a7e501cf8ba"} Sep 30 19:51:17 crc kubenswrapper[4756]: I0930 19:51:17.867429 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d992dec2-7b97-4d28-9174-60c1c8361b88","Type":"ContainerStarted","Data":"6488a4feaf8d2ad8fc0d67a345ed83f1067bbe92c441eef4d717437fb26b23d1"} Sep 30 19:51:18 crc kubenswrapper[4756]: I0930 19:51:18.367278 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-073f-account-create-45blc" Sep 30 19:51:18 crc kubenswrapper[4756]: I0930 19:51:18.372982 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-c915-account-create-77pfr" Sep 30 19:51:18 crc kubenswrapper[4756]: I0930 19:51:18.477081 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nn9wt\" (UniqueName: \"kubernetes.io/projected/9aa77118-06fc-4e3b-be73-b1cb6e95ae70-kube-api-access-nn9wt\") pod \"9aa77118-06fc-4e3b-be73-b1cb6e95ae70\" (UID: \"9aa77118-06fc-4e3b-be73-b1cb6e95ae70\") " Sep 30 19:51:18 crc kubenswrapper[4756]: I0930 19:51:18.477140 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zgp59\" (UniqueName: \"kubernetes.io/projected/39016fe2-5f7f-45fe-8f6e-4bce3684c667-kube-api-access-zgp59\") pod \"39016fe2-5f7f-45fe-8f6e-4bce3684c667\" (UID: \"39016fe2-5f7f-45fe-8f6e-4bce3684c667\") " Sep 30 19:51:18 crc kubenswrapper[4756]: I0930 19:51:18.483646 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9aa77118-06fc-4e3b-be73-b1cb6e95ae70-kube-api-access-nn9wt" (OuterVolumeSpecName: "kube-api-access-nn9wt") pod "9aa77118-06fc-4e3b-be73-b1cb6e95ae70" (UID: "9aa77118-06fc-4e3b-be73-b1cb6e95ae70"). InnerVolumeSpecName "kube-api-access-nn9wt". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:51:18 crc kubenswrapper[4756]: I0930 19:51:18.509651 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/39016fe2-5f7f-45fe-8f6e-4bce3684c667-kube-api-access-zgp59" (OuterVolumeSpecName: "kube-api-access-zgp59") pod "39016fe2-5f7f-45fe-8f6e-4bce3684c667" (UID: "39016fe2-5f7f-45fe-8f6e-4bce3684c667"). InnerVolumeSpecName "kube-api-access-zgp59". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:51:18 crc kubenswrapper[4756]: I0930 19:51:18.579689 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nn9wt\" (UniqueName: \"kubernetes.io/projected/9aa77118-06fc-4e3b-be73-b1cb6e95ae70-kube-api-access-nn9wt\") on node \"crc\" DevicePath \"\"" Sep 30 19:51:18 crc kubenswrapper[4756]: I0930 19:51:18.579736 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zgp59\" (UniqueName: \"kubernetes.io/projected/39016fe2-5f7f-45fe-8f6e-4bce3684c667-kube-api-access-zgp59\") on node \"crc\" DevicePath \"\"" Sep 30 19:51:18 crc kubenswrapper[4756]: I0930 19:51:18.876184 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"ebd344ad-8ef7-4a5c-8b74-1af845398b3b","Type":"ContainerStarted","Data":"6a012f6649739f7a3c55f7eb5ccd4842f6f7de987bfd7b459d8bead1ed555778"} Sep 30 19:51:18 crc kubenswrapper[4756]: I0930 19:51:18.876946 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cinder-api-0" Sep 30 19:51:18 crc kubenswrapper[4756]: I0930 19:51:18.879536 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-c915-account-create-77pfr" Sep 30 19:51:18 crc kubenswrapper[4756]: I0930 19:51:18.879540 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-c915-account-create-77pfr" event={"ID":"9aa77118-06fc-4e3b-be73-b1cb6e95ae70","Type":"ContainerDied","Data":"e4927beb90084b0150c27386e5ea9deb5033a628e980d0c8b4f0e9474c23e064"} Sep 30 19:51:18 crc kubenswrapper[4756]: I0930 19:51:18.879796 4756 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e4927beb90084b0150c27386e5ea9deb5033a628e980d0c8b4f0e9474c23e064" Sep 30 19:51:18 crc kubenswrapper[4756]: I0930 19:51:18.883251 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-073f-account-create-45blc" event={"ID":"39016fe2-5f7f-45fe-8f6e-4bce3684c667","Type":"ContainerDied","Data":"8d350c43b443c91aae754d350ad609b4e7addb922b7244f5fc2da334809c15eb"} Sep 30 19:51:18 crc kubenswrapper[4756]: I0930 19:51:18.883310 4756 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8d350c43b443c91aae754d350ad609b4e7addb922b7244f5fc2da334809c15eb" Sep 30 19:51:18 crc kubenswrapper[4756]: I0930 19:51:18.883389 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-073f-account-create-45blc" Sep 30 19:51:18 crc kubenswrapper[4756]: I0930 19:51:18.899746 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-api-0" podStartSLOduration=3.899726432 podStartE2EDuration="3.899726432s" podCreationTimestamp="2025-09-30 19:51:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 19:51:18.898957422 +0000 UTC m=+1208.519890939" watchObservedRunningTime="2025-09-30 19:51:18.899726432 +0000 UTC m=+1208.520659909" Sep 30 19:51:19 crc kubenswrapper[4756]: E0930 19:51:19.134702 4756 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod39016fe2_5f7f_45fe_8f6e_4bce3684c667.slice\": RecentStats: unable to find data in memory cache]" Sep 30 19:51:19 crc kubenswrapper[4756]: I0930 19:51:19.899223 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="d992dec2-7b97-4d28-9174-60c1c8361b88" containerName="ceilometer-central-agent" containerID="cri-o://98b9abc1faee822957f865610dacccc74f583dcf0f17bcdb790be269df6317c7" gracePeriod=30 Sep 30 19:51:19 crc kubenswrapper[4756]: I0930 19:51:19.900125 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d992dec2-7b97-4d28-9174-60c1c8361b88","Type":"ContainerStarted","Data":"2a6b49a5a5ec5e9876a416bc778d9dfec0f460ae8f1032e839269ab72c61e748"} Sep 30 19:51:19 crc kubenswrapper[4756]: I0930 19:51:19.900208 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Sep 30 19:51:19 crc kubenswrapper[4756]: I0930 19:51:19.900796 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="d992dec2-7b97-4d28-9174-60c1c8361b88" containerName="proxy-httpd" containerID="cri-o://2a6b49a5a5ec5e9876a416bc778d9dfec0f460ae8f1032e839269ab72c61e748" gracePeriod=30 Sep 30 19:51:19 crc kubenswrapper[4756]: I0930 19:51:19.900941 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="d992dec2-7b97-4d28-9174-60c1c8361b88" containerName="sg-core" containerID="cri-o://6488a4feaf8d2ad8fc0d67a345ed83f1067bbe92c441eef4d717437fb26b23d1" gracePeriod=30 Sep 30 19:51:19 crc kubenswrapper[4756]: I0930 19:51:19.901049 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="d992dec2-7b97-4d28-9174-60c1c8361b88" containerName="ceilometer-notification-agent" containerID="cri-o://ed381317dcf81febf0daebab1d73cf36737f33f383a75dec75ad693dad5e778d" gracePeriod=30 Sep 30 19:51:19 crc kubenswrapper[4756]: I0930 19:51:19.931357 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.452698787 podStartE2EDuration="6.931323958s" podCreationTimestamp="2025-09-30 19:51:13 +0000 UTC" firstStartedPulling="2025-09-30 19:51:14.878309453 +0000 UTC m=+1204.499242920" lastFinishedPulling="2025-09-30 19:51:19.356934614 +0000 UTC m=+1208.977868091" observedRunningTime="2025-09-30 19:51:19.930741293 +0000 UTC m=+1209.551674770" watchObservedRunningTime="2025-09-30 19:51:19.931323958 +0000 UTC m=+1209.552257435" Sep 30 19:51:20 crc kubenswrapper[4756]: I0930 19:51:20.294226 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/neutron-5647cd86c6-mmv8n" Sep 30 19:51:20 crc kubenswrapper[4756]: I0930 19:51:20.617842 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-db-sync-mzmtl"] Sep 30 19:51:20 crc kubenswrapper[4756]: E0930 19:51:20.618213 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9aa77118-06fc-4e3b-be73-b1cb6e95ae70" containerName="mariadb-account-create" Sep 30 19:51:20 crc kubenswrapper[4756]: I0930 19:51:20.618228 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="9aa77118-06fc-4e3b-be73-b1cb6e95ae70" containerName="mariadb-account-create" Sep 30 19:51:20 crc kubenswrapper[4756]: E0930 19:51:20.618252 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="39016fe2-5f7f-45fe-8f6e-4bce3684c667" containerName="mariadb-account-create" Sep 30 19:51:20 crc kubenswrapper[4756]: I0930 19:51:20.618259 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="39016fe2-5f7f-45fe-8f6e-4bce3684c667" containerName="mariadb-account-create" Sep 30 19:51:20 crc kubenswrapper[4756]: I0930 19:51:20.618456 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="39016fe2-5f7f-45fe-8f6e-4bce3684c667" containerName="mariadb-account-create" Sep 30 19:51:20 crc kubenswrapper[4756]: I0930 19:51:20.618473 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="9aa77118-06fc-4e3b-be73-b1cb6e95ae70" containerName="mariadb-account-create" Sep 30 19:51:20 crc kubenswrapper[4756]: I0930 19:51:20.619002 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-mzmtl" Sep 30 19:51:20 crc kubenswrapper[4756]: I0930 19:51:20.621452 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Sep 30 19:51:20 crc kubenswrapper[4756]: I0930 19:51:20.623113 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-scripts" Sep 30 19:51:20 crc kubenswrapper[4756]: I0930 19:51:20.624876 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-nova-dockercfg-zj27j" Sep 30 19:51:20 crc kubenswrapper[4756]: I0930 19:51:20.636080 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-mzmtl"] Sep 30 19:51:20 crc kubenswrapper[4756]: I0930 19:51:20.736866 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dc949160-e62c-4982-bb20-8f3ebe1a2692-config-data\") pod \"nova-cell0-conductor-db-sync-mzmtl\" (UID: \"dc949160-e62c-4982-bb20-8f3ebe1a2692\") " pod="openstack/nova-cell0-conductor-db-sync-mzmtl" Sep 30 19:51:20 crc kubenswrapper[4756]: I0930 19:51:20.736928 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h7shw\" (UniqueName: \"kubernetes.io/projected/dc949160-e62c-4982-bb20-8f3ebe1a2692-kube-api-access-h7shw\") pod \"nova-cell0-conductor-db-sync-mzmtl\" (UID: \"dc949160-e62c-4982-bb20-8f3ebe1a2692\") " pod="openstack/nova-cell0-conductor-db-sync-mzmtl" Sep 30 19:51:20 crc kubenswrapper[4756]: I0930 19:51:20.736983 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dc949160-e62c-4982-bb20-8f3ebe1a2692-scripts\") pod \"nova-cell0-conductor-db-sync-mzmtl\" (UID: \"dc949160-e62c-4982-bb20-8f3ebe1a2692\") " pod="openstack/nova-cell0-conductor-db-sync-mzmtl" Sep 30 19:51:20 crc kubenswrapper[4756]: I0930 19:51:20.737001 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dc949160-e62c-4982-bb20-8f3ebe1a2692-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-mzmtl\" (UID: \"dc949160-e62c-4982-bb20-8f3ebe1a2692\") " pod="openstack/nova-cell0-conductor-db-sync-mzmtl" Sep 30 19:51:20 crc kubenswrapper[4756]: I0930 19:51:20.789182 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Sep 30 19:51:20 crc kubenswrapper[4756]: I0930 19:51:20.789252 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Sep 30 19:51:20 crc kubenswrapper[4756]: I0930 19:51:20.826752 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Sep 30 19:51:20 crc kubenswrapper[4756]: I0930 19:51:20.830337 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Sep 30 19:51:20 crc kubenswrapper[4756]: I0930 19:51:20.838969 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dc949160-e62c-4982-bb20-8f3ebe1a2692-config-data\") pod \"nova-cell0-conductor-db-sync-mzmtl\" (UID: \"dc949160-e62c-4982-bb20-8f3ebe1a2692\") " pod="openstack/nova-cell0-conductor-db-sync-mzmtl" Sep 30 19:51:20 crc kubenswrapper[4756]: I0930 19:51:20.839047 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h7shw\" (UniqueName: \"kubernetes.io/projected/dc949160-e62c-4982-bb20-8f3ebe1a2692-kube-api-access-h7shw\") pod \"nova-cell0-conductor-db-sync-mzmtl\" (UID: \"dc949160-e62c-4982-bb20-8f3ebe1a2692\") " pod="openstack/nova-cell0-conductor-db-sync-mzmtl" Sep 30 19:51:20 crc kubenswrapper[4756]: I0930 19:51:20.839123 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dc949160-e62c-4982-bb20-8f3ebe1a2692-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-mzmtl\" (UID: \"dc949160-e62c-4982-bb20-8f3ebe1a2692\") " pod="openstack/nova-cell0-conductor-db-sync-mzmtl" Sep 30 19:51:20 crc kubenswrapper[4756]: I0930 19:51:20.839146 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dc949160-e62c-4982-bb20-8f3ebe1a2692-scripts\") pod \"nova-cell0-conductor-db-sync-mzmtl\" (UID: \"dc949160-e62c-4982-bb20-8f3ebe1a2692\") " pod="openstack/nova-cell0-conductor-db-sync-mzmtl" Sep 30 19:51:20 crc kubenswrapper[4756]: I0930 19:51:20.845421 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dc949160-e62c-4982-bb20-8f3ebe1a2692-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-mzmtl\" (UID: \"dc949160-e62c-4982-bb20-8f3ebe1a2692\") " pod="openstack/nova-cell0-conductor-db-sync-mzmtl" Sep 30 19:51:20 crc kubenswrapper[4756]: I0930 19:51:20.845445 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dc949160-e62c-4982-bb20-8f3ebe1a2692-scripts\") pod \"nova-cell0-conductor-db-sync-mzmtl\" (UID: \"dc949160-e62c-4982-bb20-8f3ebe1a2692\") " pod="openstack/nova-cell0-conductor-db-sync-mzmtl" Sep 30 19:51:20 crc kubenswrapper[4756]: I0930 19:51:20.847670 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dc949160-e62c-4982-bb20-8f3ebe1a2692-config-data\") pod \"nova-cell0-conductor-db-sync-mzmtl\" (UID: \"dc949160-e62c-4982-bb20-8f3ebe1a2692\") " pod="openstack/nova-cell0-conductor-db-sync-mzmtl" Sep 30 19:51:20 crc kubenswrapper[4756]: I0930 19:51:20.849445 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-5784cf869f-4qwd4" Sep 30 19:51:20 crc kubenswrapper[4756]: I0930 19:51:20.857057 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h7shw\" (UniqueName: \"kubernetes.io/projected/dc949160-e62c-4982-bb20-8f3ebe1a2692-kube-api-access-h7shw\") pod \"nova-cell0-conductor-db-sync-mzmtl\" (UID: \"dc949160-e62c-4982-bb20-8f3ebe1a2692\") " pod="openstack/nova-cell0-conductor-db-sync-mzmtl" Sep 30 19:51:20 crc kubenswrapper[4756]: I0930 19:51:20.912379 4756 generic.go:334] "Generic (PLEG): container finished" podID="d992dec2-7b97-4d28-9174-60c1c8361b88" containerID="2a6b49a5a5ec5e9876a416bc778d9dfec0f460ae8f1032e839269ab72c61e748" exitCode=0 Sep 30 19:51:20 crc kubenswrapper[4756]: I0930 19:51:20.913660 4756 generic.go:334] "Generic (PLEG): container finished" podID="d992dec2-7b97-4d28-9174-60c1c8361b88" containerID="6488a4feaf8d2ad8fc0d67a345ed83f1067bbe92c441eef4d717437fb26b23d1" exitCode=2 Sep 30 19:51:20 crc kubenswrapper[4756]: I0930 19:51:20.913825 4756 generic.go:334] "Generic (PLEG): container finished" podID="d992dec2-7b97-4d28-9174-60c1c8361b88" containerID="ed381317dcf81febf0daebab1d73cf36737f33f383a75dec75ad693dad5e778d" exitCode=0 Sep 30 19:51:20 crc kubenswrapper[4756]: I0930 19:51:20.915287 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d992dec2-7b97-4d28-9174-60c1c8361b88","Type":"ContainerDied","Data":"2a6b49a5a5ec5e9876a416bc778d9dfec0f460ae8f1032e839269ab72c61e748"} Sep 30 19:51:20 crc kubenswrapper[4756]: I0930 19:51:20.915433 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d992dec2-7b97-4d28-9174-60c1c8361b88","Type":"ContainerDied","Data":"6488a4feaf8d2ad8fc0d67a345ed83f1067bbe92c441eef4d717437fb26b23d1"} Sep 30 19:51:20 crc kubenswrapper[4756]: I0930 19:51:20.915541 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d992dec2-7b97-4d28-9174-60c1c8361b88","Type":"ContainerDied","Data":"ed381317dcf81febf0daebab1d73cf36737f33f383a75dec75ad693dad5e778d"} Sep 30 19:51:20 crc kubenswrapper[4756]: I0930 19:51:20.915662 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Sep 30 19:51:20 crc kubenswrapper[4756]: I0930 19:51:20.915908 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Sep 30 19:51:20 crc kubenswrapper[4756]: I0930 19:51:20.927451 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-75c8ddd69c-kjrvj"] Sep 30 19:51:20 crc kubenswrapper[4756]: I0930 19:51:20.927699 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-75c8ddd69c-kjrvj" podUID="46cc1f38-e166-40c0-9d00-7a074fa80234" containerName="dnsmasq-dns" containerID="cri-o://892c57a7fcb232717fe12258a15d82b1e9df9a6b1ea5c6203ef8a97a7978ce63" gracePeriod=10 Sep 30 19:51:20 crc kubenswrapper[4756]: I0930 19:51:20.935671 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-mzmtl" Sep 30 19:51:21 crc kubenswrapper[4756]: I0930 19:51:21.320159 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-scheduler-0" Sep 30 19:51:21 crc kubenswrapper[4756]: I0930 19:51:21.365757 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Sep 30 19:51:21 crc kubenswrapper[4756]: I0930 19:51:21.464231 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-mzmtl"] Sep 30 19:51:21 crc kubenswrapper[4756]: W0930 19:51:21.465564 4756 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poddc949160_e62c_4982_bb20_8f3ebe1a2692.slice/crio-625f40fc53bfa3c84eeac0fdec514e0c4229811e74ee1078dcbe54c8bdde9184 WatchSource:0}: Error finding container 625f40fc53bfa3c84eeac0fdec514e0c4229811e74ee1078dcbe54c8bdde9184: Status 404 returned error can't find the container with id 625f40fc53bfa3c84eeac0fdec514e0c4229811e74ee1078dcbe54c8bdde9184 Sep 30 19:51:21 crc kubenswrapper[4756]: I0930 19:51:21.563467 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-75c8ddd69c-kjrvj" Sep 30 19:51:21 crc kubenswrapper[4756]: I0930 19:51:21.664958 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s66ps\" (UniqueName: \"kubernetes.io/projected/46cc1f38-e166-40c0-9d00-7a074fa80234-kube-api-access-s66ps\") pod \"46cc1f38-e166-40c0-9d00-7a074fa80234\" (UID: \"46cc1f38-e166-40c0-9d00-7a074fa80234\") " Sep 30 19:51:21 crc kubenswrapper[4756]: I0930 19:51:21.665016 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/46cc1f38-e166-40c0-9d00-7a074fa80234-config\") pod \"46cc1f38-e166-40c0-9d00-7a074fa80234\" (UID: \"46cc1f38-e166-40c0-9d00-7a074fa80234\") " Sep 30 19:51:21 crc kubenswrapper[4756]: I0930 19:51:21.665040 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/46cc1f38-e166-40c0-9d00-7a074fa80234-ovsdbserver-sb\") pod \"46cc1f38-e166-40c0-9d00-7a074fa80234\" (UID: \"46cc1f38-e166-40c0-9d00-7a074fa80234\") " Sep 30 19:51:21 crc kubenswrapper[4756]: I0930 19:51:21.665121 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/46cc1f38-e166-40c0-9d00-7a074fa80234-dns-swift-storage-0\") pod \"46cc1f38-e166-40c0-9d00-7a074fa80234\" (UID: \"46cc1f38-e166-40c0-9d00-7a074fa80234\") " Sep 30 19:51:21 crc kubenswrapper[4756]: I0930 19:51:21.665147 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/46cc1f38-e166-40c0-9d00-7a074fa80234-dns-svc\") pod \"46cc1f38-e166-40c0-9d00-7a074fa80234\" (UID: \"46cc1f38-e166-40c0-9d00-7a074fa80234\") " Sep 30 19:51:21 crc kubenswrapper[4756]: I0930 19:51:21.665250 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/46cc1f38-e166-40c0-9d00-7a074fa80234-ovsdbserver-nb\") pod \"46cc1f38-e166-40c0-9d00-7a074fa80234\" (UID: \"46cc1f38-e166-40c0-9d00-7a074fa80234\") " Sep 30 19:51:21 crc kubenswrapper[4756]: I0930 19:51:21.683725 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/46cc1f38-e166-40c0-9d00-7a074fa80234-kube-api-access-s66ps" (OuterVolumeSpecName: "kube-api-access-s66ps") pod "46cc1f38-e166-40c0-9d00-7a074fa80234" (UID: "46cc1f38-e166-40c0-9d00-7a074fa80234"). InnerVolumeSpecName "kube-api-access-s66ps". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:51:21 crc kubenswrapper[4756]: I0930 19:51:21.731496 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/46cc1f38-e166-40c0-9d00-7a074fa80234-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "46cc1f38-e166-40c0-9d00-7a074fa80234" (UID: "46cc1f38-e166-40c0-9d00-7a074fa80234"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:51:21 crc kubenswrapper[4756]: I0930 19:51:21.743840 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/46cc1f38-e166-40c0-9d00-7a074fa80234-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "46cc1f38-e166-40c0-9d00-7a074fa80234" (UID: "46cc1f38-e166-40c0-9d00-7a074fa80234"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:51:21 crc kubenswrapper[4756]: I0930 19:51:21.744979 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/46cc1f38-e166-40c0-9d00-7a074fa80234-config" (OuterVolumeSpecName: "config") pod "46cc1f38-e166-40c0-9d00-7a074fa80234" (UID: "46cc1f38-e166-40c0-9d00-7a074fa80234"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:51:21 crc kubenswrapper[4756]: I0930 19:51:21.764574 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/46cc1f38-e166-40c0-9d00-7a074fa80234-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "46cc1f38-e166-40c0-9d00-7a074fa80234" (UID: "46cc1f38-e166-40c0-9d00-7a074fa80234"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:51:21 crc kubenswrapper[4756]: I0930 19:51:21.766732 4756 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/46cc1f38-e166-40c0-9d00-7a074fa80234-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 30 19:51:21 crc kubenswrapper[4756]: I0930 19:51:21.766810 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s66ps\" (UniqueName: \"kubernetes.io/projected/46cc1f38-e166-40c0-9d00-7a074fa80234-kube-api-access-s66ps\") on node \"crc\" DevicePath \"\"" Sep 30 19:51:21 crc kubenswrapper[4756]: I0930 19:51:21.766820 4756 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/46cc1f38-e166-40c0-9d00-7a074fa80234-config\") on node \"crc\" DevicePath \"\"" Sep 30 19:51:21 crc kubenswrapper[4756]: I0930 19:51:21.766828 4756 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/46cc1f38-e166-40c0-9d00-7a074fa80234-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Sep 30 19:51:21 crc kubenswrapper[4756]: I0930 19:51:21.766835 4756 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/46cc1f38-e166-40c0-9d00-7a074fa80234-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Sep 30 19:51:21 crc kubenswrapper[4756]: I0930 19:51:21.786850 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/46cc1f38-e166-40c0-9d00-7a074fa80234-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "46cc1f38-e166-40c0-9d00-7a074fa80234" (UID: "46cc1f38-e166-40c0-9d00-7a074fa80234"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:51:21 crc kubenswrapper[4756]: I0930 19:51:21.868671 4756 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/46cc1f38-e166-40c0-9d00-7a074fa80234-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Sep 30 19:51:21 crc kubenswrapper[4756]: I0930 19:51:21.938227 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 19:51:21 crc kubenswrapper[4756]: I0930 19:51:21.938619 4756 generic.go:334] "Generic (PLEG): container finished" podID="46cc1f38-e166-40c0-9d00-7a074fa80234" containerID="892c57a7fcb232717fe12258a15d82b1e9df9a6b1ea5c6203ef8a97a7978ce63" exitCode=0 Sep 30 19:51:21 crc kubenswrapper[4756]: I0930 19:51:21.938759 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-75c8ddd69c-kjrvj" event={"ID":"46cc1f38-e166-40c0-9d00-7a074fa80234","Type":"ContainerDied","Data":"892c57a7fcb232717fe12258a15d82b1e9df9a6b1ea5c6203ef8a97a7978ce63"} Sep 30 19:51:21 crc kubenswrapper[4756]: I0930 19:51:21.938824 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-75c8ddd69c-kjrvj" event={"ID":"46cc1f38-e166-40c0-9d00-7a074fa80234","Type":"ContainerDied","Data":"0578c207917db85c2b4829ed762e18c9066790f1fe9898f12f296cb00e985cf8"} Sep 30 19:51:21 crc kubenswrapper[4756]: I0930 19:51:21.938833 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-75c8ddd69c-kjrvj" Sep 30 19:51:21 crc kubenswrapper[4756]: I0930 19:51:21.938842 4756 scope.go:117] "RemoveContainer" containerID="892c57a7fcb232717fe12258a15d82b1e9df9a6b1ea5c6203ef8a97a7978ce63" Sep 30 19:51:21 crc kubenswrapper[4756]: I0930 19:51:21.948429 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-mzmtl" event={"ID":"dc949160-e62c-4982-bb20-8f3ebe1a2692","Type":"ContainerStarted","Data":"625f40fc53bfa3c84eeac0fdec514e0c4229811e74ee1078dcbe54c8bdde9184"} Sep 30 19:51:21 crc kubenswrapper[4756]: I0930 19:51:21.956921 4756 generic.go:334] "Generic (PLEG): container finished" podID="d992dec2-7b97-4d28-9174-60c1c8361b88" containerID="98b9abc1faee822957f865610dacccc74f583dcf0f17bcdb790be269df6317c7" exitCode=0 Sep 30 19:51:21 crc kubenswrapper[4756]: I0930 19:51:21.958504 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="fbf46aee-6848-4d8c-a589-ffe70c2387d4" containerName="cinder-scheduler" containerID="cri-o://8352a541769fee05d33c41e061dfaf820a55e151a987ed96661aab954b987e28" gracePeriod=30 Sep 30 19:51:21 crc kubenswrapper[4756]: I0930 19:51:21.959884 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="fbf46aee-6848-4d8c-a589-ffe70c2387d4" containerName="probe" containerID="cri-o://e03407d0dd60f8e978c9740bfb9c9d10ba442d27e0fbc113a19c0e1d6095fe4e" gracePeriod=30 Sep 30 19:51:21 crc kubenswrapper[4756]: I0930 19:51:21.960011 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d992dec2-7b97-4d28-9174-60c1c8361b88","Type":"ContainerDied","Data":"98b9abc1faee822957f865610dacccc74f583dcf0f17bcdb790be269df6317c7"} Sep 30 19:51:21 crc kubenswrapper[4756]: I0930 19:51:21.960019 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 19:51:21 crc kubenswrapper[4756]: I0930 19:51:21.972974 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d992dec2-7b97-4d28-9174-60c1c8361b88-scripts\") pod \"d992dec2-7b97-4d28-9174-60c1c8361b88\" (UID: \"d992dec2-7b97-4d28-9174-60c1c8361b88\") " Sep 30 19:51:21 crc kubenswrapper[4756]: I0930 19:51:21.973098 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d992dec2-7b97-4d28-9174-60c1c8361b88-run-httpd\") pod \"d992dec2-7b97-4d28-9174-60c1c8361b88\" (UID: \"d992dec2-7b97-4d28-9174-60c1c8361b88\") " Sep 30 19:51:21 crc kubenswrapper[4756]: I0930 19:51:21.973135 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sw7k8\" (UniqueName: \"kubernetes.io/projected/d992dec2-7b97-4d28-9174-60c1c8361b88-kube-api-access-sw7k8\") pod \"d992dec2-7b97-4d28-9174-60c1c8361b88\" (UID: \"d992dec2-7b97-4d28-9174-60c1c8361b88\") " Sep 30 19:51:21 crc kubenswrapper[4756]: I0930 19:51:21.973186 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d992dec2-7b97-4d28-9174-60c1c8361b88-log-httpd\") pod \"d992dec2-7b97-4d28-9174-60c1c8361b88\" (UID: \"d992dec2-7b97-4d28-9174-60c1c8361b88\") " Sep 30 19:51:21 crc kubenswrapper[4756]: I0930 19:51:21.973230 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/d992dec2-7b97-4d28-9174-60c1c8361b88-sg-core-conf-yaml\") pod \"d992dec2-7b97-4d28-9174-60c1c8361b88\" (UID: \"d992dec2-7b97-4d28-9174-60c1c8361b88\") " Sep 30 19:51:21 crc kubenswrapper[4756]: I0930 19:51:21.973271 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d992dec2-7b97-4d28-9174-60c1c8361b88-combined-ca-bundle\") pod \"d992dec2-7b97-4d28-9174-60c1c8361b88\" (UID: \"d992dec2-7b97-4d28-9174-60c1c8361b88\") " Sep 30 19:51:21 crc kubenswrapper[4756]: I0930 19:51:21.973349 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d992dec2-7b97-4d28-9174-60c1c8361b88-config-data\") pod \"d992dec2-7b97-4d28-9174-60c1c8361b88\" (UID: \"d992dec2-7b97-4d28-9174-60c1c8361b88\") " Sep 30 19:51:21 crc kubenswrapper[4756]: I0930 19:51:21.975607 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d992dec2-7b97-4d28-9174-60c1c8361b88-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "d992dec2-7b97-4d28-9174-60c1c8361b88" (UID: "d992dec2-7b97-4d28-9174-60c1c8361b88"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 19:51:21 crc kubenswrapper[4756]: I0930 19:51:21.976232 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d992dec2-7b97-4d28-9174-60c1c8361b88-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "d992dec2-7b97-4d28-9174-60c1c8361b88" (UID: "d992dec2-7b97-4d28-9174-60c1c8361b88"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 19:51:21 crc kubenswrapper[4756]: I0930 19:51:21.980852 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d992dec2-7b97-4d28-9174-60c1c8361b88-scripts" (OuterVolumeSpecName: "scripts") pod "d992dec2-7b97-4d28-9174-60c1c8361b88" (UID: "d992dec2-7b97-4d28-9174-60c1c8361b88"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:51:21 crc kubenswrapper[4756]: I0930 19:51:21.981104 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d992dec2-7b97-4d28-9174-60c1c8361b88-kube-api-access-sw7k8" (OuterVolumeSpecName: "kube-api-access-sw7k8") pod "d992dec2-7b97-4d28-9174-60c1c8361b88" (UID: "d992dec2-7b97-4d28-9174-60c1c8361b88"). InnerVolumeSpecName "kube-api-access-sw7k8". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:51:22 crc kubenswrapper[4756]: I0930 19:51:22.001473 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-75c8ddd69c-kjrvj"] Sep 30 19:51:22 crc kubenswrapper[4756]: I0930 19:51:22.008796 4756 scope.go:117] "RemoveContainer" containerID="01b91d0f9da313bfca8bffebfd7b6cecce1b443dbdf629a45760c8c71b6a16cf" Sep 30 19:51:22 crc kubenswrapper[4756]: I0930 19:51:22.014098 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-75c8ddd69c-kjrvj"] Sep 30 19:51:22 crc kubenswrapper[4756]: I0930 19:51:22.014307 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d992dec2-7b97-4d28-9174-60c1c8361b88-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "d992dec2-7b97-4d28-9174-60c1c8361b88" (UID: "d992dec2-7b97-4d28-9174-60c1c8361b88"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:51:22 crc kubenswrapper[4756]: I0930 19:51:22.034119 4756 scope.go:117] "RemoveContainer" containerID="892c57a7fcb232717fe12258a15d82b1e9df9a6b1ea5c6203ef8a97a7978ce63" Sep 30 19:51:22 crc kubenswrapper[4756]: E0930 19:51:22.034918 4756 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"892c57a7fcb232717fe12258a15d82b1e9df9a6b1ea5c6203ef8a97a7978ce63\": container with ID starting with 892c57a7fcb232717fe12258a15d82b1e9df9a6b1ea5c6203ef8a97a7978ce63 not found: ID does not exist" containerID="892c57a7fcb232717fe12258a15d82b1e9df9a6b1ea5c6203ef8a97a7978ce63" Sep 30 19:51:22 crc kubenswrapper[4756]: I0930 19:51:22.034963 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"892c57a7fcb232717fe12258a15d82b1e9df9a6b1ea5c6203ef8a97a7978ce63"} err="failed to get container status \"892c57a7fcb232717fe12258a15d82b1e9df9a6b1ea5c6203ef8a97a7978ce63\": rpc error: code = NotFound desc = could not find container \"892c57a7fcb232717fe12258a15d82b1e9df9a6b1ea5c6203ef8a97a7978ce63\": container with ID starting with 892c57a7fcb232717fe12258a15d82b1e9df9a6b1ea5c6203ef8a97a7978ce63 not found: ID does not exist" Sep 30 19:51:22 crc kubenswrapper[4756]: I0930 19:51:22.034991 4756 scope.go:117] "RemoveContainer" containerID="01b91d0f9da313bfca8bffebfd7b6cecce1b443dbdf629a45760c8c71b6a16cf" Sep 30 19:51:22 crc kubenswrapper[4756]: E0930 19:51:22.036496 4756 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"01b91d0f9da313bfca8bffebfd7b6cecce1b443dbdf629a45760c8c71b6a16cf\": container with ID starting with 01b91d0f9da313bfca8bffebfd7b6cecce1b443dbdf629a45760c8c71b6a16cf not found: ID does not exist" containerID="01b91d0f9da313bfca8bffebfd7b6cecce1b443dbdf629a45760c8c71b6a16cf" Sep 30 19:51:22 crc kubenswrapper[4756]: I0930 19:51:22.036528 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"01b91d0f9da313bfca8bffebfd7b6cecce1b443dbdf629a45760c8c71b6a16cf"} err="failed to get container status \"01b91d0f9da313bfca8bffebfd7b6cecce1b443dbdf629a45760c8c71b6a16cf\": rpc error: code = NotFound desc = could not find container \"01b91d0f9da313bfca8bffebfd7b6cecce1b443dbdf629a45760c8c71b6a16cf\": container with ID starting with 01b91d0f9da313bfca8bffebfd7b6cecce1b443dbdf629a45760c8c71b6a16cf not found: ID does not exist" Sep 30 19:51:22 crc kubenswrapper[4756]: I0930 19:51:22.036543 4756 scope.go:117] "RemoveContainer" containerID="2a6b49a5a5ec5e9876a416bc778d9dfec0f460ae8f1032e839269ab72c61e748" Sep 30 19:51:22 crc kubenswrapper[4756]: I0930 19:51:22.076339 4756 scope.go:117] "RemoveContainer" containerID="6488a4feaf8d2ad8fc0d67a345ed83f1067bbe92c441eef4d717437fb26b23d1" Sep 30 19:51:22 crc kubenswrapper[4756]: I0930 19:51:22.078761 4756 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d992dec2-7b97-4d28-9174-60c1c8361b88-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 19:51:22 crc kubenswrapper[4756]: I0930 19:51:22.078785 4756 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d992dec2-7b97-4d28-9174-60c1c8361b88-run-httpd\") on node \"crc\" DevicePath \"\"" Sep 30 19:51:22 crc kubenswrapper[4756]: I0930 19:51:22.078798 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sw7k8\" (UniqueName: \"kubernetes.io/projected/d992dec2-7b97-4d28-9174-60c1c8361b88-kube-api-access-sw7k8\") on node \"crc\" DevicePath \"\"" Sep 30 19:51:22 crc kubenswrapper[4756]: I0930 19:51:22.078807 4756 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d992dec2-7b97-4d28-9174-60c1c8361b88-log-httpd\") on node \"crc\" DevicePath \"\"" Sep 30 19:51:22 crc kubenswrapper[4756]: I0930 19:51:22.078816 4756 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/d992dec2-7b97-4d28-9174-60c1c8361b88-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Sep 30 19:51:22 crc kubenswrapper[4756]: I0930 19:51:22.088544 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d992dec2-7b97-4d28-9174-60c1c8361b88-config-data" (OuterVolumeSpecName: "config-data") pod "d992dec2-7b97-4d28-9174-60c1c8361b88" (UID: "d992dec2-7b97-4d28-9174-60c1c8361b88"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:51:22 crc kubenswrapper[4756]: I0930 19:51:22.101565 4756 scope.go:117] "RemoveContainer" containerID="ed381317dcf81febf0daebab1d73cf36737f33f383a75dec75ad693dad5e778d" Sep 30 19:51:22 crc kubenswrapper[4756]: I0930 19:51:22.125923 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d992dec2-7b97-4d28-9174-60c1c8361b88-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d992dec2-7b97-4d28-9174-60c1c8361b88" (UID: "d992dec2-7b97-4d28-9174-60c1c8361b88"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:51:22 crc kubenswrapper[4756]: I0930 19:51:22.146017 4756 scope.go:117] "RemoveContainer" containerID="98b9abc1faee822957f865610dacccc74f583dcf0f17bcdb790be269df6317c7" Sep 30 19:51:22 crc kubenswrapper[4756]: I0930 19:51:22.180796 4756 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d992dec2-7b97-4d28-9174-60c1c8361b88-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 19:51:22 crc kubenswrapper[4756]: I0930 19:51:22.180824 4756 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d992dec2-7b97-4d28-9174-60c1c8361b88-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 19:51:22 crc kubenswrapper[4756]: I0930 19:51:22.295409 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 30 19:51:22 crc kubenswrapper[4756]: I0930 19:51:22.316418 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Sep 30 19:51:22 crc kubenswrapper[4756]: I0930 19:51:22.325912 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Sep 30 19:51:22 crc kubenswrapper[4756]: E0930 19:51:22.326383 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d992dec2-7b97-4d28-9174-60c1c8361b88" containerName="sg-core" Sep 30 19:51:22 crc kubenswrapper[4756]: I0930 19:51:22.326419 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="d992dec2-7b97-4d28-9174-60c1c8361b88" containerName="sg-core" Sep 30 19:51:22 crc kubenswrapper[4756]: E0930 19:51:22.326430 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d992dec2-7b97-4d28-9174-60c1c8361b88" containerName="proxy-httpd" Sep 30 19:51:22 crc kubenswrapper[4756]: I0930 19:51:22.326438 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="d992dec2-7b97-4d28-9174-60c1c8361b88" containerName="proxy-httpd" Sep 30 19:51:22 crc kubenswrapper[4756]: E0930 19:51:22.326454 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d992dec2-7b97-4d28-9174-60c1c8361b88" containerName="ceilometer-notification-agent" Sep 30 19:51:22 crc kubenswrapper[4756]: I0930 19:51:22.326460 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="d992dec2-7b97-4d28-9174-60c1c8361b88" containerName="ceilometer-notification-agent" Sep 30 19:51:22 crc kubenswrapper[4756]: E0930 19:51:22.326472 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="46cc1f38-e166-40c0-9d00-7a074fa80234" containerName="init" Sep 30 19:51:22 crc kubenswrapper[4756]: I0930 19:51:22.326478 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="46cc1f38-e166-40c0-9d00-7a074fa80234" containerName="init" Sep 30 19:51:22 crc kubenswrapper[4756]: E0930 19:51:22.326504 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d992dec2-7b97-4d28-9174-60c1c8361b88" containerName="ceilometer-central-agent" Sep 30 19:51:22 crc kubenswrapper[4756]: I0930 19:51:22.326510 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="d992dec2-7b97-4d28-9174-60c1c8361b88" containerName="ceilometer-central-agent" Sep 30 19:51:22 crc kubenswrapper[4756]: E0930 19:51:22.326520 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="46cc1f38-e166-40c0-9d00-7a074fa80234" containerName="dnsmasq-dns" Sep 30 19:51:22 crc kubenswrapper[4756]: I0930 19:51:22.326526 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="46cc1f38-e166-40c0-9d00-7a074fa80234" containerName="dnsmasq-dns" Sep 30 19:51:22 crc kubenswrapper[4756]: I0930 19:51:22.326696 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="d992dec2-7b97-4d28-9174-60c1c8361b88" containerName="proxy-httpd" Sep 30 19:51:22 crc kubenswrapper[4756]: I0930 19:51:22.326712 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="d992dec2-7b97-4d28-9174-60c1c8361b88" containerName="ceilometer-notification-agent" Sep 30 19:51:22 crc kubenswrapper[4756]: I0930 19:51:22.326722 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="d992dec2-7b97-4d28-9174-60c1c8361b88" containerName="sg-core" Sep 30 19:51:22 crc kubenswrapper[4756]: I0930 19:51:22.326736 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="d992dec2-7b97-4d28-9174-60c1c8361b88" containerName="ceilometer-central-agent" Sep 30 19:51:22 crc kubenswrapper[4756]: I0930 19:51:22.326752 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="46cc1f38-e166-40c0-9d00-7a074fa80234" containerName="dnsmasq-dns" Sep 30 19:51:22 crc kubenswrapper[4756]: I0930 19:51:22.328414 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 19:51:22 crc kubenswrapper[4756]: I0930 19:51:22.330797 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Sep 30 19:51:22 crc kubenswrapper[4756]: I0930 19:51:22.331009 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Sep 30 19:51:22 crc kubenswrapper[4756]: I0930 19:51:22.334580 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 30 19:51:22 crc kubenswrapper[4756]: I0930 19:51:22.369810 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Sep 30 19:51:22 crc kubenswrapper[4756]: I0930 19:51:22.369845 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Sep 30 19:51:22 crc kubenswrapper[4756]: I0930 19:51:22.384585 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/daa26414-a4d8-42de-b7d4-edfbd69eed0c-log-httpd\") pod \"ceilometer-0\" (UID: \"daa26414-a4d8-42de-b7d4-edfbd69eed0c\") " pod="openstack/ceilometer-0" Sep 30 19:51:22 crc kubenswrapper[4756]: I0930 19:51:22.384623 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/daa26414-a4d8-42de-b7d4-edfbd69eed0c-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"daa26414-a4d8-42de-b7d4-edfbd69eed0c\") " pod="openstack/ceilometer-0" Sep 30 19:51:22 crc kubenswrapper[4756]: I0930 19:51:22.384664 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/daa26414-a4d8-42de-b7d4-edfbd69eed0c-config-data\") pod \"ceilometer-0\" (UID: \"daa26414-a4d8-42de-b7d4-edfbd69eed0c\") " pod="openstack/ceilometer-0" Sep 30 19:51:22 crc kubenswrapper[4756]: I0930 19:51:22.384789 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/daa26414-a4d8-42de-b7d4-edfbd69eed0c-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"daa26414-a4d8-42de-b7d4-edfbd69eed0c\") " pod="openstack/ceilometer-0" Sep 30 19:51:22 crc kubenswrapper[4756]: I0930 19:51:22.385022 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/daa26414-a4d8-42de-b7d4-edfbd69eed0c-run-httpd\") pod \"ceilometer-0\" (UID: \"daa26414-a4d8-42de-b7d4-edfbd69eed0c\") " pod="openstack/ceilometer-0" Sep 30 19:51:22 crc kubenswrapper[4756]: I0930 19:51:22.385047 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hld6b\" (UniqueName: \"kubernetes.io/projected/daa26414-a4d8-42de-b7d4-edfbd69eed0c-kube-api-access-hld6b\") pod \"ceilometer-0\" (UID: \"daa26414-a4d8-42de-b7d4-edfbd69eed0c\") " pod="openstack/ceilometer-0" Sep 30 19:51:22 crc kubenswrapper[4756]: I0930 19:51:22.385084 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/daa26414-a4d8-42de-b7d4-edfbd69eed0c-scripts\") pod \"ceilometer-0\" (UID: \"daa26414-a4d8-42de-b7d4-edfbd69eed0c\") " pod="openstack/ceilometer-0" Sep 30 19:51:22 crc kubenswrapper[4756]: I0930 19:51:22.415380 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Sep 30 19:51:22 crc kubenswrapper[4756]: I0930 19:51:22.428661 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Sep 30 19:51:22 crc kubenswrapper[4756]: I0930 19:51:22.486954 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/daa26414-a4d8-42de-b7d4-edfbd69eed0c-run-httpd\") pod \"ceilometer-0\" (UID: \"daa26414-a4d8-42de-b7d4-edfbd69eed0c\") " pod="openstack/ceilometer-0" Sep 30 19:51:22 crc kubenswrapper[4756]: I0930 19:51:22.486990 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hld6b\" (UniqueName: \"kubernetes.io/projected/daa26414-a4d8-42de-b7d4-edfbd69eed0c-kube-api-access-hld6b\") pod \"ceilometer-0\" (UID: \"daa26414-a4d8-42de-b7d4-edfbd69eed0c\") " pod="openstack/ceilometer-0" Sep 30 19:51:22 crc kubenswrapper[4756]: I0930 19:51:22.487014 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/daa26414-a4d8-42de-b7d4-edfbd69eed0c-scripts\") pod \"ceilometer-0\" (UID: \"daa26414-a4d8-42de-b7d4-edfbd69eed0c\") " pod="openstack/ceilometer-0" Sep 30 19:51:22 crc kubenswrapper[4756]: I0930 19:51:22.487084 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/daa26414-a4d8-42de-b7d4-edfbd69eed0c-log-httpd\") pod \"ceilometer-0\" (UID: \"daa26414-a4d8-42de-b7d4-edfbd69eed0c\") " pod="openstack/ceilometer-0" Sep 30 19:51:22 crc kubenswrapper[4756]: I0930 19:51:22.487099 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/daa26414-a4d8-42de-b7d4-edfbd69eed0c-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"daa26414-a4d8-42de-b7d4-edfbd69eed0c\") " pod="openstack/ceilometer-0" Sep 30 19:51:22 crc kubenswrapper[4756]: I0930 19:51:22.487309 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/daa26414-a4d8-42de-b7d4-edfbd69eed0c-config-data\") pod \"ceilometer-0\" (UID: \"daa26414-a4d8-42de-b7d4-edfbd69eed0c\") " pod="openstack/ceilometer-0" Sep 30 19:51:22 crc kubenswrapper[4756]: I0930 19:51:22.487369 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/daa26414-a4d8-42de-b7d4-edfbd69eed0c-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"daa26414-a4d8-42de-b7d4-edfbd69eed0c\") " pod="openstack/ceilometer-0" Sep 30 19:51:22 crc kubenswrapper[4756]: I0930 19:51:22.488491 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/daa26414-a4d8-42de-b7d4-edfbd69eed0c-log-httpd\") pod \"ceilometer-0\" (UID: \"daa26414-a4d8-42de-b7d4-edfbd69eed0c\") " pod="openstack/ceilometer-0" Sep 30 19:51:22 crc kubenswrapper[4756]: I0930 19:51:22.488844 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/daa26414-a4d8-42de-b7d4-edfbd69eed0c-run-httpd\") pod \"ceilometer-0\" (UID: \"daa26414-a4d8-42de-b7d4-edfbd69eed0c\") " pod="openstack/ceilometer-0" Sep 30 19:51:22 crc kubenswrapper[4756]: I0930 19:51:22.492068 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/daa26414-a4d8-42de-b7d4-edfbd69eed0c-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"daa26414-a4d8-42de-b7d4-edfbd69eed0c\") " pod="openstack/ceilometer-0" Sep 30 19:51:22 crc kubenswrapper[4756]: I0930 19:51:22.493134 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/daa26414-a4d8-42de-b7d4-edfbd69eed0c-scripts\") pod \"ceilometer-0\" (UID: \"daa26414-a4d8-42de-b7d4-edfbd69eed0c\") " pod="openstack/ceilometer-0" Sep 30 19:51:22 crc kubenswrapper[4756]: I0930 19:51:22.497193 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/daa26414-a4d8-42de-b7d4-edfbd69eed0c-config-data\") pod \"ceilometer-0\" (UID: \"daa26414-a4d8-42de-b7d4-edfbd69eed0c\") " pod="openstack/ceilometer-0" Sep 30 19:51:22 crc kubenswrapper[4756]: I0930 19:51:22.506064 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hld6b\" (UniqueName: \"kubernetes.io/projected/daa26414-a4d8-42de-b7d4-edfbd69eed0c-kube-api-access-hld6b\") pod \"ceilometer-0\" (UID: \"daa26414-a4d8-42de-b7d4-edfbd69eed0c\") " pod="openstack/ceilometer-0" Sep 30 19:51:22 crc kubenswrapper[4756]: I0930 19:51:22.506687 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/daa26414-a4d8-42de-b7d4-edfbd69eed0c-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"daa26414-a4d8-42de-b7d4-edfbd69eed0c\") " pod="openstack/ceilometer-0" Sep 30 19:51:22 crc kubenswrapper[4756]: I0930 19:51:22.673311 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 19:51:22 crc kubenswrapper[4756]: I0930 19:51:22.977651 4756 generic.go:334] "Generic (PLEG): container finished" podID="fbf46aee-6848-4d8c-a589-ffe70c2387d4" containerID="e03407d0dd60f8e978c9740bfb9c9d10ba442d27e0fbc113a19c0e1d6095fe4e" exitCode=0 Sep 30 19:51:22 crc kubenswrapper[4756]: I0930 19:51:22.977729 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"fbf46aee-6848-4d8c-a589-ffe70c2387d4","Type":"ContainerDied","Data":"e03407d0dd60f8e978c9740bfb9c9d10ba442d27e0fbc113a19c0e1d6095fe4e"} Sep 30 19:51:22 crc kubenswrapper[4756]: I0930 19:51:22.981873 4756 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Sep 30 19:51:22 crc kubenswrapper[4756]: I0930 19:51:22.981893 4756 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Sep 30 19:51:22 crc kubenswrapper[4756]: I0930 19:51:22.982743 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Sep 30 19:51:22 crc kubenswrapper[4756]: I0930 19:51:22.982767 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Sep 30 19:51:23 crc kubenswrapper[4756]: I0930 19:51:23.068992 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Sep 30 19:51:23 crc kubenswrapper[4756]: I0930 19:51:23.107371 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/neutron-d4df5d75-8jctn" Sep 30 19:51:23 crc kubenswrapper[4756]: I0930 19:51:23.141103 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="46cc1f38-e166-40c0-9d00-7a074fa80234" path="/var/lib/kubelet/pods/46cc1f38-e166-40c0-9d00-7a074fa80234/volumes" Sep 30 19:51:23 crc kubenswrapper[4756]: I0930 19:51:23.142118 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d992dec2-7b97-4d28-9174-60c1c8361b88" path="/var/lib/kubelet/pods/d992dec2-7b97-4d28-9174-60c1c8361b88/volumes" Sep 30 19:51:23 crc kubenswrapper[4756]: I0930 19:51:23.155000 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 30 19:51:23 crc kubenswrapper[4756]: I0930 19:51:23.163942 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-5647cd86c6-mmv8n"] Sep 30 19:51:23 crc kubenswrapper[4756]: I0930 19:51:23.164145 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-5647cd86c6-mmv8n" podUID="a8513cfc-d18c-4884-b931-f0e8192734a3" containerName="neutron-api" containerID="cri-o://a0d0a2793ffe10ff819905e8fa8f947fb5983c9223ed4b9f710124623474411e" gracePeriod=30 Sep 30 19:51:23 crc kubenswrapper[4756]: I0930 19:51:23.164485 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-5647cd86c6-mmv8n" podUID="a8513cfc-d18c-4884-b931-f0e8192734a3" containerName="neutron-httpd" containerID="cri-o://192228cbe9f8707045ae7438487eea8771f1c6ef226112ba0726991cabd15ecf" gracePeriod=30 Sep 30 19:51:23 crc kubenswrapper[4756]: W0930 19:51:23.189571 4756 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poddaa26414_a4d8_42de_b7d4_edfbd69eed0c.slice/crio-d66c73c600327f65b4c7049a58afa2e3c7fa1de34d2c0eb3809c7392f83adc4d WatchSource:0}: Error finding container d66c73c600327f65b4c7049a58afa2e3c7fa1de34d2c0eb3809c7392f83adc4d: Status 404 returned error can't find the container with id d66c73c600327f65b4c7049a58afa2e3c7fa1de34d2c0eb3809c7392f83adc4d Sep 30 19:51:23 crc kubenswrapper[4756]: I0930 19:51:23.326828 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Sep 30 19:51:23 crc kubenswrapper[4756]: I0930 19:51:23.995431 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"daa26414-a4d8-42de-b7d4-edfbd69eed0c","Type":"ContainerStarted","Data":"6eccdd6f7d6d62c6439001026f97178e42ca381f7560fc8681acd980ba86c27f"} Sep 30 19:51:23 crc kubenswrapper[4756]: I0930 19:51:23.995759 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"daa26414-a4d8-42de-b7d4-edfbd69eed0c","Type":"ContainerStarted","Data":"d66c73c600327f65b4c7049a58afa2e3c7fa1de34d2c0eb3809c7392f83adc4d"} Sep 30 19:51:23 crc kubenswrapper[4756]: I0930 19:51:23.999616 4756 generic.go:334] "Generic (PLEG): container finished" podID="a8513cfc-d18c-4884-b931-f0e8192734a3" containerID="192228cbe9f8707045ae7438487eea8771f1c6ef226112ba0726991cabd15ecf" exitCode=0 Sep 30 19:51:23 crc kubenswrapper[4756]: I0930 19:51:23.999953 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-5647cd86c6-mmv8n" event={"ID":"a8513cfc-d18c-4884-b931-f0e8192734a3","Type":"ContainerDied","Data":"192228cbe9f8707045ae7438487eea8771f1c6ef226112ba0726991cabd15ecf"} Sep 30 19:51:25 crc kubenswrapper[4756]: I0930 19:51:25.009055 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"daa26414-a4d8-42de-b7d4-edfbd69eed0c","Type":"ContainerStarted","Data":"561c24e60d31ea657fca19a9beb10ebed4e214d758798e8cfdf2bff46b6d6433"} Sep 30 19:51:25 crc kubenswrapper[4756]: I0930 19:51:25.364911 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Sep 30 19:51:25 crc kubenswrapper[4756]: I0930 19:51:25.365009 4756 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Sep 30 19:51:25 crc kubenswrapper[4756]: I0930 19:51:25.526389 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-bf36-account-create-6xdsk"] Sep 30 19:51:25 crc kubenswrapper[4756]: I0930 19:51:25.527647 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-bf36-account-create-6xdsk" Sep 30 19:51:25 crc kubenswrapper[4756]: I0930 19:51:25.534856 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-db-secret" Sep 30 19:51:25 crc kubenswrapper[4756]: I0930 19:51:25.535115 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-bf36-account-create-6xdsk"] Sep 30 19:51:25 crc kubenswrapper[4756]: I0930 19:51:25.657386 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7cwcp\" (UniqueName: \"kubernetes.io/projected/7d940278-5ba3-4ce6-996a-e4797a22e0d3-kube-api-access-7cwcp\") pod \"nova-cell1-bf36-account-create-6xdsk\" (UID: \"7d940278-5ba3-4ce6-996a-e4797a22e0d3\") " pod="openstack/nova-cell1-bf36-account-create-6xdsk" Sep 30 19:51:25 crc kubenswrapper[4756]: I0930 19:51:25.760442 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7cwcp\" (UniqueName: \"kubernetes.io/projected/7d940278-5ba3-4ce6-996a-e4797a22e0d3-kube-api-access-7cwcp\") pod \"nova-cell1-bf36-account-create-6xdsk\" (UID: \"7d940278-5ba3-4ce6-996a-e4797a22e0d3\") " pod="openstack/nova-cell1-bf36-account-create-6xdsk" Sep 30 19:51:25 crc kubenswrapper[4756]: I0930 19:51:25.811119 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7cwcp\" (UniqueName: \"kubernetes.io/projected/7d940278-5ba3-4ce6-996a-e4797a22e0d3-kube-api-access-7cwcp\") pod \"nova-cell1-bf36-account-create-6xdsk\" (UID: \"7d940278-5ba3-4ce6-996a-e4797a22e0d3\") " pod="openstack/nova-cell1-bf36-account-create-6xdsk" Sep 30 19:51:25 crc kubenswrapper[4756]: I0930 19:51:25.885081 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Sep 30 19:51:25 crc kubenswrapper[4756]: I0930 19:51:25.903618 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-bf36-account-create-6xdsk" Sep 30 19:51:25 crc kubenswrapper[4756]: I0930 19:51:25.916210 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Sep 30 19:51:25 crc kubenswrapper[4756]: I0930 19:51:25.963369 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fbf46aee-6848-4d8c-a589-ffe70c2387d4-combined-ca-bundle\") pod \"fbf46aee-6848-4d8c-a589-ffe70c2387d4\" (UID: \"fbf46aee-6848-4d8c-a589-ffe70c2387d4\") " Sep 30 19:51:25 crc kubenswrapper[4756]: I0930 19:51:25.963460 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/fbf46aee-6848-4d8c-a589-ffe70c2387d4-etc-machine-id\") pod \"fbf46aee-6848-4d8c-a589-ffe70c2387d4\" (UID: \"fbf46aee-6848-4d8c-a589-ffe70c2387d4\") " Sep 30 19:51:25 crc kubenswrapper[4756]: I0930 19:51:25.963583 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fbf46aee-6848-4d8c-a589-ffe70c2387d4-scripts\") pod \"fbf46aee-6848-4d8c-a589-ffe70c2387d4\" (UID: \"fbf46aee-6848-4d8c-a589-ffe70c2387d4\") " Sep 30 19:51:25 crc kubenswrapper[4756]: I0930 19:51:25.963639 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/fbf46aee-6848-4d8c-a589-ffe70c2387d4-config-data-custom\") pod \"fbf46aee-6848-4d8c-a589-ffe70c2387d4\" (UID: \"fbf46aee-6848-4d8c-a589-ffe70c2387d4\") " Sep 30 19:51:25 crc kubenswrapper[4756]: I0930 19:51:25.963697 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vw2b4\" (UniqueName: \"kubernetes.io/projected/fbf46aee-6848-4d8c-a589-ffe70c2387d4-kube-api-access-vw2b4\") pod \"fbf46aee-6848-4d8c-a589-ffe70c2387d4\" (UID: \"fbf46aee-6848-4d8c-a589-ffe70c2387d4\") " Sep 30 19:51:25 crc kubenswrapper[4756]: I0930 19:51:25.963719 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fbf46aee-6848-4d8c-a589-ffe70c2387d4-config-data\") pod \"fbf46aee-6848-4d8c-a589-ffe70c2387d4\" (UID: \"fbf46aee-6848-4d8c-a589-ffe70c2387d4\") " Sep 30 19:51:25 crc kubenswrapper[4756]: I0930 19:51:25.969593 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/fbf46aee-6848-4d8c-a589-ffe70c2387d4-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "fbf46aee-6848-4d8c-a589-ffe70c2387d4" (UID: "fbf46aee-6848-4d8c-a589-ffe70c2387d4"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 19:51:25 crc kubenswrapper[4756]: I0930 19:51:25.979597 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fbf46aee-6848-4d8c-a589-ffe70c2387d4-scripts" (OuterVolumeSpecName: "scripts") pod "fbf46aee-6848-4d8c-a589-ffe70c2387d4" (UID: "fbf46aee-6848-4d8c-a589-ffe70c2387d4"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:51:25 crc kubenswrapper[4756]: I0930 19:51:25.979617 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fbf46aee-6848-4d8c-a589-ffe70c2387d4-kube-api-access-vw2b4" (OuterVolumeSpecName: "kube-api-access-vw2b4") pod "fbf46aee-6848-4d8c-a589-ffe70c2387d4" (UID: "fbf46aee-6848-4d8c-a589-ffe70c2387d4"). InnerVolumeSpecName "kube-api-access-vw2b4". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:51:25 crc kubenswrapper[4756]: I0930 19:51:25.990547 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fbf46aee-6848-4d8c-a589-ffe70c2387d4-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "fbf46aee-6848-4d8c-a589-ffe70c2387d4" (UID: "fbf46aee-6848-4d8c-a589-ffe70c2387d4"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:51:26 crc kubenswrapper[4756]: I0930 19:51:26.040910 4756 generic.go:334] "Generic (PLEG): container finished" podID="fbf46aee-6848-4d8c-a589-ffe70c2387d4" containerID="8352a541769fee05d33c41e061dfaf820a55e151a987ed96661aab954b987e28" exitCode=0 Sep 30 19:51:26 crc kubenswrapper[4756]: I0930 19:51:26.040979 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"fbf46aee-6848-4d8c-a589-ffe70c2387d4","Type":"ContainerDied","Data":"8352a541769fee05d33c41e061dfaf820a55e151a987ed96661aab954b987e28"} Sep 30 19:51:26 crc kubenswrapper[4756]: I0930 19:51:26.041006 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"fbf46aee-6848-4d8c-a589-ffe70c2387d4","Type":"ContainerDied","Data":"fc4fd0c65e8eb53bce6313ec5636e56cf1e5139ab7729e6b94cf6288c118b32f"} Sep 30 19:51:26 crc kubenswrapper[4756]: I0930 19:51:26.041022 4756 scope.go:117] "RemoveContainer" containerID="e03407d0dd60f8e978c9740bfb9c9d10ba442d27e0fbc113a19c0e1d6095fe4e" Sep 30 19:51:26 crc kubenswrapper[4756]: I0930 19:51:26.041146 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Sep 30 19:51:26 crc kubenswrapper[4756]: I0930 19:51:26.046620 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fbf46aee-6848-4d8c-a589-ffe70c2387d4-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "fbf46aee-6848-4d8c-a589-ffe70c2387d4" (UID: "fbf46aee-6848-4d8c-a589-ffe70c2387d4"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:51:26 crc kubenswrapper[4756]: I0930 19:51:26.053340 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"daa26414-a4d8-42de-b7d4-edfbd69eed0c","Type":"ContainerStarted","Data":"1a183ecc2a26d72661fef444c733d87d8efda09320750404d74d24e5714c276a"} Sep 30 19:51:26 crc kubenswrapper[4756]: I0930 19:51:26.065880 4756 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fbf46aee-6848-4d8c-a589-ffe70c2387d4-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 19:51:26 crc kubenswrapper[4756]: I0930 19:51:26.065910 4756 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/fbf46aee-6848-4d8c-a589-ffe70c2387d4-config-data-custom\") on node \"crc\" DevicePath \"\"" Sep 30 19:51:26 crc kubenswrapper[4756]: I0930 19:51:26.065924 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vw2b4\" (UniqueName: \"kubernetes.io/projected/fbf46aee-6848-4d8c-a589-ffe70c2387d4-kube-api-access-vw2b4\") on node \"crc\" DevicePath \"\"" Sep 30 19:51:26 crc kubenswrapper[4756]: I0930 19:51:26.065934 4756 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fbf46aee-6848-4d8c-a589-ffe70c2387d4-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 19:51:26 crc kubenswrapper[4756]: I0930 19:51:26.065944 4756 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/fbf46aee-6848-4d8c-a589-ffe70c2387d4-etc-machine-id\") on node \"crc\" DevicePath \"\"" Sep 30 19:51:26 crc kubenswrapper[4756]: I0930 19:51:26.071533 4756 scope.go:117] "RemoveContainer" containerID="8352a541769fee05d33c41e061dfaf820a55e151a987ed96661aab954b987e28" Sep 30 19:51:26 crc kubenswrapper[4756]: I0930 19:51:26.103799 4756 scope.go:117] "RemoveContainer" containerID="e03407d0dd60f8e978c9740bfb9c9d10ba442d27e0fbc113a19c0e1d6095fe4e" Sep 30 19:51:26 crc kubenswrapper[4756]: E0930 19:51:26.104684 4756 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e03407d0dd60f8e978c9740bfb9c9d10ba442d27e0fbc113a19c0e1d6095fe4e\": container with ID starting with e03407d0dd60f8e978c9740bfb9c9d10ba442d27e0fbc113a19c0e1d6095fe4e not found: ID does not exist" containerID="e03407d0dd60f8e978c9740bfb9c9d10ba442d27e0fbc113a19c0e1d6095fe4e" Sep 30 19:51:26 crc kubenswrapper[4756]: I0930 19:51:26.104714 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e03407d0dd60f8e978c9740bfb9c9d10ba442d27e0fbc113a19c0e1d6095fe4e"} err="failed to get container status \"e03407d0dd60f8e978c9740bfb9c9d10ba442d27e0fbc113a19c0e1d6095fe4e\": rpc error: code = NotFound desc = could not find container \"e03407d0dd60f8e978c9740bfb9c9d10ba442d27e0fbc113a19c0e1d6095fe4e\": container with ID starting with e03407d0dd60f8e978c9740bfb9c9d10ba442d27e0fbc113a19c0e1d6095fe4e not found: ID does not exist" Sep 30 19:51:26 crc kubenswrapper[4756]: I0930 19:51:26.104745 4756 scope.go:117] "RemoveContainer" containerID="8352a541769fee05d33c41e061dfaf820a55e151a987ed96661aab954b987e28" Sep 30 19:51:26 crc kubenswrapper[4756]: E0930 19:51:26.107333 4756 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8352a541769fee05d33c41e061dfaf820a55e151a987ed96661aab954b987e28\": container with ID starting with 8352a541769fee05d33c41e061dfaf820a55e151a987ed96661aab954b987e28 not found: ID does not exist" containerID="8352a541769fee05d33c41e061dfaf820a55e151a987ed96661aab954b987e28" Sep 30 19:51:26 crc kubenswrapper[4756]: I0930 19:51:26.107352 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8352a541769fee05d33c41e061dfaf820a55e151a987ed96661aab954b987e28"} err="failed to get container status \"8352a541769fee05d33c41e061dfaf820a55e151a987ed96661aab954b987e28\": rpc error: code = NotFound desc = could not find container \"8352a541769fee05d33c41e061dfaf820a55e151a987ed96661aab954b987e28\": container with ID starting with 8352a541769fee05d33c41e061dfaf820a55e151a987ed96661aab954b987e28 not found: ID does not exist" Sep 30 19:51:26 crc kubenswrapper[4756]: I0930 19:51:26.173545 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fbf46aee-6848-4d8c-a589-ffe70c2387d4-config-data" (OuterVolumeSpecName: "config-data") pod "fbf46aee-6848-4d8c-a589-ffe70c2387d4" (UID: "fbf46aee-6848-4d8c-a589-ffe70c2387d4"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:51:26 crc kubenswrapper[4756]: I0930 19:51:26.274438 4756 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fbf46aee-6848-4d8c-a589-ffe70c2387d4-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 19:51:26 crc kubenswrapper[4756]: I0930 19:51:26.378496 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Sep 30 19:51:26 crc kubenswrapper[4756]: I0930 19:51:26.385770 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-scheduler-0"] Sep 30 19:51:26 crc kubenswrapper[4756]: I0930 19:51:26.420840 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-scheduler-0"] Sep 30 19:51:26 crc kubenswrapper[4756]: E0930 19:51:26.421266 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fbf46aee-6848-4d8c-a589-ffe70c2387d4" containerName="probe" Sep 30 19:51:26 crc kubenswrapper[4756]: I0930 19:51:26.421284 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="fbf46aee-6848-4d8c-a589-ffe70c2387d4" containerName="probe" Sep 30 19:51:26 crc kubenswrapper[4756]: E0930 19:51:26.421304 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fbf46aee-6848-4d8c-a589-ffe70c2387d4" containerName="cinder-scheduler" Sep 30 19:51:26 crc kubenswrapper[4756]: I0930 19:51:26.421311 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="fbf46aee-6848-4d8c-a589-ffe70c2387d4" containerName="cinder-scheduler" Sep 30 19:51:26 crc kubenswrapper[4756]: I0930 19:51:26.421568 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="fbf46aee-6848-4d8c-a589-ffe70c2387d4" containerName="probe" Sep 30 19:51:26 crc kubenswrapper[4756]: I0930 19:51:26.421591 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="fbf46aee-6848-4d8c-a589-ffe70c2387d4" containerName="cinder-scheduler" Sep 30 19:51:26 crc kubenswrapper[4756]: I0930 19:51:26.422702 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Sep 30 19:51:26 crc kubenswrapper[4756]: I0930 19:51:26.425117 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scheduler-config-data" Sep 30 19:51:26 crc kubenswrapper[4756]: I0930 19:51:26.434679 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Sep 30 19:51:26 crc kubenswrapper[4756]: I0930 19:51:26.485062 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7cba2998-4911-447e-8fa8-782a83f0b3be-scripts\") pod \"cinder-scheduler-0\" (UID: \"7cba2998-4911-447e-8fa8-782a83f0b3be\") " pod="openstack/cinder-scheduler-0" Sep 30 19:51:26 crc kubenswrapper[4756]: I0930 19:51:26.485142 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7cba2998-4911-447e-8fa8-782a83f0b3be-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"7cba2998-4911-447e-8fa8-782a83f0b3be\") " pod="openstack/cinder-scheduler-0" Sep 30 19:51:26 crc kubenswrapper[4756]: I0930 19:51:26.485171 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7cba2998-4911-447e-8fa8-782a83f0b3be-config-data\") pod \"cinder-scheduler-0\" (UID: \"7cba2998-4911-447e-8fa8-782a83f0b3be\") " pod="openstack/cinder-scheduler-0" Sep 30 19:51:26 crc kubenswrapper[4756]: I0930 19:51:26.485232 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zb8bd\" (UniqueName: \"kubernetes.io/projected/7cba2998-4911-447e-8fa8-782a83f0b3be-kube-api-access-zb8bd\") pod \"cinder-scheduler-0\" (UID: \"7cba2998-4911-447e-8fa8-782a83f0b3be\") " pod="openstack/cinder-scheduler-0" Sep 30 19:51:26 crc kubenswrapper[4756]: I0930 19:51:26.485289 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/7cba2998-4911-447e-8fa8-782a83f0b3be-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"7cba2998-4911-447e-8fa8-782a83f0b3be\") " pod="openstack/cinder-scheduler-0" Sep 30 19:51:26 crc kubenswrapper[4756]: I0930 19:51:26.485306 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/7cba2998-4911-447e-8fa8-782a83f0b3be-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"7cba2998-4911-447e-8fa8-782a83f0b3be\") " pod="openstack/cinder-scheduler-0" Sep 30 19:51:26 crc kubenswrapper[4756]: I0930 19:51:26.492031 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-bf36-account-create-6xdsk"] Sep 30 19:51:26 crc kubenswrapper[4756]: W0930 19:51:26.496608 4756 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7d940278_5ba3_4ce6_996a_e4797a22e0d3.slice/crio-bb1a33c6712cacb859312cd4c9a89822001e683cb9020c8a43dd755b38110770 WatchSource:0}: Error finding container bb1a33c6712cacb859312cd4c9a89822001e683cb9020c8a43dd755b38110770: Status 404 returned error can't find the container with id bb1a33c6712cacb859312cd4c9a89822001e683cb9020c8a43dd755b38110770 Sep 30 19:51:26 crc kubenswrapper[4756]: I0930 19:51:26.586497 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7cba2998-4911-447e-8fa8-782a83f0b3be-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"7cba2998-4911-447e-8fa8-782a83f0b3be\") " pod="openstack/cinder-scheduler-0" Sep 30 19:51:26 crc kubenswrapper[4756]: I0930 19:51:26.586563 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7cba2998-4911-447e-8fa8-782a83f0b3be-config-data\") pod \"cinder-scheduler-0\" (UID: \"7cba2998-4911-447e-8fa8-782a83f0b3be\") " pod="openstack/cinder-scheduler-0" Sep 30 19:51:26 crc kubenswrapper[4756]: I0930 19:51:26.586650 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zb8bd\" (UniqueName: \"kubernetes.io/projected/7cba2998-4911-447e-8fa8-782a83f0b3be-kube-api-access-zb8bd\") pod \"cinder-scheduler-0\" (UID: \"7cba2998-4911-447e-8fa8-782a83f0b3be\") " pod="openstack/cinder-scheduler-0" Sep 30 19:51:26 crc kubenswrapper[4756]: I0930 19:51:26.586735 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/7cba2998-4911-447e-8fa8-782a83f0b3be-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"7cba2998-4911-447e-8fa8-782a83f0b3be\") " pod="openstack/cinder-scheduler-0" Sep 30 19:51:26 crc kubenswrapper[4756]: I0930 19:51:26.586758 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/7cba2998-4911-447e-8fa8-782a83f0b3be-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"7cba2998-4911-447e-8fa8-782a83f0b3be\") " pod="openstack/cinder-scheduler-0" Sep 30 19:51:26 crc kubenswrapper[4756]: I0930 19:51:26.586794 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7cba2998-4911-447e-8fa8-782a83f0b3be-scripts\") pod \"cinder-scheduler-0\" (UID: \"7cba2998-4911-447e-8fa8-782a83f0b3be\") " pod="openstack/cinder-scheduler-0" Sep 30 19:51:26 crc kubenswrapper[4756]: I0930 19:51:26.591552 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/7cba2998-4911-447e-8fa8-782a83f0b3be-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"7cba2998-4911-447e-8fa8-782a83f0b3be\") " pod="openstack/cinder-scheduler-0" Sep 30 19:51:26 crc kubenswrapper[4756]: I0930 19:51:26.594664 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/7cba2998-4911-447e-8fa8-782a83f0b3be-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"7cba2998-4911-447e-8fa8-782a83f0b3be\") " pod="openstack/cinder-scheduler-0" Sep 30 19:51:26 crc kubenswrapper[4756]: I0930 19:51:26.598532 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7cba2998-4911-447e-8fa8-782a83f0b3be-scripts\") pod \"cinder-scheduler-0\" (UID: \"7cba2998-4911-447e-8fa8-782a83f0b3be\") " pod="openstack/cinder-scheduler-0" Sep 30 19:51:26 crc kubenswrapper[4756]: I0930 19:51:26.598551 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7cba2998-4911-447e-8fa8-782a83f0b3be-config-data\") pod \"cinder-scheduler-0\" (UID: \"7cba2998-4911-447e-8fa8-782a83f0b3be\") " pod="openstack/cinder-scheduler-0" Sep 30 19:51:26 crc kubenswrapper[4756]: I0930 19:51:26.600017 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7cba2998-4911-447e-8fa8-782a83f0b3be-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"7cba2998-4911-447e-8fa8-782a83f0b3be\") " pod="openstack/cinder-scheduler-0" Sep 30 19:51:26 crc kubenswrapper[4756]: I0930 19:51:26.618958 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zb8bd\" (UniqueName: \"kubernetes.io/projected/7cba2998-4911-447e-8fa8-782a83f0b3be-kube-api-access-zb8bd\") pod \"cinder-scheduler-0\" (UID: \"7cba2998-4911-447e-8fa8-782a83f0b3be\") " pod="openstack/cinder-scheduler-0" Sep 30 19:51:26 crc kubenswrapper[4756]: I0930 19:51:26.760442 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Sep 30 19:51:27 crc kubenswrapper[4756]: I0930 19:51:27.117619 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-bf36-account-create-6xdsk" event={"ID":"7d940278-5ba3-4ce6-996a-e4797a22e0d3","Type":"ContainerStarted","Data":"ef091cd54f04664527cd20ffacef796abb55906c385adb302c9728f615fa218a"} Sep 30 19:51:27 crc kubenswrapper[4756]: I0930 19:51:27.117869 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-bf36-account-create-6xdsk" event={"ID":"7d940278-5ba3-4ce6-996a-e4797a22e0d3","Type":"ContainerStarted","Data":"bb1a33c6712cacb859312cd4c9a89822001e683cb9020c8a43dd755b38110770"} Sep 30 19:51:27 crc kubenswrapper[4756]: I0930 19:51:27.152220 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-bf36-account-create-6xdsk" podStartSLOduration=2.152182501 podStartE2EDuration="2.152182501s" podCreationTimestamp="2025-09-30 19:51:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 19:51:27.145904907 +0000 UTC m=+1216.766838384" watchObservedRunningTime="2025-09-30 19:51:27.152182501 +0000 UTC m=+1216.773115978" Sep 30 19:51:27 crc kubenswrapper[4756]: I0930 19:51:27.182335 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fbf46aee-6848-4d8c-a589-ffe70c2387d4" path="/var/lib/kubelet/pods/fbf46aee-6848-4d8c-a589-ffe70c2387d4/volumes" Sep 30 19:51:27 crc kubenswrapper[4756]: I0930 19:51:27.212426 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"daa26414-a4d8-42de-b7d4-edfbd69eed0c","Type":"ContainerStarted","Data":"a31df9120a149aff0634ebb6d651b894842af8148891f462650fa9fdf04039eb"} Sep 30 19:51:27 crc kubenswrapper[4756]: I0930 19:51:27.212659 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Sep 30 19:51:27 crc kubenswrapper[4756]: I0930 19:51:27.265198 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=1.744794133 podStartE2EDuration="5.265179845s" podCreationTimestamp="2025-09-30 19:51:22 +0000 UTC" firstStartedPulling="2025-09-30 19:51:23.192035993 +0000 UTC m=+1212.812969460" lastFinishedPulling="2025-09-30 19:51:26.712421695 +0000 UTC m=+1216.333355172" observedRunningTime="2025-09-30 19:51:27.237698076 +0000 UTC m=+1216.858631553" watchObservedRunningTime="2025-09-30 19:51:27.265179845 +0000 UTC m=+1216.886113322" Sep 30 19:51:27 crc kubenswrapper[4756]: I0930 19:51:27.282095 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Sep 30 19:51:28 crc kubenswrapper[4756]: I0930 19:51:28.238538 4756 generic.go:334] "Generic (PLEG): container finished" podID="a8513cfc-d18c-4884-b931-f0e8192734a3" containerID="a0d0a2793ffe10ff819905e8fa8f947fb5983c9223ed4b9f710124623474411e" exitCode=0 Sep 30 19:51:28 crc kubenswrapper[4756]: I0930 19:51:28.238624 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-5647cd86c6-mmv8n" event={"ID":"a8513cfc-d18c-4884-b931-f0e8192734a3","Type":"ContainerDied","Data":"a0d0a2793ffe10ff819905e8fa8f947fb5983c9223ed4b9f710124623474411e"} Sep 30 19:51:28 crc kubenswrapper[4756]: I0930 19:51:28.240876 4756 generic.go:334] "Generic (PLEG): container finished" podID="7d940278-5ba3-4ce6-996a-e4797a22e0d3" containerID="ef091cd54f04664527cd20ffacef796abb55906c385adb302c9728f615fa218a" exitCode=0 Sep 30 19:51:28 crc kubenswrapper[4756]: I0930 19:51:28.240971 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-bf36-account-create-6xdsk" event={"ID":"7d940278-5ba3-4ce6-996a-e4797a22e0d3","Type":"ContainerDied","Data":"ef091cd54f04664527cd20ffacef796abb55906c385adb302c9728f615fa218a"} Sep 30 19:51:28 crc kubenswrapper[4756]: I0930 19:51:28.244186 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"7cba2998-4911-447e-8fa8-782a83f0b3be","Type":"ContainerStarted","Data":"679931f313811a8019f531141ea2974a1523f54138f776e501dd4cc16431520e"} Sep 30 19:51:28 crc kubenswrapper[4756]: I0930 19:51:28.366837 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/cinder-api-0" Sep 30 19:51:33 crc kubenswrapper[4756]: I0930 19:51:33.234063 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-bf36-account-create-6xdsk" Sep 30 19:51:33 crc kubenswrapper[4756]: I0930 19:51:33.296770 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-bf36-account-create-6xdsk" event={"ID":"7d940278-5ba3-4ce6-996a-e4797a22e0d3","Type":"ContainerDied","Data":"bb1a33c6712cacb859312cd4c9a89822001e683cb9020c8a43dd755b38110770"} Sep 30 19:51:33 crc kubenswrapper[4756]: I0930 19:51:33.296808 4756 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="bb1a33c6712cacb859312cd4c9a89822001e683cb9020c8a43dd755b38110770" Sep 30 19:51:33 crc kubenswrapper[4756]: I0930 19:51:33.296874 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-bf36-account-create-6xdsk" Sep 30 19:51:33 crc kubenswrapper[4756]: I0930 19:51:33.334014 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7cwcp\" (UniqueName: \"kubernetes.io/projected/7d940278-5ba3-4ce6-996a-e4797a22e0d3-kube-api-access-7cwcp\") pod \"7d940278-5ba3-4ce6-996a-e4797a22e0d3\" (UID: \"7d940278-5ba3-4ce6-996a-e4797a22e0d3\") " Sep 30 19:51:33 crc kubenswrapper[4756]: I0930 19:51:33.340545 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7d940278-5ba3-4ce6-996a-e4797a22e0d3-kube-api-access-7cwcp" (OuterVolumeSpecName: "kube-api-access-7cwcp") pod "7d940278-5ba3-4ce6-996a-e4797a22e0d3" (UID: "7d940278-5ba3-4ce6-996a-e4797a22e0d3"). InnerVolumeSpecName "kube-api-access-7cwcp". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:51:33 crc kubenswrapper[4756]: I0930 19:51:33.437983 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7cwcp\" (UniqueName: \"kubernetes.io/projected/7d940278-5ba3-4ce6-996a-e4797a22e0d3-kube-api-access-7cwcp\") on node \"crc\" DevicePath \"\"" Sep 30 19:51:33 crc kubenswrapper[4756]: I0930 19:51:33.578928 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-5647cd86c6-mmv8n" Sep 30 19:51:33 crc kubenswrapper[4756]: I0930 19:51:33.643087 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/a8513cfc-d18c-4884-b931-f0e8192734a3-config\") pod \"a8513cfc-d18c-4884-b931-f0e8192734a3\" (UID: \"a8513cfc-d18c-4884-b931-f0e8192734a3\") " Sep 30 19:51:33 crc kubenswrapper[4756]: I0930 19:51:33.643145 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/a8513cfc-d18c-4884-b931-f0e8192734a3-ovndb-tls-certs\") pod \"a8513cfc-d18c-4884-b931-f0e8192734a3\" (UID: \"a8513cfc-d18c-4884-b931-f0e8192734a3\") " Sep 30 19:51:33 crc kubenswrapper[4756]: I0930 19:51:33.643184 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a8513cfc-d18c-4884-b931-f0e8192734a3-combined-ca-bundle\") pod \"a8513cfc-d18c-4884-b931-f0e8192734a3\" (UID: \"a8513cfc-d18c-4884-b931-f0e8192734a3\") " Sep 30 19:51:33 crc kubenswrapper[4756]: I0930 19:51:33.643273 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-js5tc\" (UniqueName: \"kubernetes.io/projected/a8513cfc-d18c-4884-b931-f0e8192734a3-kube-api-access-js5tc\") pod \"a8513cfc-d18c-4884-b931-f0e8192734a3\" (UID: \"a8513cfc-d18c-4884-b931-f0e8192734a3\") " Sep 30 19:51:33 crc kubenswrapper[4756]: I0930 19:51:33.643320 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/a8513cfc-d18c-4884-b931-f0e8192734a3-httpd-config\") pod \"a8513cfc-d18c-4884-b931-f0e8192734a3\" (UID: \"a8513cfc-d18c-4884-b931-f0e8192734a3\") " Sep 30 19:51:33 crc kubenswrapper[4756]: I0930 19:51:33.648383 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a8513cfc-d18c-4884-b931-f0e8192734a3-httpd-config" (OuterVolumeSpecName: "httpd-config") pod "a8513cfc-d18c-4884-b931-f0e8192734a3" (UID: "a8513cfc-d18c-4884-b931-f0e8192734a3"). InnerVolumeSpecName "httpd-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:51:33 crc kubenswrapper[4756]: I0930 19:51:33.649566 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a8513cfc-d18c-4884-b931-f0e8192734a3-kube-api-access-js5tc" (OuterVolumeSpecName: "kube-api-access-js5tc") pod "a8513cfc-d18c-4884-b931-f0e8192734a3" (UID: "a8513cfc-d18c-4884-b931-f0e8192734a3"). InnerVolumeSpecName "kube-api-access-js5tc". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:51:33 crc kubenswrapper[4756]: I0930 19:51:33.696194 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a8513cfc-d18c-4884-b931-f0e8192734a3-config" (OuterVolumeSpecName: "config") pod "a8513cfc-d18c-4884-b931-f0e8192734a3" (UID: "a8513cfc-d18c-4884-b931-f0e8192734a3"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:51:33 crc kubenswrapper[4756]: I0930 19:51:33.698945 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a8513cfc-d18c-4884-b931-f0e8192734a3-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a8513cfc-d18c-4884-b931-f0e8192734a3" (UID: "a8513cfc-d18c-4884-b931-f0e8192734a3"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:51:33 crc kubenswrapper[4756]: I0930 19:51:33.718729 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a8513cfc-d18c-4884-b931-f0e8192734a3-ovndb-tls-certs" (OuterVolumeSpecName: "ovndb-tls-certs") pod "a8513cfc-d18c-4884-b931-f0e8192734a3" (UID: "a8513cfc-d18c-4884-b931-f0e8192734a3"). InnerVolumeSpecName "ovndb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:51:33 crc kubenswrapper[4756]: I0930 19:51:33.745275 4756 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/a8513cfc-d18c-4884-b931-f0e8192734a3-config\") on node \"crc\" DevicePath \"\"" Sep 30 19:51:33 crc kubenswrapper[4756]: I0930 19:51:33.745317 4756 reconciler_common.go:293] "Volume detached for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/a8513cfc-d18c-4884-b931-f0e8192734a3-ovndb-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 30 19:51:33 crc kubenswrapper[4756]: I0930 19:51:33.745327 4756 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a8513cfc-d18c-4884-b931-f0e8192734a3-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 19:51:33 crc kubenswrapper[4756]: I0930 19:51:33.745336 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-js5tc\" (UniqueName: \"kubernetes.io/projected/a8513cfc-d18c-4884-b931-f0e8192734a3-kube-api-access-js5tc\") on node \"crc\" DevicePath \"\"" Sep 30 19:51:33 crc kubenswrapper[4756]: I0930 19:51:33.745345 4756 reconciler_common.go:293] "Volume detached for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/a8513cfc-d18c-4884-b931-f0e8192734a3-httpd-config\") on node \"crc\" DevicePath \"\"" Sep 30 19:51:34 crc kubenswrapper[4756]: I0930 19:51:34.322175 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-mzmtl" event={"ID":"dc949160-e62c-4982-bb20-8f3ebe1a2692","Type":"ContainerStarted","Data":"dede2f383ffabf28c795cf50f3401369dd333a7c95f685770c5342063991d80f"} Sep 30 19:51:34 crc kubenswrapper[4756]: I0930 19:51:34.327643 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-5647cd86c6-mmv8n" event={"ID":"a8513cfc-d18c-4884-b931-f0e8192734a3","Type":"ContainerDied","Data":"4e6b792543fa081046422ae031779435b360f00ddf74874cb3eca0045122a212"} Sep 30 19:51:34 crc kubenswrapper[4756]: I0930 19:51:34.328023 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-5647cd86c6-mmv8n" Sep 30 19:51:34 crc kubenswrapper[4756]: I0930 19:51:34.329016 4756 scope.go:117] "RemoveContainer" containerID="192228cbe9f8707045ae7438487eea8771f1c6ef226112ba0726991cabd15ecf" Sep 30 19:51:34 crc kubenswrapper[4756]: I0930 19:51:34.331305 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"7cba2998-4911-447e-8fa8-782a83f0b3be","Type":"ContainerStarted","Data":"3816f49e158334388e40eecbe485b69612c7263f206e5f76f6bf16f35cb72753"} Sep 30 19:51:34 crc kubenswrapper[4756]: I0930 19:51:34.350036 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-db-sync-mzmtl" podStartSLOduration=2.592912263 podStartE2EDuration="14.350015151s" podCreationTimestamp="2025-09-30 19:51:20 +0000 UTC" firstStartedPulling="2025-09-30 19:51:21.469594659 +0000 UTC m=+1211.090528136" lastFinishedPulling="2025-09-30 19:51:33.226697557 +0000 UTC m=+1222.847631024" observedRunningTime="2025-09-30 19:51:34.34118334 +0000 UTC m=+1223.962116827" watchObservedRunningTime="2025-09-30 19:51:34.350015151 +0000 UTC m=+1223.970948628" Sep 30 19:51:34 crc kubenswrapper[4756]: I0930 19:51:34.378368 4756 scope.go:117] "RemoveContainer" containerID="a0d0a2793ffe10ff819905e8fa8f947fb5983c9223ed4b9f710124623474411e" Sep 30 19:51:34 crc kubenswrapper[4756]: I0930 19:51:34.379695 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-5647cd86c6-mmv8n"] Sep 30 19:51:34 crc kubenswrapper[4756]: I0930 19:51:34.388053 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-5647cd86c6-mmv8n"] Sep 30 19:51:34 crc kubenswrapper[4756]: I0930 19:51:34.744583 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 30 19:51:34 crc kubenswrapper[4756]: I0930 19:51:34.745309 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="daa26414-a4d8-42de-b7d4-edfbd69eed0c" containerName="ceilometer-central-agent" containerID="cri-o://6eccdd6f7d6d62c6439001026f97178e42ca381f7560fc8681acd980ba86c27f" gracePeriod=30 Sep 30 19:51:34 crc kubenswrapper[4756]: I0930 19:51:34.745340 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="daa26414-a4d8-42de-b7d4-edfbd69eed0c" containerName="sg-core" containerID="cri-o://1a183ecc2a26d72661fef444c733d87d8efda09320750404d74d24e5714c276a" gracePeriod=30 Sep 30 19:51:34 crc kubenswrapper[4756]: I0930 19:51:34.745355 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="daa26414-a4d8-42de-b7d4-edfbd69eed0c" containerName="proxy-httpd" containerID="cri-o://a31df9120a149aff0634ebb6d651b894842af8148891f462650fa9fdf04039eb" gracePeriod=30 Sep 30 19:51:34 crc kubenswrapper[4756]: I0930 19:51:34.745381 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="daa26414-a4d8-42de-b7d4-edfbd69eed0c" containerName="ceilometer-notification-agent" containerID="cri-o://561c24e60d31ea657fca19a9beb10ebed4e214d758798e8cfdf2bff46b6d6433" gracePeriod=30 Sep 30 19:51:35 crc kubenswrapper[4756]: I0930 19:51:35.133610 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a8513cfc-d18c-4884-b931-f0e8192734a3" path="/var/lib/kubelet/pods/a8513cfc-d18c-4884-b931-f0e8192734a3/volumes" Sep 30 19:51:35 crc kubenswrapper[4756]: I0930 19:51:35.342827 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"7cba2998-4911-447e-8fa8-782a83f0b3be","Type":"ContainerStarted","Data":"18b54d61471a434dd27220be8222ac734c16562af18d9cb5d87c937b4bbb70ab"} Sep 30 19:51:35 crc kubenswrapper[4756]: I0930 19:51:35.348100 4756 generic.go:334] "Generic (PLEG): container finished" podID="daa26414-a4d8-42de-b7d4-edfbd69eed0c" containerID="a31df9120a149aff0634ebb6d651b894842af8148891f462650fa9fdf04039eb" exitCode=0 Sep 30 19:51:35 crc kubenswrapper[4756]: I0930 19:51:35.348127 4756 generic.go:334] "Generic (PLEG): container finished" podID="daa26414-a4d8-42de-b7d4-edfbd69eed0c" containerID="1a183ecc2a26d72661fef444c733d87d8efda09320750404d74d24e5714c276a" exitCode=2 Sep 30 19:51:35 crc kubenswrapper[4756]: I0930 19:51:35.348135 4756 generic.go:334] "Generic (PLEG): container finished" podID="daa26414-a4d8-42de-b7d4-edfbd69eed0c" containerID="6eccdd6f7d6d62c6439001026f97178e42ca381f7560fc8681acd980ba86c27f" exitCode=0 Sep 30 19:51:35 crc kubenswrapper[4756]: I0930 19:51:35.348182 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"daa26414-a4d8-42de-b7d4-edfbd69eed0c","Type":"ContainerDied","Data":"a31df9120a149aff0634ebb6d651b894842af8148891f462650fa9fdf04039eb"} Sep 30 19:51:35 crc kubenswrapper[4756]: I0930 19:51:35.348236 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"daa26414-a4d8-42de-b7d4-edfbd69eed0c","Type":"ContainerDied","Data":"1a183ecc2a26d72661fef444c733d87d8efda09320750404d74d24e5714c276a"} Sep 30 19:51:35 crc kubenswrapper[4756]: I0930 19:51:35.348248 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"daa26414-a4d8-42de-b7d4-edfbd69eed0c","Type":"ContainerDied","Data":"6eccdd6f7d6d62c6439001026f97178e42ca381f7560fc8681acd980ba86c27f"} Sep 30 19:51:35 crc kubenswrapper[4756]: I0930 19:51:35.370926 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-scheduler-0" podStartSLOduration=9.370899127 podStartE2EDuration="9.370899127s" podCreationTimestamp="2025-09-30 19:51:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 19:51:35.364937911 +0000 UTC m=+1224.985871428" watchObservedRunningTime="2025-09-30 19:51:35.370899127 +0000 UTC m=+1224.991832624" Sep 30 19:51:36 crc kubenswrapper[4756]: I0930 19:51:36.041708 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 19:51:36 crc kubenswrapper[4756]: I0930 19:51:36.088831 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/daa26414-a4d8-42de-b7d4-edfbd69eed0c-scripts\") pod \"daa26414-a4d8-42de-b7d4-edfbd69eed0c\" (UID: \"daa26414-a4d8-42de-b7d4-edfbd69eed0c\") " Sep 30 19:51:36 crc kubenswrapper[4756]: I0930 19:51:36.089008 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/daa26414-a4d8-42de-b7d4-edfbd69eed0c-run-httpd\") pod \"daa26414-a4d8-42de-b7d4-edfbd69eed0c\" (UID: \"daa26414-a4d8-42de-b7d4-edfbd69eed0c\") " Sep 30 19:51:36 crc kubenswrapper[4756]: I0930 19:51:36.089043 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/daa26414-a4d8-42de-b7d4-edfbd69eed0c-config-data\") pod \"daa26414-a4d8-42de-b7d4-edfbd69eed0c\" (UID: \"daa26414-a4d8-42de-b7d4-edfbd69eed0c\") " Sep 30 19:51:36 crc kubenswrapper[4756]: I0930 19:51:36.089077 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/daa26414-a4d8-42de-b7d4-edfbd69eed0c-log-httpd\") pod \"daa26414-a4d8-42de-b7d4-edfbd69eed0c\" (UID: \"daa26414-a4d8-42de-b7d4-edfbd69eed0c\") " Sep 30 19:51:36 crc kubenswrapper[4756]: I0930 19:51:36.089107 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hld6b\" (UniqueName: \"kubernetes.io/projected/daa26414-a4d8-42de-b7d4-edfbd69eed0c-kube-api-access-hld6b\") pod \"daa26414-a4d8-42de-b7d4-edfbd69eed0c\" (UID: \"daa26414-a4d8-42de-b7d4-edfbd69eed0c\") " Sep 30 19:51:36 crc kubenswrapper[4756]: I0930 19:51:36.089135 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/daa26414-a4d8-42de-b7d4-edfbd69eed0c-sg-core-conf-yaml\") pod \"daa26414-a4d8-42de-b7d4-edfbd69eed0c\" (UID: \"daa26414-a4d8-42de-b7d4-edfbd69eed0c\") " Sep 30 19:51:36 crc kubenswrapper[4756]: I0930 19:51:36.089213 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/daa26414-a4d8-42de-b7d4-edfbd69eed0c-combined-ca-bundle\") pod \"daa26414-a4d8-42de-b7d4-edfbd69eed0c\" (UID: \"daa26414-a4d8-42de-b7d4-edfbd69eed0c\") " Sep 30 19:51:36 crc kubenswrapper[4756]: I0930 19:51:36.090011 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/daa26414-a4d8-42de-b7d4-edfbd69eed0c-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "daa26414-a4d8-42de-b7d4-edfbd69eed0c" (UID: "daa26414-a4d8-42de-b7d4-edfbd69eed0c"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 19:51:36 crc kubenswrapper[4756]: I0930 19:51:36.090263 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/daa26414-a4d8-42de-b7d4-edfbd69eed0c-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "daa26414-a4d8-42de-b7d4-edfbd69eed0c" (UID: "daa26414-a4d8-42de-b7d4-edfbd69eed0c"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 19:51:36 crc kubenswrapper[4756]: I0930 19:51:36.094969 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/daa26414-a4d8-42de-b7d4-edfbd69eed0c-kube-api-access-hld6b" (OuterVolumeSpecName: "kube-api-access-hld6b") pod "daa26414-a4d8-42de-b7d4-edfbd69eed0c" (UID: "daa26414-a4d8-42de-b7d4-edfbd69eed0c"). InnerVolumeSpecName "kube-api-access-hld6b". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:51:36 crc kubenswrapper[4756]: I0930 19:51:36.098442 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/daa26414-a4d8-42de-b7d4-edfbd69eed0c-scripts" (OuterVolumeSpecName: "scripts") pod "daa26414-a4d8-42de-b7d4-edfbd69eed0c" (UID: "daa26414-a4d8-42de-b7d4-edfbd69eed0c"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:51:36 crc kubenswrapper[4756]: I0930 19:51:36.135621 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/daa26414-a4d8-42de-b7d4-edfbd69eed0c-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "daa26414-a4d8-42de-b7d4-edfbd69eed0c" (UID: "daa26414-a4d8-42de-b7d4-edfbd69eed0c"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:51:36 crc kubenswrapper[4756]: I0930 19:51:36.173278 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/daa26414-a4d8-42de-b7d4-edfbd69eed0c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "daa26414-a4d8-42de-b7d4-edfbd69eed0c" (UID: "daa26414-a4d8-42de-b7d4-edfbd69eed0c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:51:36 crc kubenswrapper[4756]: I0930 19:51:36.190493 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/daa26414-a4d8-42de-b7d4-edfbd69eed0c-config-data" (OuterVolumeSpecName: "config-data") pod "daa26414-a4d8-42de-b7d4-edfbd69eed0c" (UID: "daa26414-a4d8-42de-b7d4-edfbd69eed0c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:51:36 crc kubenswrapper[4756]: I0930 19:51:36.191984 4756 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/daa26414-a4d8-42de-b7d4-edfbd69eed0c-log-httpd\") on node \"crc\" DevicePath \"\"" Sep 30 19:51:36 crc kubenswrapper[4756]: I0930 19:51:36.192078 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hld6b\" (UniqueName: \"kubernetes.io/projected/daa26414-a4d8-42de-b7d4-edfbd69eed0c-kube-api-access-hld6b\") on node \"crc\" DevicePath \"\"" Sep 30 19:51:36 crc kubenswrapper[4756]: I0930 19:51:36.192136 4756 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/daa26414-a4d8-42de-b7d4-edfbd69eed0c-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Sep 30 19:51:36 crc kubenswrapper[4756]: I0930 19:51:36.192188 4756 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/daa26414-a4d8-42de-b7d4-edfbd69eed0c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 19:51:36 crc kubenswrapper[4756]: I0930 19:51:36.192246 4756 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/daa26414-a4d8-42de-b7d4-edfbd69eed0c-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 19:51:36 crc kubenswrapper[4756]: I0930 19:51:36.192299 4756 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/daa26414-a4d8-42de-b7d4-edfbd69eed0c-run-httpd\") on node \"crc\" DevicePath \"\"" Sep 30 19:51:36 crc kubenswrapper[4756]: I0930 19:51:36.192348 4756 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/daa26414-a4d8-42de-b7d4-edfbd69eed0c-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 19:51:36 crc kubenswrapper[4756]: I0930 19:51:36.367374 4756 generic.go:334] "Generic (PLEG): container finished" podID="daa26414-a4d8-42de-b7d4-edfbd69eed0c" containerID="561c24e60d31ea657fca19a9beb10ebed4e214d758798e8cfdf2bff46b6d6433" exitCode=0 Sep 30 19:51:36 crc kubenswrapper[4756]: I0930 19:51:36.367471 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"daa26414-a4d8-42de-b7d4-edfbd69eed0c","Type":"ContainerDied","Data":"561c24e60d31ea657fca19a9beb10ebed4e214d758798e8cfdf2bff46b6d6433"} Sep 30 19:51:36 crc kubenswrapper[4756]: I0930 19:51:36.368890 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"daa26414-a4d8-42de-b7d4-edfbd69eed0c","Type":"ContainerDied","Data":"d66c73c600327f65b4c7049a58afa2e3c7fa1de34d2c0eb3809c7392f83adc4d"} Sep 30 19:51:36 crc kubenswrapper[4756]: I0930 19:51:36.367482 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 19:51:36 crc kubenswrapper[4756]: I0930 19:51:36.368956 4756 scope.go:117] "RemoveContainer" containerID="a31df9120a149aff0634ebb6d651b894842af8148891f462650fa9fdf04039eb" Sep 30 19:51:36 crc kubenswrapper[4756]: I0930 19:51:36.398527 4756 scope.go:117] "RemoveContainer" containerID="1a183ecc2a26d72661fef444c733d87d8efda09320750404d74d24e5714c276a" Sep 30 19:51:36 crc kubenswrapper[4756]: I0930 19:51:36.414100 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 30 19:51:36 crc kubenswrapper[4756]: I0930 19:51:36.431086 4756 scope.go:117] "RemoveContainer" containerID="561c24e60d31ea657fca19a9beb10ebed4e214d758798e8cfdf2bff46b6d6433" Sep 30 19:51:36 crc kubenswrapper[4756]: I0930 19:51:36.437316 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Sep 30 19:51:36 crc kubenswrapper[4756]: I0930 19:51:36.453566 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Sep 30 19:51:36 crc kubenswrapper[4756]: E0930 19:51:36.454040 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a8513cfc-d18c-4884-b931-f0e8192734a3" containerName="neutron-api" Sep 30 19:51:36 crc kubenswrapper[4756]: I0930 19:51:36.454059 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="a8513cfc-d18c-4884-b931-f0e8192734a3" containerName="neutron-api" Sep 30 19:51:36 crc kubenswrapper[4756]: E0930 19:51:36.454077 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="daa26414-a4d8-42de-b7d4-edfbd69eed0c" containerName="sg-core" Sep 30 19:51:36 crc kubenswrapper[4756]: I0930 19:51:36.454086 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="daa26414-a4d8-42de-b7d4-edfbd69eed0c" containerName="sg-core" Sep 30 19:51:36 crc kubenswrapper[4756]: E0930 19:51:36.454102 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="daa26414-a4d8-42de-b7d4-edfbd69eed0c" containerName="proxy-httpd" Sep 30 19:51:36 crc kubenswrapper[4756]: I0930 19:51:36.454110 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="daa26414-a4d8-42de-b7d4-edfbd69eed0c" containerName="proxy-httpd" Sep 30 19:51:36 crc kubenswrapper[4756]: E0930 19:51:36.454122 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="daa26414-a4d8-42de-b7d4-edfbd69eed0c" containerName="ceilometer-central-agent" Sep 30 19:51:36 crc kubenswrapper[4756]: I0930 19:51:36.454129 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="daa26414-a4d8-42de-b7d4-edfbd69eed0c" containerName="ceilometer-central-agent" Sep 30 19:51:36 crc kubenswrapper[4756]: E0930 19:51:36.454159 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a8513cfc-d18c-4884-b931-f0e8192734a3" containerName="neutron-httpd" Sep 30 19:51:36 crc kubenswrapper[4756]: I0930 19:51:36.454165 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="a8513cfc-d18c-4884-b931-f0e8192734a3" containerName="neutron-httpd" Sep 30 19:51:36 crc kubenswrapper[4756]: E0930 19:51:36.454181 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="daa26414-a4d8-42de-b7d4-edfbd69eed0c" containerName="ceilometer-notification-agent" Sep 30 19:51:36 crc kubenswrapper[4756]: I0930 19:51:36.454188 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="daa26414-a4d8-42de-b7d4-edfbd69eed0c" containerName="ceilometer-notification-agent" Sep 30 19:51:36 crc kubenswrapper[4756]: E0930 19:51:36.454202 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7d940278-5ba3-4ce6-996a-e4797a22e0d3" containerName="mariadb-account-create" Sep 30 19:51:36 crc kubenswrapper[4756]: I0930 19:51:36.454210 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="7d940278-5ba3-4ce6-996a-e4797a22e0d3" containerName="mariadb-account-create" Sep 30 19:51:36 crc kubenswrapper[4756]: I0930 19:51:36.454492 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="daa26414-a4d8-42de-b7d4-edfbd69eed0c" containerName="proxy-httpd" Sep 30 19:51:36 crc kubenswrapper[4756]: I0930 19:51:36.454508 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="daa26414-a4d8-42de-b7d4-edfbd69eed0c" containerName="ceilometer-notification-agent" Sep 30 19:51:36 crc kubenswrapper[4756]: I0930 19:51:36.454521 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="daa26414-a4d8-42de-b7d4-edfbd69eed0c" containerName="ceilometer-central-agent" Sep 30 19:51:36 crc kubenswrapper[4756]: I0930 19:51:36.454539 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="a8513cfc-d18c-4884-b931-f0e8192734a3" containerName="neutron-api" Sep 30 19:51:36 crc kubenswrapper[4756]: I0930 19:51:36.454556 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="daa26414-a4d8-42de-b7d4-edfbd69eed0c" containerName="sg-core" Sep 30 19:51:36 crc kubenswrapper[4756]: I0930 19:51:36.454567 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="a8513cfc-d18c-4884-b931-f0e8192734a3" containerName="neutron-httpd" Sep 30 19:51:36 crc kubenswrapper[4756]: I0930 19:51:36.454583 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="7d940278-5ba3-4ce6-996a-e4797a22e0d3" containerName="mariadb-account-create" Sep 30 19:51:36 crc kubenswrapper[4756]: I0930 19:51:36.456609 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 19:51:36 crc kubenswrapper[4756]: I0930 19:51:36.457382 4756 scope.go:117] "RemoveContainer" containerID="6eccdd6f7d6d62c6439001026f97178e42ca381f7560fc8681acd980ba86c27f" Sep 30 19:51:36 crc kubenswrapper[4756]: I0930 19:51:36.458925 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Sep 30 19:51:36 crc kubenswrapper[4756]: I0930 19:51:36.458965 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Sep 30 19:51:36 crc kubenswrapper[4756]: I0930 19:51:36.462425 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 30 19:51:36 crc kubenswrapper[4756]: I0930 19:51:36.498110 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/0853004d-07b9-430c-aa43-794b3145867d-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"0853004d-07b9-430c-aa43-794b3145867d\") " pod="openstack/ceilometer-0" Sep 30 19:51:36 crc kubenswrapper[4756]: I0930 19:51:36.498168 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0853004d-07b9-430c-aa43-794b3145867d-scripts\") pod \"ceilometer-0\" (UID: \"0853004d-07b9-430c-aa43-794b3145867d\") " pod="openstack/ceilometer-0" Sep 30 19:51:36 crc kubenswrapper[4756]: I0930 19:51:36.498223 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0853004d-07b9-430c-aa43-794b3145867d-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"0853004d-07b9-430c-aa43-794b3145867d\") " pod="openstack/ceilometer-0" Sep 30 19:51:36 crc kubenswrapper[4756]: I0930 19:51:36.498247 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0853004d-07b9-430c-aa43-794b3145867d-config-data\") pod \"ceilometer-0\" (UID: \"0853004d-07b9-430c-aa43-794b3145867d\") " pod="openstack/ceilometer-0" Sep 30 19:51:36 crc kubenswrapper[4756]: I0930 19:51:36.498268 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0853004d-07b9-430c-aa43-794b3145867d-run-httpd\") pod \"ceilometer-0\" (UID: \"0853004d-07b9-430c-aa43-794b3145867d\") " pod="openstack/ceilometer-0" Sep 30 19:51:36 crc kubenswrapper[4756]: I0930 19:51:36.498306 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0853004d-07b9-430c-aa43-794b3145867d-log-httpd\") pod \"ceilometer-0\" (UID: \"0853004d-07b9-430c-aa43-794b3145867d\") " pod="openstack/ceilometer-0" Sep 30 19:51:36 crc kubenswrapper[4756]: I0930 19:51:36.498335 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mc4gj\" (UniqueName: \"kubernetes.io/projected/0853004d-07b9-430c-aa43-794b3145867d-kube-api-access-mc4gj\") pod \"ceilometer-0\" (UID: \"0853004d-07b9-430c-aa43-794b3145867d\") " pod="openstack/ceilometer-0" Sep 30 19:51:36 crc kubenswrapper[4756]: I0930 19:51:36.510556 4756 scope.go:117] "RemoveContainer" containerID="a31df9120a149aff0634ebb6d651b894842af8148891f462650fa9fdf04039eb" Sep 30 19:51:36 crc kubenswrapper[4756]: E0930 19:51:36.511033 4756 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a31df9120a149aff0634ebb6d651b894842af8148891f462650fa9fdf04039eb\": container with ID starting with a31df9120a149aff0634ebb6d651b894842af8148891f462650fa9fdf04039eb not found: ID does not exist" containerID="a31df9120a149aff0634ebb6d651b894842af8148891f462650fa9fdf04039eb" Sep 30 19:51:36 crc kubenswrapper[4756]: I0930 19:51:36.511061 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a31df9120a149aff0634ebb6d651b894842af8148891f462650fa9fdf04039eb"} err="failed to get container status \"a31df9120a149aff0634ebb6d651b894842af8148891f462650fa9fdf04039eb\": rpc error: code = NotFound desc = could not find container \"a31df9120a149aff0634ebb6d651b894842af8148891f462650fa9fdf04039eb\": container with ID starting with a31df9120a149aff0634ebb6d651b894842af8148891f462650fa9fdf04039eb not found: ID does not exist" Sep 30 19:51:36 crc kubenswrapper[4756]: I0930 19:51:36.511082 4756 scope.go:117] "RemoveContainer" containerID="1a183ecc2a26d72661fef444c733d87d8efda09320750404d74d24e5714c276a" Sep 30 19:51:36 crc kubenswrapper[4756]: E0930 19:51:36.511313 4756 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1a183ecc2a26d72661fef444c733d87d8efda09320750404d74d24e5714c276a\": container with ID starting with 1a183ecc2a26d72661fef444c733d87d8efda09320750404d74d24e5714c276a not found: ID does not exist" containerID="1a183ecc2a26d72661fef444c733d87d8efda09320750404d74d24e5714c276a" Sep 30 19:51:36 crc kubenswrapper[4756]: I0930 19:51:36.511334 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1a183ecc2a26d72661fef444c733d87d8efda09320750404d74d24e5714c276a"} err="failed to get container status \"1a183ecc2a26d72661fef444c733d87d8efda09320750404d74d24e5714c276a\": rpc error: code = NotFound desc = could not find container \"1a183ecc2a26d72661fef444c733d87d8efda09320750404d74d24e5714c276a\": container with ID starting with 1a183ecc2a26d72661fef444c733d87d8efda09320750404d74d24e5714c276a not found: ID does not exist" Sep 30 19:51:36 crc kubenswrapper[4756]: I0930 19:51:36.511347 4756 scope.go:117] "RemoveContainer" containerID="561c24e60d31ea657fca19a9beb10ebed4e214d758798e8cfdf2bff46b6d6433" Sep 30 19:51:36 crc kubenswrapper[4756]: E0930 19:51:36.511609 4756 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"561c24e60d31ea657fca19a9beb10ebed4e214d758798e8cfdf2bff46b6d6433\": container with ID starting with 561c24e60d31ea657fca19a9beb10ebed4e214d758798e8cfdf2bff46b6d6433 not found: ID does not exist" containerID="561c24e60d31ea657fca19a9beb10ebed4e214d758798e8cfdf2bff46b6d6433" Sep 30 19:51:36 crc kubenswrapper[4756]: I0930 19:51:36.511626 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"561c24e60d31ea657fca19a9beb10ebed4e214d758798e8cfdf2bff46b6d6433"} err="failed to get container status \"561c24e60d31ea657fca19a9beb10ebed4e214d758798e8cfdf2bff46b6d6433\": rpc error: code = NotFound desc = could not find container \"561c24e60d31ea657fca19a9beb10ebed4e214d758798e8cfdf2bff46b6d6433\": container with ID starting with 561c24e60d31ea657fca19a9beb10ebed4e214d758798e8cfdf2bff46b6d6433 not found: ID does not exist" Sep 30 19:51:36 crc kubenswrapper[4756]: I0930 19:51:36.511642 4756 scope.go:117] "RemoveContainer" containerID="6eccdd6f7d6d62c6439001026f97178e42ca381f7560fc8681acd980ba86c27f" Sep 30 19:51:36 crc kubenswrapper[4756]: E0930 19:51:36.512043 4756 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6eccdd6f7d6d62c6439001026f97178e42ca381f7560fc8681acd980ba86c27f\": container with ID starting with 6eccdd6f7d6d62c6439001026f97178e42ca381f7560fc8681acd980ba86c27f not found: ID does not exist" containerID="6eccdd6f7d6d62c6439001026f97178e42ca381f7560fc8681acd980ba86c27f" Sep 30 19:51:36 crc kubenswrapper[4756]: I0930 19:51:36.512064 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6eccdd6f7d6d62c6439001026f97178e42ca381f7560fc8681acd980ba86c27f"} err="failed to get container status \"6eccdd6f7d6d62c6439001026f97178e42ca381f7560fc8681acd980ba86c27f\": rpc error: code = NotFound desc = could not find container \"6eccdd6f7d6d62c6439001026f97178e42ca381f7560fc8681acd980ba86c27f\": container with ID starting with 6eccdd6f7d6d62c6439001026f97178e42ca381f7560fc8681acd980ba86c27f not found: ID does not exist" Sep 30 19:51:36 crc kubenswrapper[4756]: I0930 19:51:36.600097 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/0853004d-07b9-430c-aa43-794b3145867d-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"0853004d-07b9-430c-aa43-794b3145867d\") " pod="openstack/ceilometer-0" Sep 30 19:51:36 crc kubenswrapper[4756]: I0930 19:51:36.600174 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0853004d-07b9-430c-aa43-794b3145867d-scripts\") pod \"ceilometer-0\" (UID: \"0853004d-07b9-430c-aa43-794b3145867d\") " pod="openstack/ceilometer-0" Sep 30 19:51:36 crc kubenswrapper[4756]: I0930 19:51:36.600242 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0853004d-07b9-430c-aa43-794b3145867d-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"0853004d-07b9-430c-aa43-794b3145867d\") " pod="openstack/ceilometer-0" Sep 30 19:51:36 crc kubenswrapper[4756]: I0930 19:51:36.600267 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0853004d-07b9-430c-aa43-794b3145867d-config-data\") pod \"ceilometer-0\" (UID: \"0853004d-07b9-430c-aa43-794b3145867d\") " pod="openstack/ceilometer-0" Sep 30 19:51:36 crc kubenswrapper[4756]: I0930 19:51:36.600290 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0853004d-07b9-430c-aa43-794b3145867d-run-httpd\") pod \"ceilometer-0\" (UID: \"0853004d-07b9-430c-aa43-794b3145867d\") " pod="openstack/ceilometer-0" Sep 30 19:51:36 crc kubenswrapper[4756]: I0930 19:51:36.600333 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0853004d-07b9-430c-aa43-794b3145867d-log-httpd\") pod \"ceilometer-0\" (UID: \"0853004d-07b9-430c-aa43-794b3145867d\") " pod="openstack/ceilometer-0" Sep 30 19:51:36 crc kubenswrapper[4756]: I0930 19:51:36.600807 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0853004d-07b9-430c-aa43-794b3145867d-run-httpd\") pod \"ceilometer-0\" (UID: \"0853004d-07b9-430c-aa43-794b3145867d\") " pod="openstack/ceilometer-0" Sep 30 19:51:36 crc kubenswrapper[4756]: I0930 19:51:36.600872 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mc4gj\" (UniqueName: \"kubernetes.io/projected/0853004d-07b9-430c-aa43-794b3145867d-kube-api-access-mc4gj\") pod \"ceilometer-0\" (UID: \"0853004d-07b9-430c-aa43-794b3145867d\") " pod="openstack/ceilometer-0" Sep 30 19:51:36 crc kubenswrapper[4756]: I0930 19:51:36.600869 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0853004d-07b9-430c-aa43-794b3145867d-log-httpd\") pod \"ceilometer-0\" (UID: \"0853004d-07b9-430c-aa43-794b3145867d\") " pod="openstack/ceilometer-0" Sep 30 19:51:36 crc kubenswrapper[4756]: I0930 19:51:36.603969 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/0853004d-07b9-430c-aa43-794b3145867d-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"0853004d-07b9-430c-aa43-794b3145867d\") " pod="openstack/ceilometer-0" Sep 30 19:51:36 crc kubenswrapper[4756]: I0930 19:51:36.604688 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0853004d-07b9-430c-aa43-794b3145867d-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"0853004d-07b9-430c-aa43-794b3145867d\") " pod="openstack/ceilometer-0" Sep 30 19:51:36 crc kubenswrapper[4756]: I0930 19:51:36.604971 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0853004d-07b9-430c-aa43-794b3145867d-scripts\") pod \"ceilometer-0\" (UID: \"0853004d-07b9-430c-aa43-794b3145867d\") " pod="openstack/ceilometer-0" Sep 30 19:51:36 crc kubenswrapper[4756]: I0930 19:51:36.605165 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0853004d-07b9-430c-aa43-794b3145867d-config-data\") pod \"ceilometer-0\" (UID: \"0853004d-07b9-430c-aa43-794b3145867d\") " pod="openstack/ceilometer-0" Sep 30 19:51:36 crc kubenswrapper[4756]: I0930 19:51:36.618313 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mc4gj\" (UniqueName: \"kubernetes.io/projected/0853004d-07b9-430c-aa43-794b3145867d-kube-api-access-mc4gj\") pod \"ceilometer-0\" (UID: \"0853004d-07b9-430c-aa43-794b3145867d\") " pod="openstack/ceilometer-0" Sep 30 19:51:36 crc kubenswrapper[4756]: I0930 19:51:36.761866 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-scheduler-0" Sep 30 19:51:36 crc kubenswrapper[4756]: I0930 19:51:36.805692 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 19:51:37 crc kubenswrapper[4756]: I0930 19:51:37.126913 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="daa26414-a4d8-42de-b7d4-edfbd69eed0c" path="/var/lib/kubelet/pods/daa26414-a4d8-42de-b7d4-edfbd69eed0c/volumes" Sep 30 19:51:37 crc kubenswrapper[4756]: I0930 19:51:37.274011 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 30 19:51:37 crc kubenswrapper[4756]: I0930 19:51:37.382666 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0853004d-07b9-430c-aa43-794b3145867d","Type":"ContainerStarted","Data":"ef9fdcf86d8763d00ded8a7b1ffcb2a30c7206f7ced2d143a06cc3327d1461e4"} Sep 30 19:51:38 crc kubenswrapper[4756]: I0930 19:51:38.391848 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0853004d-07b9-430c-aa43-794b3145867d","Type":"ContainerStarted","Data":"a59da2069cd457a45cfa4cee32dd5b3e2f6041f6c86bc1d8907d2c2ea621f64c"} Sep 30 19:51:40 crc kubenswrapper[4756]: I0930 19:51:40.409289 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0853004d-07b9-430c-aa43-794b3145867d","Type":"ContainerStarted","Data":"84ebf4268b8a711bacedf6d262db750fcb672911d679603e834f5b192f9b3bb8"} Sep 30 19:51:41 crc kubenswrapper[4756]: I0930 19:51:41.421829 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0853004d-07b9-430c-aa43-794b3145867d","Type":"ContainerStarted","Data":"5ccc6b1b8aec471d7421b417c7dc2d01cf249a7a473bdf2ac6e83543e6d2d8d0"} Sep 30 19:51:41 crc kubenswrapper[4756]: I0930 19:51:41.503046 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 30 19:51:41 crc kubenswrapper[4756]: I0930 19:51:41.992692 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-scheduler-0" Sep 30 19:51:42 crc kubenswrapper[4756]: I0930 19:51:42.431769 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0853004d-07b9-430c-aa43-794b3145867d","Type":"ContainerStarted","Data":"2f018f3d4686605a6052467ec031e85a05d28aa63a4b9734405a184ad84e1fdd"} Sep 30 19:51:42 crc kubenswrapper[4756]: I0930 19:51:42.433260 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="0853004d-07b9-430c-aa43-794b3145867d" containerName="ceilometer-central-agent" containerID="cri-o://a59da2069cd457a45cfa4cee32dd5b3e2f6041f6c86bc1d8907d2c2ea621f64c" gracePeriod=30 Sep 30 19:51:42 crc kubenswrapper[4756]: I0930 19:51:42.433408 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="0853004d-07b9-430c-aa43-794b3145867d" containerName="sg-core" containerID="cri-o://5ccc6b1b8aec471d7421b417c7dc2d01cf249a7a473bdf2ac6e83543e6d2d8d0" gracePeriod=30 Sep 30 19:51:42 crc kubenswrapper[4756]: I0930 19:51:42.433325 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="0853004d-07b9-430c-aa43-794b3145867d" containerName="proxy-httpd" containerID="cri-o://2f018f3d4686605a6052467ec031e85a05d28aa63a4b9734405a184ad84e1fdd" gracePeriod=30 Sep 30 19:51:42 crc kubenswrapper[4756]: I0930 19:51:42.433348 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="0853004d-07b9-430c-aa43-794b3145867d" containerName="ceilometer-notification-agent" containerID="cri-o://84ebf4268b8a711bacedf6d262db750fcb672911d679603e834f5b192f9b3bb8" gracePeriod=30 Sep 30 19:51:42 crc kubenswrapper[4756]: I0930 19:51:42.433292 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Sep 30 19:51:42 crc kubenswrapper[4756]: I0930 19:51:42.458487 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=1.879620964 podStartE2EDuration="6.458462645s" podCreationTimestamp="2025-09-30 19:51:36 +0000 UTC" firstStartedPulling="2025-09-30 19:51:37.296853761 +0000 UTC m=+1226.917787238" lastFinishedPulling="2025-09-30 19:51:41.875695442 +0000 UTC m=+1231.496628919" observedRunningTime="2025-09-30 19:51:42.451262977 +0000 UTC m=+1232.072196504" watchObservedRunningTime="2025-09-30 19:51:42.458462645 +0000 UTC m=+1232.079396122" Sep 30 19:51:43 crc kubenswrapper[4756]: I0930 19:51:43.468184 4756 generic.go:334] "Generic (PLEG): container finished" podID="0853004d-07b9-430c-aa43-794b3145867d" containerID="2f018f3d4686605a6052467ec031e85a05d28aa63a4b9734405a184ad84e1fdd" exitCode=0 Sep 30 19:51:43 crc kubenswrapper[4756]: I0930 19:51:43.468435 4756 generic.go:334] "Generic (PLEG): container finished" podID="0853004d-07b9-430c-aa43-794b3145867d" containerID="5ccc6b1b8aec471d7421b417c7dc2d01cf249a7a473bdf2ac6e83543e6d2d8d0" exitCode=2 Sep 30 19:51:43 crc kubenswrapper[4756]: I0930 19:51:43.468445 4756 generic.go:334] "Generic (PLEG): container finished" podID="0853004d-07b9-430c-aa43-794b3145867d" containerID="84ebf4268b8a711bacedf6d262db750fcb672911d679603e834f5b192f9b3bb8" exitCode=0 Sep 30 19:51:43 crc kubenswrapper[4756]: I0930 19:51:43.468258 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0853004d-07b9-430c-aa43-794b3145867d","Type":"ContainerDied","Data":"2f018f3d4686605a6052467ec031e85a05d28aa63a4b9734405a184ad84e1fdd"} Sep 30 19:51:43 crc kubenswrapper[4756]: I0930 19:51:43.468480 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0853004d-07b9-430c-aa43-794b3145867d","Type":"ContainerDied","Data":"5ccc6b1b8aec471d7421b417c7dc2d01cf249a7a473bdf2ac6e83543e6d2d8d0"} Sep 30 19:51:43 crc kubenswrapper[4756]: I0930 19:51:43.468493 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0853004d-07b9-430c-aa43-794b3145867d","Type":"ContainerDied","Data":"84ebf4268b8a711bacedf6d262db750fcb672911d679603e834f5b192f9b3bb8"} Sep 30 19:51:44 crc kubenswrapper[4756]: I0930 19:51:44.481671 4756 generic.go:334] "Generic (PLEG): container finished" podID="dc949160-e62c-4982-bb20-8f3ebe1a2692" containerID="dede2f383ffabf28c795cf50f3401369dd333a7c95f685770c5342063991d80f" exitCode=0 Sep 30 19:51:44 crc kubenswrapper[4756]: I0930 19:51:44.481990 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-mzmtl" event={"ID":"dc949160-e62c-4982-bb20-8f3ebe1a2692","Type":"ContainerDied","Data":"dede2f383ffabf28c795cf50f3401369dd333a7c95f685770c5342063991d80f"} Sep 30 19:51:45 crc kubenswrapper[4756]: I0930 19:51:45.206232 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 19:51:45 crc kubenswrapper[4756]: I0930 19:51:45.283840 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0853004d-07b9-430c-aa43-794b3145867d-log-httpd\") pod \"0853004d-07b9-430c-aa43-794b3145867d\" (UID: \"0853004d-07b9-430c-aa43-794b3145867d\") " Sep 30 19:51:45 crc kubenswrapper[4756]: I0930 19:51:45.283881 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0853004d-07b9-430c-aa43-794b3145867d-run-httpd\") pod \"0853004d-07b9-430c-aa43-794b3145867d\" (UID: \"0853004d-07b9-430c-aa43-794b3145867d\") " Sep 30 19:51:45 crc kubenswrapper[4756]: I0930 19:51:45.283913 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0853004d-07b9-430c-aa43-794b3145867d-combined-ca-bundle\") pod \"0853004d-07b9-430c-aa43-794b3145867d\" (UID: \"0853004d-07b9-430c-aa43-794b3145867d\") " Sep 30 19:51:45 crc kubenswrapper[4756]: I0930 19:51:45.283942 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/0853004d-07b9-430c-aa43-794b3145867d-sg-core-conf-yaml\") pod \"0853004d-07b9-430c-aa43-794b3145867d\" (UID: \"0853004d-07b9-430c-aa43-794b3145867d\") " Sep 30 19:51:45 crc kubenswrapper[4756]: I0930 19:51:45.283971 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0853004d-07b9-430c-aa43-794b3145867d-config-data\") pod \"0853004d-07b9-430c-aa43-794b3145867d\" (UID: \"0853004d-07b9-430c-aa43-794b3145867d\") " Sep 30 19:51:45 crc kubenswrapper[4756]: I0930 19:51:45.284007 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0853004d-07b9-430c-aa43-794b3145867d-scripts\") pod \"0853004d-07b9-430c-aa43-794b3145867d\" (UID: \"0853004d-07b9-430c-aa43-794b3145867d\") " Sep 30 19:51:45 crc kubenswrapper[4756]: I0930 19:51:45.284047 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mc4gj\" (UniqueName: \"kubernetes.io/projected/0853004d-07b9-430c-aa43-794b3145867d-kube-api-access-mc4gj\") pod \"0853004d-07b9-430c-aa43-794b3145867d\" (UID: \"0853004d-07b9-430c-aa43-794b3145867d\") " Sep 30 19:51:45 crc kubenswrapper[4756]: I0930 19:51:45.284632 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0853004d-07b9-430c-aa43-794b3145867d-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "0853004d-07b9-430c-aa43-794b3145867d" (UID: "0853004d-07b9-430c-aa43-794b3145867d"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 19:51:45 crc kubenswrapper[4756]: I0930 19:51:45.285144 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0853004d-07b9-430c-aa43-794b3145867d-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "0853004d-07b9-430c-aa43-794b3145867d" (UID: "0853004d-07b9-430c-aa43-794b3145867d"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 19:51:45 crc kubenswrapper[4756]: I0930 19:51:45.290075 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0853004d-07b9-430c-aa43-794b3145867d-scripts" (OuterVolumeSpecName: "scripts") pod "0853004d-07b9-430c-aa43-794b3145867d" (UID: "0853004d-07b9-430c-aa43-794b3145867d"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:51:45 crc kubenswrapper[4756]: I0930 19:51:45.291753 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0853004d-07b9-430c-aa43-794b3145867d-kube-api-access-mc4gj" (OuterVolumeSpecName: "kube-api-access-mc4gj") pod "0853004d-07b9-430c-aa43-794b3145867d" (UID: "0853004d-07b9-430c-aa43-794b3145867d"). InnerVolumeSpecName "kube-api-access-mc4gj". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:51:45 crc kubenswrapper[4756]: I0930 19:51:45.311055 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0853004d-07b9-430c-aa43-794b3145867d-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "0853004d-07b9-430c-aa43-794b3145867d" (UID: "0853004d-07b9-430c-aa43-794b3145867d"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:51:45 crc kubenswrapper[4756]: I0930 19:51:45.375358 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0853004d-07b9-430c-aa43-794b3145867d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "0853004d-07b9-430c-aa43-794b3145867d" (UID: "0853004d-07b9-430c-aa43-794b3145867d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:51:45 crc kubenswrapper[4756]: I0930 19:51:45.385856 4756 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0853004d-07b9-430c-aa43-794b3145867d-log-httpd\") on node \"crc\" DevicePath \"\"" Sep 30 19:51:45 crc kubenswrapper[4756]: I0930 19:51:45.385879 4756 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0853004d-07b9-430c-aa43-794b3145867d-run-httpd\") on node \"crc\" DevicePath \"\"" Sep 30 19:51:45 crc kubenswrapper[4756]: I0930 19:51:45.385889 4756 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0853004d-07b9-430c-aa43-794b3145867d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 19:51:45 crc kubenswrapper[4756]: I0930 19:51:45.385898 4756 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/0853004d-07b9-430c-aa43-794b3145867d-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Sep 30 19:51:45 crc kubenswrapper[4756]: I0930 19:51:45.385907 4756 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0853004d-07b9-430c-aa43-794b3145867d-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 19:51:45 crc kubenswrapper[4756]: I0930 19:51:45.385915 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mc4gj\" (UniqueName: \"kubernetes.io/projected/0853004d-07b9-430c-aa43-794b3145867d-kube-api-access-mc4gj\") on node \"crc\" DevicePath \"\"" Sep 30 19:51:45 crc kubenswrapper[4756]: I0930 19:51:45.398229 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0853004d-07b9-430c-aa43-794b3145867d-config-data" (OuterVolumeSpecName: "config-data") pod "0853004d-07b9-430c-aa43-794b3145867d" (UID: "0853004d-07b9-430c-aa43-794b3145867d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:51:45 crc kubenswrapper[4756]: I0930 19:51:45.487348 4756 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0853004d-07b9-430c-aa43-794b3145867d-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 19:51:45 crc kubenswrapper[4756]: I0930 19:51:45.495912 4756 generic.go:334] "Generic (PLEG): container finished" podID="0853004d-07b9-430c-aa43-794b3145867d" containerID="a59da2069cd457a45cfa4cee32dd5b3e2f6041f6c86bc1d8907d2c2ea621f64c" exitCode=0 Sep 30 19:51:45 crc kubenswrapper[4756]: I0930 19:51:45.495995 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 19:51:45 crc kubenswrapper[4756]: I0930 19:51:45.496001 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0853004d-07b9-430c-aa43-794b3145867d","Type":"ContainerDied","Data":"a59da2069cd457a45cfa4cee32dd5b3e2f6041f6c86bc1d8907d2c2ea621f64c"} Sep 30 19:51:45 crc kubenswrapper[4756]: I0930 19:51:45.496076 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0853004d-07b9-430c-aa43-794b3145867d","Type":"ContainerDied","Data":"ef9fdcf86d8763d00ded8a7b1ffcb2a30c7206f7ced2d143a06cc3327d1461e4"} Sep 30 19:51:45 crc kubenswrapper[4756]: I0930 19:51:45.496100 4756 scope.go:117] "RemoveContainer" containerID="2f018f3d4686605a6052467ec031e85a05d28aa63a4b9734405a184ad84e1fdd" Sep 30 19:51:45 crc kubenswrapper[4756]: I0930 19:51:45.545252 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 30 19:51:45 crc kubenswrapper[4756]: I0930 19:51:45.561777 4756 scope.go:117] "RemoveContainer" containerID="5ccc6b1b8aec471d7421b417c7dc2d01cf249a7a473bdf2ac6e83543e6d2d8d0" Sep 30 19:51:45 crc kubenswrapper[4756]: I0930 19:51:45.564212 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Sep 30 19:51:45 crc kubenswrapper[4756]: I0930 19:51:45.594542 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Sep 30 19:51:45 crc kubenswrapper[4756]: E0930 19:51:45.594982 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0853004d-07b9-430c-aa43-794b3145867d" containerName="proxy-httpd" Sep 30 19:51:45 crc kubenswrapper[4756]: I0930 19:51:45.595002 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="0853004d-07b9-430c-aa43-794b3145867d" containerName="proxy-httpd" Sep 30 19:51:45 crc kubenswrapper[4756]: E0930 19:51:45.595030 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0853004d-07b9-430c-aa43-794b3145867d" containerName="sg-core" Sep 30 19:51:45 crc kubenswrapper[4756]: I0930 19:51:45.595039 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="0853004d-07b9-430c-aa43-794b3145867d" containerName="sg-core" Sep 30 19:51:45 crc kubenswrapper[4756]: E0930 19:51:45.595053 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0853004d-07b9-430c-aa43-794b3145867d" containerName="ceilometer-notification-agent" Sep 30 19:51:45 crc kubenswrapper[4756]: I0930 19:51:45.595063 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="0853004d-07b9-430c-aa43-794b3145867d" containerName="ceilometer-notification-agent" Sep 30 19:51:45 crc kubenswrapper[4756]: E0930 19:51:45.595090 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0853004d-07b9-430c-aa43-794b3145867d" containerName="ceilometer-central-agent" Sep 30 19:51:45 crc kubenswrapper[4756]: I0930 19:51:45.595098 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="0853004d-07b9-430c-aa43-794b3145867d" containerName="ceilometer-central-agent" Sep 30 19:51:45 crc kubenswrapper[4756]: I0930 19:51:45.595315 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="0853004d-07b9-430c-aa43-794b3145867d" containerName="ceilometer-central-agent" Sep 30 19:51:45 crc kubenswrapper[4756]: I0930 19:51:45.595337 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="0853004d-07b9-430c-aa43-794b3145867d" containerName="ceilometer-notification-agent" Sep 30 19:51:45 crc kubenswrapper[4756]: I0930 19:51:45.595356 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="0853004d-07b9-430c-aa43-794b3145867d" containerName="sg-core" Sep 30 19:51:45 crc kubenswrapper[4756]: I0930 19:51:45.595365 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="0853004d-07b9-430c-aa43-794b3145867d" containerName="proxy-httpd" Sep 30 19:51:45 crc kubenswrapper[4756]: I0930 19:51:45.597344 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 19:51:45 crc kubenswrapper[4756]: I0930 19:51:45.599495 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Sep 30 19:51:45 crc kubenswrapper[4756]: I0930 19:51:45.599738 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Sep 30 19:51:45 crc kubenswrapper[4756]: I0930 19:51:45.606350 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 30 19:51:45 crc kubenswrapper[4756]: I0930 19:51:45.624093 4756 scope.go:117] "RemoveContainer" containerID="84ebf4268b8a711bacedf6d262db750fcb672911d679603e834f5b192f9b3bb8" Sep 30 19:51:45 crc kubenswrapper[4756]: I0930 19:51:45.645529 4756 scope.go:117] "RemoveContainer" containerID="a59da2069cd457a45cfa4cee32dd5b3e2f6041f6c86bc1d8907d2c2ea621f64c" Sep 30 19:51:45 crc kubenswrapper[4756]: I0930 19:51:45.662447 4756 scope.go:117] "RemoveContainer" containerID="2f018f3d4686605a6052467ec031e85a05d28aa63a4b9734405a184ad84e1fdd" Sep 30 19:51:45 crc kubenswrapper[4756]: E0930 19:51:45.662809 4756 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2f018f3d4686605a6052467ec031e85a05d28aa63a4b9734405a184ad84e1fdd\": container with ID starting with 2f018f3d4686605a6052467ec031e85a05d28aa63a4b9734405a184ad84e1fdd not found: ID does not exist" containerID="2f018f3d4686605a6052467ec031e85a05d28aa63a4b9734405a184ad84e1fdd" Sep 30 19:51:45 crc kubenswrapper[4756]: I0930 19:51:45.662846 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2f018f3d4686605a6052467ec031e85a05d28aa63a4b9734405a184ad84e1fdd"} err="failed to get container status \"2f018f3d4686605a6052467ec031e85a05d28aa63a4b9734405a184ad84e1fdd\": rpc error: code = NotFound desc = could not find container \"2f018f3d4686605a6052467ec031e85a05d28aa63a4b9734405a184ad84e1fdd\": container with ID starting with 2f018f3d4686605a6052467ec031e85a05d28aa63a4b9734405a184ad84e1fdd not found: ID does not exist" Sep 30 19:51:45 crc kubenswrapper[4756]: I0930 19:51:45.662871 4756 scope.go:117] "RemoveContainer" containerID="5ccc6b1b8aec471d7421b417c7dc2d01cf249a7a473bdf2ac6e83543e6d2d8d0" Sep 30 19:51:45 crc kubenswrapper[4756]: E0930 19:51:45.663105 4756 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5ccc6b1b8aec471d7421b417c7dc2d01cf249a7a473bdf2ac6e83543e6d2d8d0\": container with ID starting with 5ccc6b1b8aec471d7421b417c7dc2d01cf249a7a473bdf2ac6e83543e6d2d8d0 not found: ID does not exist" containerID="5ccc6b1b8aec471d7421b417c7dc2d01cf249a7a473bdf2ac6e83543e6d2d8d0" Sep 30 19:51:45 crc kubenswrapper[4756]: I0930 19:51:45.663129 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5ccc6b1b8aec471d7421b417c7dc2d01cf249a7a473bdf2ac6e83543e6d2d8d0"} err="failed to get container status \"5ccc6b1b8aec471d7421b417c7dc2d01cf249a7a473bdf2ac6e83543e6d2d8d0\": rpc error: code = NotFound desc = could not find container \"5ccc6b1b8aec471d7421b417c7dc2d01cf249a7a473bdf2ac6e83543e6d2d8d0\": container with ID starting with 5ccc6b1b8aec471d7421b417c7dc2d01cf249a7a473bdf2ac6e83543e6d2d8d0 not found: ID does not exist" Sep 30 19:51:45 crc kubenswrapper[4756]: I0930 19:51:45.663149 4756 scope.go:117] "RemoveContainer" containerID="84ebf4268b8a711bacedf6d262db750fcb672911d679603e834f5b192f9b3bb8" Sep 30 19:51:45 crc kubenswrapper[4756]: E0930 19:51:45.663549 4756 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"84ebf4268b8a711bacedf6d262db750fcb672911d679603e834f5b192f9b3bb8\": container with ID starting with 84ebf4268b8a711bacedf6d262db750fcb672911d679603e834f5b192f9b3bb8 not found: ID does not exist" containerID="84ebf4268b8a711bacedf6d262db750fcb672911d679603e834f5b192f9b3bb8" Sep 30 19:51:45 crc kubenswrapper[4756]: I0930 19:51:45.663599 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"84ebf4268b8a711bacedf6d262db750fcb672911d679603e834f5b192f9b3bb8"} err="failed to get container status \"84ebf4268b8a711bacedf6d262db750fcb672911d679603e834f5b192f9b3bb8\": rpc error: code = NotFound desc = could not find container \"84ebf4268b8a711bacedf6d262db750fcb672911d679603e834f5b192f9b3bb8\": container with ID starting with 84ebf4268b8a711bacedf6d262db750fcb672911d679603e834f5b192f9b3bb8 not found: ID does not exist" Sep 30 19:51:45 crc kubenswrapper[4756]: I0930 19:51:45.663633 4756 scope.go:117] "RemoveContainer" containerID="a59da2069cd457a45cfa4cee32dd5b3e2f6041f6c86bc1d8907d2c2ea621f64c" Sep 30 19:51:45 crc kubenswrapper[4756]: E0930 19:51:45.663898 4756 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a59da2069cd457a45cfa4cee32dd5b3e2f6041f6c86bc1d8907d2c2ea621f64c\": container with ID starting with a59da2069cd457a45cfa4cee32dd5b3e2f6041f6c86bc1d8907d2c2ea621f64c not found: ID does not exist" containerID="a59da2069cd457a45cfa4cee32dd5b3e2f6041f6c86bc1d8907d2c2ea621f64c" Sep 30 19:51:45 crc kubenswrapper[4756]: I0930 19:51:45.663923 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a59da2069cd457a45cfa4cee32dd5b3e2f6041f6c86bc1d8907d2c2ea621f64c"} err="failed to get container status \"a59da2069cd457a45cfa4cee32dd5b3e2f6041f6c86bc1d8907d2c2ea621f64c\": rpc error: code = NotFound desc = could not find container \"a59da2069cd457a45cfa4cee32dd5b3e2f6041f6c86bc1d8907d2c2ea621f64c\": container with ID starting with a59da2069cd457a45cfa4cee32dd5b3e2f6041f6c86bc1d8907d2c2ea621f64c not found: ID does not exist" Sep 30 19:51:45 crc kubenswrapper[4756]: I0930 19:51:45.690872 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/30b7dced-f630-4b43-86a9-98bd2b088439-scripts\") pod \"ceilometer-0\" (UID: \"30b7dced-f630-4b43-86a9-98bd2b088439\") " pod="openstack/ceilometer-0" Sep 30 19:51:45 crc kubenswrapper[4756]: I0930 19:51:45.691141 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-22ztd\" (UniqueName: \"kubernetes.io/projected/30b7dced-f630-4b43-86a9-98bd2b088439-kube-api-access-22ztd\") pod \"ceilometer-0\" (UID: \"30b7dced-f630-4b43-86a9-98bd2b088439\") " pod="openstack/ceilometer-0" Sep 30 19:51:45 crc kubenswrapper[4756]: I0930 19:51:45.691162 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/30b7dced-f630-4b43-86a9-98bd2b088439-config-data\") pod \"ceilometer-0\" (UID: \"30b7dced-f630-4b43-86a9-98bd2b088439\") " pod="openstack/ceilometer-0" Sep 30 19:51:45 crc kubenswrapper[4756]: I0930 19:51:45.691190 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/30b7dced-f630-4b43-86a9-98bd2b088439-run-httpd\") pod \"ceilometer-0\" (UID: \"30b7dced-f630-4b43-86a9-98bd2b088439\") " pod="openstack/ceilometer-0" Sep 30 19:51:45 crc kubenswrapper[4756]: I0930 19:51:45.691212 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/30b7dced-f630-4b43-86a9-98bd2b088439-log-httpd\") pod \"ceilometer-0\" (UID: \"30b7dced-f630-4b43-86a9-98bd2b088439\") " pod="openstack/ceilometer-0" Sep 30 19:51:45 crc kubenswrapper[4756]: I0930 19:51:45.691330 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/30b7dced-f630-4b43-86a9-98bd2b088439-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"30b7dced-f630-4b43-86a9-98bd2b088439\") " pod="openstack/ceilometer-0" Sep 30 19:51:45 crc kubenswrapper[4756]: I0930 19:51:45.691360 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/30b7dced-f630-4b43-86a9-98bd2b088439-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"30b7dced-f630-4b43-86a9-98bd2b088439\") " pod="openstack/ceilometer-0" Sep 30 19:51:45 crc kubenswrapper[4756]: I0930 19:51:45.773974 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-mzmtl" Sep 30 19:51:45 crc kubenswrapper[4756]: I0930 19:51:45.796044 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/30b7dced-f630-4b43-86a9-98bd2b088439-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"30b7dced-f630-4b43-86a9-98bd2b088439\") " pod="openstack/ceilometer-0" Sep 30 19:51:45 crc kubenswrapper[4756]: I0930 19:51:45.796100 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/30b7dced-f630-4b43-86a9-98bd2b088439-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"30b7dced-f630-4b43-86a9-98bd2b088439\") " pod="openstack/ceilometer-0" Sep 30 19:51:45 crc kubenswrapper[4756]: I0930 19:51:45.796177 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/30b7dced-f630-4b43-86a9-98bd2b088439-scripts\") pod \"ceilometer-0\" (UID: \"30b7dced-f630-4b43-86a9-98bd2b088439\") " pod="openstack/ceilometer-0" Sep 30 19:51:45 crc kubenswrapper[4756]: I0930 19:51:45.796201 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-22ztd\" (UniqueName: \"kubernetes.io/projected/30b7dced-f630-4b43-86a9-98bd2b088439-kube-api-access-22ztd\") pod \"ceilometer-0\" (UID: \"30b7dced-f630-4b43-86a9-98bd2b088439\") " pod="openstack/ceilometer-0" Sep 30 19:51:45 crc kubenswrapper[4756]: I0930 19:51:45.796221 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/30b7dced-f630-4b43-86a9-98bd2b088439-config-data\") pod \"ceilometer-0\" (UID: \"30b7dced-f630-4b43-86a9-98bd2b088439\") " pod="openstack/ceilometer-0" Sep 30 19:51:45 crc kubenswrapper[4756]: I0930 19:51:45.796248 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/30b7dced-f630-4b43-86a9-98bd2b088439-run-httpd\") pod \"ceilometer-0\" (UID: \"30b7dced-f630-4b43-86a9-98bd2b088439\") " pod="openstack/ceilometer-0" Sep 30 19:51:45 crc kubenswrapper[4756]: I0930 19:51:45.796268 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/30b7dced-f630-4b43-86a9-98bd2b088439-log-httpd\") pod \"ceilometer-0\" (UID: \"30b7dced-f630-4b43-86a9-98bd2b088439\") " pod="openstack/ceilometer-0" Sep 30 19:51:45 crc kubenswrapper[4756]: I0930 19:51:45.799965 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/30b7dced-f630-4b43-86a9-98bd2b088439-run-httpd\") pod \"ceilometer-0\" (UID: \"30b7dced-f630-4b43-86a9-98bd2b088439\") " pod="openstack/ceilometer-0" Sep 30 19:51:45 crc kubenswrapper[4756]: I0930 19:51:45.800005 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/30b7dced-f630-4b43-86a9-98bd2b088439-log-httpd\") pod \"ceilometer-0\" (UID: \"30b7dced-f630-4b43-86a9-98bd2b088439\") " pod="openstack/ceilometer-0" Sep 30 19:51:45 crc kubenswrapper[4756]: I0930 19:51:45.802013 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/30b7dced-f630-4b43-86a9-98bd2b088439-scripts\") pod \"ceilometer-0\" (UID: \"30b7dced-f630-4b43-86a9-98bd2b088439\") " pod="openstack/ceilometer-0" Sep 30 19:51:45 crc kubenswrapper[4756]: I0930 19:51:45.802614 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/30b7dced-f630-4b43-86a9-98bd2b088439-config-data\") pod \"ceilometer-0\" (UID: \"30b7dced-f630-4b43-86a9-98bd2b088439\") " pod="openstack/ceilometer-0" Sep 30 19:51:45 crc kubenswrapper[4756]: I0930 19:51:45.802636 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/30b7dced-f630-4b43-86a9-98bd2b088439-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"30b7dced-f630-4b43-86a9-98bd2b088439\") " pod="openstack/ceilometer-0" Sep 30 19:51:45 crc kubenswrapper[4756]: I0930 19:51:45.807827 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/30b7dced-f630-4b43-86a9-98bd2b088439-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"30b7dced-f630-4b43-86a9-98bd2b088439\") " pod="openstack/ceilometer-0" Sep 30 19:51:45 crc kubenswrapper[4756]: I0930 19:51:45.821705 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-22ztd\" (UniqueName: \"kubernetes.io/projected/30b7dced-f630-4b43-86a9-98bd2b088439-kube-api-access-22ztd\") pod \"ceilometer-0\" (UID: \"30b7dced-f630-4b43-86a9-98bd2b088439\") " pod="openstack/ceilometer-0" Sep 30 19:51:45 crc kubenswrapper[4756]: I0930 19:51:45.897664 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-h7shw\" (UniqueName: \"kubernetes.io/projected/dc949160-e62c-4982-bb20-8f3ebe1a2692-kube-api-access-h7shw\") pod \"dc949160-e62c-4982-bb20-8f3ebe1a2692\" (UID: \"dc949160-e62c-4982-bb20-8f3ebe1a2692\") " Sep 30 19:51:45 crc kubenswrapper[4756]: I0930 19:51:45.897715 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dc949160-e62c-4982-bb20-8f3ebe1a2692-combined-ca-bundle\") pod \"dc949160-e62c-4982-bb20-8f3ebe1a2692\" (UID: \"dc949160-e62c-4982-bb20-8f3ebe1a2692\") " Sep 30 19:51:45 crc kubenswrapper[4756]: I0930 19:51:45.897738 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dc949160-e62c-4982-bb20-8f3ebe1a2692-config-data\") pod \"dc949160-e62c-4982-bb20-8f3ebe1a2692\" (UID: \"dc949160-e62c-4982-bb20-8f3ebe1a2692\") " Sep 30 19:51:45 crc kubenswrapper[4756]: I0930 19:51:45.897788 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dc949160-e62c-4982-bb20-8f3ebe1a2692-scripts\") pod \"dc949160-e62c-4982-bb20-8f3ebe1a2692\" (UID: \"dc949160-e62c-4982-bb20-8f3ebe1a2692\") " Sep 30 19:51:45 crc kubenswrapper[4756]: I0930 19:51:45.901162 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dc949160-e62c-4982-bb20-8f3ebe1a2692-scripts" (OuterVolumeSpecName: "scripts") pod "dc949160-e62c-4982-bb20-8f3ebe1a2692" (UID: "dc949160-e62c-4982-bb20-8f3ebe1a2692"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:51:45 crc kubenswrapper[4756]: I0930 19:51:45.902533 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dc949160-e62c-4982-bb20-8f3ebe1a2692-kube-api-access-h7shw" (OuterVolumeSpecName: "kube-api-access-h7shw") pod "dc949160-e62c-4982-bb20-8f3ebe1a2692" (UID: "dc949160-e62c-4982-bb20-8f3ebe1a2692"). InnerVolumeSpecName "kube-api-access-h7shw". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:51:45 crc kubenswrapper[4756]: I0930 19:51:45.922247 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dc949160-e62c-4982-bb20-8f3ebe1a2692-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "dc949160-e62c-4982-bb20-8f3ebe1a2692" (UID: "dc949160-e62c-4982-bb20-8f3ebe1a2692"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:51:45 crc kubenswrapper[4756]: I0930 19:51:45.931051 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 19:51:45 crc kubenswrapper[4756]: I0930 19:51:45.931178 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dc949160-e62c-4982-bb20-8f3ebe1a2692-config-data" (OuterVolumeSpecName: "config-data") pod "dc949160-e62c-4982-bb20-8f3ebe1a2692" (UID: "dc949160-e62c-4982-bb20-8f3ebe1a2692"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:51:46 crc kubenswrapper[4756]: I0930 19:51:46.001413 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-h7shw\" (UniqueName: \"kubernetes.io/projected/dc949160-e62c-4982-bb20-8f3ebe1a2692-kube-api-access-h7shw\") on node \"crc\" DevicePath \"\"" Sep 30 19:51:46 crc kubenswrapper[4756]: I0930 19:51:46.001447 4756 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dc949160-e62c-4982-bb20-8f3ebe1a2692-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 19:51:46 crc kubenswrapper[4756]: I0930 19:51:46.001459 4756 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dc949160-e62c-4982-bb20-8f3ebe1a2692-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 19:51:46 crc kubenswrapper[4756]: I0930 19:51:46.001472 4756 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dc949160-e62c-4982-bb20-8f3ebe1a2692-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 19:51:46 crc kubenswrapper[4756]: I0930 19:51:46.384679 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 30 19:51:46 crc kubenswrapper[4756]: W0930 19:51:46.386593 4756 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod30b7dced_f630_4b43_86a9_98bd2b088439.slice/crio-91b11ae1cdbe96247ea0cb0984578ba4c52b8dc70938083982a5a8ea9f1babaf WatchSource:0}: Error finding container 91b11ae1cdbe96247ea0cb0984578ba4c52b8dc70938083982a5a8ea9f1babaf: Status 404 returned error can't find the container with id 91b11ae1cdbe96247ea0cb0984578ba4c52b8dc70938083982a5a8ea9f1babaf Sep 30 19:51:46 crc kubenswrapper[4756]: I0930 19:51:46.513707 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"30b7dced-f630-4b43-86a9-98bd2b088439","Type":"ContainerStarted","Data":"91b11ae1cdbe96247ea0cb0984578ba4c52b8dc70938083982a5a8ea9f1babaf"} Sep 30 19:51:46 crc kubenswrapper[4756]: I0930 19:51:46.516214 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-mzmtl" event={"ID":"dc949160-e62c-4982-bb20-8f3ebe1a2692","Type":"ContainerDied","Data":"625f40fc53bfa3c84eeac0fdec514e0c4229811e74ee1078dcbe54c8bdde9184"} Sep 30 19:51:46 crc kubenswrapper[4756]: I0930 19:51:46.516245 4756 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="625f40fc53bfa3c84eeac0fdec514e0c4229811e74ee1078dcbe54c8bdde9184" Sep 30 19:51:46 crc kubenswrapper[4756]: I0930 19:51:46.516307 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-mzmtl" Sep 30 19:51:46 crc kubenswrapper[4756]: I0930 19:51:46.602763 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-0"] Sep 30 19:51:46 crc kubenswrapper[4756]: E0930 19:51:46.603225 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dc949160-e62c-4982-bb20-8f3ebe1a2692" containerName="nova-cell0-conductor-db-sync" Sep 30 19:51:46 crc kubenswrapper[4756]: I0930 19:51:46.603252 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="dc949160-e62c-4982-bb20-8f3ebe1a2692" containerName="nova-cell0-conductor-db-sync" Sep 30 19:51:46 crc kubenswrapper[4756]: I0930 19:51:46.603515 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="dc949160-e62c-4982-bb20-8f3ebe1a2692" containerName="nova-cell0-conductor-db-sync" Sep 30 19:51:46 crc kubenswrapper[4756]: I0930 19:51:46.604254 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Sep 30 19:51:46 crc kubenswrapper[4756]: I0930 19:51:46.606651 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-nova-dockercfg-zj27j" Sep 30 19:51:46 crc kubenswrapper[4756]: I0930 19:51:46.607810 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Sep 30 19:51:46 crc kubenswrapper[4756]: I0930 19:51:46.614532 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Sep 30 19:51:46 crc kubenswrapper[4756]: I0930 19:51:46.712363 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f9ab0288-ae1d-41f7-954d-8cbadc83c614-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"f9ab0288-ae1d-41f7-954d-8cbadc83c614\") " pod="openstack/nova-cell0-conductor-0" Sep 30 19:51:46 crc kubenswrapper[4756]: I0930 19:51:46.712453 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f9ab0288-ae1d-41f7-954d-8cbadc83c614-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"f9ab0288-ae1d-41f7-954d-8cbadc83c614\") " pod="openstack/nova-cell0-conductor-0" Sep 30 19:51:46 crc kubenswrapper[4756]: I0930 19:51:46.712852 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bbdwg\" (UniqueName: \"kubernetes.io/projected/f9ab0288-ae1d-41f7-954d-8cbadc83c614-kube-api-access-bbdwg\") pod \"nova-cell0-conductor-0\" (UID: \"f9ab0288-ae1d-41f7-954d-8cbadc83c614\") " pod="openstack/nova-cell0-conductor-0" Sep 30 19:51:46 crc kubenswrapper[4756]: I0930 19:51:46.814329 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f9ab0288-ae1d-41f7-954d-8cbadc83c614-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"f9ab0288-ae1d-41f7-954d-8cbadc83c614\") " pod="openstack/nova-cell0-conductor-0" Sep 30 19:51:46 crc kubenswrapper[4756]: I0930 19:51:46.814417 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f9ab0288-ae1d-41f7-954d-8cbadc83c614-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"f9ab0288-ae1d-41f7-954d-8cbadc83c614\") " pod="openstack/nova-cell0-conductor-0" Sep 30 19:51:46 crc kubenswrapper[4756]: I0930 19:51:46.814500 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bbdwg\" (UniqueName: \"kubernetes.io/projected/f9ab0288-ae1d-41f7-954d-8cbadc83c614-kube-api-access-bbdwg\") pod \"nova-cell0-conductor-0\" (UID: \"f9ab0288-ae1d-41f7-954d-8cbadc83c614\") " pod="openstack/nova-cell0-conductor-0" Sep 30 19:51:46 crc kubenswrapper[4756]: I0930 19:51:46.818988 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f9ab0288-ae1d-41f7-954d-8cbadc83c614-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"f9ab0288-ae1d-41f7-954d-8cbadc83c614\") " pod="openstack/nova-cell0-conductor-0" Sep 30 19:51:46 crc kubenswrapper[4756]: I0930 19:51:46.819474 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f9ab0288-ae1d-41f7-954d-8cbadc83c614-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"f9ab0288-ae1d-41f7-954d-8cbadc83c614\") " pod="openstack/nova-cell0-conductor-0" Sep 30 19:51:46 crc kubenswrapper[4756]: I0930 19:51:46.837881 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bbdwg\" (UniqueName: \"kubernetes.io/projected/f9ab0288-ae1d-41f7-954d-8cbadc83c614-kube-api-access-bbdwg\") pod \"nova-cell0-conductor-0\" (UID: \"f9ab0288-ae1d-41f7-954d-8cbadc83c614\") " pod="openstack/nova-cell0-conductor-0" Sep 30 19:51:46 crc kubenswrapper[4756]: I0930 19:51:46.928998 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Sep 30 19:51:47 crc kubenswrapper[4756]: I0930 19:51:47.131700 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0853004d-07b9-430c-aa43-794b3145867d" path="/var/lib/kubelet/pods/0853004d-07b9-430c-aa43-794b3145867d/volumes" Sep 30 19:51:47 crc kubenswrapper[4756]: I0930 19:51:47.410485 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Sep 30 19:51:47 crc kubenswrapper[4756]: I0930 19:51:47.541068 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"f9ab0288-ae1d-41f7-954d-8cbadc83c614","Type":"ContainerStarted","Data":"89dbaff619c61b1a5dff07426f19a3166a124e9784c0732d4dac53a456b0ba9f"} Sep 30 19:51:47 crc kubenswrapper[4756]: I0930 19:51:47.545445 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"30b7dced-f630-4b43-86a9-98bd2b088439","Type":"ContainerStarted","Data":"e10134091ad8f357225c10adfda68a2ab219d55cd7368080db1d4a8f0fff1ee0"} Sep 30 19:51:48 crc kubenswrapper[4756]: I0930 19:51:48.583990 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"f9ab0288-ae1d-41f7-954d-8cbadc83c614","Type":"ContainerStarted","Data":"8dbe31d5b2222309a75d567a85830fd5e0fbe8a178b7a9f99d3211c5bbf82f1c"} Sep 30 19:51:48 crc kubenswrapper[4756]: I0930 19:51:48.584724 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell0-conductor-0" Sep 30 19:51:48 crc kubenswrapper[4756]: I0930 19:51:48.588172 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"30b7dced-f630-4b43-86a9-98bd2b088439","Type":"ContainerStarted","Data":"93e8c8d3de362c85843aad668e360a25cafef168d6622ddf81257ab607664687"} Sep 30 19:51:48 crc kubenswrapper[4756]: I0930 19:51:48.588215 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"30b7dced-f630-4b43-86a9-98bd2b088439","Type":"ContainerStarted","Data":"ceb05212adf346fea2a2787f9b9ac55ea78085f3bc87fcd82d78a2b51857d9d4"} Sep 30 19:51:48 crc kubenswrapper[4756]: I0930 19:51:48.603665 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-0" podStartSLOduration=2.603646829 podStartE2EDuration="2.603646829s" podCreationTimestamp="2025-09-30 19:51:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 19:51:48.597808497 +0000 UTC m=+1238.218741984" watchObservedRunningTime="2025-09-30 19:51:48.603646829 +0000 UTC m=+1238.224580316" Sep 30 19:51:50 crc kubenswrapper[4756]: I0930 19:51:50.615898 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"30b7dced-f630-4b43-86a9-98bd2b088439","Type":"ContainerStarted","Data":"90d54e75cb49503bf84d5646fd4363355d121fa689ccb8d1d366765f5fde71a9"} Sep 30 19:51:50 crc kubenswrapper[4756]: I0930 19:51:50.617461 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Sep 30 19:51:50 crc kubenswrapper[4756]: I0930 19:51:50.657959 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.365660228 podStartE2EDuration="5.657931248s" podCreationTimestamp="2025-09-30 19:51:45 +0000 UTC" firstStartedPulling="2025-09-30 19:51:46.390039606 +0000 UTC m=+1236.010973083" lastFinishedPulling="2025-09-30 19:51:49.682310616 +0000 UTC m=+1239.303244103" observedRunningTime="2025-09-30 19:51:50.650046632 +0000 UTC m=+1240.270980119" watchObservedRunningTime="2025-09-30 19:51:50.657931248 +0000 UTC m=+1240.278864725" Sep 30 19:51:56 crc kubenswrapper[4756]: I0930 19:51:56.977588 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell0-conductor-0" Sep 30 19:51:57 crc kubenswrapper[4756]: I0930 19:51:57.523050 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-cell-mapping-4kg8f"] Sep 30 19:51:57 crc kubenswrapper[4756]: I0930 19:51:57.524731 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-4kg8f" Sep 30 19:51:57 crc kubenswrapper[4756]: I0930 19:51:57.528478 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-manage-scripts" Sep 30 19:51:57 crc kubenswrapper[4756]: I0930 19:51:57.528554 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-manage-config-data" Sep 30 19:51:57 crc kubenswrapper[4756]: I0930 19:51:57.552625 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-cell-mapping-4kg8f"] Sep 30 19:51:57 crc kubenswrapper[4756]: I0930 19:51:57.641966 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vdtkk\" (UniqueName: \"kubernetes.io/projected/99d41b4c-92d7-43f2-b6d1-044876e9a8ea-kube-api-access-vdtkk\") pod \"nova-cell0-cell-mapping-4kg8f\" (UID: \"99d41b4c-92d7-43f2-b6d1-044876e9a8ea\") " pod="openstack/nova-cell0-cell-mapping-4kg8f" Sep 30 19:51:57 crc kubenswrapper[4756]: I0930 19:51:57.642029 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/99d41b4c-92d7-43f2-b6d1-044876e9a8ea-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-4kg8f\" (UID: \"99d41b4c-92d7-43f2-b6d1-044876e9a8ea\") " pod="openstack/nova-cell0-cell-mapping-4kg8f" Sep 30 19:51:57 crc kubenswrapper[4756]: I0930 19:51:57.642070 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/99d41b4c-92d7-43f2-b6d1-044876e9a8ea-scripts\") pod \"nova-cell0-cell-mapping-4kg8f\" (UID: \"99d41b4c-92d7-43f2-b6d1-044876e9a8ea\") " pod="openstack/nova-cell0-cell-mapping-4kg8f" Sep 30 19:51:57 crc kubenswrapper[4756]: I0930 19:51:57.642110 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/99d41b4c-92d7-43f2-b6d1-044876e9a8ea-config-data\") pod \"nova-cell0-cell-mapping-4kg8f\" (UID: \"99d41b4c-92d7-43f2-b6d1-044876e9a8ea\") " pod="openstack/nova-cell0-cell-mapping-4kg8f" Sep 30 19:51:57 crc kubenswrapper[4756]: I0930 19:51:57.702234 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Sep 30 19:51:57 crc kubenswrapper[4756]: I0930 19:51:57.703926 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 30 19:51:57 crc kubenswrapper[4756]: I0930 19:51:57.718447 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Sep 30 19:51:57 crc kubenswrapper[4756]: I0930 19:51:57.725730 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Sep 30 19:51:57 crc kubenswrapper[4756]: I0930 19:51:57.732908 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Sep 30 19:51:57 crc kubenswrapper[4756]: I0930 19:51:57.734387 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Sep 30 19:51:57 crc kubenswrapper[4756]: I0930 19:51:57.740193 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Sep 30 19:51:57 crc kubenswrapper[4756]: I0930 19:51:57.743751 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vdtkk\" (UniqueName: \"kubernetes.io/projected/99d41b4c-92d7-43f2-b6d1-044876e9a8ea-kube-api-access-vdtkk\") pod \"nova-cell0-cell-mapping-4kg8f\" (UID: \"99d41b4c-92d7-43f2-b6d1-044876e9a8ea\") " pod="openstack/nova-cell0-cell-mapping-4kg8f" Sep 30 19:51:57 crc kubenswrapper[4756]: I0930 19:51:57.743810 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/99d41b4c-92d7-43f2-b6d1-044876e9a8ea-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-4kg8f\" (UID: \"99d41b4c-92d7-43f2-b6d1-044876e9a8ea\") " pod="openstack/nova-cell0-cell-mapping-4kg8f" Sep 30 19:51:57 crc kubenswrapper[4756]: I0930 19:51:57.743849 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/99d41b4c-92d7-43f2-b6d1-044876e9a8ea-scripts\") pod \"nova-cell0-cell-mapping-4kg8f\" (UID: \"99d41b4c-92d7-43f2-b6d1-044876e9a8ea\") " pod="openstack/nova-cell0-cell-mapping-4kg8f" Sep 30 19:51:57 crc kubenswrapper[4756]: I0930 19:51:57.743887 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/99d41b4c-92d7-43f2-b6d1-044876e9a8ea-config-data\") pod \"nova-cell0-cell-mapping-4kg8f\" (UID: \"99d41b4c-92d7-43f2-b6d1-044876e9a8ea\") " pod="openstack/nova-cell0-cell-mapping-4kg8f" Sep 30 19:51:57 crc kubenswrapper[4756]: I0930 19:51:57.752473 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/99d41b4c-92d7-43f2-b6d1-044876e9a8ea-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-4kg8f\" (UID: \"99d41b4c-92d7-43f2-b6d1-044876e9a8ea\") " pod="openstack/nova-cell0-cell-mapping-4kg8f" Sep 30 19:51:57 crc kubenswrapper[4756]: I0930 19:51:57.756880 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/99d41b4c-92d7-43f2-b6d1-044876e9a8ea-config-data\") pod \"nova-cell0-cell-mapping-4kg8f\" (UID: \"99d41b4c-92d7-43f2-b6d1-044876e9a8ea\") " pod="openstack/nova-cell0-cell-mapping-4kg8f" Sep 30 19:51:57 crc kubenswrapper[4756]: I0930 19:51:57.763317 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Sep 30 19:51:57 crc kubenswrapper[4756]: I0930 19:51:57.764898 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/99d41b4c-92d7-43f2-b6d1-044876e9a8ea-scripts\") pod \"nova-cell0-cell-mapping-4kg8f\" (UID: \"99d41b4c-92d7-43f2-b6d1-044876e9a8ea\") " pod="openstack/nova-cell0-cell-mapping-4kg8f" Sep 30 19:51:57 crc kubenswrapper[4756]: I0930 19:51:57.768788 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vdtkk\" (UniqueName: \"kubernetes.io/projected/99d41b4c-92d7-43f2-b6d1-044876e9a8ea-kube-api-access-vdtkk\") pod \"nova-cell0-cell-mapping-4kg8f\" (UID: \"99d41b4c-92d7-43f2-b6d1-044876e9a8ea\") " pod="openstack/nova-cell0-cell-mapping-4kg8f" Sep 30 19:51:57 crc kubenswrapper[4756]: I0930 19:51:57.828467 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Sep 30 19:51:57 crc kubenswrapper[4756]: I0930 19:51:57.829840 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Sep 30 19:51:57 crc kubenswrapper[4756]: I0930 19:51:57.839035 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Sep 30 19:51:57 crc kubenswrapper[4756]: I0930 19:51:57.841057 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Sep 30 19:51:57 crc kubenswrapper[4756]: I0930 19:51:57.846912 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/04ff8360-3224-42bf-bfac-1d4725acd2ae-config-data\") pod \"nova-api-0\" (UID: \"04ff8360-3224-42bf-bfac-1d4725acd2ae\") " pod="openstack/nova-api-0" Sep 30 19:51:57 crc kubenswrapper[4756]: I0930 19:51:57.847167 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/04ff8360-3224-42bf-bfac-1d4725acd2ae-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"04ff8360-3224-42bf-bfac-1d4725acd2ae\") " pod="openstack/nova-api-0" Sep 30 19:51:57 crc kubenswrapper[4756]: I0930 19:51:57.847249 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3f1511a1-de77-42e5-8fda-cfb60a6c002b-config-data\") pod \"nova-scheduler-0\" (UID: \"3f1511a1-de77-42e5-8fda-cfb60a6c002b\") " pod="openstack/nova-scheduler-0" Sep 30 19:51:57 crc kubenswrapper[4756]: I0930 19:51:57.847349 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/04ff8360-3224-42bf-bfac-1d4725acd2ae-logs\") pod \"nova-api-0\" (UID: \"04ff8360-3224-42bf-bfac-1d4725acd2ae\") " pod="openstack/nova-api-0" Sep 30 19:51:57 crc kubenswrapper[4756]: I0930 19:51:57.847441 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3f1511a1-de77-42e5-8fda-cfb60a6c002b-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"3f1511a1-de77-42e5-8fda-cfb60a6c002b\") " pod="openstack/nova-scheduler-0" Sep 30 19:51:57 crc kubenswrapper[4756]: I0930 19:51:57.847560 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7fzsn\" (UniqueName: \"kubernetes.io/projected/3f1511a1-de77-42e5-8fda-cfb60a6c002b-kube-api-access-7fzsn\") pod \"nova-scheduler-0\" (UID: \"3f1511a1-de77-42e5-8fda-cfb60a6c002b\") " pod="openstack/nova-scheduler-0" Sep 30 19:51:57 crc kubenswrapper[4756]: I0930 19:51:57.847667 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z567h\" (UniqueName: \"kubernetes.io/projected/04ff8360-3224-42bf-bfac-1d4725acd2ae-kube-api-access-z567h\") pod \"nova-api-0\" (UID: \"04ff8360-3224-42bf-bfac-1d4725acd2ae\") " pod="openstack/nova-api-0" Sep 30 19:51:57 crc kubenswrapper[4756]: I0930 19:51:57.854648 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-4kg8f" Sep 30 19:51:57 crc kubenswrapper[4756]: I0930 19:51:57.949987 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/04ff8360-3224-42bf-bfac-1d4725acd2ae-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"04ff8360-3224-42bf-bfac-1d4725acd2ae\") " pod="openstack/nova-api-0" Sep 30 19:51:57 crc kubenswrapper[4756]: I0930 19:51:57.950250 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3f1511a1-de77-42e5-8fda-cfb60a6c002b-config-data\") pod \"nova-scheduler-0\" (UID: \"3f1511a1-de77-42e5-8fda-cfb60a6c002b\") " pod="openstack/nova-scheduler-0" Sep 30 19:51:57 crc kubenswrapper[4756]: I0930 19:51:57.950277 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/04ff8360-3224-42bf-bfac-1d4725acd2ae-logs\") pod \"nova-api-0\" (UID: \"04ff8360-3224-42bf-bfac-1d4725acd2ae\") " pod="openstack/nova-api-0" Sep 30 19:51:57 crc kubenswrapper[4756]: I0930 19:51:57.950299 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3f1511a1-de77-42e5-8fda-cfb60a6c002b-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"3f1511a1-de77-42e5-8fda-cfb60a6c002b\") " pod="openstack/nova-scheduler-0" Sep 30 19:51:57 crc kubenswrapper[4756]: I0930 19:51:57.950343 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/576e2aad-6c1b-47d2-8338-26beb8c4c93e-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"576e2aad-6c1b-47d2-8338-26beb8c4c93e\") " pod="openstack/nova-metadata-0" Sep 30 19:51:57 crc kubenswrapper[4756]: I0930 19:51:57.950361 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/576e2aad-6c1b-47d2-8338-26beb8c4c93e-config-data\") pod \"nova-metadata-0\" (UID: \"576e2aad-6c1b-47d2-8338-26beb8c4c93e\") " pod="openstack/nova-metadata-0" Sep 30 19:51:57 crc kubenswrapper[4756]: I0930 19:51:57.950386 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7fzsn\" (UniqueName: \"kubernetes.io/projected/3f1511a1-de77-42e5-8fda-cfb60a6c002b-kube-api-access-7fzsn\") pod \"nova-scheduler-0\" (UID: \"3f1511a1-de77-42e5-8fda-cfb60a6c002b\") " pod="openstack/nova-scheduler-0" Sep 30 19:51:57 crc kubenswrapper[4756]: I0930 19:51:57.950430 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/576e2aad-6c1b-47d2-8338-26beb8c4c93e-logs\") pod \"nova-metadata-0\" (UID: \"576e2aad-6c1b-47d2-8338-26beb8c4c93e\") " pod="openstack/nova-metadata-0" Sep 30 19:51:57 crc kubenswrapper[4756]: I0930 19:51:57.950457 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z567h\" (UniqueName: \"kubernetes.io/projected/04ff8360-3224-42bf-bfac-1d4725acd2ae-kube-api-access-z567h\") pod \"nova-api-0\" (UID: \"04ff8360-3224-42bf-bfac-1d4725acd2ae\") " pod="openstack/nova-api-0" Sep 30 19:51:57 crc kubenswrapper[4756]: I0930 19:51:57.950480 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q5cl7\" (UniqueName: \"kubernetes.io/projected/576e2aad-6c1b-47d2-8338-26beb8c4c93e-kube-api-access-q5cl7\") pod \"nova-metadata-0\" (UID: \"576e2aad-6c1b-47d2-8338-26beb8c4c93e\") " pod="openstack/nova-metadata-0" Sep 30 19:51:57 crc kubenswrapper[4756]: I0930 19:51:57.950531 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/04ff8360-3224-42bf-bfac-1d4725acd2ae-config-data\") pod \"nova-api-0\" (UID: \"04ff8360-3224-42bf-bfac-1d4725acd2ae\") " pod="openstack/nova-api-0" Sep 30 19:51:57 crc kubenswrapper[4756]: I0930 19:51:57.953796 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/04ff8360-3224-42bf-bfac-1d4725acd2ae-logs\") pod \"nova-api-0\" (UID: \"04ff8360-3224-42bf-bfac-1d4725acd2ae\") " pod="openstack/nova-api-0" Sep 30 19:51:57 crc kubenswrapper[4756]: I0930 19:51:57.958272 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Sep 30 19:51:57 crc kubenswrapper[4756]: I0930 19:51:57.959449 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Sep 30 19:51:57 crc kubenswrapper[4756]: I0930 19:51:57.970643 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-novncproxy-config-data" Sep 30 19:51:57 crc kubenswrapper[4756]: I0930 19:51:57.981063 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/04ff8360-3224-42bf-bfac-1d4725acd2ae-config-data\") pod \"nova-api-0\" (UID: \"04ff8360-3224-42bf-bfac-1d4725acd2ae\") " pod="openstack/nova-api-0" Sep 30 19:51:57 crc kubenswrapper[4756]: I0930 19:51:57.993883 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Sep 30 19:51:58 crc kubenswrapper[4756]: I0930 19:51:58.002251 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3f1511a1-de77-42e5-8fda-cfb60a6c002b-config-data\") pod \"nova-scheduler-0\" (UID: \"3f1511a1-de77-42e5-8fda-cfb60a6c002b\") " pod="openstack/nova-scheduler-0" Sep 30 19:51:58 crc kubenswrapper[4756]: I0930 19:51:58.002805 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/04ff8360-3224-42bf-bfac-1d4725acd2ae-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"04ff8360-3224-42bf-bfac-1d4725acd2ae\") " pod="openstack/nova-api-0" Sep 30 19:51:58 crc kubenswrapper[4756]: I0930 19:51:58.002965 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3f1511a1-de77-42e5-8fda-cfb60a6c002b-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"3f1511a1-de77-42e5-8fda-cfb60a6c002b\") " pod="openstack/nova-scheduler-0" Sep 30 19:51:58 crc kubenswrapper[4756]: I0930 19:51:58.018278 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z567h\" (UniqueName: \"kubernetes.io/projected/04ff8360-3224-42bf-bfac-1d4725acd2ae-kube-api-access-z567h\") pod \"nova-api-0\" (UID: \"04ff8360-3224-42bf-bfac-1d4725acd2ae\") " pod="openstack/nova-api-0" Sep 30 19:51:58 crc kubenswrapper[4756]: I0930 19:51:58.027471 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 30 19:51:58 crc kubenswrapper[4756]: I0930 19:51:58.028313 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7fzsn\" (UniqueName: \"kubernetes.io/projected/3f1511a1-de77-42e5-8fda-cfb60a6c002b-kube-api-access-7fzsn\") pod \"nova-scheduler-0\" (UID: \"3f1511a1-de77-42e5-8fda-cfb60a6c002b\") " pod="openstack/nova-scheduler-0" Sep 30 19:51:58 crc kubenswrapper[4756]: I0930 19:51:58.052782 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q5cl7\" (UniqueName: \"kubernetes.io/projected/576e2aad-6c1b-47d2-8338-26beb8c4c93e-kube-api-access-q5cl7\") pod \"nova-metadata-0\" (UID: \"576e2aad-6c1b-47d2-8338-26beb8c4c93e\") " pod="openstack/nova-metadata-0" Sep 30 19:51:58 crc kubenswrapper[4756]: I0930 19:51:58.052845 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-62kq8\" (UniqueName: \"kubernetes.io/projected/df33b9b5-354b-4431-aecc-aef93a455a02-kube-api-access-62kq8\") pod \"nova-cell1-novncproxy-0\" (UID: \"df33b9b5-354b-4431-aecc-aef93a455a02\") " pod="openstack/nova-cell1-novncproxy-0" Sep 30 19:51:58 crc kubenswrapper[4756]: I0930 19:51:58.052935 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/576e2aad-6c1b-47d2-8338-26beb8c4c93e-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"576e2aad-6c1b-47d2-8338-26beb8c4c93e\") " pod="openstack/nova-metadata-0" Sep 30 19:51:58 crc kubenswrapper[4756]: I0930 19:51:58.052953 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/576e2aad-6c1b-47d2-8338-26beb8c4c93e-config-data\") pod \"nova-metadata-0\" (UID: \"576e2aad-6c1b-47d2-8338-26beb8c4c93e\") " pod="openstack/nova-metadata-0" Sep 30 19:51:58 crc kubenswrapper[4756]: I0930 19:51:58.052969 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/df33b9b5-354b-4431-aecc-aef93a455a02-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"df33b9b5-354b-4431-aecc-aef93a455a02\") " pod="openstack/nova-cell1-novncproxy-0" Sep 30 19:51:58 crc kubenswrapper[4756]: I0930 19:51:58.053001 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/df33b9b5-354b-4431-aecc-aef93a455a02-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"df33b9b5-354b-4431-aecc-aef93a455a02\") " pod="openstack/nova-cell1-novncproxy-0" Sep 30 19:51:58 crc kubenswrapper[4756]: I0930 19:51:58.053021 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/576e2aad-6c1b-47d2-8338-26beb8c4c93e-logs\") pod \"nova-metadata-0\" (UID: \"576e2aad-6c1b-47d2-8338-26beb8c4c93e\") " pod="openstack/nova-metadata-0" Sep 30 19:51:58 crc kubenswrapper[4756]: I0930 19:51:58.053366 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/576e2aad-6c1b-47d2-8338-26beb8c4c93e-logs\") pod \"nova-metadata-0\" (UID: \"576e2aad-6c1b-47d2-8338-26beb8c4c93e\") " pod="openstack/nova-metadata-0" Sep 30 19:51:58 crc kubenswrapper[4756]: I0930 19:51:58.059110 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Sep 30 19:51:58 crc kubenswrapper[4756]: I0930 19:51:58.060435 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/576e2aad-6c1b-47d2-8338-26beb8c4c93e-config-data\") pod \"nova-metadata-0\" (UID: \"576e2aad-6c1b-47d2-8338-26beb8c4c93e\") " pod="openstack/nova-metadata-0" Sep 30 19:51:58 crc kubenswrapper[4756]: I0930 19:51:58.081087 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/576e2aad-6c1b-47d2-8338-26beb8c4c93e-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"576e2aad-6c1b-47d2-8338-26beb8c4c93e\") " pod="openstack/nova-metadata-0" Sep 30 19:51:58 crc kubenswrapper[4756]: I0930 19:51:58.098264 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q5cl7\" (UniqueName: \"kubernetes.io/projected/576e2aad-6c1b-47d2-8338-26beb8c4c93e-kube-api-access-q5cl7\") pod \"nova-metadata-0\" (UID: \"576e2aad-6c1b-47d2-8338-26beb8c4c93e\") " pod="openstack/nova-metadata-0" Sep 30 19:51:58 crc kubenswrapper[4756]: I0930 19:51:58.098329 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-845d6d6f59-hcjll"] Sep 30 19:51:58 crc kubenswrapper[4756]: I0930 19:51:58.099787 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-845d6d6f59-hcjll" Sep 30 19:51:58 crc kubenswrapper[4756]: I0930 19:51:58.112125 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-845d6d6f59-hcjll"] Sep 30 19:51:58 crc kubenswrapper[4756]: I0930 19:51:58.155637 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Sep 30 19:51:58 crc kubenswrapper[4756]: I0930 19:51:58.156776 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-62kq8\" (UniqueName: \"kubernetes.io/projected/df33b9b5-354b-4431-aecc-aef93a455a02-kube-api-access-62kq8\") pod \"nova-cell1-novncproxy-0\" (UID: \"df33b9b5-354b-4431-aecc-aef93a455a02\") " pod="openstack/nova-cell1-novncproxy-0" Sep 30 19:51:58 crc kubenswrapper[4756]: I0930 19:51:58.156864 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/df33b9b5-354b-4431-aecc-aef93a455a02-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"df33b9b5-354b-4431-aecc-aef93a455a02\") " pod="openstack/nova-cell1-novncproxy-0" Sep 30 19:51:58 crc kubenswrapper[4756]: I0930 19:51:58.156898 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/df33b9b5-354b-4431-aecc-aef93a455a02-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"df33b9b5-354b-4431-aecc-aef93a455a02\") " pod="openstack/nova-cell1-novncproxy-0" Sep 30 19:51:58 crc kubenswrapper[4756]: I0930 19:51:58.161608 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/df33b9b5-354b-4431-aecc-aef93a455a02-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"df33b9b5-354b-4431-aecc-aef93a455a02\") " pod="openstack/nova-cell1-novncproxy-0" Sep 30 19:51:58 crc kubenswrapper[4756]: I0930 19:51:58.179715 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/df33b9b5-354b-4431-aecc-aef93a455a02-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"df33b9b5-354b-4431-aecc-aef93a455a02\") " pod="openstack/nova-cell1-novncproxy-0" Sep 30 19:51:58 crc kubenswrapper[4756]: I0930 19:51:58.203731 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-62kq8\" (UniqueName: \"kubernetes.io/projected/df33b9b5-354b-4431-aecc-aef93a455a02-kube-api-access-62kq8\") pod \"nova-cell1-novncproxy-0\" (UID: \"df33b9b5-354b-4431-aecc-aef93a455a02\") " pod="openstack/nova-cell1-novncproxy-0" Sep 30 19:51:58 crc kubenswrapper[4756]: I0930 19:51:58.258665 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5585066a-57c7-4fd5-91b8-36af88ee6f9b-ovsdbserver-sb\") pod \"dnsmasq-dns-845d6d6f59-hcjll\" (UID: \"5585066a-57c7-4fd5-91b8-36af88ee6f9b\") " pod="openstack/dnsmasq-dns-845d6d6f59-hcjll" Sep 30 19:51:58 crc kubenswrapper[4756]: I0930 19:51:58.259857 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5585066a-57c7-4fd5-91b8-36af88ee6f9b-config\") pod \"dnsmasq-dns-845d6d6f59-hcjll\" (UID: \"5585066a-57c7-4fd5-91b8-36af88ee6f9b\") " pod="openstack/dnsmasq-dns-845d6d6f59-hcjll" Sep 30 19:51:58 crc kubenswrapper[4756]: I0930 19:51:58.259995 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r4mnf\" (UniqueName: \"kubernetes.io/projected/5585066a-57c7-4fd5-91b8-36af88ee6f9b-kube-api-access-r4mnf\") pod \"dnsmasq-dns-845d6d6f59-hcjll\" (UID: \"5585066a-57c7-4fd5-91b8-36af88ee6f9b\") " pod="openstack/dnsmasq-dns-845d6d6f59-hcjll" Sep 30 19:51:58 crc kubenswrapper[4756]: I0930 19:51:58.260101 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5585066a-57c7-4fd5-91b8-36af88ee6f9b-ovsdbserver-nb\") pod \"dnsmasq-dns-845d6d6f59-hcjll\" (UID: \"5585066a-57c7-4fd5-91b8-36af88ee6f9b\") " pod="openstack/dnsmasq-dns-845d6d6f59-hcjll" Sep 30 19:51:58 crc kubenswrapper[4756]: I0930 19:51:58.261659 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5585066a-57c7-4fd5-91b8-36af88ee6f9b-dns-svc\") pod \"dnsmasq-dns-845d6d6f59-hcjll\" (UID: \"5585066a-57c7-4fd5-91b8-36af88ee6f9b\") " pod="openstack/dnsmasq-dns-845d6d6f59-hcjll" Sep 30 19:51:58 crc kubenswrapper[4756]: I0930 19:51:58.261687 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/5585066a-57c7-4fd5-91b8-36af88ee6f9b-dns-swift-storage-0\") pod \"dnsmasq-dns-845d6d6f59-hcjll\" (UID: \"5585066a-57c7-4fd5-91b8-36af88ee6f9b\") " pod="openstack/dnsmasq-dns-845d6d6f59-hcjll" Sep 30 19:51:58 crc kubenswrapper[4756]: I0930 19:51:58.363429 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5585066a-57c7-4fd5-91b8-36af88ee6f9b-config\") pod \"dnsmasq-dns-845d6d6f59-hcjll\" (UID: \"5585066a-57c7-4fd5-91b8-36af88ee6f9b\") " pod="openstack/dnsmasq-dns-845d6d6f59-hcjll" Sep 30 19:51:58 crc kubenswrapper[4756]: I0930 19:51:58.363497 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r4mnf\" (UniqueName: \"kubernetes.io/projected/5585066a-57c7-4fd5-91b8-36af88ee6f9b-kube-api-access-r4mnf\") pod \"dnsmasq-dns-845d6d6f59-hcjll\" (UID: \"5585066a-57c7-4fd5-91b8-36af88ee6f9b\") " pod="openstack/dnsmasq-dns-845d6d6f59-hcjll" Sep 30 19:51:58 crc kubenswrapper[4756]: I0930 19:51:58.363541 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5585066a-57c7-4fd5-91b8-36af88ee6f9b-ovsdbserver-nb\") pod \"dnsmasq-dns-845d6d6f59-hcjll\" (UID: \"5585066a-57c7-4fd5-91b8-36af88ee6f9b\") " pod="openstack/dnsmasq-dns-845d6d6f59-hcjll" Sep 30 19:51:58 crc kubenswrapper[4756]: I0930 19:51:58.363626 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5585066a-57c7-4fd5-91b8-36af88ee6f9b-dns-svc\") pod \"dnsmasq-dns-845d6d6f59-hcjll\" (UID: \"5585066a-57c7-4fd5-91b8-36af88ee6f9b\") " pod="openstack/dnsmasq-dns-845d6d6f59-hcjll" Sep 30 19:51:58 crc kubenswrapper[4756]: I0930 19:51:58.363643 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/5585066a-57c7-4fd5-91b8-36af88ee6f9b-dns-swift-storage-0\") pod \"dnsmasq-dns-845d6d6f59-hcjll\" (UID: \"5585066a-57c7-4fd5-91b8-36af88ee6f9b\") " pod="openstack/dnsmasq-dns-845d6d6f59-hcjll" Sep 30 19:51:58 crc kubenswrapper[4756]: I0930 19:51:58.363664 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5585066a-57c7-4fd5-91b8-36af88ee6f9b-ovsdbserver-sb\") pod \"dnsmasq-dns-845d6d6f59-hcjll\" (UID: \"5585066a-57c7-4fd5-91b8-36af88ee6f9b\") " pod="openstack/dnsmasq-dns-845d6d6f59-hcjll" Sep 30 19:51:58 crc kubenswrapper[4756]: I0930 19:51:58.364440 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5585066a-57c7-4fd5-91b8-36af88ee6f9b-config\") pod \"dnsmasq-dns-845d6d6f59-hcjll\" (UID: \"5585066a-57c7-4fd5-91b8-36af88ee6f9b\") " pod="openstack/dnsmasq-dns-845d6d6f59-hcjll" Sep 30 19:51:58 crc kubenswrapper[4756]: I0930 19:51:58.367166 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/5585066a-57c7-4fd5-91b8-36af88ee6f9b-dns-swift-storage-0\") pod \"dnsmasq-dns-845d6d6f59-hcjll\" (UID: \"5585066a-57c7-4fd5-91b8-36af88ee6f9b\") " pod="openstack/dnsmasq-dns-845d6d6f59-hcjll" Sep 30 19:51:58 crc kubenswrapper[4756]: I0930 19:51:58.367229 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5585066a-57c7-4fd5-91b8-36af88ee6f9b-ovsdbserver-sb\") pod \"dnsmasq-dns-845d6d6f59-hcjll\" (UID: \"5585066a-57c7-4fd5-91b8-36af88ee6f9b\") " pod="openstack/dnsmasq-dns-845d6d6f59-hcjll" Sep 30 19:51:58 crc kubenswrapper[4756]: I0930 19:51:58.367295 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5585066a-57c7-4fd5-91b8-36af88ee6f9b-ovsdbserver-nb\") pod \"dnsmasq-dns-845d6d6f59-hcjll\" (UID: \"5585066a-57c7-4fd5-91b8-36af88ee6f9b\") " pod="openstack/dnsmasq-dns-845d6d6f59-hcjll" Sep 30 19:51:58 crc kubenswrapper[4756]: I0930 19:51:58.369624 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5585066a-57c7-4fd5-91b8-36af88ee6f9b-dns-svc\") pod \"dnsmasq-dns-845d6d6f59-hcjll\" (UID: \"5585066a-57c7-4fd5-91b8-36af88ee6f9b\") " pod="openstack/dnsmasq-dns-845d6d6f59-hcjll" Sep 30 19:51:58 crc kubenswrapper[4756]: I0930 19:51:58.382886 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r4mnf\" (UniqueName: \"kubernetes.io/projected/5585066a-57c7-4fd5-91b8-36af88ee6f9b-kube-api-access-r4mnf\") pod \"dnsmasq-dns-845d6d6f59-hcjll\" (UID: \"5585066a-57c7-4fd5-91b8-36af88ee6f9b\") " pod="openstack/dnsmasq-dns-845d6d6f59-hcjll" Sep 30 19:51:58 crc kubenswrapper[4756]: I0930 19:51:58.496013 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Sep 30 19:51:58 crc kubenswrapper[4756]: I0930 19:51:58.528238 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-845d6d6f59-hcjll" Sep 30 19:51:58 crc kubenswrapper[4756]: I0930 19:51:58.660268 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Sep 30 19:51:58 crc kubenswrapper[4756]: I0930 19:51:58.669039 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Sep 30 19:51:58 crc kubenswrapper[4756]: I0930 19:51:58.695550 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-cell-mapping-4kg8f"] Sep 30 19:51:58 crc kubenswrapper[4756]: I0930 19:51:58.710289 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-4kg8f" event={"ID":"99d41b4c-92d7-43f2-b6d1-044876e9a8ea","Type":"ContainerStarted","Data":"e93e42e62035bd84910ae47bc6b39861a04d85953ce6a005e9fbc027a0f3ad00"} Sep 30 19:51:58 crc kubenswrapper[4756]: I0930 19:51:58.712708 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"04ff8360-3224-42bf-bfac-1d4725acd2ae","Type":"ContainerStarted","Data":"e95b33a101a9bc7a1f527715026390d8c0020465b07487c1b8d8bdf716f06531"} Sep 30 19:51:58 crc kubenswrapper[4756]: I0930 19:51:58.714378 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"3f1511a1-de77-42e5-8fda-cfb60a6c002b","Type":"ContainerStarted","Data":"3b124727739b11c5a99e00c1dd532c40bdf1b75601729b14a90173731570d343"} Sep 30 19:51:58 crc kubenswrapper[4756]: I0930 19:51:58.818326 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-db-sync-4blkh"] Sep 30 19:51:58 crc kubenswrapper[4756]: I0930 19:51:58.819767 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-4blkh" Sep 30 19:51:58 crc kubenswrapper[4756]: I0930 19:51:58.822010 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Sep 30 19:51:58 crc kubenswrapper[4756]: I0930 19:51:58.824203 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-scripts" Sep 30 19:51:58 crc kubenswrapper[4756]: I0930 19:51:58.876611 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tlhdv\" (UniqueName: \"kubernetes.io/projected/ec7d0c64-c002-496a-b7ef-727f7d4b4458-kube-api-access-tlhdv\") pod \"nova-cell1-conductor-db-sync-4blkh\" (UID: \"ec7d0c64-c002-496a-b7ef-727f7d4b4458\") " pod="openstack/nova-cell1-conductor-db-sync-4blkh" Sep 30 19:51:58 crc kubenswrapper[4756]: I0930 19:51:58.876648 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ec7d0c64-c002-496a-b7ef-727f7d4b4458-scripts\") pod \"nova-cell1-conductor-db-sync-4blkh\" (UID: \"ec7d0c64-c002-496a-b7ef-727f7d4b4458\") " pod="openstack/nova-cell1-conductor-db-sync-4blkh" Sep 30 19:51:58 crc kubenswrapper[4756]: I0930 19:51:58.876678 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ec7d0c64-c002-496a-b7ef-727f7d4b4458-config-data\") pod \"nova-cell1-conductor-db-sync-4blkh\" (UID: \"ec7d0c64-c002-496a-b7ef-727f7d4b4458\") " pod="openstack/nova-cell1-conductor-db-sync-4blkh" Sep 30 19:51:58 crc kubenswrapper[4756]: I0930 19:51:58.876692 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ec7d0c64-c002-496a-b7ef-727f7d4b4458-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-4blkh\" (UID: \"ec7d0c64-c002-496a-b7ef-727f7d4b4458\") " pod="openstack/nova-cell1-conductor-db-sync-4blkh" Sep 30 19:51:58 crc kubenswrapper[4756]: I0930 19:51:58.898155 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-4blkh"] Sep 30 19:51:58 crc kubenswrapper[4756]: I0930 19:51:58.934348 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Sep 30 19:51:58 crc kubenswrapper[4756]: I0930 19:51:58.978974 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tlhdv\" (UniqueName: \"kubernetes.io/projected/ec7d0c64-c002-496a-b7ef-727f7d4b4458-kube-api-access-tlhdv\") pod \"nova-cell1-conductor-db-sync-4blkh\" (UID: \"ec7d0c64-c002-496a-b7ef-727f7d4b4458\") " pod="openstack/nova-cell1-conductor-db-sync-4blkh" Sep 30 19:51:58 crc kubenswrapper[4756]: I0930 19:51:58.979352 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ec7d0c64-c002-496a-b7ef-727f7d4b4458-scripts\") pod \"nova-cell1-conductor-db-sync-4blkh\" (UID: \"ec7d0c64-c002-496a-b7ef-727f7d4b4458\") " pod="openstack/nova-cell1-conductor-db-sync-4blkh" Sep 30 19:51:58 crc kubenswrapper[4756]: I0930 19:51:58.979385 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ec7d0c64-c002-496a-b7ef-727f7d4b4458-config-data\") pod \"nova-cell1-conductor-db-sync-4blkh\" (UID: \"ec7d0c64-c002-496a-b7ef-727f7d4b4458\") " pod="openstack/nova-cell1-conductor-db-sync-4blkh" Sep 30 19:51:58 crc kubenswrapper[4756]: I0930 19:51:58.979427 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ec7d0c64-c002-496a-b7ef-727f7d4b4458-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-4blkh\" (UID: \"ec7d0c64-c002-496a-b7ef-727f7d4b4458\") " pod="openstack/nova-cell1-conductor-db-sync-4blkh" Sep 30 19:51:58 crc kubenswrapper[4756]: I0930 19:51:58.993890 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ec7d0c64-c002-496a-b7ef-727f7d4b4458-scripts\") pod \"nova-cell1-conductor-db-sync-4blkh\" (UID: \"ec7d0c64-c002-496a-b7ef-727f7d4b4458\") " pod="openstack/nova-cell1-conductor-db-sync-4blkh" Sep 30 19:51:58 crc kubenswrapper[4756]: I0930 19:51:58.994092 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ec7d0c64-c002-496a-b7ef-727f7d4b4458-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-4blkh\" (UID: \"ec7d0c64-c002-496a-b7ef-727f7d4b4458\") " pod="openstack/nova-cell1-conductor-db-sync-4blkh" Sep 30 19:51:58 crc kubenswrapper[4756]: I0930 19:51:58.996676 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ec7d0c64-c002-496a-b7ef-727f7d4b4458-config-data\") pod \"nova-cell1-conductor-db-sync-4blkh\" (UID: \"ec7d0c64-c002-496a-b7ef-727f7d4b4458\") " pod="openstack/nova-cell1-conductor-db-sync-4blkh" Sep 30 19:51:58 crc kubenswrapper[4756]: I0930 19:51:58.997933 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tlhdv\" (UniqueName: \"kubernetes.io/projected/ec7d0c64-c002-496a-b7ef-727f7d4b4458-kube-api-access-tlhdv\") pod \"nova-cell1-conductor-db-sync-4blkh\" (UID: \"ec7d0c64-c002-496a-b7ef-727f7d4b4458\") " pod="openstack/nova-cell1-conductor-db-sync-4blkh" Sep 30 19:51:59 crc kubenswrapper[4756]: I0930 19:51:59.055334 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Sep 30 19:51:59 crc kubenswrapper[4756]: I0930 19:51:59.220812 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-4blkh" Sep 30 19:51:59 crc kubenswrapper[4756]: I0930 19:51:59.224007 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-845d6d6f59-hcjll"] Sep 30 19:51:59 crc kubenswrapper[4756]: W0930 19:51:59.227988 4756 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5585066a_57c7_4fd5_91b8_36af88ee6f9b.slice/crio-13bb3aaf91fa25515a5696804af9016bfa8df63cc25dc0d6418f53f2d5bd323b WatchSource:0}: Error finding container 13bb3aaf91fa25515a5696804af9016bfa8df63cc25dc0d6418f53f2d5bd323b: Status 404 returned error can't find the container with id 13bb3aaf91fa25515a5696804af9016bfa8df63cc25dc0d6418f53f2d5bd323b Sep 30 19:51:59 crc kubenswrapper[4756]: I0930 19:51:59.677501 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-4blkh"] Sep 30 19:51:59 crc kubenswrapper[4756]: W0930 19:51:59.691211 4756 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podec7d0c64_c002_496a_b7ef_727f7d4b4458.slice/crio-6814707a40745a523b517f72f192d7e7edac5edfa73ee3bd4970418396b77879 WatchSource:0}: Error finding container 6814707a40745a523b517f72f192d7e7edac5edfa73ee3bd4970418396b77879: Status 404 returned error can't find the container with id 6814707a40745a523b517f72f192d7e7edac5edfa73ee3bd4970418396b77879 Sep 30 19:51:59 crc kubenswrapper[4756]: I0930 19:51:59.729016 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"576e2aad-6c1b-47d2-8338-26beb8c4c93e","Type":"ContainerStarted","Data":"acc9c9c734195425d5066964ee74bab73a901ff06f38eddd92c392244aac22d0"} Sep 30 19:51:59 crc kubenswrapper[4756]: I0930 19:51:59.730917 4756 generic.go:334] "Generic (PLEG): container finished" podID="5585066a-57c7-4fd5-91b8-36af88ee6f9b" containerID="67d58b09c6f4e31d8ce5b1c997ab3aa4f38c32454c8098ef147764143adde022" exitCode=0 Sep 30 19:51:59 crc kubenswrapper[4756]: I0930 19:51:59.730995 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-845d6d6f59-hcjll" event={"ID":"5585066a-57c7-4fd5-91b8-36af88ee6f9b","Type":"ContainerDied","Data":"67d58b09c6f4e31d8ce5b1c997ab3aa4f38c32454c8098ef147764143adde022"} Sep 30 19:51:59 crc kubenswrapper[4756]: I0930 19:51:59.731021 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-845d6d6f59-hcjll" event={"ID":"5585066a-57c7-4fd5-91b8-36af88ee6f9b","Type":"ContainerStarted","Data":"13bb3aaf91fa25515a5696804af9016bfa8df63cc25dc0d6418f53f2d5bd323b"} Sep 30 19:51:59 crc kubenswrapper[4756]: I0930 19:51:59.735969 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-4kg8f" event={"ID":"99d41b4c-92d7-43f2-b6d1-044876e9a8ea","Type":"ContainerStarted","Data":"c873acaf2a9f5921ab2fc0924a478163d49e82779ad8a5ee008a11ab0463e3a2"} Sep 30 19:51:59 crc kubenswrapper[4756]: I0930 19:51:59.743020 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"df33b9b5-354b-4431-aecc-aef93a455a02","Type":"ContainerStarted","Data":"73c5c72bc980e19c32a571eb30d2b228b743844e6ff710bf543634d9cb05bdaa"} Sep 30 19:51:59 crc kubenswrapper[4756]: I0930 19:51:59.744889 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-4blkh" event={"ID":"ec7d0c64-c002-496a-b7ef-727f7d4b4458","Type":"ContainerStarted","Data":"6814707a40745a523b517f72f192d7e7edac5edfa73ee3bd4970418396b77879"} Sep 30 19:51:59 crc kubenswrapper[4756]: I0930 19:51:59.782544 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-cell-mapping-4kg8f" podStartSLOduration=2.782525246 podStartE2EDuration="2.782525246s" podCreationTimestamp="2025-09-30 19:51:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 19:51:59.774713911 +0000 UTC m=+1249.395647378" watchObservedRunningTime="2025-09-30 19:51:59.782525246 +0000 UTC m=+1249.403458723" Sep 30 19:52:00 crc kubenswrapper[4756]: I0930 19:52:00.758782 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-4blkh" event={"ID":"ec7d0c64-c002-496a-b7ef-727f7d4b4458","Type":"ContainerStarted","Data":"e3de5d8e8ddf6f0cd0c0368c9ca27659f3f2da42af7357ba2cc6cf72a80b007a"} Sep 30 19:52:00 crc kubenswrapper[4756]: I0930 19:52:00.763847 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-845d6d6f59-hcjll" event={"ID":"5585066a-57c7-4fd5-91b8-36af88ee6f9b","Type":"ContainerStarted","Data":"1393abceb130e8d843a769d54554b6bec32b4ca26b2612d660665605da762b2c"} Sep 30 19:52:00 crc kubenswrapper[4756]: I0930 19:52:00.764058 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-845d6d6f59-hcjll" Sep 30 19:52:00 crc kubenswrapper[4756]: I0930 19:52:00.782023 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-db-sync-4blkh" podStartSLOduration=2.782004452 podStartE2EDuration="2.782004452s" podCreationTimestamp="2025-09-30 19:51:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 19:52:00.774462235 +0000 UTC m=+1250.395395722" watchObservedRunningTime="2025-09-30 19:52:00.782004452 +0000 UTC m=+1250.402937929" Sep 30 19:52:00 crc kubenswrapper[4756]: I0930 19:52:00.798806 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-845d6d6f59-hcjll" podStartSLOduration=3.798787741 podStartE2EDuration="3.798787741s" podCreationTimestamp="2025-09-30 19:51:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 19:52:00.791164382 +0000 UTC m=+1250.412097859" watchObservedRunningTime="2025-09-30 19:52:00.798787741 +0000 UTC m=+1250.419721208" Sep 30 19:52:01 crc kubenswrapper[4756]: I0930 19:52:01.296770 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Sep 30 19:52:01 crc kubenswrapper[4756]: I0930 19:52:01.311893 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Sep 30 19:52:02 crc kubenswrapper[4756]: I0930 19:52:02.798054 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"df33b9b5-354b-4431-aecc-aef93a455a02","Type":"ContainerStarted","Data":"6642d2ccfdc3501ceb1f771a6ea2bd1b3170a7e579b9cc7e8dc8a0a13881303a"} Sep 30 19:52:02 crc kubenswrapper[4756]: I0930 19:52:02.798333 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell1-novncproxy-0" podUID="df33b9b5-354b-4431-aecc-aef93a455a02" containerName="nova-cell1-novncproxy-novncproxy" containerID="cri-o://6642d2ccfdc3501ceb1f771a6ea2bd1b3170a7e579b9cc7e8dc8a0a13881303a" gracePeriod=30 Sep 30 19:52:02 crc kubenswrapper[4756]: I0930 19:52:02.806052 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"04ff8360-3224-42bf-bfac-1d4725acd2ae","Type":"ContainerStarted","Data":"cc833a452e5eb7d11d7eb0502e0bd5e4fb9085857c830eaf9182bddd7e675953"} Sep 30 19:52:02 crc kubenswrapper[4756]: I0930 19:52:02.806098 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"04ff8360-3224-42bf-bfac-1d4725acd2ae","Type":"ContainerStarted","Data":"dd34ebee6ba535fafab2697edd599929e443e0b1d71841eeddabdc0a54f03aaa"} Sep 30 19:52:02 crc kubenswrapper[4756]: I0930 19:52:02.808850 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"3f1511a1-de77-42e5-8fda-cfb60a6c002b","Type":"ContainerStarted","Data":"733f586ad39f43fe7c1a787ed859079c7d8784b952f555b97c4c61757af4f506"} Sep 30 19:52:02 crc kubenswrapper[4756]: I0930 19:52:02.815800 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"576e2aad-6c1b-47d2-8338-26beb8c4c93e","Type":"ContainerStarted","Data":"f7d5ee1721d495453ac73162ff8b96b6a3b3f3d4c350acafb440165e3ddc9dcb"} Sep 30 19:52:02 crc kubenswrapper[4756]: I0930 19:52:02.815854 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"576e2aad-6c1b-47d2-8338-26beb8c4c93e","Type":"ContainerStarted","Data":"ba6569afe758498355cd9682bc8fca893079965c6280528e59fd48ecadc1495f"} Sep 30 19:52:02 crc kubenswrapper[4756]: I0930 19:52:02.816062 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="576e2aad-6c1b-47d2-8338-26beb8c4c93e" containerName="nova-metadata-log" containerID="cri-o://ba6569afe758498355cd9682bc8fca893079965c6280528e59fd48ecadc1495f" gracePeriod=30 Sep 30 19:52:02 crc kubenswrapper[4756]: I0930 19:52:02.816207 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="576e2aad-6c1b-47d2-8338-26beb8c4c93e" containerName="nova-metadata-metadata" containerID="cri-o://f7d5ee1721d495453ac73162ff8b96b6a3b3f3d4c350acafb440165e3ddc9dcb" gracePeriod=30 Sep 30 19:52:02 crc kubenswrapper[4756]: I0930 19:52:02.819139 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-novncproxy-0" podStartSLOduration=2.822959453 podStartE2EDuration="5.819116432s" podCreationTimestamp="2025-09-30 19:51:57 +0000 UTC" firstStartedPulling="2025-09-30 19:51:59.054174096 +0000 UTC m=+1248.675107573" lastFinishedPulling="2025-09-30 19:52:02.050331065 +0000 UTC m=+1251.671264552" observedRunningTime="2025-09-30 19:52:02.817101969 +0000 UTC m=+1252.438035466" watchObservedRunningTime="2025-09-30 19:52:02.819116432 +0000 UTC m=+1252.440049909" Sep 30 19:52:02 crc kubenswrapper[4756]: I0930 19:52:02.841680 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.663636567 podStartE2EDuration="5.84011266s" podCreationTimestamp="2025-09-30 19:51:57 +0000 UTC" firstStartedPulling="2025-09-30 19:51:58.877036136 +0000 UTC m=+1248.497969613" lastFinishedPulling="2025-09-30 19:52:02.053512209 +0000 UTC m=+1251.674445706" observedRunningTime="2025-09-30 19:52:02.837204894 +0000 UTC m=+1252.458138381" watchObservedRunningTime="2025-09-30 19:52:02.84011266 +0000 UTC m=+1252.461046137" Sep 30 19:52:02 crc kubenswrapper[4756]: I0930 19:52:02.857722 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.463037894 podStartE2EDuration="5.85770481s" podCreationTimestamp="2025-09-30 19:51:57 +0000 UTC" firstStartedPulling="2025-09-30 19:51:58.661533293 +0000 UTC m=+1248.282466760" lastFinishedPulling="2025-09-30 19:52:02.056200199 +0000 UTC m=+1251.677133676" observedRunningTime="2025-09-30 19:52:02.851128848 +0000 UTC m=+1252.472062335" watchObservedRunningTime="2025-09-30 19:52:02.85770481 +0000 UTC m=+1252.478638287" Sep 30 19:52:03 crc kubenswrapper[4756]: I0930 19:52:03.059963 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Sep 30 19:52:03 crc kubenswrapper[4756]: I0930 19:52:03.156431 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Sep 30 19:52:03 crc kubenswrapper[4756]: I0930 19:52:03.156473 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Sep 30 19:52:03 crc kubenswrapper[4756]: I0930 19:52:03.496372 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-novncproxy-0" Sep 30 19:52:03 crc kubenswrapper[4756]: I0930 19:52:03.828275 4756 generic.go:334] "Generic (PLEG): container finished" podID="576e2aad-6c1b-47d2-8338-26beb8c4c93e" containerID="ba6569afe758498355cd9682bc8fca893079965c6280528e59fd48ecadc1495f" exitCode=143 Sep 30 19:52:03 crc kubenswrapper[4756]: I0930 19:52:03.828378 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"576e2aad-6c1b-47d2-8338-26beb8c4c93e","Type":"ContainerDied","Data":"ba6569afe758498355cd9682bc8fca893079965c6280528e59fd48ecadc1495f"} Sep 30 19:52:06 crc kubenswrapper[4756]: I0930 19:52:06.862698 4756 generic.go:334] "Generic (PLEG): container finished" podID="99d41b4c-92d7-43f2-b6d1-044876e9a8ea" containerID="c873acaf2a9f5921ab2fc0924a478163d49e82779ad8a5ee008a11ab0463e3a2" exitCode=0 Sep 30 19:52:06 crc kubenswrapper[4756]: I0930 19:52:06.862757 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-4kg8f" event={"ID":"99d41b4c-92d7-43f2-b6d1-044876e9a8ea","Type":"ContainerDied","Data":"c873acaf2a9f5921ab2fc0924a478163d49e82779ad8a5ee008a11ab0463e3a2"} Sep 30 19:52:06 crc kubenswrapper[4756]: I0930 19:52:06.866065 4756 generic.go:334] "Generic (PLEG): container finished" podID="ec7d0c64-c002-496a-b7ef-727f7d4b4458" containerID="e3de5d8e8ddf6f0cd0c0368c9ca27659f3f2da42af7357ba2cc6cf72a80b007a" exitCode=0 Sep 30 19:52:06 crc kubenswrapper[4756]: I0930 19:52:06.866116 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-4blkh" event={"ID":"ec7d0c64-c002-496a-b7ef-727f7d4b4458","Type":"ContainerDied","Data":"e3de5d8e8ddf6f0cd0c0368c9ca27659f3f2da42af7357ba2cc6cf72a80b007a"} Sep 30 19:52:06 crc kubenswrapper[4756]: I0930 19:52:06.895063 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=6.520022123 podStartE2EDuration="9.895036236s" podCreationTimestamp="2025-09-30 19:51:57 +0000 UTC" firstStartedPulling="2025-09-30 19:51:58.673117275 +0000 UTC m=+1248.294050752" lastFinishedPulling="2025-09-30 19:52:02.048131388 +0000 UTC m=+1251.669064865" observedRunningTime="2025-09-30 19:52:02.877510988 +0000 UTC m=+1252.498444465" watchObservedRunningTime="2025-09-30 19:52:06.895036236 +0000 UTC m=+1256.515969733" Sep 30 19:52:08 crc kubenswrapper[4756]: I0930 19:52:08.028042 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Sep 30 19:52:08 crc kubenswrapper[4756]: I0930 19:52:08.028564 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Sep 30 19:52:08 crc kubenswrapper[4756]: I0930 19:52:08.059968 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Sep 30 19:52:08 crc kubenswrapper[4756]: I0930 19:52:08.094235 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Sep 30 19:52:08 crc kubenswrapper[4756]: I0930 19:52:08.382185 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-4blkh" Sep 30 19:52:08 crc kubenswrapper[4756]: I0930 19:52:08.387560 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-4kg8f" Sep 30 19:52:08 crc kubenswrapper[4756]: I0930 19:52:08.481985 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vdtkk\" (UniqueName: \"kubernetes.io/projected/99d41b4c-92d7-43f2-b6d1-044876e9a8ea-kube-api-access-vdtkk\") pod \"99d41b4c-92d7-43f2-b6d1-044876e9a8ea\" (UID: \"99d41b4c-92d7-43f2-b6d1-044876e9a8ea\") " Sep 30 19:52:08 crc kubenswrapper[4756]: I0930 19:52:08.482049 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tlhdv\" (UniqueName: \"kubernetes.io/projected/ec7d0c64-c002-496a-b7ef-727f7d4b4458-kube-api-access-tlhdv\") pod \"ec7d0c64-c002-496a-b7ef-727f7d4b4458\" (UID: \"ec7d0c64-c002-496a-b7ef-727f7d4b4458\") " Sep 30 19:52:08 crc kubenswrapper[4756]: I0930 19:52:08.482075 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/99d41b4c-92d7-43f2-b6d1-044876e9a8ea-config-data\") pod \"99d41b4c-92d7-43f2-b6d1-044876e9a8ea\" (UID: \"99d41b4c-92d7-43f2-b6d1-044876e9a8ea\") " Sep 30 19:52:08 crc kubenswrapper[4756]: I0930 19:52:08.482131 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/99d41b4c-92d7-43f2-b6d1-044876e9a8ea-combined-ca-bundle\") pod \"99d41b4c-92d7-43f2-b6d1-044876e9a8ea\" (UID: \"99d41b4c-92d7-43f2-b6d1-044876e9a8ea\") " Sep 30 19:52:08 crc kubenswrapper[4756]: I0930 19:52:08.482212 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ec7d0c64-c002-496a-b7ef-727f7d4b4458-config-data\") pod \"ec7d0c64-c002-496a-b7ef-727f7d4b4458\" (UID: \"ec7d0c64-c002-496a-b7ef-727f7d4b4458\") " Sep 30 19:52:08 crc kubenswrapper[4756]: I0930 19:52:08.482249 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ec7d0c64-c002-496a-b7ef-727f7d4b4458-combined-ca-bundle\") pod \"ec7d0c64-c002-496a-b7ef-727f7d4b4458\" (UID: \"ec7d0c64-c002-496a-b7ef-727f7d4b4458\") " Sep 30 19:52:08 crc kubenswrapper[4756]: I0930 19:52:08.482313 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ec7d0c64-c002-496a-b7ef-727f7d4b4458-scripts\") pod \"ec7d0c64-c002-496a-b7ef-727f7d4b4458\" (UID: \"ec7d0c64-c002-496a-b7ef-727f7d4b4458\") " Sep 30 19:52:08 crc kubenswrapper[4756]: I0930 19:52:08.482364 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/99d41b4c-92d7-43f2-b6d1-044876e9a8ea-scripts\") pod \"99d41b4c-92d7-43f2-b6d1-044876e9a8ea\" (UID: \"99d41b4c-92d7-43f2-b6d1-044876e9a8ea\") " Sep 30 19:52:08 crc kubenswrapper[4756]: I0930 19:52:08.488703 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ec7d0c64-c002-496a-b7ef-727f7d4b4458-scripts" (OuterVolumeSpecName: "scripts") pod "ec7d0c64-c002-496a-b7ef-727f7d4b4458" (UID: "ec7d0c64-c002-496a-b7ef-727f7d4b4458"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:52:08 crc kubenswrapper[4756]: I0930 19:52:08.491488 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/99d41b4c-92d7-43f2-b6d1-044876e9a8ea-scripts" (OuterVolumeSpecName: "scripts") pod "99d41b4c-92d7-43f2-b6d1-044876e9a8ea" (UID: "99d41b4c-92d7-43f2-b6d1-044876e9a8ea"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:52:08 crc kubenswrapper[4756]: I0930 19:52:08.493128 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/99d41b4c-92d7-43f2-b6d1-044876e9a8ea-kube-api-access-vdtkk" (OuterVolumeSpecName: "kube-api-access-vdtkk") pod "99d41b4c-92d7-43f2-b6d1-044876e9a8ea" (UID: "99d41b4c-92d7-43f2-b6d1-044876e9a8ea"). InnerVolumeSpecName "kube-api-access-vdtkk". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:52:08 crc kubenswrapper[4756]: I0930 19:52:08.493571 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ec7d0c64-c002-496a-b7ef-727f7d4b4458-kube-api-access-tlhdv" (OuterVolumeSpecName: "kube-api-access-tlhdv") pod "ec7d0c64-c002-496a-b7ef-727f7d4b4458" (UID: "ec7d0c64-c002-496a-b7ef-727f7d4b4458"). InnerVolumeSpecName "kube-api-access-tlhdv". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:52:08 crc kubenswrapper[4756]: I0930 19:52:08.513362 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/99d41b4c-92d7-43f2-b6d1-044876e9a8ea-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "99d41b4c-92d7-43f2-b6d1-044876e9a8ea" (UID: "99d41b4c-92d7-43f2-b6d1-044876e9a8ea"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:52:08 crc kubenswrapper[4756]: I0930 19:52:08.518909 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/99d41b4c-92d7-43f2-b6d1-044876e9a8ea-config-data" (OuterVolumeSpecName: "config-data") pod "99d41b4c-92d7-43f2-b6d1-044876e9a8ea" (UID: "99d41b4c-92d7-43f2-b6d1-044876e9a8ea"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:52:08 crc kubenswrapper[4756]: I0930 19:52:08.520599 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ec7d0c64-c002-496a-b7ef-727f7d4b4458-config-data" (OuterVolumeSpecName: "config-data") pod "ec7d0c64-c002-496a-b7ef-727f7d4b4458" (UID: "ec7d0c64-c002-496a-b7ef-727f7d4b4458"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:52:08 crc kubenswrapper[4756]: I0930 19:52:08.529599 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-845d6d6f59-hcjll" Sep 30 19:52:08 crc kubenswrapper[4756]: I0930 19:52:08.546591 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ec7d0c64-c002-496a-b7ef-727f7d4b4458-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ec7d0c64-c002-496a-b7ef-727f7d4b4458" (UID: "ec7d0c64-c002-496a-b7ef-727f7d4b4458"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:52:08 crc kubenswrapper[4756]: I0930 19:52:08.590737 4756 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ec7d0c64-c002-496a-b7ef-727f7d4b4458-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 19:52:08 crc kubenswrapper[4756]: I0930 19:52:08.590776 4756 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ec7d0c64-c002-496a-b7ef-727f7d4b4458-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 19:52:08 crc kubenswrapper[4756]: I0930 19:52:08.590786 4756 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ec7d0c64-c002-496a-b7ef-727f7d4b4458-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 19:52:08 crc kubenswrapper[4756]: I0930 19:52:08.590801 4756 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/99d41b4c-92d7-43f2-b6d1-044876e9a8ea-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 19:52:08 crc kubenswrapper[4756]: I0930 19:52:08.590809 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vdtkk\" (UniqueName: \"kubernetes.io/projected/99d41b4c-92d7-43f2-b6d1-044876e9a8ea-kube-api-access-vdtkk\") on node \"crc\" DevicePath \"\"" Sep 30 19:52:08 crc kubenswrapper[4756]: I0930 19:52:08.590819 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tlhdv\" (UniqueName: \"kubernetes.io/projected/ec7d0c64-c002-496a-b7ef-727f7d4b4458-kube-api-access-tlhdv\") on node \"crc\" DevicePath \"\"" Sep 30 19:52:08 crc kubenswrapper[4756]: I0930 19:52:08.590827 4756 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/99d41b4c-92d7-43f2-b6d1-044876e9a8ea-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 19:52:08 crc kubenswrapper[4756]: I0930 19:52:08.590838 4756 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/99d41b4c-92d7-43f2-b6d1-044876e9a8ea-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 19:52:08 crc kubenswrapper[4756]: I0930 19:52:08.597529 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5784cf869f-4qwd4"] Sep 30 19:52:08 crc kubenswrapper[4756]: I0930 19:52:08.597791 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-5784cf869f-4qwd4" podUID="78d7d65c-1041-4ec8-97df-885ea20328ea" containerName="dnsmasq-dns" containerID="cri-o://aa920ad2cb5c3c09829c17c32c5930c36c12bdb5b70db6d64f0c0b6f86627d08" gracePeriod=10 Sep 30 19:52:08 crc kubenswrapper[4756]: I0930 19:52:08.893721 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-4blkh" event={"ID":"ec7d0c64-c002-496a-b7ef-727f7d4b4458","Type":"ContainerDied","Data":"6814707a40745a523b517f72f192d7e7edac5edfa73ee3bd4970418396b77879"} Sep 30 19:52:08 crc kubenswrapper[4756]: I0930 19:52:08.894026 4756 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6814707a40745a523b517f72f192d7e7edac5edfa73ee3bd4970418396b77879" Sep 30 19:52:08 crc kubenswrapper[4756]: I0930 19:52:08.894077 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-4blkh" Sep 30 19:52:08 crc kubenswrapper[4756]: I0930 19:52:08.900416 4756 generic.go:334] "Generic (PLEG): container finished" podID="78d7d65c-1041-4ec8-97df-885ea20328ea" containerID="aa920ad2cb5c3c09829c17c32c5930c36c12bdb5b70db6d64f0c0b6f86627d08" exitCode=0 Sep 30 19:52:08 crc kubenswrapper[4756]: I0930 19:52:08.900479 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5784cf869f-4qwd4" event={"ID":"78d7d65c-1041-4ec8-97df-885ea20328ea","Type":"ContainerDied","Data":"aa920ad2cb5c3c09829c17c32c5930c36c12bdb5b70db6d64f0c0b6f86627d08"} Sep 30 19:52:08 crc kubenswrapper[4756]: I0930 19:52:08.902075 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-4kg8f" Sep 30 19:52:08 crc kubenswrapper[4756]: I0930 19:52:08.902418 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-4kg8f" event={"ID":"99d41b4c-92d7-43f2-b6d1-044876e9a8ea","Type":"ContainerDied","Data":"e93e42e62035bd84910ae47bc6b39861a04d85953ce6a005e9fbc027a0f3ad00"} Sep 30 19:52:08 crc kubenswrapper[4756]: I0930 19:52:08.902441 4756 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e93e42e62035bd84910ae47bc6b39861a04d85953ce6a005e9fbc027a0f3ad00" Sep 30 19:52:08 crc kubenswrapper[4756]: I0930 19:52:08.964465 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Sep 30 19:52:09 crc kubenswrapper[4756]: I0930 19:52:09.005448 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-0"] Sep 30 19:52:09 crc kubenswrapper[4756]: E0930 19:52:09.007233 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ec7d0c64-c002-496a-b7ef-727f7d4b4458" containerName="nova-cell1-conductor-db-sync" Sep 30 19:52:09 crc kubenswrapper[4756]: I0930 19:52:09.007249 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="ec7d0c64-c002-496a-b7ef-727f7d4b4458" containerName="nova-cell1-conductor-db-sync" Sep 30 19:52:09 crc kubenswrapper[4756]: E0930 19:52:09.007302 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="99d41b4c-92d7-43f2-b6d1-044876e9a8ea" containerName="nova-manage" Sep 30 19:52:09 crc kubenswrapper[4756]: I0930 19:52:09.007309 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="99d41b4c-92d7-43f2-b6d1-044876e9a8ea" containerName="nova-manage" Sep 30 19:52:09 crc kubenswrapper[4756]: I0930 19:52:09.008608 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="ec7d0c64-c002-496a-b7ef-727f7d4b4458" containerName="nova-cell1-conductor-db-sync" Sep 30 19:52:09 crc kubenswrapper[4756]: I0930 19:52:09.008638 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="99d41b4c-92d7-43f2-b6d1-044876e9a8ea" containerName="nova-manage" Sep 30 19:52:09 crc kubenswrapper[4756]: I0930 19:52:09.016731 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Sep 30 19:52:09 crc kubenswrapper[4756]: I0930 19:52:09.020059 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Sep 30 19:52:09 crc kubenswrapper[4756]: I0930 19:52:09.051688 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Sep 30 19:52:09 crc kubenswrapper[4756]: I0930 19:52:09.086202 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5784cf869f-4qwd4" Sep 30 19:52:09 crc kubenswrapper[4756]: I0930 19:52:09.112979 4756 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="04ff8360-3224-42bf-bfac-1d4725acd2ae" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.0.187:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Sep 30 19:52:09 crc kubenswrapper[4756]: I0930 19:52:09.113346 4756 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="04ff8360-3224-42bf-bfac-1d4725acd2ae" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.0.187:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Sep 30 19:52:09 crc kubenswrapper[4756]: I0930 19:52:09.117928 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/164b639f-c027-4486-a298-06e9510e14ac-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"164b639f-c027-4486-a298-06e9510e14ac\") " pod="openstack/nova-cell1-conductor-0" Sep 30 19:52:09 crc kubenswrapper[4756]: I0930 19:52:09.118040 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s65d9\" (UniqueName: \"kubernetes.io/projected/164b639f-c027-4486-a298-06e9510e14ac-kube-api-access-s65d9\") pod \"nova-cell1-conductor-0\" (UID: \"164b639f-c027-4486-a298-06e9510e14ac\") " pod="openstack/nova-cell1-conductor-0" Sep 30 19:52:09 crc kubenswrapper[4756]: I0930 19:52:09.118075 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/164b639f-c027-4486-a298-06e9510e14ac-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"164b639f-c027-4486-a298-06e9510e14ac\") " pod="openstack/nova-cell1-conductor-0" Sep 30 19:52:09 crc kubenswrapper[4756]: I0930 19:52:09.141902 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Sep 30 19:52:09 crc kubenswrapper[4756]: I0930 19:52:09.142171 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="04ff8360-3224-42bf-bfac-1d4725acd2ae" containerName="nova-api-log" containerID="cri-o://dd34ebee6ba535fafab2697edd599929e443e0b1d71841eeddabdc0a54f03aaa" gracePeriod=30 Sep 30 19:52:09 crc kubenswrapper[4756]: I0930 19:52:09.142324 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="04ff8360-3224-42bf-bfac-1d4725acd2ae" containerName="nova-api-api" containerID="cri-o://cc833a452e5eb7d11d7eb0502e0bd5e4fb9085857c830eaf9182bddd7e675953" gracePeriod=30 Sep 30 19:52:09 crc kubenswrapper[4756]: I0930 19:52:09.220992 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/78d7d65c-1041-4ec8-97df-885ea20328ea-ovsdbserver-nb\") pod \"78d7d65c-1041-4ec8-97df-885ea20328ea\" (UID: \"78d7d65c-1041-4ec8-97df-885ea20328ea\") " Sep 30 19:52:09 crc kubenswrapper[4756]: I0930 19:52:09.221084 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/78d7d65c-1041-4ec8-97df-885ea20328ea-config\") pod \"78d7d65c-1041-4ec8-97df-885ea20328ea\" (UID: \"78d7d65c-1041-4ec8-97df-885ea20328ea\") " Sep 30 19:52:09 crc kubenswrapper[4756]: I0930 19:52:09.221146 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/78d7d65c-1041-4ec8-97df-885ea20328ea-ovsdbserver-sb\") pod \"78d7d65c-1041-4ec8-97df-885ea20328ea\" (UID: \"78d7d65c-1041-4ec8-97df-885ea20328ea\") " Sep 30 19:52:09 crc kubenswrapper[4756]: I0930 19:52:09.221205 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-99v94\" (UniqueName: \"kubernetes.io/projected/78d7d65c-1041-4ec8-97df-885ea20328ea-kube-api-access-99v94\") pod \"78d7d65c-1041-4ec8-97df-885ea20328ea\" (UID: \"78d7d65c-1041-4ec8-97df-885ea20328ea\") " Sep 30 19:52:09 crc kubenswrapper[4756]: I0930 19:52:09.221254 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/78d7d65c-1041-4ec8-97df-885ea20328ea-dns-svc\") pod \"78d7d65c-1041-4ec8-97df-885ea20328ea\" (UID: \"78d7d65c-1041-4ec8-97df-885ea20328ea\") " Sep 30 19:52:09 crc kubenswrapper[4756]: I0930 19:52:09.221305 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/78d7d65c-1041-4ec8-97df-885ea20328ea-dns-swift-storage-0\") pod \"78d7d65c-1041-4ec8-97df-885ea20328ea\" (UID: \"78d7d65c-1041-4ec8-97df-885ea20328ea\") " Sep 30 19:52:09 crc kubenswrapper[4756]: I0930 19:52:09.221752 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/164b639f-c027-4486-a298-06e9510e14ac-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"164b639f-c027-4486-a298-06e9510e14ac\") " pod="openstack/nova-cell1-conductor-0" Sep 30 19:52:09 crc kubenswrapper[4756]: I0930 19:52:09.221821 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s65d9\" (UniqueName: \"kubernetes.io/projected/164b639f-c027-4486-a298-06e9510e14ac-kube-api-access-s65d9\") pod \"nova-cell1-conductor-0\" (UID: \"164b639f-c027-4486-a298-06e9510e14ac\") " pod="openstack/nova-cell1-conductor-0" Sep 30 19:52:09 crc kubenswrapper[4756]: I0930 19:52:09.221844 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/164b639f-c027-4486-a298-06e9510e14ac-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"164b639f-c027-4486-a298-06e9510e14ac\") " pod="openstack/nova-cell1-conductor-0" Sep 30 19:52:09 crc kubenswrapper[4756]: I0930 19:52:09.226444 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/164b639f-c027-4486-a298-06e9510e14ac-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"164b639f-c027-4486-a298-06e9510e14ac\") " pod="openstack/nova-cell1-conductor-0" Sep 30 19:52:09 crc kubenswrapper[4756]: I0930 19:52:09.227006 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/164b639f-c027-4486-a298-06e9510e14ac-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"164b639f-c027-4486-a298-06e9510e14ac\") " pod="openstack/nova-cell1-conductor-0" Sep 30 19:52:09 crc kubenswrapper[4756]: I0930 19:52:09.227579 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/78d7d65c-1041-4ec8-97df-885ea20328ea-kube-api-access-99v94" (OuterVolumeSpecName: "kube-api-access-99v94") pod "78d7d65c-1041-4ec8-97df-885ea20328ea" (UID: "78d7d65c-1041-4ec8-97df-885ea20328ea"). InnerVolumeSpecName "kube-api-access-99v94". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:52:09 crc kubenswrapper[4756]: I0930 19:52:09.260050 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s65d9\" (UniqueName: \"kubernetes.io/projected/164b639f-c027-4486-a298-06e9510e14ac-kube-api-access-s65d9\") pod \"nova-cell1-conductor-0\" (UID: \"164b639f-c027-4486-a298-06e9510e14ac\") " pod="openstack/nova-cell1-conductor-0" Sep 30 19:52:09 crc kubenswrapper[4756]: I0930 19:52:09.302685 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/78d7d65c-1041-4ec8-97df-885ea20328ea-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "78d7d65c-1041-4ec8-97df-885ea20328ea" (UID: "78d7d65c-1041-4ec8-97df-885ea20328ea"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:52:09 crc kubenswrapper[4756]: I0930 19:52:09.307246 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/78d7d65c-1041-4ec8-97df-885ea20328ea-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "78d7d65c-1041-4ec8-97df-885ea20328ea" (UID: "78d7d65c-1041-4ec8-97df-885ea20328ea"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:52:09 crc kubenswrapper[4756]: I0930 19:52:09.311349 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/78d7d65c-1041-4ec8-97df-885ea20328ea-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "78d7d65c-1041-4ec8-97df-885ea20328ea" (UID: "78d7d65c-1041-4ec8-97df-885ea20328ea"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:52:09 crc kubenswrapper[4756]: I0930 19:52:09.324187 4756 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/78d7d65c-1041-4ec8-97df-885ea20328ea-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Sep 30 19:52:09 crc kubenswrapper[4756]: I0930 19:52:09.324223 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-99v94\" (UniqueName: \"kubernetes.io/projected/78d7d65c-1041-4ec8-97df-885ea20328ea-kube-api-access-99v94\") on node \"crc\" DevicePath \"\"" Sep 30 19:52:09 crc kubenswrapper[4756]: I0930 19:52:09.324235 4756 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/78d7d65c-1041-4ec8-97df-885ea20328ea-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Sep 30 19:52:09 crc kubenswrapper[4756]: I0930 19:52:09.324243 4756 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/78d7d65c-1041-4ec8-97df-885ea20328ea-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Sep 30 19:52:09 crc kubenswrapper[4756]: I0930 19:52:09.326284 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/78d7d65c-1041-4ec8-97df-885ea20328ea-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "78d7d65c-1041-4ec8-97df-885ea20328ea" (UID: "78d7d65c-1041-4ec8-97df-885ea20328ea"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:52:09 crc kubenswrapper[4756]: I0930 19:52:09.344930 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/78d7d65c-1041-4ec8-97df-885ea20328ea-config" (OuterVolumeSpecName: "config") pod "78d7d65c-1041-4ec8-97df-885ea20328ea" (UID: "78d7d65c-1041-4ec8-97df-885ea20328ea"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:52:09 crc kubenswrapper[4756]: I0930 19:52:09.378682 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Sep 30 19:52:09 crc kubenswrapper[4756]: I0930 19:52:09.426134 4756 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/78d7d65c-1041-4ec8-97df-885ea20328ea-config\") on node \"crc\" DevicePath \"\"" Sep 30 19:52:09 crc kubenswrapper[4756]: I0930 19:52:09.426644 4756 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/78d7d65c-1041-4ec8-97df-885ea20328ea-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 30 19:52:09 crc kubenswrapper[4756]: I0930 19:52:09.616189 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Sep 30 19:52:09 crc kubenswrapper[4756]: I0930 19:52:09.919147 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5784cf869f-4qwd4" event={"ID":"78d7d65c-1041-4ec8-97df-885ea20328ea","Type":"ContainerDied","Data":"263e4d83488c467b9ab1a780cf4d661166e4185a0fb7d5a2e745febf930183a2"} Sep 30 19:52:09 crc kubenswrapper[4756]: I0930 19:52:09.919205 4756 scope.go:117] "RemoveContainer" containerID="aa920ad2cb5c3c09829c17c32c5930c36c12bdb5b70db6d64f0c0b6f86627d08" Sep 30 19:52:09 crc kubenswrapper[4756]: I0930 19:52:09.920157 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5784cf869f-4qwd4" Sep 30 19:52:09 crc kubenswrapper[4756]: I0930 19:52:09.940687 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Sep 30 19:52:09 crc kubenswrapper[4756]: I0930 19:52:09.942163 4756 generic.go:334] "Generic (PLEG): container finished" podID="04ff8360-3224-42bf-bfac-1d4725acd2ae" containerID="dd34ebee6ba535fafab2697edd599929e443e0b1d71841eeddabdc0a54f03aaa" exitCode=143 Sep 30 19:52:09 crc kubenswrapper[4756]: I0930 19:52:09.942451 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"04ff8360-3224-42bf-bfac-1d4725acd2ae","Type":"ContainerDied","Data":"dd34ebee6ba535fafab2697edd599929e443e0b1d71841eeddabdc0a54f03aaa"} Sep 30 19:52:09 crc kubenswrapper[4756]: I0930 19:52:09.987579 4756 scope.go:117] "RemoveContainer" containerID="b8b76eb8b3e1eda91f6d68df720fb41c71f2d82cfa6e3420643684942b12a26b" Sep 30 19:52:09 crc kubenswrapper[4756]: I0930 19:52:09.991326 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5784cf869f-4qwd4"] Sep 30 19:52:10 crc kubenswrapper[4756]: I0930 19:52:10.001845 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5784cf869f-4qwd4"] Sep 30 19:52:10 crc kubenswrapper[4756]: I0930 19:52:10.957168 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"164b639f-c027-4486-a298-06e9510e14ac","Type":"ContainerStarted","Data":"1eade56fcdb4689bb071f3b44ecd695f64f66b527074f30c140f41aff392a497"} Sep 30 19:52:10 crc kubenswrapper[4756]: I0930 19:52:10.959233 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"164b639f-c027-4486-a298-06e9510e14ac","Type":"ContainerStarted","Data":"726f5baf0d60f8fe7ae861fd6090b19269009f3509e3a690a1594c710c9cc203"} Sep 30 19:52:10 crc kubenswrapper[4756]: I0930 19:52:10.959367 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-conductor-0" Sep 30 19:52:10 crc kubenswrapper[4756]: I0930 19:52:10.958256 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="3f1511a1-de77-42e5-8fda-cfb60a6c002b" containerName="nova-scheduler-scheduler" containerID="cri-o://733f586ad39f43fe7c1a787ed859079c7d8784b952f555b97c4c61757af4f506" gracePeriod=30 Sep 30 19:52:10 crc kubenswrapper[4756]: I0930 19:52:10.974933 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-0" podStartSLOduration=2.974905093 podStartE2EDuration="2.974905093s" podCreationTimestamp="2025-09-30 19:52:08 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 19:52:10.971978287 +0000 UTC m=+1260.592911764" watchObservedRunningTime="2025-09-30 19:52:10.974905093 +0000 UTC m=+1260.595838580" Sep 30 19:52:11 crc kubenswrapper[4756]: I0930 19:52:11.137944 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="78d7d65c-1041-4ec8-97df-885ea20328ea" path="/var/lib/kubelet/pods/78d7d65c-1041-4ec8-97df-885ea20328ea/volumes" Sep 30 19:52:13 crc kubenswrapper[4756]: E0930 19:52:13.066867 4756 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="733f586ad39f43fe7c1a787ed859079c7d8784b952f555b97c4c61757af4f506" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Sep 30 19:52:13 crc kubenswrapper[4756]: E0930 19:52:13.069538 4756 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="733f586ad39f43fe7c1a787ed859079c7d8784b952f555b97c4c61757af4f506" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Sep 30 19:52:13 crc kubenswrapper[4756]: E0930 19:52:13.070990 4756 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="733f586ad39f43fe7c1a787ed859079c7d8784b952f555b97c4c61757af4f506" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Sep 30 19:52:13 crc kubenswrapper[4756]: E0930 19:52:13.071038 4756 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-scheduler-0" podUID="3f1511a1-de77-42e5-8fda-cfb60a6c002b" containerName="nova-scheduler-scheduler" Sep 30 19:52:13 crc kubenswrapper[4756]: I0930 19:52:13.992478 4756 generic.go:334] "Generic (PLEG): container finished" podID="3f1511a1-de77-42e5-8fda-cfb60a6c002b" containerID="733f586ad39f43fe7c1a787ed859079c7d8784b952f555b97c4c61757af4f506" exitCode=0 Sep 30 19:52:13 crc kubenswrapper[4756]: I0930 19:52:13.992618 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"3f1511a1-de77-42e5-8fda-cfb60a6c002b","Type":"ContainerDied","Data":"733f586ad39f43fe7c1a787ed859079c7d8784b952f555b97c4c61757af4f506"} Sep 30 19:52:14 crc kubenswrapper[4756]: I0930 19:52:14.321214 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Sep 30 19:52:14 crc kubenswrapper[4756]: I0930 19:52:14.439301 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3f1511a1-de77-42e5-8fda-cfb60a6c002b-combined-ca-bundle\") pod \"3f1511a1-de77-42e5-8fda-cfb60a6c002b\" (UID: \"3f1511a1-de77-42e5-8fda-cfb60a6c002b\") " Sep 30 19:52:14 crc kubenswrapper[4756]: I0930 19:52:14.439438 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7fzsn\" (UniqueName: \"kubernetes.io/projected/3f1511a1-de77-42e5-8fda-cfb60a6c002b-kube-api-access-7fzsn\") pod \"3f1511a1-de77-42e5-8fda-cfb60a6c002b\" (UID: \"3f1511a1-de77-42e5-8fda-cfb60a6c002b\") " Sep 30 19:52:14 crc kubenswrapper[4756]: I0930 19:52:14.439481 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3f1511a1-de77-42e5-8fda-cfb60a6c002b-config-data\") pod \"3f1511a1-de77-42e5-8fda-cfb60a6c002b\" (UID: \"3f1511a1-de77-42e5-8fda-cfb60a6c002b\") " Sep 30 19:52:14 crc kubenswrapper[4756]: I0930 19:52:14.444749 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3f1511a1-de77-42e5-8fda-cfb60a6c002b-kube-api-access-7fzsn" (OuterVolumeSpecName: "kube-api-access-7fzsn") pod "3f1511a1-de77-42e5-8fda-cfb60a6c002b" (UID: "3f1511a1-de77-42e5-8fda-cfb60a6c002b"). InnerVolumeSpecName "kube-api-access-7fzsn". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:52:14 crc kubenswrapper[4756]: I0930 19:52:14.484924 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3f1511a1-de77-42e5-8fda-cfb60a6c002b-config-data" (OuterVolumeSpecName: "config-data") pod "3f1511a1-de77-42e5-8fda-cfb60a6c002b" (UID: "3f1511a1-de77-42e5-8fda-cfb60a6c002b"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:52:14 crc kubenswrapper[4756]: I0930 19:52:14.488554 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3f1511a1-de77-42e5-8fda-cfb60a6c002b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "3f1511a1-de77-42e5-8fda-cfb60a6c002b" (UID: "3f1511a1-de77-42e5-8fda-cfb60a6c002b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:52:14 crc kubenswrapper[4756]: I0930 19:52:14.542260 4756 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3f1511a1-de77-42e5-8fda-cfb60a6c002b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 19:52:14 crc kubenswrapper[4756]: I0930 19:52:14.542294 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7fzsn\" (UniqueName: \"kubernetes.io/projected/3f1511a1-de77-42e5-8fda-cfb60a6c002b-kube-api-access-7fzsn\") on node \"crc\" DevicePath \"\"" Sep 30 19:52:14 crc kubenswrapper[4756]: I0930 19:52:14.542309 4756 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3f1511a1-de77-42e5-8fda-cfb60a6c002b-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 19:52:15 crc kubenswrapper[4756]: I0930 19:52:15.002787 4756 generic.go:334] "Generic (PLEG): container finished" podID="04ff8360-3224-42bf-bfac-1d4725acd2ae" containerID="cc833a452e5eb7d11d7eb0502e0bd5e4fb9085857c830eaf9182bddd7e675953" exitCode=0 Sep 30 19:52:15 crc kubenswrapper[4756]: I0930 19:52:15.002855 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"04ff8360-3224-42bf-bfac-1d4725acd2ae","Type":"ContainerDied","Data":"cc833a452e5eb7d11d7eb0502e0bd5e4fb9085857c830eaf9182bddd7e675953"} Sep 30 19:52:15 crc kubenswrapper[4756]: I0930 19:52:15.002889 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"04ff8360-3224-42bf-bfac-1d4725acd2ae","Type":"ContainerDied","Data":"e95b33a101a9bc7a1f527715026390d8c0020465b07487c1b8d8bdf716f06531"} Sep 30 19:52:15 crc kubenswrapper[4756]: I0930 19:52:15.002902 4756 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e95b33a101a9bc7a1f527715026390d8c0020465b07487c1b8d8bdf716f06531" Sep 30 19:52:15 crc kubenswrapper[4756]: I0930 19:52:15.004722 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"3f1511a1-de77-42e5-8fda-cfb60a6c002b","Type":"ContainerDied","Data":"3b124727739b11c5a99e00c1dd532c40bdf1b75601729b14a90173731570d343"} Sep 30 19:52:15 crc kubenswrapper[4756]: I0930 19:52:15.004767 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Sep 30 19:52:15 crc kubenswrapper[4756]: I0930 19:52:15.004778 4756 scope.go:117] "RemoveContainer" containerID="733f586ad39f43fe7c1a787ed859079c7d8784b952f555b97c4c61757af4f506" Sep 30 19:52:15 crc kubenswrapper[4756]: I0930 19:52:15.050729 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 30 19:52:15 crc kubenswrapper[4756]: I0930 19:52:15.080892 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Sep 30 19:52:15 crc kubenswrapper[4756]: I0930 19:52:15.108898 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Sep 30 19:52:15 crc kubenswrapper[4756]: I0930 19:52:15.131950 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3f1511a1-de77-42e5-8fda-cfb60a6c002b" path="/var/lib/kubelet/pods/3f1511a1-de77-42e5-8fda-cfb60a6c002b/volumes" Sep 30 19:52:15 crc kubenswrapper[4756]: I0930 19:52:15.137671 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Sep 30 19:52:15 crc kubenswrapper[4756]: E0930 19:52:15.138090 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="04ff8360-3224-42bf-bfac-1d4725acd2ae" containerName="nova-api-log" Sep 30 19:52:15 crc kubenswrapper[4756]: I0930 19:52:15.138125 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="04ff8360-3224-42bf-bfac-1d4725acd2ae" containerName="nova-api-log" Sep 30 19:52:15 crc kubenswrapper[4756]: E0930 19:52:15.138135 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="78d7d65c-1041-4ec8-97df-885ea20328ea" containerName="dnsmasq-dns" Sep 30 19:52:15 crc kubenswrapper[4756]: I0930 19:52:15.138141 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="78d7d65c-1041-4ec8-97df-885ea20328ea" containerName="dnsmasq-dns" Sep 30 19:52:15 crc kubenswrapper[4756]: E0930 19:52:15.138188 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="78d7d65c-1041-4ec8-97df-885ea20328ea" containerName="init" Sep 30 19:52:15 crc kubenswrapper[4756]: I0930 19:52:15.138388 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="78d7d65c-1041-4ec8-97df-885ea20328ea" containerName="init" Sep 30 19:52:15 crc kubenswrapper[4756]: E0930 19:52:15.138435 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3f1511a1-de77-42e5-8fda-cfb60a6c002b" containerName="nova-scheduler-scheduler" Sep 30 19:52:15 crc kubenswrapper[4756]: I0930 19:52:15.138443 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="3f1511a1-de77-42e5-8fda-cfb60a6c002b" containerName="nova-scheduler-scheduler" Sep 30 19:52:15 crc kubenswrapper[4756]: E0930 19:52:15.138455 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="04ff8360-3224-42bf-bfac-1d4725acd2ae" containerName="nova-api-api" Sep 30 19:52:15 crc kubenswrapper[4756]: I0930 19:52:15.138462 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="04ff8360-3224-42bf-bfac-1d4725acd2ae" containerName="nova-api-api" Sep 30 19:52:15 crc kubenswrapper[4756]: I0930 19:52:15.138692 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="04ff8360-3224-42bf-bfac-1d4725acd2ae" containerName="nova-api-log" Sep 30 19:52:15 crc kubenswrapper[4756]: I0930 19:52:15.138746 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="78d7d65c-1041-4ec8-97df-885ea20328ea" containerName="dnsmasq-dns" Sep 30 19:52:15 crc kubenswrapper[4756]: I0930 19:52:15.138762 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="04ff8360-3224-42bf-bfac-1d4725acd2ae" containerName="nova-api-api" Sep 30 19:52:15 crc kubenswrapper[4756]: I0930 19:52:15.138774 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="3f1511a1-de77-42e5-8fda-cfb60a6c002b" containerName="nova-scheduler-scheduler" Sep 30 19:52:15 crc kubenswrapper[4756]: I0930 19:52:15.139439 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Sep 30 19:52:15 crc kubenswrapper[4756]: I0930 19:52:15.141215 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Sep 30 19:52:15 crc kubenswrapper[4756]: I0930 19:52:15.147426 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Sep 30 19:52:15 crc kubenswrapper[4756]: I0930 19:52:15.154961 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/04ff8360-3224-42bf-bfac-1d4725acd2ae-logs\") pod \"04ff8360-3224-42bf-bfac-1d4725acd2ae\" (UID: \"04ff8360-3224-42bf-bfac-1d4725acd2ae\") " Sep 30 19:52:15 crc kubenswrapper[4756]: I0930 19:52:15.155154 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/04ff8360-3224-42bf-bfac-1d4725acd2ae-combined-ca-bundle\") pod \"04ff8360-3224-42bf-bfac-1d4725acd2ae\" (UID: \"04ff8360-3224-42bf-bfac-1d4725acd2ae\") " Sep 30 19:52:15 crc kubenswrapper[4756]: I0930 19:52:15.155325 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-z567h\" (UniqueName: \"kubernetes.io/projected/04ff8360-3224-42bf-bfac-1d4725acd2ae-kube-api-access-z567h\") pod \"04ff8360-3224-42bf-bfac-1d4725acd2ae\" (UID: \"04ff8360-3224-42bf-bfac-1d4725acd2ae\") " Sep 30 19:52:15 crc kubenswrapper[4756]: I0930 19:52:15.155465 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/04ff8360-3224-42bf-bfac-1d4725acd2ae-config-data\") pod \"04ff8360-3224-42bf-bfac-1d4725acd2ae\" (UID: \"04ff8360-3224-42bf-bfac-1d4725acd2ae\") " Sep 30 19:52:15 crc kubenswrapper[4756]: I0930 19:52:15.156697 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/04ff8360-3224-42bf-bfac-1d4725acd2ae-logs" (OuterVolumeSpecName: "logs") pod "04ff8360-3224-42bf-bfac-1d4725acd2ae" (UID: "04ff8360-3224-42bf-bfac-1d4725acd2ae"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 19:52:15 crc kubenswrapper[4756]: I0930 19:52:15.165820 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/04ff8360-3224-42bf-bfac-1d4725acd2ae-kube-api-access-z567h" (OuterVolumeSpecName: "kube-api-access-z567h") pod "04ff8360-3224-42bf-bfac-1d4725acd2ae" (UID: "04ff8360-3224-42bf-bfac-1d4725acd2ae"). InnerVolumeSpecName "kube-api-access-z567h". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:52:15 crc kubenswrapper[4756]: I0930 19:52:15.181348 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/04ff8360-3224-42bf-bfac-1d4725acd2ae-config-data" (OuterVolumeSpecName: "config-data") pod "04ff8360-3224-42bf-bfac-1d4725acd2ae" (UID: "04ff8360-3224-42bf-bfac-1d4725acd2ae"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:52:15 crc kubenswrapper[4756]: I0930 19:52:15.182894 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/04ff8360-3224-42bf-bfac-1d4725acd2ae-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "04ff8360-3224-42bf-bfac-1d4725acd2ae" (UID: "04ff8360-3224-42bf-bfac-1d4725acd2ae"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:52:15 crc kubenswrapper[4756]: I0930 19:52:15.258071 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2c09b969-b4ff-472d-a938-80dcdbc49fa0-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"2c09b969-b4ff-472d-a938-80dcdbc49fa0\") " pod="openstack/nova-scheduler-0" Sep 30 19:52:15 crc kubenswrapper[4756]: I0930 19:52:15.258315 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2c09b969-b4ff-472d-a938-80dcdbc49fa0-config-data\") pod \"nova-scheduler-0\" (UID: \"2c09b969-b4ff-472d-a938-80dcdbc49fa0\") " pod="openstack/nova-scheduler-0" Sep 30 19:52:15 crc kubenswrapper[4756]: I0930 19:52:15.258386 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l6frg\" (UniqueName: \"kubernetes.io/projected/2c09b969-b4ff-472d-a938-80dcdbc49fa0-kube-api-access-l6frg\") pod \"nova-scheduler-0\" (UID: \"2c09b969-b4ff-472d-a938-80dcdbc49fa0\") " pod="openstack/nova-scheduler-0" Sep 30 19:52:15 crc kubenswrapper[4756]: I0930 19:52:15.258474 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-z567h\" (UniqueName: \"kubernetes.io/projected/04ff8360-3224-42bf-bfac-1d4725acd2ae-kube-api-access-z567h\") on node \"crc\" DevicePath \"\"" Sep 30 19:52:15 crc kubenswrapper[4756]: I0930 19:52:15.258488 4756 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/04ff8360-3224-42bf-bfac-1d4725acd2ae-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 19:52:15 crc kubenswrapper[4756]: I0930 19:52:15.258500 4756 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/04ff8360-3224-42bf-bfac-1d4725acd2ae-logs\") on node \"crc\" DevicePath \"\"" Sep 30 19:52:15 crc kubenswrapper[4756]: I0930 19:52:15.258512 4756 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/04ff8360-3224-42bf-bfac-1d4725acd2ae-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 19:52:15 crc kubenswrapper[4756]: I0930 19:52:15.359623 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l6frg\" (UniqueName: \"kubernetes.io/projected/2c09b969-b4ff-472d-a938-80dcdbc49fa0-kube-api-access-l6frg\") pod \"nova-scheduler-0\" (UID: \"2c09b969-b4ff-472d-a938-80dcdbc49fa0\") " pod="openstack/nova-scheduler-0" Sep 30 19:52:15 crc kubenswrapper[4756]: I0930 19:52:15.359670 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2c09b969-b4ff-472d-a938-80dcdbc49fa0-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"2c09b969-b4ff-472d-a938-80dcdbc49fa0\") " pod="openstack/nova-scheduler-0" Sep 30 19:52:15 crc kubenswrapper[4756]: I0930 19:52:15.359806 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2c09b969-b4ff-472d-a938-80dcdbc49fa0-config-data\") pod \"nova-scheduler-0\" (UID: \"2c09b969-b4ff-472d-a938-80dcdbc49fa0\") " pod="openstack/nova-scheduler-0" Sep 30 19:52:15 crc kubenswrapper[4756]: I0930 19:52:15.364138 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2c09b969-b4ff-472d-a938-80dcdbc49fa0-config-data\") pod \"nova-scheduler-0\" (UID: \"2c09b969-b4ff-472d-a938-80dcdbc49fa0\") " pod="openstack/nova-scheduler-0" Sep 30 19:52:15 crc kubenswrapper[4756]: I0930 19:52:15.364438 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2c09b969-b4ff-472d-a938-80dcdbc49fa0-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"2c09b969-b4ff-472d-a938-80dcdbc49fa0\") " pod="openstack/nova-scheduler-0" Sep 30 19:52:15 crc kubenswrapper[4756]: I0930 19:52:15.375359 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l6frg\" (UniqueName: \"kubernetes.io/projected/2c09b969-b4ff-472d-a938-80dcdbc49fa0-kube-api-access-l6frg\") pod \"nova-scheduler-0\" (UID: \"2c09b969-b4ff-472d-a938-80dcdbc49fa0\") " pod="openstack/nova-scheduler-0" Sep 30 19:52:15 crc kubenswrapper[4756]: I0930 19:52:15.468020 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Sep 30 19:52:15 crc kubenswrapper[4756]: I0930 19:52:15.936853 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Sep 30 19:52:15 crc kubenswrapper[4756]: I0930 19:52:15.965252 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Sep 30 19:52:16 crc kubenswrapper[4756]: I0930 19:52:16.020114 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"2c09b969-b4ff-472d-a938-80dcdbc49fa0","Type":"ContainerStarted","Data":"6f834961cdf36e4c7ca497383da013cc9f13744208b63d3bf35649599dccbd42"} Sep 30 19:52:16 crc kubenswrapper[4756]: I0930 19:52:16.021859 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 30 19:52:16 crc kubenswrapper[4756]: I0930 19:52:16.158734 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Sep 30 19:52:16 crc kubenswrapper[4756]: I0930 19:52:16.169052 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Sep 30 19:52:16 crc kubenswrapper[4756]: I0930 19:52:16.177081 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Sep 30 19:52:16 crc kubenswrapper[4756]: I0930 19:52:16.178652 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 30 19:52:16 crc kubenswrapper[4756]: I0930 19:52:16.180445 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Sep 30 19:52:16 crc kubenswrapper[4756]: I0930 19:52:16.198716 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Sep 30 19:52:16 crc kubenswrapper[4756]: I0930 19:52:16.279962 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d2690b61-b282-4c01-bfd4-afbcd8bd0968-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"d2690b61-b282-4c01-bfd4-afbcd8bd0968\") " pod="openstack/nova-api-0" Sep 30 19:52:16 crc kubenswrapper[4756]: I0930 19:52:16.280363 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-85rk8\" (UniqueName: \"kubernetes.io/projected/d2690b61-b282-4c01-bfd4-afbcd8bd0968-kube-api-access-85rk8\") pod \"nova-api-0\" (UID: \"d2690b61-b282-4c01-bfd4-afbcd8bd0968\") " pod="openstack/nova-api-0" Sep 30 19:52:16 crc kubenswrapper[4756]: I0930 19:52:16.280421 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d2690b61-b282-4c01-bfd4-afbcd8bd0968-logs\") pod \"nova-api-0\" (UID: \"d2690b61-b282-4c01-bfd4-afbcd8bd0968\") " pod="openstack/nova-api-0" Sep 30 19:52:16 crc kubenswrapper[4756]: I0930 19:52:16.280456 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d2690b61-b282-4c01-bfd4-afbcd8bd0968-config-data\") pod \"nova-api-0\" (UID: \"d2690b61-b282-4c01-bfd4-afbcd8bd0968\") " pod="openstack/nova-api-0" Sep 30 19:52:16 crc kubenswrapper[4756]: I0930 19:52:16.381771 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d2690b61-b282-4c01-bfd4-afbcd8bd0968-logs\") pod \"nova-api-0\" (UID: \"d2690b61-b282-4c01-bfd4-afbcd8bd0968\") " pod="openstack/nova-api-0" Sep 30 19:52:16 crc kubenswrapper[4756]: I0930 19:52:16.381829 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d2690b61-b282-4c01-bfd4-afbcd8bd0968-config-data\") pod \"nova-api-0\" (UID: \"d2690b61-b282-4c01-bfd4-afbcd8bd0968\") " pod="openstack/nova-api-0" Sep 30 19:52:16 crc kubenswrapper[4756]: I0930 19:52:16.381925 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d2690b61-b282-4c01-bfd4-afbcd8bd0968-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"d2690b61-b282-4c01-bfd4-afbcd8bd0968\") " pod="openstack/nova-api-0" Sep 30 19:52:16 crc kubenswrapper[4756]: I0930 19:52:16.381994 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-85rk8\" (UniqueName: \"kubernetes.io/projected/d2690b61-b282-4c01-bfd4-afbcd8bd0968-kube-api-access-85rk8\") pod \"nova-api-0\" (UID: \"d2690b61-b282-4c01-bfd4-afbcd8bd0968\") " pod="openstack/nova-api-0" Sep 30 19:52:16 crc kubenswrapper[4756]: I0930 19:52:16.382571 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d2690b61-b282-4c01-bfd4-afbcd8bd0968-logs\") pod \"nova-api-0\" (UID: \"d2690b61-b282-4c01-bfd4-afbcd8bd0968\") " pod="openstack/nova-api-0" Sep 30 19:52:16 crc kubenswrapper[4756]: I0930 19:52:16.385915 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d2690b61-b282-4c01-bfd4-afbcd8bd0968-config-data\") pod \"nova-api-0\" (UID: \"d2690b61-b282-4c01-bfd4-afbcd8bd0968\") " pod="openstack/nova-api-0" Sep 30 19:52:16 crc kubenswrapper[4756]: I0930 19:52:16.386750 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d2690b61-b282-4c01-bfd4-afbcd8bd0968-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"d2690b61-b282-4c01-bfd4-afbcd8bd0968\") " pod="openstack/nova-api-0" Sep 30 19:52:16 crc kubenswrapper[4756]: I0930 19:52:16.397424 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-85rk8\" (UniqueName: \"kubernetes.io/projected/d2690b61-b282-4c01-bfd4-afbcd8bd0968-kube-api-access-85rk8\") pod \"nova-api-0\" (UID: \"d2690b61-b282-4c01-bfd4-afbcd8bd0968\") " pod="openstack/nova-api-0" Sep 30 19:52:16 crc kubenswrapper[4756]: I0930 19:52:16.495739 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 30 19:52:17 crc kubenswrapper[4756]: I0930 19:52:17.009992 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Sep 30 19:52:17 crc kubenswrapper[4756]: W0930 19:52:17.015555 4756 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd2690b61_b282_4c01_bfd4_afbcd8bd0968.slice/crio-12213960a61b6db79b187747828d1e40e754529b3f6ab0178a8ce828fd685e24 WatchSource:0}: Error finding container 12213960a61b6db79b187747828d1e40e754529b3f6ab0178a8ce828fd685e24: Status 404 returned error can't find the container with id 12213960a61b6db79b187747828d1e40e754529b3f6ab0178a8ce828fd685e24 Sep 30 19:52:17 crc kubenswrapper[4756]: I0930 19:52:17.040731 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"d2690b61-b282-4c01-bfd4-afbcd8bd0968","Type":"ContainerStarted","Data":"12213960a61b6db79b187747828d1e40e754529b3f6ab0178a8ce828fd685e24"} Sep 30 19:52:17 crc kubenswrapper[4756]: I0930 19:52:17.043822 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"2c09b969-b4ff-472d-a938-80dcdbc49fa0","Type":"ContainerStarted","Data":"a98b89ec3d4c88ae15503ab82bce6e0663ac2886a6aceee2848aa1443a4ae194"} Sep 30 19:52:17 crc kubenswrapper[4756]: I0930 19:52:17.062300 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.062285876 podStartE2EDuration="2.062285876s" podCreationTimestamp="2025-09-30 19:52:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 19:52:17.060812658 +0000 UTC m=+1266.681746165" watchObservedRunningTime="2025-09-30 19:52:17.062285876 +0000 UTC m=+1266.683219363" Sep 30 19:52:17 crc kubenswrapper[4756]: I0930 19:52:17.131484 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="04ff8360-3224-42bf-bfac-1d4725acd2ae" path="/var/lib/kubelet/pods/04ff8360-3224-42bf-bfac-1d4725acd2ae/volumes" Sep 30 19:52:18 crc kubenswrapper[4756]: I0930 19:52:18.054602 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"d2690b61-b282-4c01-bfd4-afbcd8bd0968","Type":"ContainerStarted","Data":"144841594fdfdc515967de332ca6c03c61bd7b867ec6835c38ad1265d8e618b2"} Sep 30 19:52:18 crc kubenswrapper[4756]: I0930 19:52:18.054890 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"d2690b61-b282-4c01-bfd4-afbcd8bd0968","Type":"ContainerStarted","Data":"7f224754455da6d862bca540eeb1063828055422bb68de68929ab6f5eb427f73"} Sep 30 19:52:18 crc kubenswrapper[4756]: I0930 19:52:18.081465 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.081441127 podStartE2EDuration="2.081441127s" podCreationTimestamp="2025-09-30 19:52:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 19:52:18.073274284 +0000 UTC m=+1267.694207771" watchObservedRunningTime="2025-09-30 19:52:18.081441127 +0000 UTC m=+1267.702374614" Sep 30 19:52:19 crc kubenswrapper[4756]: I0930 19:52:19.408628 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-conductor-0" Sep 30 19:52:19 crc kubenswrapper[4756]: I0930 19:52:19.558902 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Sep 30 19:52:19 crc kubenswrapper[4756]: I0930 19:52:19.559127 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/kube-state-metrics-0" podUID="718df58f-c1e5-4cab-9fb5-28a9303c6787" containerName="kube-state-metrics" containerID="cri-o://36ccac6330607fd278f4224f86dd7f08c4029700687d7fc1504a00b684714a34" gracePeriod=30 Sep 30 19:52:19 crc kubenswrapper[4756]: I0930 19:52:19.991465 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Sep 30 19:52:20 crc kubenswrapper[4756]: I0930 19:52:20.076997 4756 generic.go:334] "Generic (PLEG): container finished" podID="718df58f-c1e5-4cab-9fb5-28a9303c6787" containerID="36ccac6330607fd278f4224f86dd7f08c4029700687d7fc1504a00b684714a34" exitCode=2 Sep 30 19:52:20 crc kubenswrapper[4756]: I0930 19:52:20.077065 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"718df58f-c1e5-4cab-9fb5-28a9303c6787","Type":"ContainerDied","Data":"36ccac6330607fd278f4224f86dd7f08c4029700687d7fc1504a00b684714a34"} Sep 30 19:52:20 crc kubenswrapper[4756]: I0930 19:52:20.077116 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"718df58f-c1e5-4cab-9fb5-28a9303c6787","Type":"ContainerDied","Data":"0f5c73b16a7f8fbe4aad09bbabe569e0b17c89990a5295f48465224c447c3907"} Sep 30 19:52:20 crc kubenswrapper[4756]: I0930 19:52:20.077134 4756 scope.go:117] "RemoveContainer" containerID="36ccac6330607fd278f4224f86dd7f08c4029700687d7fc1504a00b684714a34" Sep 30 19:52:20 crc kubenswrapper[4756]: I0930 19:52:20.077075 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Sep 30 19:52:20 crc kubenswrapper[4756]: I0930 19:52:20.097111 4756 scope.go:117] "RemoveContainer" containerID="36ccac6330607fd278f4224f86dd7f08c4029700687d7fc1504a00b684714a34" Sep 30 19:52:20 crc kubenswrapper[4756]: E0930 19:52:20.097488 4756 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"36ccac6330607fd278f4224f86dd7f08c4029700687d7fc1504a00b684714a34\": container with ID starting with 36ccac6330607fd278f4224f86dd7f08c4029700687d7fc1504a00b684714a34 not found: ID does not exist" containerID="36ccac6330607fd278f4224f86dd7f08c4029700687d7fc1504a00b684714a34" Sep 30 19:52:20 crc kubenswrapper[4756]: I0930 19:52:20.097518 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"36ccac6330607fd278f4224f86dd7f08c4029700687d7fc1504a00b684714a34"} err="failed to get container status \"36ccac6330607fd278f4224f86dd7f08c4029700687d7fc1504a00b684714a34\": rpc error: code = NotFound desc = could not find container \"36ccac6330607fd278f4224f86dd7f08c4029700687d7fc1504a00b684714a34\": container with ID starting with 36ccac6330607fd278f4224f86dd7f08c4029700687d7fc1504a00b684714a34 not found: ID does not exist" Sep 30 19:52:20 crc kubenswrapper[4756]: I0930 19:52:20.161087 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-z4rsd\" (UniqueName: \"kubernetes.io/projected/718df58f-c1e5-4cab-9fb5-28a9303c6787-kube-api-access-z4rsd\") pod \"718df58f-c1e5-4cab-9fb5-28a9303c6787\" (UID: \"718df58f-c1e5-4cab-9fb5-28a9303c6787\") " Sep 30 19:52:20 crc kubenswrapper[4756]: I0930 19:52:20.166638 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/718df58f-c1e5-4cab-9fb5-28a9303c6787-kube-api-access-z4rsd" (OuterVolumeSpecName: "kube-api-access-z4rsd") pod "718df58f-c1e5-4cab-9fb5-28a9303c6787" (UID: "718df58f-c1e5-4cab-9fb5-28a9303c6787"). InnerVolumeSpecName "kube-api-access-z4rsd". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:52:20 crc kubenswrapper[4756]: I0930 19:52:20.263411 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-z4rsd\" (UniqueName: \"kubernetes.io/projected/718df58f-c1e5-4cab-9fb5-28a9303c6787-kube-api-access-z4rsd\") on node \"crc\" DevicePath \"\"" Sep 30 19:52:20 crc kubenswrapper[4756]: I0930 19:52:20.414041 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Sep 30 19:52:20 crc kubenswrapper[4756]: I0930 19:52:20.447761 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/kube-state-metrics-0"] Sep 30 19:52:20 crc kubenswrapper[4756]: I0930 19:52:20.457344 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/kube-state-metrics-0"] Sep 30 19:52:20 crc kubenswrapper[4756]: E0930 19:52:20.457854 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="718df58f-c1e5-4cab-9fb5-28a9303c6787" containerName="kube-state-metrics" Sep 30 19:52:20 crc kubenswrapper[4756]: I0930 19:52:20.457873 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="718df58f-c1e5-4cab-9fb5-28a9303c6787" containerName="kube-state-metrics" Sep 30 19:52:20 crc kubenswrapper[4756]: I0930 19:52:20.458111 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="718df58f-c1e5-4cab-9fb5-28a9303c6787" containerName="kube-state-metrics" Sep 30 19:52:20 crc kubenswrapper[4756]: I0930 19:52:20.458862 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Sep 30 19:52:20 crc kubenswrapper[4756]: I0930 19:52:20.462253 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"kube-state-metrics-tls-config" Sep 30 19:52:20 crc kubenswrapper[4756]: I0930 19:52:20.462682 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-kube-state-metrics-svc" Sep 30 19:52:20 crc kubenswrapper[4756]: I0930 19:52:20.468149 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Sep 30 19:52:20 crc kubenswrapper[4756]: I0930 19:52:20.470278 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Sep 30 19:52:20 crc kubenswrapper[4756]: I0930 19:52:20.569866 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sbdxt\" (UniqueName: \"kubernetes.io/projected/e92dcf48-58c2-4c3e-907f-7fd43e23a424-kube-api-access-sbdxt\") pod \"kube-state-metrics-0\" (UID: \"e92dcf48-58c2-4c3e-907f-7fd43e23a424\") " pod="openstack/kube-state-metrics-0" Sep 30 19:52:20 crc kubenswrapper[4756]: I0930 19:52:20.569948 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/e92dcf48-58c2-4c3e-907f-7fd43e23a424-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"e92dcf48-58c2-4c3e-907f-7fd43e23a424\") " pod="openstack/kube-state-metrics-0" Sep 30 19:52:20 crc kubenswrapper[4756]: I0930 19:52:20.569998 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/e92dcf48-58c2-4c3e-907f-7fd43e23a424-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"e92dcf48-58c2-4c3e-907f-7fd43e23a424\") " pod="openstack/kube-state-metrics-0" Sep 30 19:52:20 crc kubenswrapper[4756]: I0930 19:52:20.570025 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e92dcf48-58c2-4c3e-907f-7fd43e23a424-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"e92dcf48-58c2-4c3e-907f-7fd43e23a424\") " pod="openstack/kube-state-metrics-0" Sep 30 19:52:20 crc kubenswrapper[4756]: I0930 19:52:20.671114 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sbdxt\" (UniqueName: \"kubernetes.io/projected/e92dcf48-58c2-4c3e-907f-7fd43e23a424-kube-api-access-sbdxt\") pod \"kube-state-metrics-0\" (UID: \"e92dcf48-58c2-4c3e-907f-7fd43e23a424\") " pod="openstack/kube-state-metrics-0" Sep 30 19:52:20 crc kubenswrapper[4756]: I0930 19:52:20.671380 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/e92dcf48-58c2-4c3e-907f-7fd43e23a424-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"e92dcf48-58c2-4c3e-907f-7fd43e23a424\") " pod="openstack/kube-state-metrics-0" Sep 30 19:52:20 crc kubenswrapper[4756]: I0930 19:52:20.671446 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/e92dcf48-58c2-4c3e-907f-7fd43e23a424-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"e92dcf48-58c2-4c3e-907f-7fd43e23a424\") " pod="openstack/kube-state-metrics-0" Sep 30 19:52:20 crc kubenswrapper[4756]: I0930 19:52:20.671476 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e92dcf48-58c2-4c3e-907f-7fd43e23a424-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"e92dcf48-58c2-4c3e-907f-7fd43e23a424\") " pod="openstack/kube-state-metrics-0" Sep 30 19:52:20 crc kubenswrapper[4756]: I0930 19:52:20.676914 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e92dcf48-58c2-4c3e-907f-7fd43e23a424-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"e92dcf48-58c2-4c3e-907f-7fd43e23a424\") " pod="openstack/kube-state-metrics-0" Sep 30 19:52:20 crc kubenswrapper[4756]: I0930 19:52:20.681477 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/e92dcf48-58c2-4c3e-907f-7fd43e23a424-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"e92dcf48-58c2-4c3e-907f-7fd43e23a424\") " pod="openstack/kube-state-metrics-0" Sep 30 19:52:20 crc kubenswrapper[4756]: I0930 19:52:20.683025 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/e92dcf48-58c2-4c3e-907f-7fd43e23a424-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"e92dcf48-58c2-4c3e-907f-7fd43e23a424\") " pod="openstack/kube-state-metrics-0" Sep 30 19:52:20 crc kubenswrapper[4756]: I0930 19:52:20.691802 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sbdxt\" (UniqueName: \"kubernetes.io/projected/e92dcf48-58c2-4c3e-907f-7fd43e23a424-kube-api-access-sbdxt\") pod \"kube-state-metrics-0\" (UID: \"e92dcf48-58c2-4c3e-907f-7fd43e23a424\") " pod="openstack/kube-state-metrics-0" Sep 30 19:52:20 crc kubenswrapper[4756]: I0930 19:52:20.811958 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Sep 30 19:52:21 crc kubenswrapper[4756]: I0930 19:52:21.129419 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="718df58f-c1e5-4cab-9fb5-28a9303c6787" path="/var/lib/kubelet/pods/718df58f-c1e5-4cab-9fb5-28a9303c6787/volumes" Sep 30 19:52:21 crc kubenswrapper[4756]: I0930 19:52:21.260068 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Sep 30 19:52:21 crc kubenswrapper[4756]: I0930 19:52:21.271439 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 30 19:52:21 crc kubenswrapper[4756]: I0930 19:52:21.271782 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="30b7dced-f630-4b43-86a9-98bd2b088439" containerName="ceilometer-central-agent" containerID="cri-o://e10134091ad8f357225c10adfda68a2ab219d55cd7368080db1d4a8f0fff1ee0" gracePeriod=30 Sep 30 19:52:21 crc kubenswrapper[4756]: I0930 19:52:21.272102 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="30b7dced-f630-4b43-86a9-98bd2b088439" containerName="ceilometer-notification-agent" containerID="cri-o://ceb05212adf346fea2a2787f9b9ac55ea78085f3bc87fcd82d78a2b51857d9d4" gracePeriod=30 Sep 30 19:52:21 crc kubenswrapper[4756]: I0930 19:52:21.272138 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="30b7dced-f630-4b43-86a9-98bd2b088439" containerName="sg-core" containerID="cri-o://93e8c8d3de362c85843aad668e360a25cafef168d6622ddf81257ab607664687" gracePeriod=30 Sep 30 19:52:21 crc kubenswrapper[4756]: I0930 19:52:21.272333 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="30b7dced-f630-4b43-86a9-98bd2b088439" containerName="proxy-httpd" containerID="cri-o://90d54e75cb49503bf84d5646fd4363355d121fa689ccb8d1d366765f5fde71a9" gracePeriod=30 Sep 30 19:52:22 crc kubenswrapper[4756]: I0930 19:52:22.101195 4756 generic.go:334] "Generic (PLEG): container finished" podID="30b7dced-f630-4b43-86a9-98bd2b088439" containerID="90d54e75cb49503bf84d5646fd4363355d121fa689ccb8d1d366765f5fde71a9" exitCode=0 Sep 30 19:52:22 crc kubenswrapper[4756]: I0930 19:52:22.101984 4756 generic.go:334] "Generic (PLEG): container finished" podID="30b7dced-f630-4b43-86a9-98bd2b088439" containerID="93e8c8d3de362c85843aad668e360a25cafef168d6622ddf81257ab607664687" exitCode=2 Sep 30 19:52:22 crc kubenswrapper[4756]: I0930 19:52:22.101451 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"30b7dced-f630-4b43-86a9-98bd2b088439","Type":"ContainerDied","Data":"90d54e75cb49503bf84d5646fd4363355d121fa689ccb8d1d366765f5fde71a9"} Sep 30 19:52:22 crc kubenswrapper[4756]: I0930 19:52:22.102090 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"30b7dced-f630-4b43-86a9-98bd2b088439","Type":"ContainerDied","Data":"93e8c8d3de362c85843aad668e360a25cafef168d6622ddf81257ab607664687"} Sep 30 19:52:22 crc kubenswrapper[4756]: I0930 19:52:22.102133 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"30b7dced-f630-4b43-86a9-98bd2b088439","Type":"ContainerDied","Data":"e10134091ad8f357225c10adfda68a2ab219d55cd7368080db1d4a8f0fff1ee0"} Sep 30 19:52:22 crc kubenswrapper[4756]: I0930 19:52:22.102010 4756 generic.go:334] "Generic (PLEG): container finished" podID="30b7dced-f630-4b43-86a9-98bd2b088439" containerID="e10134091ad8f357225c10adfda68a2ab219d55cd7368080db1d4a8f0fff1ee0" exitCode=0 Sep 30 19:52:22 crc kubenswrapper[4756]: I0930 19:52:22.104580 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"e92dcf48-58c2-4c3e-907f-7fd43e23a424","Type":"ContainerStarted","Data":"1de9577ca6bdb23f4bd45cbc37f2aa9d5e6f8b133c3ff43908637b2ae485af44"} Sep 30 19:52:22 crc kubenswrapper[4756]: I0930 19:52:22.104629 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"e92dcf48-58c2-4c3e-907f-7fd43e23a424","Type":"ContainerStarted","Data":"3b988ae109b01b5793feb6f3e4ac31c313feec15e8ddabe87bd972ae4c352c0f"} Sep 30 19:52:22 crc kubenswrapper[4756]: I0930 19:52:22.104705 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/kube-state-metrics-0" Sep 30 19:52:22 crc kubenswrapper[4756]: I0930 19:52:22.128783 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/kube-state-metrics-0" podStartSLOduration=1.757050847 podStartE2EDuration="2.128765804s" podCreationTimestamp="2025-09-30 19:52:20 +0000 UTC" firstStartedPulling="2025-09-30 19:52:21.26328972 +0000 UTC m=+1270.884223197" lastFinishedPulling="2025-09-30 19:52:21.635004677 +0000 UTC m=+1271.255938154" observedRunningTime="2025-09-30 19:52:22.11981153 +0000 UTC m=+1271.740745007" watchObservedRunningTime="2025-09-30 19:52:22.128765804 +0000 UTC m=+1271.749699281" Sep 30 19:52:23 crc kubenswrapper[4756]: I0930 19:52:23.146799 4756 generic.go:334] "Generic (PLEG): container finished" podID="30b7dced-f630-4b43-86a9-98bd2b088439" containerID="ceb05212adf346fea2a2787f9b9ac55ea78085f3bc87fcd82d78a2b51857d9d4" exitCode=0 Sep 30 19:52:23 crc kubenswrapper[4756]: I0930 19:52:23.162632 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"30b7dced-f630-4b43-86a9-98bd2b088439","Type":"ContainerDied","Data":"ceb05212adf346fea2a2787f9b9ac55ea78085f3bc87fcd82d78a2b51857d9d4"} Sep 30 19:52:23 crc kubenswrapper[4756]: I0930 19:52:23.411834 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 19:52:23 crc kubenswrapper[4756]: I0930 19:52:23.543430 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/30b7dced-f630-4b43-86a9-98bd2b088439-sg-core-conf-yaml\") pod \"30b7dced-f630-4b43-86a9-98bd2b088439\" (UID: \"30b7dced-f630-4b43-86a9-98bd2b088439\") " Sep 30 19:52:23 crc kubenswrapper[4756]: I0930 19:52:23.543510 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/30b7dced-f630-4b43-86a9-98bd2b088439-config-data\") pod \"30b7dced-f630-4b43-86a9-98bd2b088439\" (UID: \"30b7dced-f630-4b43-86a9-98bd2b088439\") " Sep 30 19:52:23 crc kubenswrapper[4756]: I0930 19:52:23.543652 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-22ztd\" (UniqueName: \"kubernetes.io/projected/30b7dced-f630-4b43-86a9-98bd2b088439-kube-api-access-22ztd\") pod \"30b7dced-f630-4b43-86a9-98bd2b088439\" (UID: \"30b7dced-f630-4b43-86a9-98bd2b088439\") " Sep 30 19:52:23 crc kubenswrapper[4756]: I0930 19:52:23.543770 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/30b7dced-f630-4b43-86a9-98bd2b088439-log-httpd\") pod \"30b7dced-f630-4b43-86a9-98bd2b088439\" (UID: \"30b7dced-f630-4b43-86a9-98bd2b088439\") " Sep 30 19:52:23 crc kubenswrapper[4756]: I0930 19:52:23.543793 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/30b7dced-f630-4b43-86a9-98bd2b088439-combined-ca-bundle\") pod \"30b7dced-f630-4b43-86a9-98bd2b088439\" (UID: \"30b7dced-f630-4b43-86a9-98bd2b088439\") " Sep 30 19:52:23 crc kubenswrapper[4756]: I0930 19:52:23.543839 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/30b7dced-f630-4b43-86a9-98bd2b088439-run-httpd\") pod \"30b7dced-f630-4b43-86a9-98bd2b088439\" (UID: \"30b7dced-f630-4b43-86a9-98bd2b088439\") " Sep 30 19:52:23 crc kubenswrapper[4756]: I0930 19:52:23.543879 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/30b7dced-f630-4b43-86a9-98bd2b088439-scripts\") pod \"30b7dced-f630-4b43-86a9-98bd2b088439\" (UID: \"30b7dced-f630-4b43-86a9-98bd2b088439\") " Sep 30 19:52:23 crc kubenswrapper[4756]: I0930 19:52:23.546144 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/30b7dced-f630-4b43-86a9-98bd2b088439-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "30b7dced-f630-4b43-86a9-98bd2b088439" (UID: "30b7dced-f630-4b43-86a9-98bd2b088439"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 19:52:23 crc kubenswrapper[4756]: I0930 19:52:23.546218 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/30b7dced-f630-4b43-86a9-98bd2b088439-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "30b7dced-f630-4b43-86a9-98bd2b088439" (UID: "30b7dced-f630-4b43-86a9-98bd2b088439"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 19:52:23 crc kubenswrapper[4756]: I0930 19:52:23.550276 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/30b7dced-f630-4b43-86a9-98bd2b088439-scripts" (OuterVolumeSpecName: "scripts") pod "30b7dced-f630-4b43-86a9-98bd2b088439" (UID: "30b7dced-f630-4b43-86a9-98bd2b088439"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:52:23 crc kubenswrapper[4756]: I0930 19:52:23.550838 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/30b7dced-f630-4b43-86a9-98bd2b088439-kube-api-access-22ztd" (OuterVolumeSpecName: "kube-api-access-22ztd") pod "30b7dced-f630-4b43-86a9-98bd2b088439" (UID: "30b7dced-f630-4b43-86a9-98bd2b088439"). InnerVolumeSpecName "kube-api-access-22ztd". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:52:23 crc kubenswrapper[4756]: I0930 19:52:23.578477 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/30b7dced-f630-4b43-86a9-98bd2b088439-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "30b7dced-f630-4b43-86a9-98bd2b088439" (UID: "30b7dced-f630-4b43-86a9-98bd2b088439"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:52:23 crc kubenswrapper[4756]: I0930 19:52:23.630652 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/30b7dced-f630-4b43-86a9-98bd2b088439-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "30b7dced-f630-4b43-86a9-98bd2b088439" (UID: "30b7dced-f630-4b43-86a9-98bd2b088439"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:52:23 crc kubenswrapper[4756]: I0930 19:52:23.646117 4756 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/30b7dced-f630-4b43-86a9-98bd2b088439-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Sep 30 19:52:23 crc kubenswrapper[4756]: I0930 19:52:23.646146 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-22ztd\" (UniqueName: \"kubernetes.io/projected/30b7dced-f630-4b43-86a9-98bd2b088439-kube-api-access-22ztd\") on node \"crc\" DevicePath \"\"" Sep 30 19:52:23 crc kubenswrapper[4756]: I0930 19:52:23.646171 4756 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/30b7dced-f630-4b43-86a9-98bd2b088439-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 19:52:23 crc kubenswrapper[4756]: I0930 19:52:23.646180 4756 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/30b7dced-f630-4b43-86a9-98bd2b088439-log-httpd\") on node \"crc\" DevicePath \"\"" Sep 30 19:52:23 crc kubenswrapper[4756]: I0930 19:52:23.646189 4756 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/30b7dced-f630-4b43-86a9-98bd2b088439-run-httpd\") on node \"crc\" DevicePath \"\"" Sep 30 19:52:23 crc kubenswrapper[4756]: I0930 19:52:23.646197 4756 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/30b7dced-f630-4b43-86a9-98bd2b088439-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 19:52:23 crc kubenswrapper[4756]: I0930 19:52:23.659849 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/30b7dced-f630-4b43-86a9-98bd2b088439-config-data" (OuterVolumeSpecName: "config-data") pod "30b7dced-f630-4b43-86a9-98bd2b088439" (UID: "30b7dced-f630-4b43-86a9-98bd2b088439"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:52:23 crc kubenswrapper[4756]: I0930 19:52:23.747537 4756 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/30b7dced-f630-4b43-86a9-98bd2b088439-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 19:52:24 crc kubenswrapper[4756]: I0930 19:52:24.161278 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"30b7dced-f630-4b43-86a9-98bd2b088439","Type":"ContainerDied","Data":"91b11ae1cdbe96247ea0cb0984578ba4c52b8dc70938083982a5a8ea9f1babaf"} Sep 30 19:52:24 crc kubenswrapper[4756]: I0930 19:52:24.161335 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 19:52:24 crc kubenswrapper[4756]: I0930 19:52:24.161354 4756 scope.go:117] "RemoveContainer" containerID="90d54e75cb49503bf84d5646fd4363355d121fa689ccb8d1d366765f5fde71a9" Sep 30 19:52:24 crc kubenswrapper[4756]: I0930 19:52:24.184163 4756 scope.go:117] "RemoveContainer" containerID="93e8c8d3de362c85843aad668e360a25cafef168d6622ddf81257ab607664687" Sep 30 19:52:24 crc kubenswrapper[4756]: I0930 19:52:24.202611 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 30 19:52:24 crc kubenswrapper[4756]: I0930 19:52:24.218354 4756 scope.go:117] "RemoveContainer" containerID="ceb05212adf346fea2a2787f9b9ac55ea78085f3bc87fcd82d78a2b51857d9d4" Sep 30 19:52:24 crc kubenswrapper[4756]: I0930 19:52:24.220301 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Sep 30 19:52:24 crc kubenswrapper[4756]: I0930 19:52:24.231738 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Sep 30 19:52:24 crc kubenswrapper[4756]: E0930 19:52:24.232134 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="30b7dced-f630-4b43-86a9-98bd2b088439" containerName="ceilometer-central-agent" Sep 30 19:52:24 crc kubenswrapper[4756]: I0930 19:52:24.232153 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="30b7dced-f630-4b43-86a9-98bd2b088439" containerName="ceilometer-central-agent" Sep 30 19:52:24 crc kubenswrapper[4756]: E0930 19:52:24.232190 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="30b7dced-f630-4b43-86a9-98bd2b088439" containerName="proxy-httpd" Sep 30 19:52:24 crc kubenswrapper[4756]: I0930 19:52:24.232198 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="30b7dced-f630-4b43-86a9-98bd2b088439" containerName="proxy-httpd" Sep 30 19:52:24 crc kubenswrapper[4756]: E0930 19:52:24.232210 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="30b7dced-f630-4b43-86a9-98bd2b088439" containerName="sg-core" Sep 30 19:52:24 crc kubenswrapper[4756]: I0930 19:52:24.232217 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="30b7dced-f630-4b43-86a9-98bd2b088439" containerName="sg-core" Sep 30 19:52:24 crc kubenswrapper[4756]: E0930 19:52:24.232235 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="30b7dced-f630-4b43-86a9-98bd2b088439" containerName="ceilometer-notification-agent" Sep 30 19:52:24 crc kubenswrapper[4756]: I0930 19:52:24.232245 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="30b7dced-f630-4b43-86a9-98bd2b088439" containerName="ceilometer-notification-agent" Sep 30 19:52:24 crc kubenswrapper[4756]: I0930 19:52:24.232500 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="30b7dced-f630-4b43-86a9-98bd2b088439" containerName="ceilometer-notification-agent" Sep 30 19:52:24 crc kubenswrapper[4756]: I0930 19:52:24.232523 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="30b7dced-f630-4b43-86a9-98bd2b088439" containerName="ceilometer-central-agent" Sep 30 19:52:24 crc kubenswrapper[4756]: I0930 19:52:24.232537 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="30b7dced-f630-4b43-86a9-98bd2b088439" containerName="proxy-httpd" Sep 30 19:52:24 crc kubenswrapper[4756]: I0930 19:52:24.232557 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="30b7dced-f630-4b43-86a9-98bd2b088439" containerName="sg-core" Sep 30 19:52:24 crc kubenswrapper[4756]: I0930 19:52:24.234298 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 19:52:24 crc kubenswrapper[4756]: I0930 19:52:24.236785 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Sep 30 19:52:24 crc kubenswrapper[4756]: I0930 19:52:24.238187 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Sep 30 19:52:24 crc kubenswrapper[4756]: I0930 19:52:24.238360 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Sep 30 19:52:24 crc kubenswrapper[4756]: I0930 19:52:24.239809 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 30 19:52:24 crc kubenswrapper[4756]: I0930 19:52:24.240920 4756 scope.go:117] "RemoveContainer" containerID="e10134091ad8f357225c10adfda68a2ab219d55cd7368080db1d4a8f0fff1ee0" Sep 30 19:52:24 crc kubenswrapper[4756]: I0930 19:52:24.257648 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/9e98fab0-8869-4d11-accc-cc6188f528aa-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"9e98fab0-8869-4d11-accc-cc6188f528aa\") " pod="openstack/ceilometer-0" Sep 30 19:52:24 crc kubenswrapper[4756]: I0930 19:52:24.257710 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/9e98fab0-8869-4d11-accc-cc6188f528aa-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"9e98fab0-8869-4d11-accc-cc6188f528aa\") " pod="openstack/ceilometer-0" Sep 30 19:52:24 crc kubenswrapper[4756]: I0930 19:52:24.257728 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2hmnz\" (UniqueName: \"kubernetes.io/projected/9e98fab0-8869-4d11-accc-cc6188f528aa-kube-api-access-2hmnz\") pod \"ceilometer-0\" (UID: \"9e98fab0-8869-4d11-accc-cc6188f528aa\") " pod="openstack/ceilometer-0" Sep 30 19:52:24 crc kubenswrapper[4756]: I0930 19:52:24.257781 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9e98fab0-8869-4d11-accc-cc6188f528aa-scripts\") pod \"ceilometer-0\" (UID: \"9e98fab0-8869-4d11-accc-cc6188f528aa\") " pod="openstack/ceilometer-0" Sep 30 19:52:24 crc kubenswrapper[4756]: I0930 19:52:24.257838 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9e98fab0-8869-4d11-accc-cc6188f528aa-config-data\") pod \"ceilometer-0\" (UID: \"9e98fab0-8869-4d11-accc-cc6188f528aa\") " pod="openstack/ceilometer-0" Sep 30 19:52:24 crc kubenswrapper[4756]: I0930 19:52:24.258016 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9e98fab0-8869-4d11-accc-cc6188f528aa-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"9e98fab0-8869-4d11-accc-cc6188f528aa\") " pod="openstack/ceilometer-0" Sep 30 19:52:24 crc kubenswrapper[4756]: I0930 19:52:24.258091 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/9e98fab0-8869-4d11-accc-cc6188f528aa-run-httpd\") pod \"ceilometer-0\" (UID: \"9e98fab0-8869-4d11-accc-cc6188f528aa\") " pod="openstack/ceilometer-0" Sep 30 19:52:24 crc kubenswrapper[4756]: I0930 19:52:24.258116 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/9e98fab0-8869-4d11-accc-cc6188f528aa-log-httpd\") pod \"ceilometer-0\" (UID: \"9e98fab0-8869-4d11-accc-cc6188f528aa\") " pod="openstack/ceilometer-0" Sep 30 19:52:24 crc kubenswrapper[4756]: I0930 19:52:24.359975 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9e98fab0-8869-4d11-accc-cc6188f528aa-scripts\") pod \"ceilometer-0\" (UID: \"9e98fab0-8869-4d11-accc-cc6188f528aa\") " pod="openstack/ceilometer-0" Sep 30 19:52:24 crc kubenswrapper[4756]: I0930 19:52:24.360053 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9e98fab0-8869-4d11-accc-cc6188f528aa-config-data\") pod \"ceilometer-0\" (UID: \"9e98fab0-8869-4d11-accc-cc6188f528aa\") " pod="openstack/ceilometer-0" Sep 30 19:52:24 crc kubenswrapper[4756]: I0930 19:52:24.360086 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9e98fab0-8869-4d11-accc-cc6188f528aa-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"9e98fab0-8869-4d11-accc-cc6188f528aa\") " pod="openstack/ceilometer-0" Sep 30 19:52:24 crc kubenswrapper[4756]: I0930 19:52:24.360114 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/9e98fab0-8869-4d11-accc-cc6188f528aa-run-httpd\") pod \"ceilometer-0\" (UID: \"9e98fab0-8869-4d11-accc-cc6188f528aa\") " pod="openstack/ceilometer-0" Sep 30 19:52:24 crc kubenswrapper[4756]: I0930 19:52:24.360135 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/9e98fab0-8869-4d11-accc-cc6188f528aa-log-httpd\") pod \"ceilometer-0\" (UID: \"9e98fab0-8869-4d11-accc-cc6188f528aa\") " pod="openstack/ceilometer-0" Sep 30 19:52:24 crc kubenswrapper[4756]: I0930 19:52:24.360172 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/9e98fab0-8869-4d11-accc-cc6188f528aa-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"9e98fab0-8869-4d11-accc-cc6188f528aa\") " pod="openstack/ceilometer-0" Sep 30 19:52:24 crc kubenswrapper[4756]: I0930 19:52:24.360209 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/9e98fab0-8869-4d11-accc-cc6188f528aa-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"9e98fab0-8869-4d11-accc-cc6188f528aa\") " pod="openstack/ceilometer-0" Sep 30 19:52:24 crc kubenswrapper[4756]: I0930 19:52:24.360226 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2hmnz\" (UniqueName: \"kubernetes.io/projected/9e98fab0-8869-4d11-accc-cc6188f528aa-kube-api-access-2hmnz\") pod \"ceilometer-0\" (UID: \"9e98fab0-8869-4d11-accc-cc6188f528aa\") " pod="openstack/ceilometer-0" Sep 30 19:52:24 crc kubenswrapper[4756]: I0930 19:52:24.360553 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/9e98fab0-8869-4d11-accc-cc6188f528aa-run-httpd\") pod \"ceilometer-0\" (UID: \"9e98fab0-8869-4d11-accc-cc6188f528aa\") " pod="openstack/ceilometer-0" Sep 30 19:52:24 crc kubenswrapper[4756]: I0930 19:52:24.361246 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/9e98fab0-8869-4d11-accc-cc6188f528aa-log-httpd\") pod \"ceilometer-0\" (UID: \"9e98fab0-8869-4d11-accc-cc6188f528aa\") " pod="openstack/ceilometer-0" Sep 30 19:52:24 crc kubenswrapper[4756]: I0930 19:52:24.363852 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/9e98fab0-8869-4d11-accc-cc6188f528aa-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"9e98fab0-8869-4d11-accc-cc6188f528aa\") " pod="openstack/ceilometer-0" Sep 30 19:52:24 crc kubenswrapper[4756]: I0930 19:52:24.363898 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/9e98fab0-8869-4d11-accc-cc6188f528aa-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"9e98fab0-8869-4d11-accc-cc6188f528aa\") " pod="openstack/ceilometer-0" Sep 30 19:52:24 crc kubenswrapper[4756]: I0930 19:52:24.364709 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9e98fab0-8869-4d11-accc-cc6188f528aa-config-data\") pod \"ceilometer-0\" (UID: \"9e98fab0-8869-4d11-accc-cc6188f528aa\") " pod="openstack/ceilometer-0" Sep 30 19:52:24 crc kubenswrapper[4756]: I0930 19:52:24.379452 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9e98fab0-8869-4d11-accc-cc6188f528aa-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"9e98fab0-8869-4d11-accc-cc6188f528aa\") " pod="openstack/ceilometer-0" Sep 30 19:52:24 crc kubenswrapper[4756]: I0930 19:52:24.379566 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9e98fab0-8869-4d11-accc-cc6188f528aa-scripts\") pod \"ceilometer-0\" (UID: \"9e98fab0-8869-4d11-accc-cc6188f528aa\") " pod="openstack/ceilometer-0" Sep 30 19:52:24 crc kubenswrapper[4756]: I0930 19:52:24.383491 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2hmnz\" (UniqueName: \"kubernetes.io/projected/9e98fab0-8869-4d11-accc-cc6188f528aa-kube-api-access-2hmnz\") pod \"ceilometer-0\" (UID: \"9e98fab0-8869-4d11-accc-cc6188f528aa\") " pod="openstack/ceilometer-0" Sep 30 19:52:24 crc kubenswrapper[4756]: I0930 19:52:24.591091 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 19:52:25 crc kubenswrapper[4756]: I0930 19:52:25.014130 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 30 19:52:25 crc kubenswrapper[4756]: I0930 19:52:25.128973 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="30b7dced-f630-4b43-86a9-98bd2b088439" path="/var/lib/kubelet/pods/30b7dced-f630-4b43-86a9-98bd2b088439/volumes" Sep 30 19:52:25 crc kubenswrapper[4756]: I0930 19:52:25.171600 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"9e98fab0-8869-4d11-accc-cc6188f528aa","Type":"ContainerStarted","Data":"1393d155308d187da165830e209fe291bcbb72b37ae9fef775891ee7f3c17eee"} Sep 30 19:52:25 crc kubenswrapper[4756]: I0930 19:52:25.468256 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Sep 30 19:52:25 crc kubenswrapper[4756]: I0930 19:52:25.495251 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Sep 30 19:52:26 crc kubenswrapper[4756]: I0930 19:52:26.184178 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"9e98fab0-8869-4d11-accc-cc6188f528aa","Type":"ContainerStarted","Data":"e49cedfa787e3b2543dcaa4f8448297938c4b5936c7c01a1d4529aa70f266410"} Sep 30 19:52:26 crc kubenswrapper[4756]: I0930 19:52:26.212171 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Sep 30 19:52:26 crc kubenswrapper[4756]: I0930 19:52:26.496854 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Sep 30 19:52:26 crc kubenswrapper[4756]: I0930 19:52:26.497141 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Sep 30 19:52:27 crc kubenswrapper[4756]: I0930 19:52:27.195704 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"9e98fab0-8869-4d11-accc-cc6188f528aa","Type":"ContainerStarted","Data":"b974b6df34d962f4f40e8c6f726afd418030d61b9e23842ca68eeec8b8c30e81"} Sep 30 19:52:27 crc kubenswrapper[4756]: I0930 19:52:27.578611 4756 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="d2690b61-b282-4c01-bfd4-afbcd8bd0968" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.0.195:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Sep 30 19:52:27 crc kubenswrapper[4756]: I0930 19:52:27.579069 4756 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="d2690b61-b282-4c01-bfd4-afbcd8bd0968" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.0.195:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Sep 30 19:52:28 crc kubenswrapper[4756]: I0930 19:52:28.211165 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"9e98fab0-8869-4d11-accc-cc6188f528aa","Type":"ContainerStarted","Data":"cfc25a06580e87f8defabd76d686babda3a8813b5d4c29aeb359ef5d22732380"} Sep 30 19:52:29 crc kubenswrapper[4756]: I0930 19:52:29.224990 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"9e98fab0-8869-4d11-accc-cc6188f528aa","Type":"ContainerStarted","Data":"70c1431406ffad6d280ae503d9b24f1e284cd4ce147355986bdb6284b9ada6ac"} Sep 30 19:52:29 crc kubenswrapper[4756]: I0930 19:52:29.225320 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Sep 30 19:52:29 crc kubenswrapper[4756]: I0930 19:52:29.248328 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=1.493811076 podStartE2EDuration="5.248306178s" podCreationTimestamp="2025-09-30 19:52:24 +0000 UTC" firstStartedPulling="2025-09-30 19:52:25.016142829 +0000 UTC m=+1274.637076306" lastFinishedPulling="2025-09-30 19:52:28.770637921 +0000 UTC m=+1278.391571408" observedRunningTime="2025-09-30 19:52:29.243211764 +0000 UTC m=+1278.864145291" watchObservedRunningTime="2025-09-30 19:52:29.248306178 +0000 UTC m=+1278.869239695" Sep 30 19:52:30 crc kubenswrapper[4756]: I0930 19:52:30.824638 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/kube-state-metrics-0" Sep 30 19:52:33 crc kubenswrapper[4756]: I0930 19:52:33.262956 4756 generic.go:334] "Generic (PLEG): container finished" podID="df33b9b5-354b-4431-aecc-aef93a455a02" containerID="6642d2ccfdc3501ceb1f771a6ea2bd1b3170a7e579b9cc7e8dc8a0a13881303a" exitCode=137 Sep 30 19:52:33 crc kubenswrapper[4756]: I0930 19:52:33.263113 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"df33b9b5-354b-4431-aecc-aef93a455a02","Type":"ContainerDied","Data":"6642d2ccfdc3501ceb1f771a6ea2bd1b3170a7e579b9cc7e8dc8a0a13881303a"} Sep 30 19:52:33 crc kubenswrapper[4756]: I0930 19:52:33.263523 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"df33b9b5-354b-4431-aecc-aef93a455a02","Type":"ContainerDied","Data":"73c5c72bc980e19c32a571eb30d2b228b743844e6ff710bf543634d9cb05bdaa"} Sep 30 19:52:33 crc kubenswrapper[4756]: I0930 19:52:33.263543 4756 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="73c5c72bc980e19c32a571eb30d2b228b743844e6ff710bf543634d9cb05bdaa" Sep 30 19:52:33 crc kubenswrapper[4756]: I0930 19:52:33.266172 4756 generic.go:334] "Generic (PLEG): container finished" podID="576e2aad-6c1b-47d2-8338-26beb8c4c93e" containerID="f7d5ee1721d495453ac73162ff8b96b6a3b3f3d4c350acafb440165e3ddc9dcb" exitCode=137 Sep 30 19:52:33 crc kubenswrapper[4756]: I0930 19:52:33.266212 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"576e2aad-6c1b-47d2-8338-26beb8c4c93e","Type":"ContainerDied","Data":"f7d5ee1721d495453ac73162ff8b96b6a3b3f3d4c350acafb440165e3ddc9dcb"} Sep 30 19:52:33 crc kubenswrapper[4756]: I0930 19:52:33.266238 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"576e2aad-6c1b-47d2-8338-26beb8c4c93e","Type":"ContainerDied","Data":"acc9c9c734195425d5066964ee74bab73a901ff06f38eddd92c392244aac22d0"} Sep 30 19:52:33 crc kubenswrapper[4756]: I0930 19:52:33.266249 4756 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="acc9c9c734195425d5066964ee74bab73a901ff06f38eddd92c392244aac22d0" Sep 30 19:52:33 crc kubenswrapper[4756]: I0930 19:52:33.307419 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Sep 30 19:52:33 crc kubenswrapper[4756]: I0930 19:52:33.313504 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Sep 30 19:52:33 crc kubenswrapper[4756]: I0930 19:52:33.389823 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/df33b9b5-354b-4431-aecc-aef93a455a02-combined-ca-bundle\") pod \"df33b9b5-354b-4431-aecc-aef93a455a02\" (UID: \"df33b9b5-354b-4431-aecc-aef93a455a02\") " Sep 30 19:52:33 crc kubenswrapper[4756]: I0930 19:52:33.389870 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/576e2aad-6c1b-47d2-8338-26beb8c4c93e-config-data\") pod \"576e2aad-6c1b-47d2-8338-26beb8c4c93e\" (UID: \"576e2aad-6c1b-47d2-8338-26beb8c4c93e\") " Sep 30 19:52:33 crc kubenswrapper[4756]: I0930 19:52:33.389904 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-q5cl7\" (UniqueName: \"kubernetes.io/projected/576e2aad-6c1b-47d2-8338-26beb8c4c93e-kube-api-access-q5cl7\") pod \"576e2aad-6c1b-47d2-8338-26beb8c4c93e\" (UID: \"576e2aad-6c1b-47d2-8338-26beb8c4c93e\") " Sep 30 19:52:33 crc kubenswrapper[4756]: I0930 19:52:33.389991 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/576e2aad-6c1b-47d2-8338-26beb8c4c93e-combined-ca-bundle\") pod \"576e2aad-6c1b-47d2-8338-26beb8c4c93e\" (UID: \"576e2aad-6c1b-47d2-8338-26beb8c4c93e\") " Sep 30 19:52:33 crc kubenswrapper[4756]: I0930 19:52:33.390014 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/576e2aad-6c1b-47d2-8338-26beb8c4c93e-logs\") pod \"576e2aad-6c1b-47d2-8338-26beb8c4c93e\" (UID: \"576e2aad-6c1b-47d2-8338-26beb8c4c93e\") " Sep 30 19:52:33 crc kubenswrapper[4756]: I0930 19:52:33.390030 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/df33b9b5-354b-4431-aecc-aef93a455a02-config-data\") pod \"df33b9b5-354b-4431-aecc-aef93a455a02\" (UID: \"df33b9b5-354b-4431-aecc-aef93a455a02\") " Sep 30 19:52:33 crc kubenswrapper[4756]: I0930 19:52:33.390070 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-62kq8\" (UniqueName: \"kubernetes.io/projected/df33b9b5-354b-4431-aecc-aef93a455a02-kube-api-access-62kq8\") pod \"df33b9b5-354b-4431-aecc-aef93a455a02\" (UID: \"df33b9b5-354b-4431-aecc-aef93a455a02\") " Sep 30 19:52:33 crc kubenswrapper[4756]: I0930 19:52:33.392707 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/576e2aad-6c1b-47d2-8338-26beb8c4c93e-logs" (OuterVolumeSpecName: "logs") pod "576e2aad-6c1b-47d2-8338-26beb8c4c93e" (UID: "576e2aad-6c1b-47d2-8338-26beb8c4c93e"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 19:52:33 crc kubenswrapper[4756]: I0930 19:52:33.398983 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/576e2aad-6c1b-47d2-8338-26beb8c4c93e-kube-api-access-q5cl7" (OuterVolumeSpecName: "kube-api-access-q5cl7") pod "576e2aad-6c1b-47d2-8338-26beb8c4c93e" (UID: "576e2aad-6c1b-47d2-8338-26beb8c4c93e"). InnerVolumeSpecName "kube-api-access-q5cl7". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:52:33 crc kubenswrapper[4756]: I0930 19:52:33.404064 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/df33b9b5-354b-4431-aecc-aef93a455a02-kube-api-access-62kq8" (OuterVolumeSpecName: "kube-api-access-62kq8") pod "df33b9b5-354b-4431-aecc-aef93a455a02" (UID: "df33b9b5-354b-4431-aecc-aef93a455a02"). InnerVolumeSpecName "kube-api-access-62kq8". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:52:33 crc kubenswrapper[4756]: I0930 19:52:33.426606 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/576e2aad-6c1b-47d2-8338-26beb8c4c93e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "576e2aad-6c1b-47d2-8338-26beb8c4c93e" (UID: "576e2aad-6c1b-47d2-8338-26beb8c4c93e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:52:33 crc kubenswrapper[4756]: I0930 19:52:33.442350 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/576e2aad-6c1b-47d2-8338-26beb8c4c93e-config-data" (OuterVolumeSpecName: "config-data") pod "576e2aad-6c1b-47d2-8338-26beb8c4c93e" (UID: "576e2aad-6c1b-47d2-8338-26beb8c4c93e"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:52:33 crc kubenswrapper[4756]: I0930 19:52:33.456711 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/df33b9b5-354b-4431-aecc-aef93a455a02-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "df33b9b5-354b-4431-aecc-aef93a455a02" (UID: "df33b9b5-354b-4431-aecc-aef93a455a02"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:52:33 crc kubenswrapper[4756]: I0930 19:52:33.457249 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/df33b9b5-354b-4431-aecc-aef93a455a02-config-data" (OuterVolumeSpecName: "config-data") pod "df33b9b5-354b-4431-aecc-aef93a455a02" (UID: "df33b9b5-354b-4431-aecc-aef93a455a02"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:52:33 crc kubenswrapper[4756]: I0930 19:52:33.494902 4756 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/df33b9b5-354b-4431-aecc-aef93a455a02-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 19:52:33 crc kubenswrapper[4756]: I0930 19:52:33.494936 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-62kq8\" (UniqueName: \"kubernetes.io/projected/df33b9b5-354b-4431-aecc-aef93a455a02-kube-api-access-62kq8\") on node \"crc\" DevicePath \"\"" Sep 30 19:52:33 crc kubenswrapper[4756]: I0930 19:52:33.494948 4756 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/df33b9b5-354b-4431-aecc-aef93a455a02-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 19:52:33 crc kubenswrapper[4756]: I0930 19:52:33.494957 4756 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/576e2aad-6c1b-47d2-8338-26beb8c4c93e-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 19:52:33 crc kubenswrapper[4756]: I0930 19:52:33.494966 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-q5cl7\" (UniqueName: \"kubernetes.io/projected/576e2aad-6c1b-47d2-8338-26beb8c4c93e-kube-api-access-q5cl7\") on node \"crc\" DevicePath \"\"" Sep 30 19:52:33 crc kubenswrapper[4756]: I0930 19:52:33.494974 4756 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/576e2aad-6c1b-47d2-8338-26beb8c4c93e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 19:52:33 crc kubenswrapper[4756]: I0930 19:52:33.494984 4756 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/576e2aad-6c1b-47d2-8338-26beb8c4c93e-logs\") on node \"crc\" DevicePath \"\"" Sep 30 19:52:34 crc kubenswrapper[4756]: I0930 19:52:34.275336 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Sep 30 19:52:34 crc kubenswrapper[4756]: I0930 19:52:34.275368 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Sep 30 19:52:34 crc kubenswrapper[4756]: I0930 19:52:34.313969 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Sep 30 19:52:34 crc kubenswrapper[4756]: I0930 19:52:34.322667 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Sep 30 19:52:34 crc kubenswrapper[4756]: I0930 19:52:34.340182 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Sep 30 19:52:34 crc kubenswrapper[4756]: I0930 19:52:34.360213 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Sep 30 19:52:34 crc kubenswrapper[4756]: I0930 19:52:34.375510 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Sep 30 19:52:34 crc kubenswrapper[4756]: E0930 19:52:34.376053 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="df33b9b5-354b-4431-aecc-aef93a455a02" containerName="nova-cell1-novncproxy-novncproxy" Sep 30 19:52:34 crc kubenswrapper[4756]: I0930 19:52:34.376079 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="df33b9b5-354b-4431-aecc-aef93a455a02" containerName="nova-cell1-novncproxy-novncproxy" Sep 30 19:52:34 crc kubenswrapper[4756]: E0930 19:52:34.376106 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="576e2aad-6c1b-47d2-8338-26beb8c4c93e" containerName="nova-metadata-log" Sep 30 19:52:34 crc kubenswrapper[4756]: I0930 19:52:34.376115 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="576e2aad-6c1b-47d2-8338-26beb8c4c93e" containerName="nova-metadata-log" Sep 30 19:52:34 crc kubenswrapper[4756]: E0930 19:52:34.376131 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="576e2aad-6c1b-47d2-8338-26beb8c4c93e" containerName="nova-metadata-metadata" Sep 30 19:52:34 crc kubenswrapper[4756]: I0930 19:52:34.376138 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="576e2aad-6c1b-47d2-8338-26beb8c4c93e" containerName="nova-metadata-metadata" Sep 30 19:52:34 crc kubenswrapper[4756]: I0930 19:52:34.376366 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="576e2aad-6c1b-47d2-8338-26beb8c4c93e" containerName="nova-metadata-metadata" Sep 30 19:52:34 crc kubenswrapper[4756]: I0930 19:52:34.376407 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="576e2aad-6c1b-47d2-8338-26beb8c4c93e" containerName="nova-metadata-log" Sep 30 19:52:34 crc kubenswrapper[4756]: I0930 19:52:34.376421 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="df33b9b5-354b-4431-aecc-aef93a455a02" containerName="nova-cell1-novncproxy-novncproxy" Sep 30 19:52:34 crc kubenswrapper[4756]: I0930 19:52:34.377030 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Sep 30 19:52:34 crc kubenswrapper[4756]: I0930 19:52:34.380471 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-novncproxy-config-data" Sep 30 19:52:34 crc kubenswrapper[4756]: I0930 19:52:34.380736 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-novncproxy-cell1-public-svc" Sep 30 19:52:34 crc kubenswrapper[4756]: I0930 19:52:34.381438 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-novncproxy-cell1-vencrypt" Sep 30 19:52:34 crc kubenswrapper[4756]: I0930 19:52:34.385704 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Sep 30 19:52:34 crc kubenswrapper[4756]: I0930 19:52:34.387821 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Sep 30 19:52:34 crc kubenswrapper[4756]: I0930 19:52:34.395138 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Sep 30 19:52:34 crc kubenswrapper[4756]: I0930 19:52:34.396225 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Sep 30 19:52:34 crc kubenswrapper[4756]: I0930 19:52:34.399712 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Sep 30 19:52:34 crc kubenswrapper[4756]: I0930 19:52:34.416703 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6f3ed405-3d2e-4296-8ea5-7b426f2bd987-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"6f3ed405-3d2e-4296-8ea5-7b426f2bd987\") " pod="openstack/nova-cell1-novncproxy-0" Sep 30 19:52:34 crc kubenswrapper[4756]: I0930 19:52:34.416764 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/4f6756ce-2d11-45b8-ae0d-3e1bf4ef589d-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"4f6756ce-2d11-45b8-ae0d-3e1bf4ef589d\") " pod="openstack/nova-metadata-0" Sep 30 19:52:34 crc kubenswrapper[4756]: I0930 19:52:34.417672 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6f3ed405-3d2e-4296-8ea5-7b426f2bd987-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"6f3ed405-3d2e-4296-8ea5-7b426f2bd987\") " pod="openstack/nova-cell1-novncproxy-0" Sep 30 19:52:34 crc kubenswrapper[4756]: I0930 19:52:34.417740 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/6f3ed405-3d2e-4296-8ea5-7b426f2bd987-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"6f3ed405-3d2e-4296-8ea5-7b426f2bd987\") " pod="openstack/nova-cell1-novncproxy-0" Sep 30 19:52:34 crc kubenswrapper[4756]: I0930 19:52:34.417765 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4f6756ce-2d11-45b8-ae0d-3e1bf4ef589d-logs\") pod \"nova-metadata-0\" (UID: \"4f6756ce-2d11-45b8-ae0d-3e1bf4ef589d\") " pod="openstack/nova-metadata-0" Sep 30 19:52:34 crc kubenswrapper[4756]: I0930 19:52:34.417839 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/6f3ed405-3d2e-4296-8ea5-7b426f2bd987-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"6f3ed405-3d2e-4296-8ea5-7b426f2bd987\") " pod="openstack/nova-cell1-novncproxy-0" Sep 30 19:52:34 crc kubenswrapper[4756]: I0930 19:52:34.417945 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wlcks\" (UniqueName: \"kubernetes.io/projected/6f3ed405-3d2e-4296-8ea5-7b426f2bd987-kube-api-access-wlcks\") pod \"nova-cell1-novncproxy-0\" (UID: \"6f3ed405-3d2e-4296-8ea5-7b426f2bd987\") " pod="openstack/nova-cell1-novncproxy-0" Sep 30 19:52:34 crc kubenswrapper[4756]: I0930 19:52:34.417981 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4f6756ce-2d11-45b8-ae0d-3e1bf4ef589d-config-data\") pod \"nova-metadata-0\" (UID: \"4f6756ce-2d11-45b8-ae0d-3e1bf4ef589d\") " pod="openstack/nova-metadata-0" Sep 30 19:52:34 crc kubenswrapper[4756]: I0930 19:52:34.418102 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4f6756ce-2d11-45b8-ae0d-3e1bf4ef589d-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"4f6756ce-2d11-45b8-ae0d-3e1bf4ef589d\") " pod="openstack/nova-metadata-0" Sep 30 19:52:34 crc kubenswrapper[4756]: I0930 19:52:34.418185 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c2rjp\" (UniqueName: \"kubernetes.io/projected/4f6756ce-2d11-45b8-ae0d-3e1bf4ef589d-kube-api-access-c2rjp\") pod \"nova-metadata-0\" (UID: \"4f6756ce-2d11-45b8-ae0d-3e1bf4ef589d\") " pod="openstack/nova-metadata-0" Sep 30 19:52:34 crc kubenswrapper[4756]: I0930 19:52:34.445588 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Sep 30 19:52:34 crc kubenswrapper[4756]: I0930 19:52:34.520027 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wlcks\" (UniqueName: \"kubernetes.io/projected/6f3ed405-3d2e-4296-8ea5-7b426f2bd987-kube-api-access-wlcks\") pod \"nova-cell1-novncproxy-0\" (UID: \"6f3ed405-3d2e-4296-8ea5-7b426f2bd987\") " pod="openstack/nova-cell1-novncproxy-0" Sep 30 19:52:34 crc kubenswrapper[4756]: I0930 19:52:34.520068 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4f6756ce-2d11-45b8-ae0d-3e1bf4ef589d-config-data\") pod \"nova-metadata-0\" (UID: \"4f6756ce-2d11-45b8-ae0d-3e1bf4ef589d\") " pod="openstack/nova-metadata-0" Sep 30 19:52:34 crc kubenswrapper[4756]: I0930 19:52:34.520123 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4f6756ce-2d11-45b8-ae0d-3e1bf4ef589d-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"4f6756ce-2d11-45b8-ae0d-3e1bf4ef589d\") " pod="openstack/nova-metadata-0" Sep 30 19:52:34 crc kubenswrapper[4756]: I0930 19:52:34.520156 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c2rjp\" (UniqueName: \"kubernetes.io/projected/4f6756ce-2d11-45b8-ae0d-3e1bf4ef589d-kube-api-access-c2rjp\") pod \"nova-metadata-0\" (UID: \"4f6756ce-2d11-45b8-ae0d-3e1bf4ef589d\") " pod="openstack/nova-metadata-0" Sep 30 19:52:34 crc kubenswrapper[4756]: I0930 19:52:34.520193 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6f3ed405-3d2e-4296-8ea5-7b426f2bd987-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"6f3ed405-3d2e-4296-8ea5-7b426f2bd987\") " pod="openstack/nova-cell1-novncproxy-0" Sep 30 19:52:34 crc kubenswrapper[4756]: I0930 19:52:34.520220 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/4f6756ce-2d11-45b8-ae0d-3e1bf4ef589d-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"4f6756ce-2d11-45b8-ae0d-3e1bf4ef589d\") " pod="openstack/nova-metadata-0" Sep 30 19:52:34 crc kubenswrapper[4756]: I0930 19:52:34.520250 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6f3ed405-3d2e-4296-8ea5-7b426f2bd987-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"6f3ed405-3d2e-4296-8ea5-7b426f2bd987\") " pod="openstack/nova-cell1-novncproxy-0" Sep 30 19:52:34 crc kubenswrapper[4756]: I0930 19:52:34.520276 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/6f3ed405-3d2e-4296-8ea5-7b426f2bd987-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"6f3ed405-3d2e-4296-8ea5-7b426f2bd987\") " pod="openstack/nova-cell1-novncproxy-0" Sep 30 19:52:34 crc kubenswrapper[4756]: I0930 19:52:34.520297 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4f6756ce-2d11-45b8-ae0d-3e1bf4ef589d-logs\") pod \"nova-metadata-0\" (UID: \"4f6756ce-2d11-45b8-ae0d-3e1bf4ef589d\") " pod="openstack/nova-metadata-0" Sep 30 19:52:34 crc kubenswrapper[4756]: I0930 19:52:34.520337 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/6f3ed405-3d2e-4296-8ea5-7b426f2bd987-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"6f3ed405-3d2e-4296-8ea5-7b426f2bd987\") " pod="openstack/nova-cell1-novncproxy-0" Sep 30 19:52:34 crc kubenswrapper[4756]: I0930 19:52:34.522062 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4f6756ce-2d11-45b8-ae0d-3e1bf4ef589d-logs\") pod \"nova-metadata-0\" (UID: \"4f6756ce-2d11-45b8-ae0d-3e1bf4ef589d\") " pod="openstack/nova-metadata-0" Sep 30 19:52:34 crc kubenswrapper[4756]: I0930 19:52:34.525975 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6f3ed405-3d2e-4296-8ea5-7b426f2bd987-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"6f3ed405-3d2e-4296-8ea5-7b426f2bd987\") " pod="openstack/nova-cell1-novncproxy-0" Sep 30 19:52:34 crc kubenswrapper[4756]: I0930 19:52:34.526036 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/6f3ed405-3d2e-4296-8ea5-7b426f2bd987-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"6f3ed405-3d2e-4296-8ea5-7b426f2bd987\") " pod="openstack/nova-cell1-novncproxy-0" Sep 30 19:52:34 crc kubenswrapper[4756]: I0930 19:52:34.526094 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/4f6756ce-2d11-45b8-ae0d-3e1bf4ef589d-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"4f6756ce-2d11-45b8-ae0d-3e1bf4ef589d\") " pod="openstack/nova-metadata-0" Sep 30 19:52:34 crc kubenswrapper[4756]: I0930 19:52:34.526642 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4f6756ce-2d11-45b8-ae0d-3e1bf4ef589d-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"4f6756ce-2d11-45b8-ae0d-3e1bf4ef589d\") " pod="openstack/nova-metadata-0" Sep 30 19:52:34 crc kubenswrapper[4756]: I0930 19:52:34.534263 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/6f3ed405-3d2e-4296-8ea5-7b426f2bd987-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"6f3ed405-3d2e-4296-8ea5-7b426f2bd987\") " pod="openstack/nova-cell1-novncproxy-0" Sep 30 19:52:34 crc kubenswrapper[4756]: I0930 19:52:34.536793 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6f3ed405-3d2e-4296-8ea5-7b426f2bd987-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"6f3ed405-3d2e-4296-8ea5-7b426f2bd987\") " pod="openstack/nova-cell1-novncproxy-0" Sep 30 19:52:34 crc kubenswrapper[4756]: I0930 19:52:34.540019 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4f6756ce-2d11-45b8-ae0d-3e1bf4ef589d-config-data\") pod \"nova-metadata-0\" (UID: \"4f6756ce-2d11-45b8-ae0d-3e1bf4ef589d\") " pod="openstack/nova-metadata-0" Sep 30 19:52:34 crc kubenswrapper[4756]: I0930 19:52:34.545767 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c2rjp\" (UniqueName: \"kubernetes.io/projected/4f6756ce-2d11-45b8-ae0d-3e1bf4ef589d-kube-api-access-c2rjp\") pod \"nova-metadata-0\" (UID: \"4f6756ce-2d11-45b8-ae0d-3e1bf4ef589d\") " pod="openstack/nova-metadata-0" Sep 30 19:52:34 crc kubenswrapper[4756]: I0930 19:52:34.560982 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wlcks\" (UniqueName: \"kubernetes.io/projected/6f3ed405-3d2e-4296-8ea5-7b426f2bd987-kube-api-access-wlcks\") pod \"nova-cell1-novncproxy-0\" (UID: \"6f3ed405-3d2e-4296-8ea5-7b426f2bd987\") " pod="openstack/nova-cell1-novncproxy-0" Sep 30 19:52:34 crc kubenswrapper[4756]: I0930 19:52:34.732113 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Sep 30 19:52:34 crc kubenswrapper[4756]: I0930 19:52:34.735939 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Sep 30 19:52:35 crc kubenswrapper[4756]: I0930 19:52:35.125516 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="576e2aad-6c1b-47d2-8338-26beb8c4c93e" path="/var/lib/kubelet/pods/576e2aad-6c1b-47d2-8338-26beb8c4c93e/volumes" Sep 30 19:52:35 crc kubenswrapper[4756]: I0930 19:52:35.126464 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="df33b9b5-354b-4431-aecc-aef93a455a02" path="/var/lib/kubelet/pods/df33b9b5-354b-4431-aecc-aef93a455a02/volumes" Sep 30 19:52:35 crc kubenswrapper[4756]: I0930 19:52:35.211154 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Sep 30 19:52:35 crc kubenswrapper[4756]: W0930 19:52:35.265297 4756 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6f3ed405_3d2e_4296_8ea5_7b426f2bd987.slice/crio-9eea8b3b58082a50bbf72ebde514f3da218f03bcad9af6a78faeb3bc854d34f7 WatchSource:0}: Error finding container 9eea8b3b58082a50bbf72ebde514f3da218f03bcad9af6a78faeb3bc854d34f7: Status 404 returned error can't find the container with id 9eea8b3b58082a50bbf72ebde514f3da218f03bcad9af6a78faeb3bc854d34f7 Sep 30 19:52:35 crc kubenswrapper[4756]: I0930 19:52:35.265557 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Sep 30 19:52:35 crc kubenswrapper[4756]: I0930 19:52:35.290496 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"6f3ed405-3d2e-4296-8ea5-7b426f2bd987","Type":"ContainerStarted","Data":"9eea8b3b58082a50bbf72ebde514f3da218f03bcad9af6a78faeb3bc854d34f7"} Sep 30 19:52:35 crc kubenswrapper[4756]: I0930 19:52:35.292177 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"4f6756ce-2d11-45b8-ae0d-3e1bf4ef589d","Type":"ContainerStarted","Data":"59fba15c97b7f164077e5065f9da0add5e1dc99c0e8881633a5955b491b9d655"} Sep 30 19:52:36 crc kubenswrapper[4756]: I0930 19:52:36.305701 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"6f3ed405-3d2e-4296-8ea5-7b426f2bd987","Type":"ContainerStarted","Data":"10c6b1bb1f7f4de1bc5c4aca7ca3dd73b0eea0fe2e2a9e20084f2def6aa17018"} Sep 30 19:52:36 crc kubenswrapper[4756]: I0930 19:52:36.308156 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"4f6756ce-2d11-45b8-ae0d-3e1bf4ef589d","Type":"ContainerStarted","Data":"6d008c654436805bfaeae405e585f88aac07cfffcc03e4c98abff8f344b5757d"} Sep 30 19:52:36 crc kubenswrapper[4756]: I0930 19:52:36.308198 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"4f6756ce-2d11-45b8-ae0d-3e1bf4ef589d","Type":"ContainerStarted","Data":"63811f19f70f2db22b8c6db5cf3d5678c6c94e93d0629c39778c948e817682b4"} Sep 30 19:52:36 crc kubenswrapper[4756]: I0930 19:52:36.335225 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-novncproxy-0" podStartSLOduration=2.335207649 podStartE2EDuration="2.335207649s" podCreationTimestamp="2025-09-30 19:52:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 19:52:36.321952483 +0000 UTC m=+1285.942885990" watchObservedRunningTime="2025-09-30 19:52:36.335207649 +0000 UTC m=+1285.956141116" Sep 30 19:52:36 crc kubenswrapper[4756]: I0930 19:52:36.353007 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.3529911439999998 podStartE2EDuration="2.352991144s" podCreationTimestamp="2025-09-30 19:52:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 19:52:36.344588795 +0000 UTC m=+1285.965522292" watchObservedRunningTime="2025-09-30 19:52:36.352991144 +0000 UTC m=+1285.973924621" Sep 30 19:52:36 crc kubenswrapper[4756]: I0930 19:52:36.503158 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Sep 30 19:52:36 crc kubenswrapper[4756]: I0930 19:52:36.503579 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Sep 30 19:52:36 crc kubenswrapper[4756]: I0930 19:52:36.503789 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Sep 30 19:52:36 crc kubenswrapper[4756]: I0930 19:52:36.506805 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Sep 30 19:52:37 crc kubenswrapper[4756]: I0930 19:52:37.317136 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Sep 30 19:52:37 crc kubenswrapper[4756]: I0930 19:52:37.322812 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Sep 30 19:52:37 crc kubenswrapper[4756]: I0930 19:52:37.522460 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-59cf4bdb65-ljgq7"] Sep 30 19:52:37 crc kubenswrapper[4756]: I0930 19:52:37.525216 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-59cf4bdb65-ljgq7" Sep 30 19:52:37 crc kubenswrapper[4756]: I0930 19:52:37.537371 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-59cf4bdb65-ljgq7"] Sep 30 19:52:37 crc kubenswrapper[4756]: I0930 19:52:37.580356 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-58grk\" (UniqueName: \"kubernetes.io/projected/3ad8997f-b736-41c8-b087-d09b6316065d-kube-api-access-58grk\") pod \"dnsmasq-dns-59cf4bdb65-ljgq7\" (UID: \"3ad8997f-b736-41c8-b087-d09b6316065d\") " pod="openstack/dnsmasq-dns-59cf4bdb65-ljgq7" Sep 30 19:52:37 crc kubenswrapper[4756]: I0930 19:52:37.580590 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/3ad8997f-b736-41c8-b087-d09b6316065d-dns-swift-storage-0\") pod \"dnsmasq-dns-59cf4bdb65-ljgq7\" (UID: \"3ad8997f-b736-41c8-b087-d09b6316065d\") " pod="openstack/dnsmasq-dns-59cf4bdb65-ljgq7" Sep 30 19:52:37 crc kubenswrapper[4756]: I0930 19:52:37.580645 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/3ad8997f-b736-41c8-b087-d09b6316065d-ovsdbserver-nb\") pod \"dnsmasq-dns-59cf4bdb65-ljgq7\" (UID: \"3ad8997f-b736-41c8-b087-d09b6316065d\") " pod="openstack/dnsmasq-dns-59cf4bdb65-ljgq7" Sep 30 19:52:37 crc kubenswrapper[4756]: I0930 19:52:37.580734 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/3ad8997f-b736-41c8-b087-d09b6316065d-ovsdbserver-sb\") pod \"dnsmasq-dns-59cf4bdb65-ljgq7\" (UID: \"3ad8997f-b736-41c8-b087-d09b6316065d\") " pod="openstack/dnsmasq-dns-59cf4bdb65-ljgq7" Sep 30 19:52:37 crc kubenswrapper[4756]: I0930 19:52:37.580825 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3ad8997f-b736-41c8-b087-d09b6316065d-config\") pod \"dnsmasq-dns-59cf4bdb65-ljgq7\" (UID: \"3ad8997f-b736-41c8-b087-d09b6316065d\") " pod="openstack/dnsmasq-dns-59cf4bdb65-ljgq7" Sep 30 19:52:37 crc kubenswrapper[4756]: I0930 19:52:37.580861 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3ad8997f-b736-41c8-b087-d09b6316065d-dns-svc\") pod \"dnsmasq-dns-59cf4bdb65-ljgq7\" (UID: \"3ad8997f-b736-41c8-b087-d09b6316065d\") " pod="openstack/dnsmasq-dns-59cf4bdb65-ljgq7" Sep 30 19:52:37 crc kubenswrapper[4756]: I0930 19:52:37.682569 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/3ad8997f-b736-41c8-b087-d09b6316065d-dns-swift-storage-0\") pod \"dnsmasq-dns-59cf4bdb65-ljgq7\" (UID: \"3ad8997f-b736-41c8-b087-d09b6316065d\") " pod="openstack/dnsmasq-dns-59cf4bdb65-ljgq7" Sep 30 19:52:37 crc kubenswrapper[4756]: I0930 19:52:37.682632 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/3ad8997f-b736-41c8-b087-d09b6316065d-ovsdbserver-nb\") pod \"dnsmasq-dns-59cf4bdb65-ljgq7\" (UID: \"3ad8997f-b736-41c8-b087-d09b6316065d\") " pod="openstack/dnsmasq-dns-59cf4bdb65-ljgq7" Sep 30 19:52:37 crc kubenswrapper[4756]: I0930 19:52:37.682692 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/3ad8997f-b736-41c8-b087-d09b6316065d-ovsdbserver-sb\") pod \"dnsmasq-dns-59cf4bdb65-ljgq7\" (UID: \"3ad8997f-b736-41c8-b087-d09b6316065d\") " pod="openstack/dnsmasq-dns-59cf4bdb65-ljgq7" Sep 30 19:52:37 crc kubenswrapper[4756]: I0930 19:52:37.682768 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3ad8997f-b736-41c8-b087-d09b6316065d-config\") pod \"dnsmasq-dns-59cf4bdb65-ljgq7\" (UID: \"3ad8997f-b736-41c8-b087-d09b6316065d\") " pod="openstack/dnsmasq-dns-59cf4bdb65-ljgq7" Sep 30 19:52:37 crc kubenswrapper[4756]: I0930 19:52:37.682793 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3ad8997f-b736-41c8-b087-d09b6316065d-dns-svc\") pod \"dnsmasq-dns-59cf4bdb65-ljgq7\" (UID: \"3ad8997f-b736-41c8-b087-d09b6316065d\") " pod="openstack/dnsmasq-dns-59cf4bdb65-ljgq7" Sep 30 19:52:37 crc kubenswrapper[4756]: I0930 19:52:37.682822 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-58grk\" (UniqueName: \"kubernetes.io/projected/3ad8997f-b736-41c8-b087-d09b6316065d-kube-api-access-58grk\") pod \"dnsmasq-dns-59cf4bdb65-ljgq7\" (UID: \"3ad8997f-b736-41c8-b087-d09b6316065d\") " pod="openstack/dnsmasq-dns-59cf4bdb65-ljgq7" Sep 30 19:52:37 crc kubenswrapper[4756]: I0930 19:52:37.683815 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/3ad8997f-b736-41c8-b087-d09b6316065d-dns-swift-storage-0\") pod \"dnsmasq-dns-59cf4bdb65-ljgq7\" (UID: \"3ad8997f-b736-41c8-b087-d09b6316065d\") " pod="openstack/dnsmasq-dns-59cf4bdb65-ljgq7" Sep 30 19:52:37 crc kubenswrapper[4756]: I0930 19:52:37.683917 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/3ad8997f-b736-41c8-b087-d09b6316065d-ovsdbserver-nb\") pod \"dnsmasq-dns-59cf4bdb65-ljgq7\" (UID: \"3ad8997f-b736-41c8-b087-d09b6316065d\") " pod="openstack/dnsmasq-dns-59cf4bdb65-ljgq7" Sep 30 19:52:37 crc kubenswrapper[4756]: I0930 19:52:37.683926 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3ad8997f-b736-41c8-b087-d09b6316065d-config\") pod \"dnsmasq-dns-59cf4bdb65-ljgq7\" (UID: \"3ad8997f-b736-41c8-b087-d09b6316065d\") " pod="openstack/dnsmasq-dns-59cf4bdb65-ljgq7" Sep 30 19:52:37 crc kubenswrapper[4756]: I0930 19:52:37.683995 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/3ad8997f-b736-41c8-b087-d09b6316065d-ovsdbserver-sb\") pod \"dnsmasq-dns-59cf4bdb65-ljgq7\" (UID: \"3ad8997f-b736-41c8-b087-d09b6316065d\") " pod="openstack/dnsmasq-dns-59cf4bdb65-ljgq7" Sep 30 19:52:37 crc kubenswrapper[4756]: I0930 19:52:37.684218 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3ad8997f-b736-41c8-b087-d09b6316065d-dns-svc\") pod \"dnsmasq-dns-59cf4bdb65-ljgq7\" (UID: \"3ad8997f-b736-41c8-b087-d09b6316065d\") " pod="openstack/dnsmasq-dns-59cf4bdb65-ljgq7" Sep 30 19:52:37 crc kubenswrapper[4756]: I0930 19:52:37.710654 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-58grk\" (UniqueName: \"kubernetes.io/projected/3ad8997f-b736-41c8-b087-d09b6316065d-kube-api-access-58grk\") pod \"dnsmasq-dns-59cf4bdb65-ljgq7\" (UID: \"3ad8997f-b736-41c8-b087-d09b6316065d\") " pod="openstack/dnsmasq-dns-59cf4bdb65-ljgq7" Sep 30 19:52:37 crc kubenswrapper[4756]: I0930 19:52:37.863860 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-59cf4bdb65-ljgq7" Sep 30 19:52:38 crc kubenswrapper[4756]: I0930 19:52:38.357512 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-59cf4bdb65-ljgq7"] Sep 30 19:52:38 crc kubenswrapper[4756]: W0930 19:52:38.360203 4756 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3ad8997f_b736_41c8_b087_d09b6316065d.slice/crio-5f40a8e4c92dc45ec9c7183da68ffaa56203051c2de942ed08935bd6e16423b1 WatchSource:0}: Error finding container 5f40a8e4c92dc45ec9c7183da68ffaa56203051c2de942ed08935bd6e16423b1: Status 404 returned error can't find the container with id 5f40a8e4c92dc45ec9c7183da68ffaa56203051c2de942ed08935bd6e16423b1 Sep 30 19:52:39 crc kubenswrapper[4756]: I0930 19:52:39.335681 4756 generic.go:334] "Generic (PLEG): container finished" podID="3ad8997f-b736-41c8-b087-d09b6316065d" containerID="58b5b5ec91d10a67d194b5ff9f7a783dc05b8dd915bde4ee562175c5aaaf173b" exitCode=0 Sep 30 19:52:39 crc kubenswrapper[4756]: I0930 19:52:39.335768 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-59cf4bdb65-ljgq7" event={"ID":"3ad8997f-b736-41c8-b087-d09b6316065d","Type":"ContainerDied","Data":"58b5b5ec91d10a67d194b5ff9f7a783dc05b8dd915bde4ee562175c5aaaf173b"} Sep 30 19:52:39 crc kubenswrapper[4756]: I0930 19:52:39.335805 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-59cf4bdb65-ljgq7" event={"ID":"3ad8997f-b736-41c8-b087-d09b6316065d","Type":"ContainerStarted","Data":"5f40a8e4c92dc45ec9c7183da68ffaa56203051c2de942ed08935bd6e16423b1"} Sep 30 19:52:39 crc kubenswrapper[4756]: I0930 19:52:39.721259 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 30 19:52:39 crc kubenswrapper[4756]: I0930 19:52:39.721907 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="9e98fab0-8869-4d11-accc-cc6188f528aa" containerName="ceilometer-central-agent" containerID="cri-o://e49cedfa787e3b2543dcaa4f8448297938c4b5936c7c01a1d4529aa70f266410" gracePeriod=30 Sep 30 19:52:39 crc kubenswrapper[4756]: I0930 19:52:39.721996 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="9e98fab0-8869-4d11-accc-cc6188f528aa" containerName="ceilometer-notification-agent" containerID="cri-o://b974b6df34d962f4f40e8c6f726afd418030d61b9e23842ca68eeec8b8c30e81" gracePeriod=30 Sep 30 19:52:39 crc kubenswrapper[4756]: I0930 19:52:39.722032 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="9e98fab0-8869-4d11-accc-cc6188f528aa" containerName="proxy-httpd" containerID="cri-o://70c1431406ffad6d280ae503d9b24f1e284cd4ce147355986bdb6284b9ada6ac" gracePeriod=30 Sep 30 19:52:39 crc kubenswrapper[4756]: I0930 19:52:39.722032 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="9e98fab0-8869-4d11-accc-cc6188f528aa" containerName="sg-core" containerID="cri-o://cfc25a06580e87f8defabd76d686babda3a8813b5d4c29aeb359ef5d22732380" gracePeriod=30 Sep 30 19:52:39 crc kubenswrapper[4756]: I0930 19:52:39.733688 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-novncproxy-0" Sep 30 19:52:39 crc kubenswrapper[4756]: I0930 19:52:39.737064 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Sep 30 19:52:39 crc kubenswrapper[4756]: I0930 19:52:39.737139 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Sep 30 19:52:39 crc kubenswrapper[4756]: I0930 19:52:39.737549 4756 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ceilometer-0" podUID="9e98fab0-8869-4d11-accc-cc6188f528aa" containerName="proxy-httpd" probeResult="failure" output="Get \"https://10.217.0.197:3000/\": EOF" Sep 30 19:52:40 crc kubenswrapper[4756]: I0930 19:52:40.171259 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Sep 30 19:52:40 crc kubenswrapper[4756]: I0930 19:52:40.349291 4756 generic.go:334] "Generic (PLEG): container finished" podID="9e98fab0-8869-4d11-accc-cc6188f528aa" containerID="70c1431406ffad6d280ae503d9b24f1e284cd4ce147355986bdb6284b9ada6ac" exitCode=0 Sep 30 19:52:40 crc kubenswrapper[4756]: I0930 19:52:40.349321 4756 generic.go:334] "Generic (PLEG): container finished" podID="9e98fab0-8869-4d11-accc-cc6188f528aa" containerID="cfc25a06580e87f8defabd76d686babda3a8813b5d4c29aeb359ef5d22732380" exitCode=2 Sep 30 19:52:40 crc kubenswrapper[4756]: I0930 19:52:40.349330 4756 generic.go:334] "Generic (PLEG): container finished" podID="9e98fab0-8869-4d11-accc-cc6188f528aa" containerID="e49cedfa787e3b2543dcaa4f8448297938c4b5936c7c01a1d4529aa70f266410" exitCode=0 Sep 30 19:52:40 crc kubenswrapper[4756]: I0930 19:52:40.349366 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"9e98fab0-8869-4d11-accc-cc6188f528aa","Type":"ContainerDied","Data":"70c1431406ffad6d280ae503d9b24f1e284cd4ce147355986bdb6284b9ada6ac"} Sep 30 19:52:40 crc kubenswrapper[4756]: I0930 19:52:40.349426 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"9e98fab0-8869-4d11-accc-cc6188f528aa","Type":"ContainerDied","Data":"cfc25a06580e87f8defabd76d686babda3a8813b5d4c29aeb359ef5d22732380"} Sep 30 19:52:40 crc kubenswrapper[4756]: I0930 19:52:40.349437 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"9e98fab0-8869-4d11-accc-cc6188f528aa","Type":"ContainerDied","Data":"e49cedfa787e3b2543dcaa4f8448297938c4b5936c7c01a1d4529aa70f266410"} Sep 30 19:52:40 crc kubenswrapper[4756]: I0930 19:52:40.352067 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-59cf4bdb65-ljgq7" event={"ID":"3ad8997f-b736-41c8-b087-d09b6316065d","Type":"ContainerStarted","Data":"51ef4e173c225032f765e5047d88bbd26bb0ef46029f5e6ee965a80da5a74dba"} Sep 30 19:52:40 crc kubenswrapper[4756]: I0930 19:52:40.352125 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="d2690b61-b282-4c01-bfd4-afbcd8bd0968" containerName="nova-api-log" containerID="cri-o://7f224754455da6d862bca540eeb1063828055422bb68de68929ab6f5eb427f73" gracePeriod=30 Sep 30 19:52:40 crc kubenswrapper[4756]: I0930 19:52:40.352273 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="d2690b61-b282-4c01-bfd4-afbcd8bd0968" containerName="nova-api-api" containerID="cri-o://144841594fdfdc515967de332ca6c03c61bd7b867ec6835c38ad1265d8e618b2" gracePeriod=30 Sep 30 19:52:40 crc kubenswrapper[4756]: I0930 19:52:40.379111 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-59cf4bdb65-ljgq7" podStartSLOduration=3.379095547 podStartE2EDuration="3.379095547s" podCreationTimestamp="2025-09-30 19:52:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 19:52:40.377471304 +0000 UTC m=+1289.998404781" watchObservedRunningTime="2025-09-30 19:52:40.379095547 +0000 UTC m=+1290.000029024" Sep 30 19:52:41 crc kubenswrapper[4756]: I0930 19:52:41.361602 4756 generic.go:334] "Generic (PLEG): container finished" podID="d2690b61-b282-4c01-bfd4-afbcd8bd0968" containerID="7f224754455da6d862bca540eeb1063828055422bb68de68929ab6f5eb427f73" exitCode=143 Sep 30 19:52:41 crc kubenswrapper[4756]: I0930 19:52:41.361649 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"d2690b61-b282-4c01-bfd4-afbcd8bd0968","Type":"ContainerDied","Data":"7f224754455da6d862bca540eeb1063828055422bb68de68929ab6f5eb427f73"} Sep 30 19:52:41 crc kubenswrapper[4756]: I0930 19:52:41.362156 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-59cf4bdb65-ljgq7" Sep 30 19:52:41 crc kubenswrapper[4756]: I0930 19:52:41.538848 4756 patch_prober.go:28] interesting pod/machine-config-daemon-4n9zj container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 19:52:41 crc kubenswrapper[4756]: I0930 19:52:41.538911 4756 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 19:52:43 crc kubenswrapper[4756]: I0930 19:52:43.327699 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 19:52:43 crc kubenswrapper[4756]: I0930 19:52:43.385373 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/9e98fab0-8869-4d11-accc-cc6188f528aa-log-httpd\") pod \"9e98fab0-8869-4d11-accc-cc6188f528aa\" (UID: \"9e98fab0-8869-4d11-accc-cc6188f528aa\") " Sep 30 19:52:43 crc kubenswrapper[4756]: I0930 19:52:43.385521 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/9e98fab0-8869-4d11-accc-cc6188f528aa-ceilometer-tls-certs\") pod \"9e98fab0-8869-4d11-accc-cc6188f528aa\" (UID: \"9e98fab0-8869-4d11-accc-cc6188f528aa\") " Sep 30 19:52:43 crc kubenswrapper[4756]: I0930 19:52:43.385555 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/9e98fab0-8869-4d11-accc-cc6188f528aa-run-httpd\") pod \"9e98fab0-8869-4d11-accc-cc6188f528aa\" (UID: \"9e98fab0-8869-4d11-accc-cc6188f528aa\") " Sep 30 19:52:43 crc kubenswrapper[4756]: I0930 19:52:43.385616 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9e98fab0-8869-4d11-accc-cc6188f528aa-scripts\") pod \"9e98fab0-8869-4d11-accc-cc6188f528aa\" (UID: \"9e98fab0-8869-4d11-accc-cc6188f528aa\") " Sep 30 19:52:43 crc kubenswrapper[4756]: I0930 19:52:43.385733 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9e98fab0-8869-4d11-accc-cc6188f528aa-config-data\") pod \"9e98fab0-8869-4d11-accc-cc6188f528aa\" (UID: \"9e98fab0-8869-4d11-accc-cc6188f528aa\") " Sep 30 19:52:43 crc kubenswrapper[4756]: I0930 19:52:43.385785 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/9e98fab0-8869-4d11-accc-cc6188f528aa-sg-core-conf-yaml\") pod \"9e98fab0-8869-4d11-accc-cc6188f528aa\" (UID: \"9e98fab0-8869-4d11-accc-cc6188f528aa\") " Sep 30 19:52:43 crc kubenswrapper[4756]: I0930 19:52:43.385844 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9e98fab0-8869-4d11-accc-cc6188f528aa-combined-ca-bundle\") pod \"9e98fab0-8869-4d11-accc-cc6188f528aa\" (UID: \"9e98fab0-8869-4d11-accc-cc6188f528aa\") " Sep 30 19:52:43 crc kubenswrapper[4756]: I0930 19:52:43.385992 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2hmnz\" (UniqueName: \"kubernetes.io/projected/9e98fab0-8869-4d11-accc-cc6188f528aa-kube-api-access-2hmnz\") pod \"9e98fab0-8869-4d11-accc-cc6188f528aa\" (UID: \"9e98fab0-8869-4d11-accc-cc6188f528aa\") " Sep 30 19:52:43 crc kubenswrapper[4756]: I0930 19:52:43.386661 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9e98fab0-8869-4d11-accc-cc6188f528aa-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "9e98fab0-8869-4d11-accc-cc6188f528aa" (UID: "9e98fab0-8869-4d11-accc-cc6188f528aa"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 19:52:43 crc kubenswrapper[4756]: I0930 19:52:43.386673 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9e98fab0-8869-4d11-accc-cc6188f528aa-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "9e98fab0-8869-4d11-accc-cc6188f528aa" (UID: "9e98fab0-8869-4d11-accc-cc6188f528aa"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 19:52:43 crc kubenswrapper[4756]: I0930 19:52:43.387333 4756 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/9e98fab0-8869-4d11-accc-cc6188f528aa-log-httpd\") on node \"crc\" DevicePath \"\"" Sep 30 19:52:43 crc kubenswrapper[4756]: I0930 19:52:43.387350 4756 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/9e98fab0-8869-4d11-accc-cc6188f528aa-run-httpd\") on node \"crc\" DevicePath \"\"" Sep 30 19:52:43 crc kubenswrapper[4756]: I0930 19:52:43.400712 4756 generic.go:334] "Generic (PLEG): container finished" podID="9e98fab0-8869-4d11-accc-cc6188f528aa" containerID="b974b6df34d962f4f40e8c6f726afd418030d61b9e23842ca68eeec8b8c30e81" exitCode=0 Sep 30 19:52:43 crc kubenswrapper[4756]: I0930 19:52:43.400806 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"9e98fab0-8869-4d11-accc-cc6188f528aa","Type":"ContainerDied","Data":"b974b6df34d962f4f40e8c6f726afd418030d61b9e23842ca68eeec8b8c30e81"} Sep 30 19:52:43 crc kubenswrapper[4756]: I0930 19:52:43.401007 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"9e98fab0-8869-4d11-accc-cc6188f528aa","Type":"ContainerDied","Data":"1393d155308d187da165830e209fe291bcbb72b37ae9fef775891ee7f3c17eee"} Sep 30 19:52:43 crc kubenswrapper[4756]: I0930 19:52:43.400872 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 19:52:43 crc kubenswrapper[4756]: I0930 19:52:43.401069 4756 scope.go:117] "RemoveContainer" containerID="70c1431406ffad6d280ae503d9b24f1e284cd4ce147355986bdb6284b9ada6ac" Sep 30 19:52:43 crc kubenswrapper[4756]: I0930 19:52:43.407756 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9e98fab0-8869-4d11-accc-cc6188f528aa-kube-api-access-2hmnz" (OuterVolumeSpecName: "kube-api-access-2hmnz") pod "9e98fab0-8869-4d11-accc-cc6188f528aa" (UID: "9e98fab0-8869-4d11-accc-cc6188f528aa"). InnerVolumeSpecName "kube-api-access-2hmnz". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:52:43 crc kubenswrapper[4756]: I0930 19:52:43.407795 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9e98fab0-8869-4d11-accc-cc6188f528aa-scripts" (OuterVolumeSpecName: "scripts") pod "9e98fab0-8869-4d11-accc-cc6188f528aa" (UID: "9e98fab0-8869-4d11-accc-cc6188f528aa"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:52:43 crc kubenswrapper[4756]: I0930 19:52:43.440160 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9e98fab0-8869-4d11-accc-cc6188f528aa-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "9e98fab0-8869-4d11-accc-cc6188f528aa" (UID: "9e98fab0-8869-4d11-accc-cc6188f528aa"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:52:43 crc kubenswrapper[4756]: I0930 19:52:43.489791 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2hmnz\" (UniqueName: \"kubernetes.io/projected/9e98fab0-8869-4d11-accc-cc6188f528aa-kube-api-access-2hmnz\") on node \"crc\" DevicePath \"\"" Sep 30 19:52:43 crc kubenswrapper[4756]: I0930 19:52:43.489823 4756 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9e98fab0-8869-4d11-accc-cc6188f528aa-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 19:52:43 crc kubenswrapper[4756]: I0930 19:52:43.489908 4756 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/9e98fab0-8869-4d11-accc-cc6188f528aa-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Sep 30 19:52:43 crc kubenswrapper[4756]: I0930 19:52:43.506178 4756 scope.go:117] "RemoveContainer" containerID="cfc25a06580e87f8defabd76d686babda3a8813b5d4c29aeb359ef5d22732380" Sep 30 19:52:43 crc kubenswrapper[4756]: I0930 19:52:43.515291 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9e98fab0-8869-4d11-accc-cc6188f528aa-config-data" (OuterVolumeSpecName: "config-data") pod "9e98fab0-8869-4d11-accc-cc6188f528aa" (UID: "9e98fab0-8869-4d11-accc-cc6188f528aa"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:52:43 crc kubenswrapper[4756]: I0930 19:52:43.531554 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9e98fab0-8869-4d11-accc-cc6188f528aa-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "9e98fab0-8869-4d11-accc-cc6188f528aa" (UID: "9e98fab0-8869-4d11-accc-cc6188f528aa"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:52:43 crc kubenswrapper[4756]: I0930 19:52:43.544072 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9e98fab0-8869-4d11-accc-cc6188f528aa-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "9e98fab0-8869-4d11-accc-cc6188f528aa" (UID: "9e98fab0-8869-4d11-accc-cc6188f528aa"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:52:43 crc kubenswrapper[4756]: I0930 19:52:43.576814 4756 scope.go:117] "RemoveContainer" containerID="b974b6df34d962f4f40e8c6f726afd418030d61b9e23842ca68eeec8b8c30e81" Sep 30 19:52:43 crc kubenswrapper[4756]: I0930 19:52:43.592728 4756 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9e98fab0-8869-4d11-accc-cc6188f528aa-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 19:52:43 crc kubenswrapper[4756]: I0930 19:52:43.592757 4756 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9e98fab0-8869-4d11-accc-cc6188f528aa-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 19:52:43 crc kubenswrapper[4756]: I0930 19:52:43.592771 4756 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/9e98fab0-8869-4d11-accc-cc6188f528aa-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 30 19:52:43 crc kubenswrapper[4756]: I0930 19:52:43.595230 4756 scope.go:117] "RemoveContainer" containerID="e49cedfa787e3b2543dcaa4f8448297938c4b5936c7c01a1d4529aa70f266410" Sep 30 19:52:43 crc kubenswrapper[4756]: I0930 19:52:43.615772 4756 scope.go:117] "RemoveContainer" containerID="70c1431406ffad6d280ae503d9b24f1e284cd4ce147355986bdb6284b9ada6ac" Sep 30 19:52:43 crc kubenswrapper[4756]: E0930 19:52:43.616208 4756 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"70c1431406ffad6d280ae503d9b24f1e284cd4ce147355986bdb6284b9ada6ac\": container with ID starting with 70c1431406ffad6d280ae503d9b24f1e284cd4ce147355986bdb6284b9ada6ac not found: ID does not exist" containerID="70c1431406ffad6d280ae503d9b24f1e284cd4ce147355986bdb6284b9ada6ac" Sep 30 19:52:43 crc kubenswrapper[4756]: I0930 19:52:43.616234 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"70c1431406ffad6d280ae503d9b24f1e284cd4ce147355986bdb6284b9ada6ac"} err="failed to get container status \"70c1431406ffad6d280ae503d9b24f1e284cd4ce147355986bdb6284b9ada6ac\": rpc error: code = NotFound desc = could not find container \"70c1431406ffad6d280ae503d9b24f1e284cd4ce147355986bdb6284b9ada6ac\": container with ID starting with 70c1431406ffad6d280ae503d9b24f1e284cd4ce147355986bdb6284b9ada6ac not found: ID does not exist" Sep 30 19:52:43 crc kubenswrapper[4756]: I0930 19:52:43.616254 4756 scope.go:117] "RemoveContainer" containerID="cfc25a06580e87f8defabd76d686babda3a8813b5d4c29aeb359ef5d22732380" Sep 30 19:52:43 crc kubenswrapper[4756]: E0930 19:52:43.616498 4756 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cfc25a06580e87f8defabd76d686babda3a8813b5d4c29aeb359ef5d22732380\": container with ID starting with cfc25a06580e87f8defabd76d686babda3a8813b5d4c29aeb359ef5d22732380 not found: ID does not exist" containerID="cfc25a06580e87f8defabd76d686babda3a8813b5d4c29aeb359ef5d22732380" Sep 30 19:52:43 crc kubenswrapper[4756]: I0930 19:52:43.616514 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cfc25a06580e87f8defabd76d686babda3a8813b5d4c29aeb359ef5d22732380"} err="failed to get container status \"cfc25a06580e87f8defabd76d686babda3a8813b5d4c29aeb359ef5d22732380\": rpc error: code = NotFound desc = could not find container \"cfc25a06580e87f8defabd76d686babda3a8813b5d4c29aeb359ef5d22732380\": container with ID starting with cfc25a06580e87f8defabd76d686babda3a8813b5d4c29aeb359ef5d22732380 not found: ID does not exist" Sep 30 19:52:43 crc kubenswrapper[4756]: I0930 19:52:43.616528 4756 scope.go:117] "RemoveContainer" containerID="b974b6df34d962f4f40e8c6f726afd418030d61b9e23842ca68eeec8b8c30e81" Sep 30 19:52:43 crc kubenswrapper[4756]: E0930 19:52:43.618600 4756 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b974b6df34d962f4f40e8c6f726afd418030d61b9e23842ca68eeec8b8c30e81\": container with ID starting with b974b6df34d962f4f40e8c6f726afd418030d61b9e23842ca68eeec8b8c30e81 not found: ID does not exist" containerID="b974b6df34d962f4f40e8c6f726afd418030d61b9e23842ca68eeec8b8c30e81" Sep 30 19:52:43 crc kubenswrapper[4756]: I0930 19:52:43.618630 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b974b6df34d962f4f40e8c6f726afd418030d61b9e23842ca68eeec8b8c30e81"} err="failed to get container status \"b974b6df34d962f4f40e8c6f726afd418030d61b9e23842ca68eeec8b8c30e81\": rpc error: code = NotFound desc = could not find container \"b974b6df34d962f4f40e8c6f726afd418030d61b9e23842ca68eeec8b8c30e81\": container with ID starting with b974b6df34d962f4f40e8c6f726afd418030d61b9e23842ca68eeec8b8c30e81 not found: ID does not exist" Sep 30 19:52:43 crc kubenswrapper[4756]: I0930 19:52:43.618653 4756 scope.go:117] "RemoveContainer" containerID="e49cedfa787e3b2543dcaa4f8448297938c4b5936c7c01a1d4529aa70f266410" Sep 30 19:52:43 crc kubenswrapper[4756]: E0930 19:52:43.618891 4756 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e49cedfa787e3b2543dcaa4f8448297938c4b5936c7c01a1d4529aa70f266410\": container with ID starting with e49cedfa787e3b2543dcaa4f8448297938c4b5936c7c01a1d4529aa70f266410 not found: ID does not exist" containerID="e49cedfa787e3b2543dcaa4f8448297938c4b5936c7c01a1d4529aa70f266410" Sep 30 19:52:43 crc kubenswrapper[4756]: I0930 19:52:43.618909 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e49cedfa787e3b2543dcaa4f8448297938c4b5936c7c01a1d4529aa70f266410"} err="failed to get container status \"e49cedfa787e3b2543dcaa4f8448297938c4b5936c7c01a1d4529aa70f266410\": rpc error: code = NotFound desc = could not find container \"e49cedfa787e3b2543dcaa4f8448297938c4b5936c7c01a1d4529aa70f266410\": container with ID starting with e49cedfa787e3b2543dcaa4f8448297938c4b5936c7c01a1d4529aa70f266410 not found: ID does not exist" Sep 30 19:52:43 crc kubenswrapper[4756]: I0930 19:52:43.740728 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 30 19:52:43 crc kubenswrapper[4756]: I0930 19:52:43.749257 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Sep 30 19:52:43 crc kubenswrapper[4756]: I0930 19:52:43.784059 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Sep 30 19:52:43 crc kubenswrapper[4756]: E0930 19:52:43.785014 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9e98fab0-8869-4d11-accc-cc6188f528aa" containerName="ceilometer-notification-agent" Sep 30 19:52:43 crc kubenswrapper[4756]: I0930 19:52:43.785031 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="9e98fab0-8869-4d11-accc-cc6188f528aa" containerName="ceilometer-notification-agent" Sep 30 19:52:43 crc kubenswrapper[4756]: E0930 19:52:43.785082 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9e98fab0-8869-4d11-accc-cc6188f528aa" containerName="ceilometer-central-agent" Sep 30 19:52:43 crc kubenswrapper[4756]: I0930 19:52:43.785092 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="9e98fab0-8869-4d11-accc-cc6188f528aa" containerName="ceilometer-central-agent" Sep 30 19:52:43 crc kubenswrapper[4756]: E0930 19:52:43.785135 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9e98fab0-8869-4d11-accc-cc6188f528aa" containerName="sg-core" Sep 30 19:52:43 crc kubenswrapper[4756]: I0930 19:52:43.785144 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="9e98fab0-8869-4d11-accc-cc6188f528aa" containerName="sg-core" Sep 30 19:52:43 crc kubenswrapper[4756]: E0930 19:52:43.785161 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9e98fab0-8869-4d11-accc-cc6188f528aa" containerName="proxy-httpd" Sep 30 19:52:43 crc kubenswrapper[4756]: I0930 19:52:43.785169 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="9e98fab0-8869-4d11-accc-cc6188f528aa" containerName="proxy-httpd" Sep 30 19:52:43 crc kubenswrapper[4756]: I0930 19:52:43.790204 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="9e98fab0-8869-4d11-accc-cc6188f528aa" containerName="proxy-httpd" Sep 30 19:52:43 crc kubenswrapper[4756]: I0930 19:52:43.790240 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="9e98fab0-8869-4d11-accc-cc6188f528aa" containerName="sg-core" Sep 30 19:52:43 crc kubenswrapper[4756]: I0930 19:52:43.790269 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="9e98fab0-8869-4d11-accc-cc6188f528aa" containerName="ceilometer-notification-agent" Sep 30 19:52:43 crc kubenswrapper[4756]: I0930 19:52:43.790306 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="9e98fab0-8869-4d11-accc-cc6188f528aa" containerName="ceilometer-central-agent" Sep 30 19:52:43 crc kubenswrapper[4756]: I0930 19:52:43.796308 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 19:52:43 crc kubenswrapper[4756]: I0930 19:52:43.799998 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Sep 30 19:52:43 crc kubenswrapper[4756]: I0930 19:52:43.801044 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Sep 30 19:52:43 crc kubenswrapper[4756]: I0930 19:52:43.801212 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Sep 30 19:52:43 crc kubenswrapper[4756]: I0930 19:52:43.821534 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 30 19:52:43 crc kubenswrapper[4756]: I0930 19:52:43.881106 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 30 19:52:43 crc kubenswrapper[4756]: I0930 19:52:43.904025 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d2690b61-b282-4c01-bfd4-afbcd8bd0968-logs\") pod \"d2690b61-b282-4c01-bfd4-afbcd8bd0968\" (UID: \"d2690b61-b282-4c01-bfd4-afbcd8bd0968\") " Sep 30 19:52:43 crc kubenswrapper[4756]: I0930 19:52:43.904154 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d2690b61-b282-4c01-bfd4-afbcd8bd0968-combined-ca-bundle\") pod \"d2690b61-b282-4c01-bfd4-afbcd8bd0968\" (UID: \"d2690b61-b282-4c01-bfd4-afbcd8bd0968\") " Sep 30 19:52:43 crc kubenswrapper[4756]: I0930 19:52:43.904281 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d2690b61-b282-4c01-bfd4-afbcd8bd0968-config-data\") pod \"d2690b61-b282-4c01-bfd4-afbcd8bd0968\" (UID: \"d2690b61-b282-4c01-bfd4-afbcd8bd0968\") " Sep 30 19:52:43 crc kubenswrapper[4756]: I0930 19:52:43.904320 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-85rk8\" (UniqueName: \"kubernetes.io/projected/d2690b61-b282-4c01-bfd4-afbcd8bd0968-kube-api-access-85rk8\") pod \"d2690b61-b282-4c01-bfd4-afbcd8bd0968\" (UID: \"d2690b61-b282-4c01-bfd4-afbcd8bd0968\") " Sep 30 19:52:43 crc kubenswrapper[4756]: I0930 19:52:43.904654 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d2690b61-b282-4c01-bfd4-afbcd8bd0968-logs" (OuterVolumeSpecName: "logs") pod "d2690b61-b282-4c01-bfd4-afbcd8bd0968" (UID: "d2690b61-b282-4c01-bfd4-afbcd8bd0968"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 19:52:43 crc kubenswrapper[4756]: I0930 19:52:43.904696 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/78518120-da64-4b55-9dce-b7412b16bed3-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"78518120-da64-4b55-9dce-b7412b16bed3\") " pod="openstack/ceilometer-0" Sep 30 19:52:43 crc kubenswrapper[4756]: I0930 19:52:43.904720 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/78518120-da64-4b55-9dce-b7412b16bed3-scripts\") pod \"ceilometer-0\" (UID: \"78518120-da64-4b55-9dce-b7412b16bed3\") " pod="openstack/ceilometer-0" Sep 30 19:52:43 crc kubenswrapper[4756]: I0930 19:52:43.904741 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/78518120-da64-4b55-9dce-b7412b16bed3-run-httpd\") pod \"ceilometer-0\" (UID: \"78518120-da64-4b55-9dce-b7412b16bed3\") " pod="openstack/ceilometer-0" Sep 30 19:52:43 crc kubenswrapper[4756]: I0930 19:52:43.904763 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/78518120-da64-4b55-9dce-b7412b16bed3-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"78518120-da64-4b55-9dce-b7412b16bed3\") " pod="openstack/ceilometer-0" Sep 30 19:52:43 crc kubenswrapper[4756]: I0930 19:52:43.904819 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/78518120-da64-4b55-9dce-b7412b16bed3-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"78518120-da64-4b55-9dce-b7412b16bed3\") " pod="openstack/ceilometer-0" Sep 30 19:52:43 crc kubenswrapper[4756]: I0930 19:52:43.904837 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2t229\" (UniqueName: \"kubernetes.io/projected/78518120-da64-4b55-9dce-b7412b16bed3-kube-api-access-2t229\") pod \"ceilometer-0\" (UID: \"78518120-da64-4b55-9dce-b7412b16bed3\") " pod="openstack/ceilometer-0" Sep 30 19:52:43 crc kubenswrapper[4756]: I0930 19:52:43.904862 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/78518120-da64-4b55-9dce-b7412b16bed3-config-data\") pod \"ceilometer-0\" (UID: \"78518120-da64-4b55-9dce-b7412b16bed3\") " pod="openstack/ceilometer-0" Sep 30 19:52:43 crc kubenswrapper[4756]: I0930 19:52:43.904898 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/78518120-da64-4b55-9dce-b7412b16bed3-log-httpd\") pod \"ceilometer-0\" (UID: \"78518120-da64-4b55-9dce-b7412b16bed3\") " pod="openstack/ceilometer-0" Sep 30 19:52:43 crc kubenswrapper[4756]: I0930 19:52:43.904948 4756 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d2690b61-b282-4c01-bfd4-afbcd8bd0968-logs\") on node \"crc\" DevicePath \"\"" Sep 30 19:52:43 crc kubenswrapper[4756]: I0930 19:52:43.913129 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d2690b61-b282-4c01-bfd4-afbcd8bd0968-kube-api-access-85rk8" (OuterVolumeSpecName: "kube-api-access-85rk8") pod "d2690b61-b282-4c01-bfd4-afbcd8bd0968" (UID: "d2690b61-b282-4c01-bfd4-afbcd8bd0968"). InnerVolumeSpecName "kube-api-access-85rk8". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:52:43 crc kubenswrapper[4756]: I0930 19:52:43.951683 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d2690b61-b282-4c01-bfd4-afbcd8bd0968-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d2690b61-b282-4c01-bfd4-afbcd8bd0968" (UID: "d2690b61-b282-4c01-bfd4-afbcd8bd0968"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:52:43 crc kubenswrapper[4756]: I0930 19:52:43.969591 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d2690b61-b282-4c01-bfd4-afbcd8bd0968-config-data" (OuterVolumeSpecName: "config-data") pod "d2690b61-b282-4c01-bfd4-afbcd8bd0968" (UID: "d2690b61-b282-4c01-bfd4-afbcd8bd0968"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:52:44 crc kubenswrapper[4756]: I0930 19:52:44.006991 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/78518120-da64-4b55-9dce-b7412b16bed3-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"78518120-da64-4b55-9dce-b7412b16bed3\") " pod="openstack/ceilometer-0" Sep 30 19:52:44 crc kubenswrapper[4756]: I0930 19:52:44.007045 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/78518120-da64-4b55-9dce-b7412b16bed3-scripts\") pod \"ceilometer-0\" (UID: \"78518120-da64-4b55-9dce-b7412b16bed3\") " pod="openstack/ceilometer-0" Sep 30 19:52:44 crc kubenswrapper[4756]: I0930 19:52:44.007074 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/78518120-da64-4b55-9dce-b7412b16bed3-run-httpd\") pod \"ceilometer-0\" (UID: \"78518120-da64-4b55-9dce-b7412b16bed3\") " pod="openstack/ceilometer-0" Sep 30 19:52:44 crc kubenswrapper[4756]: I0930 19:52:44.007095 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/78518120-da64-4b55-9dce-b7412b16bed3-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"78518120-da64-4b55-9dce-b7412b16bed3\") " pod="openstack/ceilometer-0" Sep 30 19:52:44 crc kubenswrapper[4756]: I0930 19:52:44.007175 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/78518120-da64-4b55-9dce-b7412b16bed3-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"78518120-da64-4b55-9dce-b7412b16bed3\") " pod="openstack/ceilometer-0" Sep 30 19:52:44 crc kubenswrapper[4756]: I0930 19:52:44.007217 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2t229\" (UniqueName: \"kubernetes.io/projected/78518120-da64-4b55-9dce-b7412b16bed3-kube-api-access-2t229\") pod \"ceilometer-0\" (UID: \"78518120-da64-4b55-9dce-b7412b16bed3\") " pod="openstack/ceilometer-0" Sep 30 19:52:44 crc kubenswrapper[4756]: I0930 19:52:44.007287 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/78518120-da64-4b55-9dce-b7412b16bed3-config-data\") pod \"ceilometer-0\" (UID: \"78518120-da64-4b55-9dce-b7412b16bed3\") " pod="openstack/ceilometer-0" Sep 30 19:52:44 crc kubenswrapper[4756]: I0930 19:52:44.007335 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/78518120-da64-4b55-9dce-b7412b16bed3-log-httpd\") pod \"ceilometer-0\" (UID: \"78518120-da64-4b55-9dce-b7412b16bed3\") " pod="openstack/ceilometer-0" Sep 30 19:52:44 crc kubenswrapper[4756]: I0930 19:52:44.007510 4756 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d2690b61-b282-4c01-bfd4-afbcd8bd0968-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 19:52:44 crc kubenswrapper[4756]: I0930 19:52:44.007536 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-85rk8\" (UniqueName: \"kubernetes.io/projected/d2690b61-b282-4c01-bfd4-afbcd8bd0968-kube-api-access-85rk8\") on node \"crc\" DevicePath \"\"" Sep 30 19:52:44 crc kubenswrapper[4756]: I0930 19:52:44.007553 4756 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d2690b61-b282-4c01-bfd4-afbcd8bd0968-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 19:52:44 crc kubenswrapper[4756]: I0930 19:52:44.007974 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/78518120-da64-4b55-9dce-b7412b16bed3-log-httpd\") pod \"ceilometer-0\" (UID: \"78518120-da64-4b55-9dce-b7412b16bed3\") " pod="openstack/ceilometer-0" Sep 30 19:52:44 crc kubenswrapper[4756]: I0930 19:52:44.009895 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/78518120-da64-4b55-9dce-b7412b16bed3-run-httpd\") pod \"ceilometer-0\" (UID: \"78518120-da64-4b55-9dce-b7412b16bed3\") " pod="openstack/ceilometer-0" Sep 30 19:52:44 crc kubenswrapper[4756]: I0930 19:52:44.011839 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/78518120-da64-4b55-9dce-b7412b16bed3-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"78518120-da64-4b55-9dce-b7412b16bed3\") " pod="openstack/ceilometer-0" Sep 30 19:52:44 crc kubenswrapper[4756]: I0930 19:52:44.012361 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/78518120-da64-4b55-9dce-b7412b16bed3-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"78518120-da64-4b55-9dce-b7412b16bed3\") " pod="openstack/ceilometer-0" Sep 30 19:52:44 crc kubenswrapper[4756]: I0930 19:52:44.014014 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/78518120-da64-4b55-9dce-b7412b16bed3-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"78518120-da64-4b55-9dce-b7412b16bed3\") " pod="openstack/ceilometer-0" Sep 30 19:52:44 crc kubenswrapper[4756]: I0930 19:52:44.016846 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/78518120-da64-4b55-9dce-b7412b16bed3-scripts\") pod \"ceilometer-0\" (UID: \"78518120-da64-4b55-9dce-b7412b16bed3\") " pod="openstack/ceilometer-0" Sep 30 19:52:44 crc kubenswrapper[4756]: I0930 19:52:44.019095 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/78518120-da64-4b55-9dce-b7412b16bed3-config-data\") pod \"ceilometer-0\" (UID: \"78518120-da64-4b55-9dce-b7412b16bed3\") " pod="openstack/ceilometer-0" Sep 30 19:52:44 crc kubenswrapper[4756]: I0930 19:52:44.026321 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2t229\" (UniqueName: \"kubernetes.io/projected/78518120-da64-4b55-9dce-b7412b16bed3-kube-api-access-2t229\") pod \"ceilometer-0\" (UID: \"78518120-da64-4b55-9dce-b7412b16bed3\") " pod="openstack/ceilometer-0" Sep 30 19:52:44 crc kubenswrapper[4756]: I0930 19:52:44.121717 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 19:52:44 crc kubenswrapper[4756]: I0930 19:52:44.419104 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 30 19:52:44 crc kubenswrapper[4756]: I0930 19:52:44.422090 4756 generic.go:334] "Generic (PLEG): container finished" podID="d2690b61-b282-4c01-bfd4-afbcd8bd0968" containerID="144841594fdfdc515967de332ca6c03c61bd7b867ec6835c38ad1265d8e618b2" exitCode=0 Sep 30 19:52:44 crc kubenswrapper[4756]: I0930 19:52:44.422143 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"d2690b61-b282-4c01-bfd4-afbcd8bd0968","Type":"ContainerDied","Data":"144841594fdfdc515967de332ca6c03c61bd7b867ec6835c38ad1265d8e618b2"} Sep 30 19:52:44 crc kubenswrapper[4756]: I0930 19:52:44.422177 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"d2690b61-b282-4c01-bfd4-afbcd8bd0968","Type":"ContainerDied","Data":"12213960a61b6db79b187747828d1e40e754529b3f6ab0178a8ce828fd685e24"} Sep 30 19:52:44 crc kubenswrapper[4756]: I0930 19:52:44.422199 4756 scope.go:117] "RemoveContainer" containerID="144841594fdfdc515967de332ca6c03c61bd7b867ec6835c38ad1265d8e618b2" Sep 30 19:52:44 crc kubenswrapper[4756]: I0930 19:52:44.422316 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 30 19:52:44 crc kubenswrapper[4756]: I0930 19:52:44.489330 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Sep 30 19:52:44 crc kubenswrapper[4756]: I0930 19:52:44.494161 4756 scope.go:117] "RemoveContainer" containerID="7f224754455da6d862bca540eeb1063828055422bb68de68929ab6f5eb427f73" Sep 30 19:52:44 crc kubenswrapper[4756]: I0930 19:52:44.509015 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Sep 30 19:52:44 crc kubenswrapper[4756]: I0930 19:52:44.518942 4756 scope.go:117] "RemoveContainer" containerID="144841594fdfdc515967de332ca6c03c61bd7b867ec6835c38ad1265d8e618b2" Sep 30 19:52:44 crc kubenswrapper[4756]: I0930 19:52:44.523001 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Sep 30 19:52:44 crc kubenswrapper[4756]: E0930 19:52:44.523501 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d2690b61-b282-4c01-bfd4-afbcd8bd0968" containerName="nova-api-api" Sep 30 19:52:44 crc kubenswrapper[4756]: I0930 19:52:44.523525 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="d2690b61-b282-4c01-bfd4-afbcd8bd0968" containerName="nova-api-api" Sep 30 19:52:44 crc kubenswrapper[4756]: E0930 19:52:44.523583 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d2690b61-b282-4c01-bfd4-afbcd8bd0968" containerName="nova-api-log" Sep 30 19:52:44 crc kubenswrapper[4756]: I0930 19:52:44.523593 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="d2690b61-b282-4c01-bfd4-afbcd8bd0968" containerName="nova-api-log" Sep 30 19:52:44 crc kubenswrapper[4756]: I0930 19:52:44.523850 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="d2690b61-b282-4c01-bfd4-afbcd8bd0968" containerName="nova-api-log" Sep 30 19:52:44 crc kubenswrapper[4756]: I0930 19:52:44.523874 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="d2690b61-b282-4c01-bfd4-afbcd8bd0968" containerName="nova-api-api" Sep 30 19:52:44 crc kubenswrapper[4756]: I0930 19:52:44.525070 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 30 19:52:44 crc kubenswrapper[4756]: I0930 19:52:44.530516 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Sep 30 19:52:44 crc kubenswrapper[4756]: I0930 19:52:44.530732 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-internal-svc" Sep 30 19:52:44 crc kubenswrapper[4756]: I0930 19:52:44.531030 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-public-svc" Sep 30 19:52:44 crc kubenswrapper[4756]: E0930 19:52:44.537411 4756 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"144841594fdfdc515967de332ca6c03c61bd7b867ec6835c38ad1265d8e618b2\": container with ID starting with 144841594fdfdc515967de332ca6c03c61bd7b867ec6835c38ad1265d8e618b2 not found: ID does not exist" containerID="144841594fdfdc515967de332ca6c03c61bd7b867ec6835c38ad1265d8e618b2" Sep 30 19:52:44 crc kubenswrapper[4756]: I0930 19:52:44.537442 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"144841594fdfdc515967de332ca6c03c61bd7b867ec6835c38ad1265d8e618b2"} err="failed to get container status \"144841594fdfdc515967de332ca6c03c61bd7b867ec6835c38ad1265d8e618b2\": rpc error: code = NotFound desc = could not find container \"144841594fdfdc515967de332ca6c03c61bd7b867ec6835c38ad1265d8e618b2\": container with ID starting with 144841594fdfdc515967de332ca6c03c61bd7b867ec6835c38ad1265d8e618b2 not found: ID does not exist" Sep 30 19:52:44 crc kubenswrapper[4756]: I0930 19:52:44.537468 4756 scope.go:117] "RemoveContainer" containerID="7f224754455da6d862bca540eeb1063828055422bb68de68929ab6f5eb427f73" Sep 30 19:52:44 crc kubenswrapper[4756]: E0930 19:52:44.537955 4756 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7f224754455da6d862bca540eeb1063828055422bb68de68929ab6f5eb427f73\": container with ID starting with 7f224754455da6d862bca540eeb1063828055422bb68de68929ab6f5eb427f73 not found: ID does not exist" containerID="7f224754455da6d862bca540eeb1063828055422bb68de68929ab6f5eb427f73" Sep 30 19:52:44 crc kubenswrapper[4756]: I0930 19:52:44.537974 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7f224754455da6d862bca540eeb1063828055422bb68de68929ab6f5eb427f73"} err="failed to get container status \"7f224754455da6d862bca540eeb1063828055422bb68de68929ab6f5eb427f73\": rpc error: code = NotFound desc = could not find container \"7f224754455da6d862bca540eeb1063828055422bb68de68929ab6f5eb427f73\": container with ID starting with 7f224754455da6d862bca540eeb1063828055422bb68de68929ab6f5eb427f73 not found: ID does not exist" Sep 30 19:52:44 crc kubenswrapper[4756]: I0930 19:52:44.540929 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Sep 30 19:52:44 crc kubenswrapper[4756]: I0930 19:52:44.618761 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a645827a-f0f2-4cbd-8f0a-d1126fe95701-logs\") pod \"nova-api-0\" (UID: \"a645827a-f0f2-4cbd-8f0a-d1126fe95701\") " pod="openstack/nova-api-0" Sep 30 19:52:44 crc kubenswrapper[4756]: I0930 19:52:44.618835 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/a645827a-f0f2-4cbd-8f0a-d1126fe95701-public-tls-certs\") pod \"nova-api-0\" (UID: \"a645827a-f0f2-4cbd-8f0a-d1126fe95701\") " pod="openstack/nova-api-0" Sep 30 19:52:44 crc kubenswrapper[4756]: I0930 19:52:44.618955 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a645827a-f0f2-4cbd-8f0a-d1126fe95701-config-data\") pod \"nova-api-0\" (UID: \"a645827a-f0f2-4cbd-8f0a-d1126fe95701\") " pod="openstack/nova-api-0" Sep 30 19:52:44 crc kubenswrapper[4756]: I0930 19:52:44.618984 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a645827a-f0f2-4cbd-8f0a-d1126fe95701-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"a645827a-f0f2-4cbd-8f0a-d1126fe95701\") " pod="openstack/nova-api-0" Sep 30 19:52:44 crc kubenswrapper[4756]: I0930 19:52:44.619059 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pvgt7\" (UniqueName: \"kubernetes.io/projected/a645827a-f0f2-4cbd-8f0a-d1126fe95701-kube-api-access-pvgt7\") pod \"nova-api-0\" (UID: \"a645827a-f0f2-4cbd-8f0a-d1126fe95701\") " pod="openstack/nova-api-0" Sep 30 19:52:44 crc kubenswrapper[4756]: I0930 19:52:44.619082 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/a645827a-f0f2-4cbd-8f0a-d1126fe95701-internal-tls-certs\") pod \"nova-api-0\" (UID: \"a645827a-f0f2-4cbd-8f0a-d1126fe95701\") " pod="openstack/nova-api-0" Sep 30 19:52:44 crc kubenswrapper[4756]: I0930 19:52:44.720928 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a645827a-f0f2-4cbd-8f0a-d1126fe95701-logs\") pod \"nova-api-0\" (UID: \"a645827a-f0f2-4cbd-8f0a-d1126fe95701\") " pod="openstack/nova-api-0" Sep 30 19:52:44 crc kubenswrapper[4756]: I0930 19:52:44.720983 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/a645827a-f0f2-4cbd-8f0a-d1126fe95701-public-tls-certs\") pod \"nova-api-0\" (UID: \"a645827a-f0f2-4cbd-8f0a-d1126fe95701\") " pod="openstack/nova-api-0" Sep 30 19:52:44 crc kubenswrapper[4756]: I0930 19:52:44.721033 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a645827a-f0f2-4cbd-8f0a-d1126fe95701-config-data\") pod \"nova-api-0\" (UID: \"a645827a-f0f2-4cbd-8f0a-d1126fe95701\") " pod="openstack/nova-api-0" Sep 30 19:52:44 crc kubenswrapper[4756]: I0930 19:52:44.721052 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a645827a-f0f2-4cbd-8f0a-d1126fe95701-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"a645827a-f0f2-4cbd-8f0a-d1126fe95701\") " pod="openstack/nova-api-0" Sep 30 19:52:44 crc kubenswrapper[4756]: I0930 19:52:44.721095 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/a645827a-f0f2-4cbd-8f0a-d1126fe95701-internal-tls-certs\") pod \"nova-api-0\" (UID: \"a645827a-f0f2-4cbd-8f0a-d1126fe95701\") " pod="openstack/nova-api-0" Sep 30 19:52:44 crc kubenswrapper[4756]: I0930 19:52:44.721116 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pvgt7\" (UniqueName: \"kubernetes.io/projected/a645827a-f0f2-4cbd-8f0a-d1126fe95701-kube-api-access-pvgt7\") pod \"nova-api-0\" (UID: \"a645827a-f0f2-4cbd-8f0a-d1126fe95701\") " pod="openstack/nova-api-0" Sep 30 19:52:44 crc kubenswrapper[4756]: I0930 19:52:44.721341 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a645827a-f0f2-4cbd-8f0a-d1126fe95701-logs\") pod \"nova-api-0\" (UID: \"a645827a-f0f2-4cbd-8f0a-d1126fe95701\") " pod="openstack/nova-api-0" Sep 30 19:52:44 crc kubenswrapper[4756]: I0930 19:52:44.726425 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a645827a-f0f2-4cbd-8f0a-d1126fe95701-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"a645827a-f0f2-4cbd-8f0a-d1126fe95701\") " pod="openstack/nova-api-0" Sep 30 19:52:44 crc kubenswrapper[4756]: I0930 19:52:44.726557 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/a645827a-f0f2-4cbd-8f0a-d1126fe95701-internal-tls-certs\") pod \"nova-api-0\" (UID: \"a645827a-f0f2-4cbd-8f0a-d1126fe95701\") " pod="openstack/nova-api-0" Sep 30 19:52:44 crc kubenswrapper[4756]: I0930 19:52:44.727117 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/a645827a-f0f2-4cbd-8f0a-d1126fe95701-public-tls-certs\") pod \"nova-api-0\" (UID: \"a645827a-f0f2-4cbd-8f0a-d1126fe95701\") " pod="openstack/nova-api-0" Sep 30 19:52:44 crc kubenswrapper[4756]: I0930 19:52:44.727558 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a645827a-f0f2-4cbd-8f0a-d1126fe95701-config-data\") pod \"nova-api-0\" (UID: \"a645827a-f0f2-4cbd-8f0a-d1126fe95701\") " pod="openstack/nova-api-0" Sep 30 19:52:44 crc kubenswrapper[4756]: I0930 19:52:44.733479 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-cell1-novncproxy-0" Sep 30 19:52:44 crc kubenswrapper[4756]: I0930 19:52:44.736643 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pvgt7\" (UniqueName: \"kubernetes.io/projected/a645827a-f0f2-4cbd-8f0a-d1126fe95701-kube-api-access-pvgt7\") pod \"nova-api-0\" (UID: \"a645827a-f0f2-4cbd-8f0a-d1126fe95701\") " pod="openstack/nova-api-0" Sep 30 19:52:44 crc kubenswrapper[4756]: I0930 19:52:44.737011 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Sep 30 19:52:44 crc kubenswrapper[4756]: I0930 19:52:44.737035 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Sep 30 19:52:44 crc kubenswrapper[4756]: I0930 19:52:44.749800 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-cell1-novncproxy-0" Sep 30 19:52:44 crc kubenswrapper[4756]: I0930 19:52:44.855980 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 30 19:52:45 crc kubenswrapper[4756]: I0930 19:52:45.127313 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9e98fab0-8869-4d11-accc-cc6188f528aa" path="/var/lib/kubelet/pods/9e98fab0-8869-4d11-accc-cc6188f528aa/volumes" Sep 30 19:52:45 crc kubenswrapper[4756]: I0930 19:52:45.128117 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d2690b61-b282-4c01-bfd4-afbcd8bd0968" path="/var/lib/kubelet/pods/d2690b61-b282-4c01-bfd4-afbcd8bd0968/volumes" Sep 30 19:52:45 crc kubenswrapper[4756]: I0930 19:52:45.333475 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Sep 30 19:52:45 crc kubenswrapper[4756]: I0930 19:52:45.433426 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"78518120-da64-4b55-9dce-b7412b16bed3","Type":"ContainerStarted","Data":"22b219bf8630c672d661ef7fab54e4eb8a02542b53a085535c6963dd7177287e"} Sep 30 19:52:45 crc kubenswrapper[4756]: I0930 19:52:45.433467 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"78518120-da64-4b55-9dce-b7412b16bed3","Type":"ContainerStarted","Data":"e4fb0c80e0c096f0d548c2c8e2360463ccf8846baf5adfa6abdfcf6710feb13b"} Sep 30 19:52:45 crc kubenswrapper[4756]: I0930 19:52:45.436500 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"a645827a-f0f2-4cbd-8f0a-d1126fe95701","Type":"ContainerStarted","Data":"be12478ca53ad02dbbb5081cb57e0d38ab962287c46ee293b6892d4693682ce0"} Sep 30 19:52:45 crc kubenswrapper[4756]: I0930 19:52:45.451363 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-novncproxy-0" Sep 30 19:52:45 crc kubenswrapper[4756]: I0930 19:52:45.590820 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-cell-mapping-rkwpj"] Sep 30 19:52:45 crc kubenswrapper[4756]: I0930 19:52:45.592181 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-rkwpj" Sep 30 19:52:45 crc kubenswrapper[4756]: I0930 19:52:45.595885 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-manage-config-data" Sep 30 19:52:45 crc kubenswrapper[4756]: I0930 19:52:45.595901 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-manage-scripts" Sep 30 19:52:45 crc kubenswrapper[4756]: I0930 19:52:45.604847 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-cell-mapping-rkwpj"] Sep 30 19:52:45 crc kubenswrapper[4756]: I0930 19:52:45.643695 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xnrh5\" (UniqueName: \"kubernetes.io/projected/41ec7c91-a63c-45a9-97dc-f9c1666bdcce-kube-api-access-xnrh5\") pod \"nova-cell1-cell-mapping-rkwpj\" (UID: \"41ec7c91-a63c-45a9-97dc-f9c1666bdcce\") " pod="openstack/nova-cell1-cell-mapping-rkwpj" Sep 30 19:52:45 crc kubenswrapper[4756]: I0930 19:52:45.643911 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/41ec7c91-a63c-45a9-97dc-f9c1666bdcce-config-data\") pod \"nova-cell1-cell-mapping-rkwpj\" (UID: \"41ec7c91-a63c-45a9-97dc-f9c1666bdcce\") " pod="openstack/nova-cell1-cell-mapping-rkwpj" Sep 30 19:52:45 crc kubenswrapper[4756]: I0930 19:52:45.643947 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/41ec7c91-a63c-45a9-97dc-f9c1666bdcce-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-rkwpj\" (UID: \"41ec7c91-a63c-45a9-97dc-f9c1666bdcce\") " pod="openstack/nova-cell1-cell-mapping-rkwpj" Sep 30 19:52:45 crc kubenswrapper[4756]: I0930 19:52:45.643998 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/41ec7c91-a63c-45a9-97dc-f9c1666bdcce-scripts\") pod \"nova-cell1-cell-mapping-rkwpj\" (UID: \"41ec7c91-a63c-45a9-97dc-f9c1666bdcce\") " pod="openstack/nova-cell1-cell-mapping-rkwpj" Sep 30 19:52:45 crc kubenswrapper[4756]: I0930 19:52:45.746964 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/41ec7c91-a63c-45a9-97dc-f9c1666bdcce-config-data\") pod \"nova-cell1-cell-mapping-rkwpj\" (UID: \"41ec7c91-a63c-45a9-97dc-f9c1666bdcce\") " pod="openstack/nova-cell1-cell-mapping-rkwpj" Sep 30 19:52:45 crc kubenswrapper[4756]: I0930 19:52:45.747027 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/41ec7c91-a63c-45a9-97dc-f9c1666bdcce-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-rkwpj\" (UID: \"41ec7c91-a63c-45a9-97dc-f9c1666bdcce\") " pod="openstack/nova-cell1-cell-mapping-rkwpj" Sep 30 19:52:45 crc kubenswrapper[4756]: I0930 19:52:45.747072 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/41ec7c91-a63c-45a9-97dc-f9c1666bdcce-scripts\") pod \"nova-cell1-cell-mapping-rkwpj\" (UID: \"41ec7c91-a63c-45a9-97dc-f9c1666bdcce\") " pod="openstack/nova-cell1-cell-mapping-rkwpj" Sep 30 19:52:45 crc kubenswrapper[4756]: I0930 19:52:45.747143 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xnrh5\" (UniqueName: \"kubernetes.io/projected/41ec7c91-a63c-45a9-97dc-f9c1666bdcce-kube-api-access-xnrh5\") pod \"nova-cell1-cell-mapping-rkwpj\" (UID: \"41ec7c91-a63c-45a9-97dc-f9c1666bdcce\") " pod="openstack/nova-cell1-cell-mapping-rkwpj" Sep 30 19:52:45 crc kubenswrapper[4756]: I0930 19:52:45.749565 4756 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="4f6756ce-2d11-45b8-ae0d-3e1bf4ef589d" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.199:8775/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Sep 30 19:52:45 crc kubenswrapper[4756]: I0930 19:52:45.750276 4756 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="4f6756ce-2d11-45b8-ae0d-3e1bf4ef589d" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.199:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Sep 30 19:52:45 crc kubenswrapper[4756]: I0930 19:52:45.751154 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/41ec7c91-a63c-45a9-97dc-f9c1666bdcce-config-data\") pod \"nova-cell1-cell-mapping-rkwpj\" (UID: \"41ec7c91-a63c-45a9-97dc-f9c1666bdcce\") " pod="openstack/nova-cell1-cell-mapping-rkwpj" Sep 30 19:52:45 crc kubenswrapper[4756]: I0930 19:52:45.751462 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/41ec7c91-a63c-45a9-97dc-f9c1666bdcce-scripts\") pod \"nova-cell1-cell-mapping-rkwpj\" (UID: \"41ec7c91-a63c-45a9-97dc-f9c1666bdcce\") " pod="openstack/nova-cell1-cell-mapping-rkwpj" Sep 30 19:52:45 crc kubenswrapper[4756]: I0930 19:52:45.760347 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/41ec7c91-a63c-45a9-97dc-f9c1666bdcce-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-rkwpj\" (UID: \"41ec7c91-a63c-45a9-97dc-f9c1666bdcce\") " pod="openstack/nova-cell1-cell-mapping-rkwpj" Sep 30 19:52:45 crc kubenswrapper[4756]: I0930 19:52:45.763952 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xnrh5\" (UniqueName: \"kubernetes.io/projected/41ec7c91-a63c-45a9-97dc-f9c1666bdcce-kube-api-access-xnrh5\") pod \"nova-cell1-cell-mapping-rkwpj\" (UID: \"41ec7c91-a63c-45a9-97dc-f9c1666bdcce\") " pod="openstack/nova-cell1-cell-mapping-rkwpj" Sep 30 19:52:45 crc kubenswrapper[4756]: I0930 19:52:45.963468 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-rkwpj" Sep 30 19:52:46 crc kubenswrapper[4756]: I0930 19:52:46.407262 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-cell-mapping-rkwpj"] Sep 30 19:52:46 crc kubenswrapper[4756]: I0930 19:52:46.449728 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"78518120-da64-4b55-9dce-b7412b16bed3","Type":"ContainerStarted","Data":"db3913fcdfdf7d832fb8669bd6c097b34f4a8d5f1bdafc8f900c161696611530"} Sep 30 19:52:46 crc kubenswrapper[4756]: I0930 19:52:46.458780 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-rkwpj" event={"ID":"41ec7c91-a63c-45a9-97dc-f9c1666bdcce","Type":"ContainerStarted","Data":"e3942fc725a1f58f43b4c44fff13049393adfbc5b644c9034cb31c5e6ed04cde"} Sep 30 19:52:46 crc kubenswrapper[4756]: I0930 19:52:46.465219 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"a645827a-f0f2-4cbd-8f0a-d1126fe95701","Type":"ContainerStarted","Data":"555af6ac58b0030141c06926f56bf9141286bde6168e3c175a0d701868cf8887"} Sep 30 19:52:46 crc kubenswrapper[4756]: I0930 19:52:46.465255 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"a645827a-f0f2-4cbd-8f0a-d1126fe95701","Type":"ContainerStarted","Data":"cf4b89ceeeed9dccf835138670b7818d31ceda30ff656ea45307fa63f6610a37"} Sep 30 19:52:46 crc kubenswrapper[4756]: I0930 19:52:46.484903 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.484885188 podStartE2EDuration="2.484885188s" podCreationTimestamp="2025-09-30 19:52:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 19:52:46.480517534 +0000 UTC m=+1296.101451011" watchObservedRunningTime="2025-09-30 19:52:46.484885188 +0000 UTC m=+1296.105818665" Sep 30 19:52:47 crc kubenswrapper[4756]: I0930 19:52:47.488082 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"78518120-da64-4b55-9dce-b7412b16bed3","Type":"ContainerStarted","Data":"8dcf655a6b6e1186f83f889c32c480afbeb2d95de763becd9d328651e8b7a7c9"} Sep 30 19:52:47 crc kubenswrapper[4756]: I0930 19:52:47.490323 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-rkwpj" event={"ID":"41ec7c91-a63c-45a9-97dc-f9c1666bdcce","Type":"ContainerStarted","Data":"f362a6f3b07e09c3283f9cf2b98cdc3b3ef2d6ed31377ef667da3782fae5da8a"} Sep 30 19:52:47 crc kubenswrapper[4756]: I0930 19:52:47.515427 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-cell-mapping-rkwpj" podStartSLOduration=2.515407243 podStartE2EDuration="2.515407243s" podCreationTimestamp="2025-09-30 19:52:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 19:52:47.508340439 +0000 UTC m=+1297.129273926" watchObservedRunningTime="2025-09-30 19:52:47.515407243 +0000 UTC m=+1297.136340740" Sep 30 19:52:47 crc kubenswrapper[4756]: I0930 19:52:47.865144 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-59cf4bdb65-ljgq7" Sep 30 19:52:47 crc kubenswrapper[4756]: I0930 19:52:47.932439 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-845d6d6f59-hcjll"] Sep 30 19:52:47 crc kubenswrapper[4756]: I0930 19:52:47.932660 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-845d6d6f59-hcjll" podUID="5585066a-57c7-4fd5-91b8-36af88ee6f9b" containerName="dnsmasq-dns" containerID="cri-o://1393abceb130e8d843a769d54554b6bec32b4ca26b2612d660665605da762b2c" gracePeriod=10 Sep 30 19:52:48 crc kubenswrapper[4756]: I0930 19:52:48.440557 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-845d6d6f59-hcjll" Sep 30 19:52:48 crc kubenswrapper[4756]: I0930 19:52:48.503716 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"78518120-da64-4b55-9dce-b7412b16bed3","Type":"ContainerStarted","Data":"8446379b5b8e3c30e3501dc472fc67b7b914bcaff0ef7dc5380700b34b1f393f"} Sep 30 19:52:48 crc kubenswrapper[4756]: I0930 19:52:48.504119 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Sep 30 19:52:48 crc kubenswrapper[4756]: I0930 19:52:48.505784 4756 generic.go:334] "Generic (PLEG): container finished" podID="5585066a-57c7-4fd5-91b8-36af88ee6f9b" containerID="1393abceb130e8d843a769d54554b6bec32b4ca26b2612d660665605da762b2c" exitCode=0 Sep 30 19:52:48 crc kubenswrapper[4756]: I0930 19:52:48.506260 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-845d6d6f59-hcjll" Sep 30 19:52:48 crc kubenswrapper[4756]: I0930 19:52:48.506424 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-845d6d6f59-hcjll" event={"ID":"5585066a-57c7-4fd5-91b8-36af88ee6f9b","Type":"ContainerDied","Data":"1393abceb130e8d843a769d54554b6bec32b4ca26b2612d660665605da762b2c"} Sep 30 19:52:48 crc kubenswrapper[4756]: I0930 19:52:48.506464 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-845d6d6f59-hcjll" event={"ID":"5585066a-57c7-4fd5-91b8-36af88ee6f9b","Type":"ContainerDied","Data":"13bb3aaf91fa25515a5696804af9016bfa8df63cc25dc0d6418f53f2d5bd323b"} Sep 30 19:52:48 crc kubenswrapper[4756]: I0930 19:52:48.506483 4756 scope.go:117] "RemoveContainer" containerID="1393abceb130e8d843a769d54554b6bec32b4ca26b2612d660665605da762b2c" Sep 30 19:52:48 crc kubenswrapper[4756]: I0930 19:52:48.538385 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.287680593 podStartE2EDuration="5.5383599s" podCreationTimestamp="2025-09-30 19:52:43 +0000 UTC" firstStartedPulling="2025-09-30 19:52:44.436063345 +0000 UTC m=+1294.056996822" lastFinishedPulling="2025-09-30 19:52:47.686742652 +0000 UTC m=+1297.307676129" observedRunningTime="2025-09-30 19:52:48.524609632 +0000 UTC m=+1298.145543109" watchObservedRunningTime="2025-09-30 19:52:48.5383599 +0000 UTC m=+1298.159293377" Sep 30 19:52:48 crc kubenswrapper[4756]: I0930 19:52:48.539313 4756 scope.go:117] "RemoveContainer" containerID="67d58b09c6f4e31d8ce5b1c997ab3aa4f38c32454c8098ef147764143adde022" Sep 30 19:52:48 crc kubenswrapper[4756]: I0930 19:52:48.549038 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5585066a-57c7-4fd5-91b8-36af88ee6f9b-config\") pod \"5585066a-57c7-4fd5-91b8-36af88ee6f9b\" (UID: \"5585066a-57c7-4fd5-91b8-36af88ee6f9b\") " Sep 30 19:52:48 crc kubenswrapper[4756]: I0930 19:52:48.549166 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5585066a-57c7-4fd5-91b8-36af88ee6f9b-ovsdbserver-sb\") pod \"5585066a-57c7-4fd5-91b8-36af88ee6f9b\" (UID: \"5585066a-57c7-4fd5-91b8-36af88ee6f9b\") " Sep 30 19:52:48 crc kubenswrapper[4756]: I0930 19:52:48.549224 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5585066a-57c7-4fd5-91b8-36af88ee6f9b-dns-svc\") pod \"5585066a-57c7-4fd5-91b8-36af88ee6f9b\" (UID: \"5585066a-57c7-4fd5-91b8-36af88ee6f9b\") " Sep 30 19:52:48 crc kubenswrapper[4756]: I0930 19:52:48.549247 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-r4mnf\" (UniqueName: \"kubernetes.io/projected/5585066a-57c7-4fd5-91b8-36af88ee6f9b-kube-api-access-r4mnf\") pod \"5585066a-57c7-4fd5-91b8-36af88ee6f9b\" (UID: \"5585066a-57c7-4fd5-91b8-36af88ee6f9b\") " Sep 30 19:52:48 crc kubenswrapper[4756]: I0930 19:52:48.549271 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/5585066a-57c7-4fd5-91b8-36af88ee6f9b-dns-swift-storage-0\") pod \"5585066a-57c7-4fd5-91b8-36af88ee6f9b\" (UID: \"5585066a-57c7-4fd5-91b8-36af88ee6f9b\") " Sep 30 19:52:48 crc kubenswrapper[4756]: I0930 19:52:48.549312 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5585066a-57c7-4fd5-91b8-36af88ee6f9b-ovsdbserver-nb\") pod \"5585066a-57c7-4fd5-91b8-36af88ee6f9b\" (UID: \"5585066a-57c7-4fd5-91b8-36af88ee6f9b\") " Sep 30 19:52:48 crc kubenswrapper[4756]: I0930 19:52:48.565719 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5585066a-57c7-4fd5-91b8-36af88ee6f9b-kube-api-access-r4mnf" (OuterVolumeSpecName: "kube-api-access-r4mnf") pod "5585066a-57c7-4fd5-91b8-36af88ee6f9b" (UID: "5585066a-57c7-4fd5-91b8-36af88ee6f9b"). InnerVolumeSpecName "kube-api-access-r4mnf". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:52:48 crc kubenswrapper[4756]: I0930 19:52:48.567811 4756 scope.go:117] "RemoveContainer" containerID="1393abceb130e8d843a769d54554b6bec32b4ca26b2612d660665605da762b2c" Sep 30 19:52:48 crc kubenswrapper[4756]: E0930 19:52:48.568756 4756 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1393abceb130e8d843a769d54554b6bec32b4ca26b2612d660665605da762b2c\": container with ID starting with 1393abceb130e8d843a769d54554b6bec32b4ca26b2612d660665605da762b2c not found: ID does not exist" containerID="1393abceb130e8d843a769d54554b6bec32b4ca26b2612d660665605da762b2c" Sep 30 19:52:48 crc kubenswrapper[4756]: I0930 19:52:48.568799 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1393abceb130e8d843a769d54554b6bec32b4ca26b2612d660665605da762b2c"} err="failed to get container status \"1393abceb130e8d843a769d54554b6bec32b4ca26b2612d660665605da762b2c\": rpc error: code = NotFound desc = could not find container \"1393abceb130e8d843a769d54554b6bec32b4ca26b2612d660665605da762b2c\": container with ID starting with 1393abceb130e8d843a769d54554b6bec32b4ca26b2612d660665605da762b2c not found: ID does not exist" Sep 30 19:52:48 crc kubenswrapper[4756]: I0930 19:52:48.568824 4756 scope.go:117] "RemoveContainer" containerID="67d58b09c6f4e31d8ce5b1c997ab3aa4f38c32454c8098ef147764143adde022" Sep 30 19:52:48 crc kubenswrapper[4756]: E0930 19:52:48.569221 4756 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"67d58b09c6f4e31d8ce5b1c997ab3aa4f38c32454c8098ef147764143adde022\": container with ID starting with 67d58b09c6f4e31d8ce5b1c997ab3aa4f38c32454c8098ef147764143adde022 not found: ID does not exist" containerID="67d58b09c6f4e31d8ce5b1c997ab3aa4f38c32454c8098ef147764143adde022" Sep 30 19:52:48 crc kubenswrapper[4756]: I0930 19:52:48.569244 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"67d58b09c6f4e31d8ce5b1c997ab3aa4f38c32454c8098ef147764143adde022"} err="failed to get container status \"67d58b09c6f4e31d8ce5b1c997ab3aa4f38c32454c8098ef147764143adde022\": rpc error: code = NotFound desc = could not find container \"67d58b09c6f4e31d8ce5b1c997ab3aa4f38c32454c8098ef147764143adde022\": container with ID starting with 67d58b09c6f4e31d8ce5b1c997ab3aa4f38c32454c8098ef147764143adde022 not found: ID does not exist" Sep 30 19:52:48 crc kubenswrapper[4756]: I0930 19:52:48.605714 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5585066a-57c7-4fd5-91b8-36af88ee6f9b-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "5585066a-57c7-4fd5-91b8-36af88ee6f9b" (UID: "5585066a-57c7-4fd5-91b8-36af88ee6f9b"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:52:48 crc kubenswrapper[4756]: I0930 19:52:48.607974 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5585066a-57c7-4fd5-91b8-36af88ee6f9b-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "5585066a-57c7-4fd5-91b8-36af88ee6f9b" (UID: "5585066a-57c7-4fd5-91b8-36af88ee6f9b"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:52:48 crc kubenswrapper[4756]: I0930 19:52:48.620927 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5585066a-57c7-4fd5-91b8-36af88ee6f9b-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "5585066a-57c7-4fd5-91b8-36af88ee6f9b" (UID: "5585066a-57c7-4fd5-91b8-36af88ee6f9b"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:52:48 crc kubenswrapper[4756]: I0930 19:52:48.621609 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5585066a-57c7-4fd5-91b8-36af88ee6f9b-config" (OuterVolumeSpecName: "config") pod "5585066a-57c7-4fd5-91b8-36af88ee6f9b" (UID: "5585066a-57c7-4fd5-91b8-36af88ee6f9b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:52:48 crc kubenswrapper[4756]: I0930 19:52:48.627912 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5585066a-57c7-4fd5-91b8-36af88ee6f9b-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "5585066a-57c7-4fd5-91b8-36af88ee6f9b" (UID: "5585066a-57c7-4fd5-91b8-36af88ee6f9b"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:52:48 crc kubenswrapper[4756]: I0930 19:52:48.653911 4756 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5585066a-57c7-4fd5-91b8-36af88ee6f9b-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Sep 30 19:52:48 crc kubenswrapper[4756]: I0930 19:52:48.654134 4756 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5585066a-57c7-4fd5-91b8-36af88ee6f9b-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 30 19:52:48 crc kubenswrapper[4756]: I0930 19:52:48.654198 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-r4mnf\" (UniqueName: \"kubernetes.io/projected/5585066a-57c7-4fd5-91b8-36af88ee6f9b-kube-api-access-r4mnf\") on node \"crc\" DevicePath \"\"" Sep 30 19:52:48 crc kubenswrapper[4756]: I0930 19:52:48.654255 4756 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/5585066a-57c7-4fd5-91b8-36af88ee6f9b-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Sep 30 19:52:48 crc kubenswrapper[4756]: I0930 19:52:48.654309 4756 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5585066a-57c7-4fd5-91b8-36af88ee6f9b-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Sep 30 19:52:48 crc kubenswrapper[4756]: I0930 19:52:48.654372 4756 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5585066a-57c7-4fd5-91b8-36af88ee6f9b-config\") on node \"crc\" DevicePath \"\"" Sep 30 19:52:48 crc kubenswrapper[4756]: I0930 19:52:48.839083 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-845d6d6f59-hcjll"] Sep 30 19:52:48 crc kubenswrapper[4756]: I0930 19:52:48.846352 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-845d6d6f59-hcjll"] Sep 30 19:52:49 crc kubenswrapper[4756]: I0930 19:52:49.126871 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5585066a-57c7-4fd5-91b8-36af88ee6f9b" path="/var/lib/kubelet/pods/5585066a-57c7-4fd5-91b8-36af88ee6f9b/volumes" Sep 30 19:52:51 crc kubenswrapper[4756]: I0930 19:52:51.577322 4756 generic.go:334] "Generic (PLEG): container finished" podID="41ec7c91-a63c-45a9-97dc-f9c1666bdcce" containerID="f362a6f3b07e09c3283f9cf2b98cdc3b3ef2d6ed31377ef667da3782fae5da8a" exitCode=0 Sep 30 19:52:51 crc kubenswrapper[4756]: I0930 19:52:51.577431 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-rkwpj" event={"ID":"41ec7c91-a63c-45a9-97dc-f9c1666bdcce","Type":"ContainerDied","Data":"f362a6f3b07e09c3283f9cf2b98cdc3b3ef2d6ed31377ef667da3782fae5da8a"} Sep 30 19:52:52 crc kubenswrapper[4756]: I0930 19:52:52.994145 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-rkwpj" Sep 30 19:52:53 crc kubenswrapper[4756]: I0930 19:52:53.157433 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/41ec7c91-a63c-45a9-97dc-f9c1666bdcce-combined-ca-bundle\") pod \"41ec7c91-a63c-45a9-97dc-f9c1666bdcce\" (UID: \"41ec7c91-a63c-45a9-97dc-f9c1666bdcce\") " Sep 30 19:52:53 crc kubenswrapper[4756]: I0930 19:52:53.157477 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/41ec7c91-a63c-45a9-97dc-f9c1666bdcce-config-data\") pod \"41ec7c91-a63c-45a9-97dc-f9c1666bdcce\" (UID: \"41ec7c91-a63c-45a9-97dc-f9c1666bdcce\") " Sep 30 19:52:53 crc kubenswrapper[4756]: I0930 19:52:53.157576 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/41ec7c91-a63c-45a9-97dc-f9c1666bdcce-scripts\") pod \"41ec7c91-a63c-45a9-97dc-f9c1666bdcce\" (UID: \"41ec7c91-a63c-45a9-97dc-f9c1666bdcce\") " Sep 30 19:52:53 crc kubenswrapper[4756]: I0930 19:52:53.157775 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xnrh5\" (UniqueName: \"kubernetes.io/projected/41ec7c91-a63c-45a9-97dc-f9c1666bdcce-kube-api-access-xnrh5\") pod \"41ec7c91-a63c-45a9-97dc-f9c1666bdcce\" (UID: \"41ec7c91-a63c-45a9-97dc-f9c1666bdcce\") " Sep 30 19:52:53 crc kubenswrapper[4756]: I0930 19:52:53.167648 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/41ec7c91-a63c-45a9-97dc-f9c1666bdcce-scripts" (OuterVolumeSpecName: "scripts") pod "41ec7c91-a63c-45a9-97dc-f9c1666bdcce" (UID: "41ec7c91-a63c-45a9-97dc-f9c1666bdcce"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:52:53 crc kubenswrapper[4756]: I0930 19:52:53.172145 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/41ec7c91-a63c-45a9-97dc-f9c1666bdcce-kube-api-access-xnrh5" (OuterVolumeSpecName: "kube-api-access-xnrh5") pod "41ec7c91-a63c-45a9-97dc-f9c1666bdcce" (UID: "41ec7c91-a63c-45a9-97dc-f9c1666bdcce"). InnerVolumeSpecName "kube-api-access-xnrh5". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:52:53 crc kubenswrapper[4756]: I0930 19:52:53.185975 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/41ec7c91-a63c-45a9-97dc-f9c1666bdcce-config-data" (OuterVolumeSpecName: "config-data") pod "41ec7c91-a63c-45a9-97dc-f9c1666bdcce" (UID: "41ec7c91-a63c-45a9-97dc-f9c1666bdcce"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:52:53 crc kubenswrapper[4756]: I0930 19:52:53.191928 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/41ec7c91-a63c-45a9-97dc-f9c1666bdcce-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "41ec7c91-a63c-45a9-97dc-f9c1666bdcce" (UID: "41ec7c91-a63c-45a9-97dc-f9c1666bdcce"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:52:53 crc kubenswrapper[4756]: I0930 19:52:53.260344 4756 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/41ec7c91-a63c-45a9-97dc-f9c1666bdcce-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 19:52:53 crc kubenswrapper[4756]: I0930 19:52:53.260382 4756 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/41ec7c91-a63c-45a9-97dc-f9c1666bdcce-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 19:52:53 crc kubenswrapper[4756]: I0930 19:52:53.260401 4756 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/41ec7c91-a63c-45a9-97dc-f9c1666bdcce-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 19:52:53 crc kubenswrapper[4756]: I0930 19:52:53.260411 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xnrh5\" (UniqueName: \"kubernetes.io/projected/41ec7c91-a63c-45a9-97dc-f9c1666bdcce-kube-api-access-xnrh5\") on node \"crc\" DevicePath \"\"" Sep 30 19:52:53 crc kubenswrapper[4756]: I0930 19:52:53.599861 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-rkwpj" event={"ID":"41ec7c91-a63c-45a9-97dc-f9c1666bdcce","Type":"ContainerDied","Data":"e3942fc725a1f58f43b4c44fff13049393adfbc5b644c9034cb31c5e6ed04cde"} Sep 30 19:52:53 crc kubenswrapper[4756]: I0930 19:52:53.599897 4756 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e3942fc725a1f58f43b4c44fff13049393adfbc5b644c9034cb31c5e6ed04cde" Sep 30 19:52:53 crc kubenswrapper[4756]: I0930 19:52:53.599927 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-rkwpj" Sep 30 19:52:53 crc kubenswrapper[4756]: I0930 19:52:53.782427 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Sep 30 19:52:53 crc kubenswrapper[4756]: I0930 19:52:53.782883 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="a645827a-f0f2-4cbd-8f0a-d1126fe95701" containerName="nova-api-log" containerID="cri-o://cf4b89ceeeed9dccf835138670b7818d31ceda30ff656ea45307fa63f6610a37" gracePeriod=30 Sep 30 19:52:53 crc kubenswrapper[4756]: I0930 19:52:53.783092 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="a645827a-f0f2-4cbd-8f0a-d1126fe95701" containerName="nova-api-api" containerID="cri-o://555af6ac58b0030141c06926f56bf9141286bde6168e3c175a0d701868cf8887" gracePeriod=30 Sep 30 19:52:53 crc kubenswrapper[4756]: I0930 19:52:53.825296 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Sep 30 19:52:53 crc kubenswrapper[4756]: I0930 19:52:53.825546 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="2c09b969-b4ff-472d-a938-80dcdbc49fa0" containerName="nova-scheduler-scheduler" containerID="cri-o://a98b89ec3d4c88ae15503ab82bce6e0663ac2886a6aceee2848aa1443a4ae194" gracePeriod=30 Sep 30 19:52:53 crc kubenswrapper[4756]: I0930 19:52:53.834933 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Sep 30 19:52:53 crc kubenswrapper[4756]: I0930 19:52:53.835425 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="4f6756ce-2d11-45b8-ae0d-3e1bf4ef589d" containerName="nova-metadata-log" containerID="cri-o://63811f19f70f2db22b8c6db5cf3d5678c6c94e93d0629c39778c948e817682b4" gracePeriod=30 Sep 30 19:52:53 crc kubenswrapper[4756]: I0930 19:52:53.835867 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="4f6756ce-2d11-45b8-ae0d-3e1bf4ef589d" containerName="nova-metadata-metadata" containerID="cri-o://6d008c654436805bfaeae405e585f88aac07cfffcc03e4c98abff8f344b5757d" gracePeriod=30 Sep 30 19:52:54 crc kubenswrapper[4756]: I0930 19:52:54.313821 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 30 19:52:54 crc kubenswrapper[4756]: I0930 19:52:54.387234 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/a645827a-f0f2-4cbd-8f0a-d1126fe95701-public-tls-certs\") pod \"a645827a-f0f2-4cbd-8f0a-d1126fe95701\" (UID: \"a645827a-f0f2-4cbd-8f0a-d1126fe95701\") " Sep 30 19:52:54 crc kubenswrapper[4756]: I0930 19:52:54.387316 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a645827a-f0f2-4cbd-8f0a-d1126fe95701-config-data\") pod \"a645827a-f0f2-4cbd-8f0a-d1126fe95701\" (UID: \"a645827a-f0f2-4cbd-8f0a-d1126fe95701\") " Sep 30 19:52:54 crc kubenswrapper[4756]: I0930 19:52:54.387597 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a645827a-f0f2-4cbd-8f0a-d1126fe95701-logs\") pod \"a645827a-f0f2-4cbd-8f0a-d1126fe95701\" (UID: \"a645827a-f0f2-4cbd-8f0a-d1126fe95701\") " Sep 30 19:52:54 crc kubenswrapper[4756]: I0930 19:52:54.387663 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/a645827a-f0f2-4cbd-8f0a-d1126fe95701-internal-tls-certs\") pod \"a645827a-f0f2-4cbd-8f0a-d1126fe95701\" (UID: \"a645827a-f0f2-4cbd-8f0a-d1126fe95701\") " Sep 30 19:52:54 crc kubenswrapper[4756]: I0930 19:52:54.387689 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a645827a-f0f2-4cbd-8f0a-d1126fe95701-combined-ca-bundle\") pod \"a645827a-f0f2-4cbd-8f0a-d1126fe95701\" (UID: \"a645827a-f0f2-4cbd-8f0a-d1126fe95701\") " Sep 30 19:52:54 crc kubenswrapper[4756]: I0930 19:52:54.387814 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pvgt7\" (UniqueName: \"kubernetes.io/projected/a645827a-f0f2-4cbd-8f0a-d1126fe95701-kube-api-access-pvgt7\") pod \"a645827a-f0f2-4cbd-8f0a-d1126fe95701\" (UID: \"a645827a-f0f2-4cbd-8f0a-d1126fe95701\") " Sep 30 19:52:54 crc kubenswrapper[4756]: I0930 19:52:54.388122 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a645827a-f0f2-4cbd-8f0a-d1126fe95701-logs" (OuterVolumeSpecName: "logs") pod "a645827a-f0f2-4cbd-8f0a-d1126fe95701" (UID: "a645827a-f0f2-4cbd-8f0a-d1126fe95701"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 19:52:54 crc kubenswrapper[4756]: I0930 19:52:54.388556 4756 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a645827a-f0f2-4cbd-8f0a-d1126fe95701-logs\") on node \"crc\" DevicePath \"\"" Sep 30 19:52:54 crc kubenswrapper[4756]: I0930 19:52:54.392197 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a645827a-f0f2-4cbd-8f0a-d1126fe95701-kube-api-access-pvgt7" (OuterVolumeSpecName: "kube-api-access-pvgt7") pod "a645827a-f0f2-4cbd-8f0a-d1126fe95701" (UID: "a645827a-f0f2-4cbd-8f0a-d1126fe95701"). InnerVolumeSpecName "kube-api-access-pvgt7". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:52:54 crc kubenswrapper[4756]: I0930 19:52:54.416035 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a645827a-f0f2-4cbd-8f0a-d1126fe95701-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a645827a-f0f2-4cbd-8f0a-d1126fe95701" (UID: "a645827a-f0f2-4cbd-8f0a-d1126fe95701"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:52:54 crc kubenswrapper[4756]: I0930 19:52:54.422371 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a645827a-f0f2-4cbd-8f0a-d1126fe95701-config-data" (OuterVolumeSpecName: "config-data") pod "a645827a-f0f2-4cbd-8f0a-d1126fe95701" (UID: "a645827a-f0f2-4cbd-8f0a-d1126fe95701"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:52:54 crc kubenswrapper[4756]: I0930 19:52:54.445931 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a645827a-f0f2-4cbd-8f0a-d1126fe95701-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "a645827a-f0f2-4cbd-8f0a-d1126fe95701" (UID: "a645827a-f0f2-4cbd-8f0a-d1126fe95701"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:52:54 crc kubenswrapper[4756]: I0930 19:52:54.446800 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a645827a-f0f2-4cbd-8f0a-d1126fe95701-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "a645827a-f0f2-4cbd-8f0a-d1126fe95701" (UID: "a645827a-f0f2-4cbd-8f0a-d1126fe95701"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:52:54 crc kubenswrapper[4756]: I0930 19:52:54.489766 4756 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/a645827a-f0f2-4cbd-8f0a-d1126fe95701-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 30 19:52:54 crc kubenswrapper[4756]: I0930 19:52:54.489796 4756 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a645827a-f0f2-4cbd-8f0a-d1126fe95701-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 19:52:54 crc kubenswrapper[4756]: I0930 19:52:54.489806 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pvgt7\" (UniqueName: \"kubernetes.io/projected/a645827a-f0f2-4cbd-8f0a-d1126fe95701-kube-api-access-pvgt7\") on node \"crc\" DevicePath \"\"" Sep 30 19:52:54 crc kubenswrapper[4756]: I0930 19:52:54.489815 4756 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/a645827a-f0f2-4cbd-8f0a-d1126fe95701-public-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 30 19:52:54 crc kubenswrapper[4756]: I0930 19:52:54.489826 4756 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a645827a-f0f2-4cbd-8f0a-d1126fe95701-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 19:52:54 crc kubenswrapper[4756]: I0930 19:52:54.609830 4756 generic.go:334] "Generic (PLEG): container finished" podID="a645827a-f0f2-4cbd-8f0a-d1126fe95701" containerID="555af6ac58b0030141c06926f56bf9141286bde6168e3c175a0d701868cf8887" exitCode=0 Sep 30 19:52:54 crc kubenswrapper[4756]: I0930 19:52:54.610108 4756 generic.go:334] "Generic (PLEG): container finished" podID="a645827a-f0f2-4cbd-8f0a-d1126fe95701" containerID="cf4b89ceeeed9dccf835138670b7818d31ceda30ff656ea45307fa63f6610a37" exitCode=143 Sep 30 19:52:54 crc kubenswrapper[4756]: I0930 19:52:54.609897 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 30 19:52:54 crc kubenswrapper[4756]: I0930 19:52:54.609900 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"a645827a-f0f2-4cbd-8f0a-d1126fe95701","Type":"ContainerDied","Data":"555af6ac58b0030141c06926f56bf9141286bde6168e3c175a0d701868cf8887"} Sep 30 19:52:54 crc kubenswrapper[4756]: I0930 19:52:54.610288 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"a645827a-f0f2-4cbd-8f0a-d1126fe95701","Type":"ContainerDied","Data":"cf4b89ceeeed9dccf835138670b7818d31ceda30ff656ea45307fa63f6610a37"} Sep 30 19:52:54 crc kubenswrapper[4756]: I0930 19:52:54.610304 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"a645827a-f0f2-4cbd-8f0a-d1126fe95701","Type":"ContainerDied","Data":"be12478ca53ad02dbbb5081cb57e0d38ab962287c46ee293b6892d4693682ce0"} Sep 30 19:52:54 crc kubenswrapper[4756]: I0930 19:52:54.610325 4756 scope.go:117] "RemoveContainer" containerID="555af6ac58b0030141c06926f56bf9141286bde6168e3c175a0d701868cf8887" Sep 30 19:52:54 crc kubenswrapper[4756]: I0930 19:52:54.612150 4756 generic.go:334] "Generic (PLEG): container finished" podID="4f6756ce-2d11-45b8-ae0d-3e1bf4ef589d" containerID="63811f19f70f2db22b8c6db5cf3d5678c6c94e93d0629c39778c948e817682b4" exitCode=143 Sep 30 19:52:54 crc kubenswrapper[4756]: I0930 19:52:54.612199 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"4f6756ce-2d11-45b8-ae0d-3e1bf4ef589d","Type":"ContainerDied","Data":"63811f19f70f2db22b8c6db5cf3d5678c6c94e93d0629c39778c948e817682b4"} Sep 30 19:52:54 crc kubenswrapper[4756]: I0930 19:52:54.629232 4756 scope.go:117] "RemoveContainer" containerID="cf4b89ceeeed9dccf835138670b7818d31ceda30ff656ea45307fa63f6610a37" Sep 30 19:52:54 crc kubenswrapper[4756]: I0930 19:52:54.663081 4756 scope.go:117] "RemoveContainer" containerID="555af6ac58b0030141c06926f56bf9141286bde6168e3c175a0d701868cf8887" Sep 30 19:52:54 crc kubenswrapper[4756]: E0930 19:52:54.664112 4756 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"555af6ac58b0030141c06926f56bf9141286bde6168e3c175a0d701868cf8887\": container with ID starting with 555af6ac58b0030141c06926f56bf9141286bde6168e3c175a0d701868cf8887 not found: ID does not exist" containerID="555af6ac58b0030141c06926f56bf9141286bde6168e3c175a0d701868cf8887" Sep 30 19:52:54 crc kubenswrapper[4756]: I0930 19:52:54.664205 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"555af6ac58b0030141c06926f56bf9141286bde6168e3c175a0d701868cf8887"} err="failed to get container status \"555af6ac58b0030141c06926f56bf9141286bde6168e3c175a0d701868cf8887\": rpc error: code = NotFound desc = could not find container \"555af6ac58b0030141c06926f56bf9141286bde6168e3c175a0d701868cf8887\": container with ID starting with 555af6ac58b0030141c06926f56bf9141286bde6168e3c175a0d701868cf8887 not found: ID does not exist" Sep 30 19:52:54 crc kubenswrapper[4756]: I0930 19:52:54.664233 4756 scope.go:117] "RemoveContainer" containerID="cf4b89ceeeed9dccf835138670b7818d31ceda30ff656ea45307fa63f6610a37" Sep 30 19:52:54 crc kubenswrapper[4756]: E0930 19:52:54.667912 4756 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cf4b89ceeeed9dccf835138670b7818d31ceda30ff656ea45307fa63f6610a37\": container with ID starting with cf4b89ceeeed9dccf835138670b7818d31ceda30ff656ea45307fa63f6610a37 not found: ID does not exist" containerID="cf4b89ceeeed9dccf835138670b7818d31ceda30ff656ea45307fa63f6610a37" Sep 30 19:52:54 crc kubenswrapper[4756]: I0930 19:52:54.667963 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cf4b89ceeeed9dccf835138670b7818d31ceda30ff656ea45307fa63f6610a37"} err="failed to get container status \"cf4b89ceeeed9dccf835138670b7818d31ceda30ff656ea45307fa63f6610a37\": rpc error: code = NotFound desc = could not find container \"cf4b89ceeeed9dccf835138670b7818d31ceda30ff656ea45307fa63f6610a37\": container with ID starting with cf4b89ceeeed9dccf835138670b7818d31ceda30ff656ea45307fa63f6610a37 not found: ID does not exist" Sep 30 19:52:54 crc kubenswrapper[4756]: I0930 19:52:54.667990 4756 scope.go:117] "RemoveContainer" containerID="555af6ac58b0030141c06926f56bf9141286bde6168e3c175a0d701868cf8887" Sep 30 19:52:54 crc kubenswrapper[4756]: I0930 19:52:54.668063 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Sep 30 19:52:54 crc kubenswrapper[4756]: I0930 19:52:54.668691 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"555af6ac58b0030141c06926f56bf9141286bde6168e3c175a0d701868cf8887"} err="failed to get container status \"555af6ac58b0030141c06926f56bf9141286bde6168e3c175a0d701868cf8887\": rpc error: code = NotFound desc = could not find container \"555af6ac58b0030141c06926f56bf9141286bde6168e3c175a0d701868cf8887\": container with ID starting with 555af6ac58b0030141c06926f56bf9141286bde6168e3c175a0d701868cf8887 not found: ID does not exist" Sep 30 19:52:54 crc kubenswrapper[4756]: I0930 19:52:54.668738 4756 scope.go:117] "RemoveContainer" containerID="cf4b89ceeeed9dccf835138670b7818d31ceda30ff656ea45307fa63f6610a37" Sep 30 19:52:54 crc kubenswrapper[4756]: I0930 19:52:54.669134 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cf4b89ceeeed9dccf835138670b7818d31ceda30ff656ea45307fa63f6610a37"} err="failed to get container status \"cf4b89ceeeed9dccf835138670b7818d31ceda30ff656ea45307fa63f6610a37\": rpc error: code = NotFound desc = could not find container \"cf4b89ceeeed9dccf835138670b7818d31ceda30ff656ea45307fa63f6610a37\": container with ID starting with cf4b89ceeeed9dccf835138670b7818d31ceda30ff656ea45307fa63f6610a37 not found: ID does not exist" Sep 30 19:52:54 crc kubenswrapper[4756]: I0930 19:52:54.687318 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Sep 30 19:52:54 crc kubenswrapper[4756]: I0930 19:52:54.694928 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Sep 30 19:52:54 crc kubenswrapper[4756]: E0930 19:52:54.695450 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5585066a-57c7-4fd5-91b8-36af88ee6f9b" containerName="init" Sep 30 19:52:54 crc kubenswrapper[4756]: I0930 19:52:54.695467 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="5585066a-57c7-4fd5-91b8-36af88ee6f9b" containerName="init" Sep 30 19:52:54 crc kubenswrapper[4756]: E0930 19:52:54.695487 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a645827a-f0f2-4cbd-8f0a-d1126fe95701" containerName="nova-api-log" Sep 30 19:52:54 crc kubenswrapper[4756]: I0930 19:52:54.695495 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="a645827a-f0f2-4cbd-8f0a-d1126fe95701" containerName="nova-api-log" Sep 30 19:52:54 crc kubenswrapper[4756]: E0930 19:52:54.695512 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="41ec7c91-a63c-45a9-97dc-f9c1666bdcce" containerName="nova-manage" Sep 30 19:52:54 crc kubenswrapper[4756]: I0930 19:52:54.695520 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="41ec7c91-a63c-45a9-97dc-f9c1666bdcce" containerName="nova-manage" Sep 30 19:52:54 crc kubenswrapper[4756]: E0930 19:52:54.695539 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5585066a-57c7-4fd5-91b8-36af88ee6f9b" containerName="dnsmasq-dns" Sep 30 19:52:54 crc kubenswrapper[4756]: I0930 19:52:54.695545 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="5585066a-57c7-4fd5-91b8-36af88ee6f9b" containerName="dnsmasq-dns" Sep 30 19:52:54 crc kubenswrapper[4756]: E0930 19:52:54.695565 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a645827a-f0f2-4cbd-8f0a-d1126fe95701" containerName="nova-api-api" Sep 30 19:52:54 crc kubenswrapper[4756]: I0930 19:52:54.695572 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="a645827a-f0f2-4cbd-8f0a-d1126fe95701" containerName="nova-api-api" Sep 30 19:52:54 crc kubenswrapper[4756]: I0930 19:52:54.695806 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="a645827a-f0f2-4cbd-8f0a-d1126fe95701" containerName="nova-api-log" Sep 30 19:52:54 crc kubenswrapper[4756]: I0930 19:52:54.695842 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="41ec7c91-a63c-45a9-97dc-f9c1666bdcce" containerName="nova-manage" Sep 30 19:52:54 crc kubenswrapper[4756]: I0930 19:52:54.695861 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="a645827a-f0f2-4cbd-8f0a-d1126fe95701" containerName="nova-api-api" Sep 30 19:52:54 crc kubenswrapper[4756]: I0930 19:52:54.695873 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="5585066a-57c7-4fd5-91b8-36af88ee6f9b" containerName="dnsmasq-dns" Sep 30 19:52:54 crc kubenswrapper[4756]: I0930 19:52:54.697113 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 30 19:52:54 crc kubenswrapper[4756]: I0930 19:52:54.699725 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-internal-svc" Sep 30 19:52:54 crc kubenswrapper[4756]: I0930 19:52:54.699943 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-public-svc" Sep 30 19:52:54 crc kubenswrapper[4756]: I0930 19:52:54.708236 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Sep 30 19:52:54 crc kubenswrapper[4756]: I0930 19:52:54.718353 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Sep 30 19:52:54 crc kubenswrapper[4756]: I0930 19:52:54.797389 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j5wjf\" (UniqueName: \"kubernetes.io/projected/eed122c3-f6af-488a-9684-f60ffe104434-kube-api-access-j5wjf\") pod \"nova-api-0\" (UID: \"eed122c3-f6af-488a-9684-f60ffe104434\") " pod="openstack/nova-api-0" Sep 30 19:52:54 crc kubenswrapper[4756]: I0930 19:52:54.797550 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/eed122c3-f6af-488a-9684-f60ffe104434-config-data\") pod \"nova-api-0\" (UID: \"eed122c3-f6af-488a-9684-f60ffe104434\") " pod="openstack/nova-api-0" Sep 30 19:52:54 crc kubenswrapper[4756]: I0930 19:52:54.797609 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eed122c3-f6af-488a-9684-f60ffe104434-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"eed122c3-f6af-488a-9684-f60ffe104434\") " pod="openstack/nova-api-0" Sep 30 19:52:54 crc kubenswrapper[4756]: I0930 19:52:54.797690 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/eed122c3-f6af-488a-9684-f60ffe104434-public-tls-certs\") pod \"nova-api-0\" (UID: \"eed122c3-f6af-488a-9684-f60ffe104434\") " pod="openstack/nova-api-0" Sep 30 19:52:54 crc kubenswrapper[4756]: I0930 19:52:54.797886 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/eed122c3-f6af-488a-9684-f60ffe104434-internal-tls-certs\") pod \"nova-api-0\" (UID: \"eed122c3-f6af-488a-9684-f60ffe104434\") " pod="openstack/nova-api-0" Sep 30 19:52:54 crc kubenswrapper[4756]: I0930 19:52:54.798011 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/eed122c3-f6af-488a-9684-f60ffe104434-logs\") pod \"nova-api-0\" (UID: \"eed122c3-f6af-488a-9684-f60ffe104434\") " pod="openstack/nova-api-0" Sep 30 19:52:54 crc kubenswrapper[4756]: I0930 19:52:54.899316 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j5wjf\" (UniqueName: \"kubernetes.io/projected/eed122c3-f6af-488a-9684-f60ffe104434-kube-api-access-j5wjf\") pod \"nova-api-0\" (UID: \"eed122c3-f6af-488a-9684-f60ffe104434\") " pod="openstack/nova-api-0" Sep 30 19:52:54 crc kubenswrapper[4756]: I0930 19:52:54.899610 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/eed122c3-f6af-488a-9684-f60ffe104434-config-data\") pod \"nova-api-0\" (UID: \"eed122c3-f6af-488a-9684-f60ffe104434\") " pod="openstack/nova-api-0" Sep 30 19:52:54 crc kubenswrapper[4756]: I0930 19:52:54.899709 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eed122c3-f6af-488a-9684-f60ffe104434-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"eed122c3-f6af-488a-9684-f60ffe104434\") " pod="openstack/nova-api-0" Sep 30 19:52:54 crc kubenswrapper[4756]: I0930 19:52:54.899801 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/eed122c3-f6af-488a-9684-f60ffe104434-public-tls-certs\") pod \"nova-api-0\" (UID: \"eed122c3-f6af-488a-9684-f60ffe104434\") " pod="openstack/nova-api-0" Sep 30 19:52:54 crc kubenswrapper[4756]: I0930 19:52:54.899989 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/eed122c3-f6af-488a-9684-f60ffe104434-internal-tls-certs\") pod \"nova-api-0\" (UID: \"eed122c3-f6af-488a-9684-f60ffe104434\") " pod="openstack/nova-api-0" Sep 30 19:52:54 crc kubenswrapper[4756]: I0930 19:52:54.901500 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/eed122c3-f6af-488a-9684-f60ffe104434-logs\") pod \"nova-api-0\" (UID: \"eed122c3-f6af-488a-9684-f60ffe104434\") " pod="openstack/nova-api-0" Sep 30 19:52:54 crc kubenswrapper[4756]: I0930 19:52:54.902482 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/eed122c3-f6af-488a-9684-f60ffe104434-logs\") pod \"nova-api-0\" (UID: \"eed122c3-f6af-488a-9684-f60ffe104434\") " pod="openstack/nova-api-0" Sep 30 19:52:54 crc kubenswrapper[4756]: I0930 19:52:54.904162 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/eed122c3-f6af-488a-9684-f60ffe104434-public-tls-certs\") pod \"nova-api-0\" (UID: \"eed122c3-f6af-488a-9684-f60ffe104434\") " pod="openstack/nova-api-0" Sep 30 19:52:54 crc kubenswrapper[4756]: I0930 19:52:54.908539 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/eed122c3-f6af-488a-9684-f60ffe104434-config-data\") pod \"nova-api-0\" (UID: \"eed122c3-f6af-488a-9684-f60ffe104434\") " pod="openstack/nova-api-0" Sep 30 19:52:54 crc kubenswrapper[4756]: I0930 19:52:54.911509 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/eed122c3-f6af-488a-9684-f60ffe104434-internal-tls-certs\") pod \"nova-api-0\" (UID: \"eed122c3-f6af-488a-9684-f60ffe104434\") " pod="openstack/nova-api-0" Sep 30 19:52:54 crc kubenswrapper[4756]: I0930 19:52:54.930066 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eed122c3-f6af-488a-9684-f60ffe104434-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"eed122c3-f6af-488a-9684-f60ffe104434\") " pod="openstack/nova-api-0" Sep 30 19:52:54 crc kubenswrapper[4756]: I0930 19:52:54.931225 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j5wjf\" (UniqueName: \"kubernetes.io/projected/eed122c3-f6af-488a-9684-f60ffe104434-kube-api-access-j5wjf\") pod \"nova-api-0\" (UID: \"eed122c3-f6af-488a-9684-f60ffe104434\") " pod="openstack/nova-api-0" Sep 30 19:52:55 crc kubenswrapper[4756]: I0930 19:52:55.022952 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 30 19:52:55 crc kubenswrapper[4756]: I0930 19:52:55.132824 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a645827a-f0f2-4cbd-8f0a-d1126fe95701" path="/var/lib/kubelet/pods/a645827a-f0f2-4cbd-8f0a-d1126fe95701/volumes" Sep 30 19:52:55 crc kubenswrapper[4756]: I0930 19:52:55.476159 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Sep 30 19:52:55 crc kubenswrapper[4756]: E0930 19:52:55.477641 4756 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="a98b89ec3d4c88ae15503ab82bce6e0663ac2886a6aceee2848aa1443a4ae194" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Sep 30 19:52:55 crc kubenswrapper[4756]: E0930 19:52:55.486887 4756 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="a98b89ec3d4c88ae15503ab82bce6e0663ac2886a6aceee2848aa1443a4ae194" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Sep 30 19:52:55 crc kubenswrapper[4756]: E0930 19:52:55.490960 4756 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="a98b89ec3d4c88ae15503ab82bce6e0663ac2886a6aceee2848aa1443a4ae194" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Sep 30 19:52:55 crc kubenswrapper[4756]: E0930 19:52:55.491026 4756 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-scheduler-0" podUID="2c09b969-b4ff-472d-a938-80dcdbc49fa0" containerName="nova-scheduler-scheduler" Sep 30 19:52:55 crc kubenswrapper[4756]: I0930 19:52:55.621799 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"eed122c3-f6af-488a-9684-f60ffe104434","Type":"ContainerStarted","Data":"0318ec90fdae1de3844c876368978de3d95b8f151ccc9cc6ea5b08c95f8d4641"} Sep 30 19:52:56 crc kubenswrapper[4756]: I0930 19:52:56.631671 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"eed122c3-f6af-488a-9684-f60ffe104434","Type":"ContainerStarted","Data":"e10cbb9a45161cb9ac5e6eb2582b06fee216894c3414ab0191425a8e6235e510"} Sep 30 19:52:56 crc kubenswrapper[4756]: I0930 19:52:56.632147 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"eed122c3-f6af-488a-9684-f60ffe104434","Type":"ContainerStarted","Data":"2c21c56307aa57c6e443391243b07699b843b3fabf15be8a802984834452c537"} Sep 30 19:52:56 crc kubenswrapper[4756]: I0930 19:52:56.682059 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.6820330119999998 podStartE2EDuration="2.682033012s" podCreationTimestamp="2025-09-30 19:52:54 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 19:52:56.663664563 +0000 UTC m=+1306.284598050" watchObservedRunningTime="2025-09-30 19:52:56.682033012 +0000 UTC m=+1306.302966489" Sep 30 19:52:58 crc kubenswrapper[4756]: I0930 19:52:57.527776 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Sep 30 19:52:58 crc kubenswrapper[4756]: I0930 19:52:57.546892 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4f6756ce-2d11-45b8-ae0d-3e1bf4ef589d-combined-ca-bundle\") pod \"4f6756ce-2d11-45b8-ae0d-3e1bf4ef589d\" (UID: \"4f6756ce-2d11-45b8-ae0d-3e1bf4ef589d\") " Sep 30 19:52:58 crc kubenswrapper[4756]: I0930 19:52:57.546943 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/4f6756ce-2d11-45b8-ae0d-3e1bf4ef589d-nova-metadata-tls-certs\") pod \"4f6756ce-2d11-45b8-ae0d-3e1bf4ef589d\" (UID: \"4f6756ce-2d11-45b8-ae0d-3e1bf4ef589d\") " Sep 30 19:52:58 crc kubenswrapper[4756]: I0930 19:52:57.546986 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4f6756ce-2d11-45b8-ae0d-3e1bf4ef589d-config-data\") pod \"4f6756ce-2d11-45b8-ae0d-3e1bf4ef589d\" (UID: \"4f6756ce-2d11-45b8-ae0d-3e1bf4ef589d\") " Sep 30 19:52:58 crc kubenswrapper[4756]: I0930 19:52:57.547061 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c2rjp\" (UniqueName: \"kubernetes.io/projected/4f6756ce-2d11-45b8-ae0d-3e1bf4ef589d-kube-api-access-c2rjp\") pod \"4f6756ce-2d11-45b8-ae0d-3e1bf4ef589d\" (UID: \"4f6756ce-2d11-45b8-ae0d-3e1bf4ef589d\") " Sep 30 19:52:58 crc kubenswrapper[4756]: I0930 19:52:57.547155 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4f6756ce-2d11-45b8-ae0d-3e1bf4ef589d-logs\") pod \"4f6756ce-2d11-45b8-ae0d-3e1bf4ef589d\" (UID: \"4f6756ce-2d11-45b8-ae0d-3e1bf4ef589d\") " Sep 30 19:52:58 crc kubenswrapper[4756]: I0930 19:52:57.548370 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4f6756ce-2d11-45b8-ae0d-3e1bf4ef589d-logs" (OuterVolumeSpecName: "logs") pod "4f6756ce-2d11-45b8-ae0d-3e1bf4ef589d" (UID: "4f6756ce-2d11-45b8-ae0d-3e1bf4ef589d"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 19:52:58 crc kubenswrapper[4756]: I0930 19:52:57.553864 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4f6756ce-2d11-45b8-ae0d-3e1bf4ef589d-kube-api-access-c2rjp" (OuterVolumeSpecName: "kube-api-access-c2rjp") pod "4f6756ce-2d11-45b8-ae0d-3e1bf4ef589d" (UID: "4f6756ce-2d11-45b8-ae0d-3e1bf4ef589d"). InnerVolumeSpecName "kube-api-access-c2rjp". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:52:58 crc kubenswrapper[4756]: I0930 19:52:57.588601 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4f6756ce-2d11-45b8-ae0d-3e1bf4ef589d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "4f6756ce-2d11-45b8-ae0d-3e1bf4ef589d" (UID: "4f6756ce-2d11-45b8-ae0d-3e1bf4ef589d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:52:58 crc kubenswrapper[4756]: I0930 19:52:57.594615 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4f6756ce-2d11-45b8-ae0d-3e1bf4ef589d-config-data" (OuterVolumeSpecName: "config-data") pod "4f6756ce-2d11-45b8-ae0d-3e1bf4ef589d" (UID: "4f6756ce-2d11-45b8-ae0d-3e1bf4ef589d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:52:58 crc kubenswrapper[4756]: I0930 19:52:57.606510 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4f6756ce-2d11-45b8-ae0d-3e1bf4ef589d-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "4f6756ce-2d11-45b8-ae0d-3e1bf4ef589d" (UID: "4f6756ce-2d11-45b8-ae0d-3e1bf4ef589d"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:52:58 crc kubenswrapper[4756]: I0930 19:52:57.641043 4756 generic.go:334] "Generic (PLEG): container finished" podID="4f6756ce-2d11-45b8-ae0d-3e1bf4ef589d" containerID="6d008c654436805bfaeae405e585f88aac07cfffcc03e4c98abff8f344b5757d" exitCode=0 Sep 30 19:52:58 crc kubenswrapper[4756]: I0930 19:52:57.641109 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Sep 30 19:52:58 crc kubenswrapper[4756]: I0930 19:52:57.641138 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"4f6756ce-2d11-45b8-ae0d-3e1bf4ef589d","Type":"ContainerDied","Data":"6d008c654436805bfaeae405e585f88aac07cfffcc03e4c98abff8f344b5757d"} Sep 30 19:52:58 crc kubenswrapper[4756]: I0930 19:52:57.641193 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"4f6756ce-2d11-45b8-ae0d-3e1bf4ef589d","Type":"ContainerDied","Data":"59fba15c97b7f164077e5065f9da0add5e1dc99c0e8881633a5955b491b9d655"} Sep 30 19:52:58 crc kubenswrapper[4756]: I0930 19:52:57.641211 4756 scope.go:117] "RemoveContainer" containerID="6d008c654436805bfaeae405e585f88aac07cfffcc03e4c98abff8f344b5757d" Sep 30 19:52:58 crc kubenswrapper[4756]: I0930 19:52:57.649141 4756 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4f6756ce-2d11-45b8-ae0d-3e1bf4ef589d-logs\") on node \"crc\" DevicePath \"\"" Sep 30 19:52:58 crc kubenswrapper[4756]: I0930 19:52:57.649162 4756 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4f6756ce-2d11-45b8-ae0d-3e1bf4ef589d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 19:52:58 crc kubenswrapper[4756]: I0930 19:52:57.649171 4756 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/4f6756ce-2d11-45b8-ae0d-3e1bf4ef589d-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 30 19:52:58 crc kubenswrapper[4756]: I0930 19:52:57.649181 4756 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4f6756ce-2d11-45b8-ae0d-3e1bf4ef589d-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 19:52:58 crc kubenswrapper[4756]: I0930 19:52:57.649189 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c2rjp\" (UniqueName: \"kubernetes.io/projected/4f6756ce-2d11-45b8-ae0d-3e1bf4ef589d-kube-api-access-c2rjp\") on node \"crc\" DevicePath \"\"" Sep 30 19:52:58 crc kubenswrapper[4756]: I0930 19:52:57.682152 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Sep 30 19:52:58 crc kubenswrapper[4756]: I0930 19:52:57.686633 4756 scope.go:117] "RemoveContainer" containerID="63811f19f70f2db22b8c6db5cf3d5678c6c94e93d0629c39778c948e817682b4" Sep 30 19:52:58 crc kubenswrapper[4756]: I0930 19:52:57.692089 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Sep 30 19:52:58 crc kubenswrapper[4756]: I0930 19:52:57.715073 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Sep 30 19:52:58 crc kubenswrapper[4756]: E0930 19:52:57.715576 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4f6756ce-2d11-45b8-ae0d-3e1bf4ef589d" containerName="nova-metadata-log" Sep 30 19:52:58 crc kubenswrapper[4756]: I0930 19:52:57.715592 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="4f6756ce-2d11-45b8-ae0d-3e1bf4ef589d" containerName="nova-metadata-log" Sep 30 19:52:58 crc kubenswrapper[4756]: E0930 19:52:57.715626 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4f6756ce-2d11-45b8-ae0d-3e1bf4ef589d" containerName="nova-metadata-metadata" Sep 30 19:52:58 crc kubenswrapper[4756]: I0930 19:52:57.715636 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="4f6756ce-2d11-45b8-ae0d-3e1bf4ef589d" containerName="nova-metadata-metadata" Sep 30 19:52:58 crc kubenswrapper[4756]: I0930 19:52:57.715876 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="4f6756ce-2d11-45b8-ae0d-3e1bf4ef589d" containerName="nova-metadata-metadata" Sep 30 19:52:58 crc kubenswrapper[4756]: I0930 19:52:57.715906 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="4f6756ce-2d11-45b8-ae0d-3e1bf4ef589d" containerName="nova-metadata-log" Sep 30 19:52:58 crc kubenswrapper[4756]: I0930 19:52:57.717319 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Sep 30 19:52:58 crc kubenswrapper[4756]: I0930 19:52:57.721505 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Sep 30 19:52:58 crc kubenswrapper[4756]: I0930 19:52:57.722448 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Sep 30 19:52:58 crc kubenswrapper[4756]: I0930 19:52:57.725943 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Sep 30 19:52:58 crc kubenswrapper[4756]: I0930 19:52:57.726778 4756 scope.go:117] "RemoveContainer" containerID="6d008c654436805bfaeae405e585f88aac07cfffcc03e4c98abff8f344b5757d" Sep 30 19:52:58 crc kubenswrapper[4756]: E0930 19:52:57.727156 4756 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6d008c654436805bfaeae405e585f88aac07cfffcc03e4c98abff8f344b5757d\": container with ID starting with 6d008c654436805bfaeae405e585f88aac07cfffcc03e4c98abff8f344b5757d not found: ID does not exist" containerID="6d008c654436805bfaeae405e585f88aac07cfffcc03e4c98abff8f344b5757d" Sep 30 19:52:58 crc kubenswrapper[4756]: I0930 19:52:57.727195 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6d008c654436805bfaeae405e585f88aac07cfffcc03e4c98abff8f344b5757d"} err="failed to get container status \"6d008c654436805bfaeae405e585f88aac07cfffcc03e4c98abff8f344b5757d\": rpc error: code = NotFound desc = could not find container \"6d008c654436805bfaeae405e585f88aac07cfffcc03e4c98abff8f344b5757d\": container with ID starting with 6d008c654436805bfaeae405e585f88aac07cfffcc03e4c98abff8f344b5757d not found: ID does not exist" Sep 30 19:52:58 crc kubenswrapper[4756]: I0930 19:52:57.727225 4756 scope.go:117] "RemoveContainer" containerID="63811f19f70f2db22b8c6db5cf3d5678c6c94e93d0629c39778c948e817682b4" Sep 30 19:52:58 crc kubenswrapper[4756]: E0930 19:52:57.728997 4756 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"63811f19f70f2db22b8c6db5cf3d5678c6c94e93d0629c39778c948e817682b4\": container with ID starting with 63811f19f70f2db22b8c6db5cf3d5678c6c94e93d0629c39778c948e817682b4 not found: ID does not exist" containerID="63811f19f70f2db22b8c6db5cf3d5678c6c94e93d0629c39778c948e817682b4" Sep 30 19:52:58 crc kubenswrapper[4756]: I0930 19:52:57.729052 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"63811f19f70f2db22b8c6db5cf3d5678c6c94e93d0629c39778c948e817682b4"} err="failed to get container status \"63811f19f70f2db22b8c6db5cf3d5678c6c94e93d0629c39778c948e817682b4\": rpc error: code = NotFound desc = could not find container \"63811f19f70f2db22b8c6db5cf3d5678c6c94e93d0629c39778c948e817682b4\": container with ID starting with 63811f19f70f2db22b8c6db5cf3d5678c6c94e93d0629c39778c948e817682b4 not found: ID does not exist" Sep 30 19:52:58 crc kubenswrapper[4756]: I0930 19:52:57.750021 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ln69m\" (UniqueName: \"kubernetes.io/projected/ce0daf51-f01f-4ace-bf9d-3dd2e2023618-kube-api-access-ln69m\") pod \"nova-metadata-0\" (UID: \"ce0daf51-f01f-4ace-bf9d-3dd2e2023618\") " pod="openstack/nova-metadata-0" Sep 30 19:52:58 crc kubenswrapper[4756]: I0930 19:52:57.750063 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ce0daf51-f01f-4ace-bf9d-3dd2e2023618-logs\") pod \"nova-metadata-0\" (UID: \"ce0daf51-f01f-4ace-bf9d-3dd2e2023618\") " pod="openstack/nova-metadata-0" Sep 30 19:52:58 crc kubenswrapper[4756]: I0930 19:52:57.750092 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ce0daf51-f01f-4ace-bf9d-3dd2e2023618-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"ce0daf51-f01f-4ace-bf9d-3dd2e2023618\") " pod="openstack/nova-metadata-0" Sep 30 19:52:58 crc kubenswrapper[4756]: I0930 19:52:57.750223 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ce0daf51-f01f-4ace-bf9d-3dd2e2023618-config-data\") pod \"nova-metadata-0\" (UID: \"ce0daf51-f01f-4ace-bf9d-3dd2e2023618\") " pod="openstack/nova-metadata-0" Sep 30 19:52:58 crc kubenswrapper[4756]: I0930 19:52:57.750312 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/ce0daf51-f01f-4ace-bf9d-3dd2e2023618-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"ce0daf51-f01f-4ace-bf9d-3dd2e2023618\") " pod="openstack/nova-metadata-0" Sep 30 19:52:58 crc kubenswrapper[4756]: I0930 19:52:57.852724 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ln69m\" (UniqueName: \"kubernetes.io/projected/ce0daf51-f01f-4ace-bf9d-3dd2e2023618-kube-api-access-ln69m\") pod \"nova-metadata-0\" (UID: \"ce0daf51-f01f-4ace-bf9d-3dd2e2023618\") " pod="openstack/nova-metadata-0" Sep 30 19:52:58 crc kubenswrapper[4756]: I0930 19:52:57.852772 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ce0daf51-f01f-4ace-bf9d-3dd2e2023618-logs\") pod \"nova-metadata-0\" (UID: \"ce0daf51-f01f-4ace-bf9d-3dd2e2023618\") " pod="openstack/nova-metadata-0" Sep 30 19:52:58 crc kubenswrapper[4756]: I0930 19:52:57.852821 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ce0daf51-f01f-4ace-bf9d-3dd2e2023618-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"ce0daf51-f01f-4ace-bf9d-3dd2e2023618\") " pod="openstack/nova-metadata-0" Sep 30 19:52:58 crc kubenswrapper[4756]: I0930 19:52:57.852885 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ce0daf51-f01f-4ace-bf9d-3dd2e2023618-config-data\") pod \"nova-metadata-0\" (UID: \"ce0daf51-f01f-4ace-bf9d-3dd2e2023618\") " pod="openstack/nova-metadata-0" Sep 30 19:52:58 crc kubenswrapper[4756]: I0930 19:52:57.852939 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/ce0daf51-f01f-4ace-bf9d-3dd2e2023618-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"ce0daf51-f01f-4ace-bf9d-3dd2e2023618\") " pod="openstack/nova-metadata-0" Sep 30 19:52:58 crc kubenswrapper[4756]: I0930 19:52:57.853330 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ce0daf51-f01f-4ace-bf9d-3dd2e2023618-logs\") pod \"nova-metadata-0\" (UID: \"ce0daf51-f01f-4ace-bf9d-3dd2e2023618\") " pod="openstack/nova-metadata-0" Sep 30 19:52:58 crc kubenswrapper[4756]: I0930 19:52:57.856863 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ce0daf51-f01f-4ace-bf9d-3dd2e2023618-config-data\") pod \"nova-metadata-0\" (UID: \"ce0daf51-f01f-4ace-bf9d-3dd2e2023618\") " pod="openstack/nova-metadata-0" Sep 30 19:52:58 crc kubenswrapper[4756]: I0930 19:52:57.857012 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/ce0daf51-f01f-4ace-bf9d-3dd2e2023618-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"ce0daf51-f01f-4ace-bf9d-3dd2e2023618\") " pod="openstack/nova-metadata-0" Sep 30 19:52:58 crc kubenswrapper[4756]: I0930 19:52:57.858163 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ce0daf51-f01f-4ace-bf9d-3dd2e2023618-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"ce0daf51-f01f-4ace-bf9d-3dd2e2023618\") " pod="openstack/nova-metadata-0" Sep 30 19:52:58 crc kubenswrapper[4756]: I0930 19:52:57.879777 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ln69m\" (UniqueName: \"kubernetes.io/projected/ce0daf51-f01f-4ace-bf9d-3dd2e2023618-kube-api-access-ln69m\") pod \"nova-metadata-0\" (UID: \"ce0daf51-f01f-4ace-bf9d-3dd2e2023618\") " pod="openstack/nova-metadata-0" Sep 30 19:52:58 crc kubenswrapper[4756]: I0930 19:52:58.039868 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Sep 30 19:52:58 crc kubenswrapper[4756]: I0930 19:52:58.470170 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Sep 30 19:52:58 crc kubenswrapper[4756]: I0930 19:52:58.651510 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"ce0daf51-f01f-4ace-bf9d-3dd2e2023618","Type":"ContainerStarted","Data":"46ebb7c52a491c73ac8d6f8d7d6acb550544551c6bf2e2051405fb8444e43678"} Sep 30 19:52:59 crc kubenswrapper[4756]: I0930 19:52:59.138236 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4f6756ce-2d11-45b8-ae0d-3e1bf4ef589d" path="/var/lib/kubelet/pods/4f6756ce-2d11-45b8-ae0d-3e1bf4ef589d/volumes" Sep 30 19:52:59 crc kubenswrapper[4756]: I0930 19:52:59.661655 4756 generic.go:334] "Generic (PLEG): container finished" podID="2c09b969-b4ff-472d-a938-80dcdbc49fa0" containerID="a98b89ec3d4c88ae15503ab82bce6e0663ac2886a6aceee2848aa1443a4ae194" exitCode=0 Sep 30 19:52:59 crc kubenswrapper[4756]: I0930 19:52:59.661747 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"2c09b969-b4ff-472d-a938-80dcdbc49fa0","Type":"ContainerDied","Data":"a98b89ec3d4c88ae15503ab82bce6e0663ac2886a6aceee2848aa1443a4ae194"} Sep 30 19:52:59 crc kubenswrapper[4756]: I0930 19:52:59.662197 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"2c09b969-b4ff-472d-a938-80dcdbc49fa0","Type":"ContainerDied","Data":"6f834961cdf36e4c7ca497383da013cc9f13744208b63d3bf35649599dccbd42"} Sep 30 19:52:59 crc kubenswrapper[4756]: I0930 19:52:59.662237 4756 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6f834961cdf36e4c7ca497383da013cc9f13744208b63d3bf35649599dccbd42" Sep 30 19:52:59 crc kubenswrapper[4756]: I0930 19:52:59.663756 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"ce0daf51-f01f-4ace-bf9d-3dd2e2023618","Type":"ContainerStarted","Data":"493c10b1ea169f530bd9a7626736c31aefe74b4181647d0bdc31bf4a5ec5cc4f"} Sep 30 19:52:59 crc kubenswrapper[4756]: I0930 19:52:59.663812 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"ce0daf51-f01f-4ace-bf9d-3dd2e2023618","Type":"ContainerStarted","Data":"5f2536bd80f2c6c079d1749352246b7217d86ded3227c9601a2ad2db0771bef2"} Sep 30 19:52:59 crc kubenswrapper[4756]: I0930 19:52:59.686313 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.686288321 podStartE2EDuration="2.686288321s" podCreationTimestamp="2025-09-30 19:52:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 19:52:59.683851277 +0000 UTC m=+1309.304784774" watchObservedRunningTime="2025-09-30 19:52:59.686288321 +0000 UTC m=+1309.307221838" Sep 30 19:52:59 crc kubenswrapper[4756]: I0930 19:52:59.700002 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Sep 30 19:52:59 crc kubenswrapper[4756]: I0930 19:52:59.894041 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2c09b969-b4ff-472d-a938-80dcdbc49fa0-config-data\") pod \"2c09b969-b4ff-472d-a938-80dcdbc49fa0\" (UID: \"2c09b969-b4ff-472d-a938-80dcdbc49fa0\") " Sep 30 19:52:59 crc kubenswrapper[4756]: I0930 19:52:59.894149 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2c09b969-b4ff-472d-a938-80dcdbc49fa0-combined-ca-bundle\") pod \"2c09b969-b4ff-472d-a938-80dcdbc49fa0\" (UID: \"2c09b969-b4ff-472d-a938-80dcdbc49fa0\") " Sep 30 19:52:59 crc kubenswrapper[4756]: I0930 19:52:59.894206 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-l6frg\" (UniqueName: \"kubernetes.io/projected/2c09b969-b4ff-472d-a938-80dcdbc49fa0-kube-api-access-l6frg\") pod \"2c09b969-b4ff-472d-a938-80dcdbc49fa0\" (UID: \"2c09b969-b4ff-472d-a938-80dcdbc49fa0\") " Sep 30 19:52:59 crc kubenswrapper[4756]: I0930 19:52:59.905272 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2c09b969-b4ff-472d-a938-80dcdbc49fa0-kube-api-access-l6frg" (OuterVolumeSpecName: "kube-api-access-l6frg") pod "2c09b969-b4ff-472d-a938-80dcdbc49fa0" (UID: "2c09b969-b4ff-472d-a938-80dcdbc49fa0"). InnerVolumeSpecName "kube-api-access-l6frg". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:52:59 crc kubenswrapper[4756]: I0930 19:52:59.925646 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2c09b969-b4ff-472d-a938-80dcdbc49fa0-config-data" (OuterVolumeSpecName: "config-data") pod "2c09b969-b4ff-472d-a938-80dcdbc49fa0" (UID: "2c09b969-b4ff-472d-a938-80dcdbc49fa0"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:52:59 crc kubenswrapper[4756]: I0930 19:52:59.959689 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2c09b969-b4ff-472d-a938-80dcdbc49fa0-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "2c09b969-b4ff-472d-a938-80dcdbc49fa0" (UID: "2c09b969-b4ff-472d-a938-80dcdbc49fa0"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:52:59 crc kubenswrapper[4756]: I0930 19:52:59.997169 4756 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2c09b969-b4ff-472d-a938-80dcdbc49fa0-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 19:52:59 crc kubenswrapper[4756]: I0930 19:52:59.997213 4756 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2c09b969-b4ff-472d-a938-80dcdbc49fa0-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 19:52:59 crc kubenswrapper[4756]: I0930 19:52:59.997230 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-l6frg\" (UniqueName: \"kubernetes.io/projected/2c09b969-b4ff-472d-a938-80dcdbc49fa0-kube-api-access-l6frg\") on node \"crc\" DevicePath \"\"" Sep 30 19:53:00 crc kubenswrapper[4756]: I0930 19:53:00.674855 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Sep 30 19:53:00 crc kubenswrapper[4756]: I0930 19:53:00.724872 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Sep 30 19:53:00 crc kubenswrapper[4756]: I0930 19:53:00.740083 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Sep 30 19:53:00 crc kubenswrapper[4756]: I0930 19:53:00.752044 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Sep 30 19:53:00 crc kubenswrapper[4756]: E0930 19:53:00.752500 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2c09b969-b4ff-472d-a938-80dcdbc49fa0" containerName="nova-scheduler-scheduler" Sep 30 19:53:00 crc kubenswrapper[4756]: I0930 19:53:00.752514 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="2c09b969-b4ff-472d-a938-80dcdbc49fa0" containerName="nova-scheduler-scheduler" Sep 30 19:53:00 crc kubenswrapper[4756]: I0930 19:53:00.752775 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="2c09b969-b4ff-472d-a938-80dcdbc49fa0" containerName="nova-scheduler-scheduler" Sep 30 19:53:00 crc kubenswrapper[4756]: I0930 19:53:00.753577 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Sep 30 19:53:00 crc kubenswrapper[4756]: I0930 19:53:00.760126 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Sep 30 19:53:00 crc kubenswrapper[4756]: I0930 19:53:00.771611 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Sep 30 19:53:00 crc kubenswrapper[4756]: I0930 19:53:00.822496 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cff112d3-3898-41c9-aab6-ed2e2a57a484-config-data\") pod \"nova-scheduler-0\" (UID: \"cff112d3-3898-41c9-aab6-ed2e2a57a484\") " pod="openstack/nova-scheduler-0" Sep 30 19:53:00 crc kubenswrapper[4756]: I0930 19:53:00.822928 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cff112d3-3898-41c9-aab6-ed2e2a57a484-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"cff112d3-3898-41c9-aab6-ed2e2a57a484\") " pod="openstack/nova-scheduler-0" Sep 30 19:53:00 crc kubenswrapper[4756]: I0930 19:53:00.823076 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fxxt9\" (UniqueName: \"kubernetes.io/projected/cff112d3-3898-41c9-aab6-ed2e2a57a484-kube-api-access-fxxt9\") pod \"nova-scheduler-0\" (UID: \"cff112d3-3898-41c9-aab6-ed2e2a57a484\") " pod="openstack/nova-scheduler-0" Sep 30 19:53:00 crc kubenswrapper[4756]: I0930 19:53:00.924160 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fxxt9\" (UniqueName: \"kubernetes.io/projected/cff112d3-3898-41c9-aab6-ed2e2a57a484-kube-api-access-fxxt9\") pod \"nova-scheduler-0\" (UID: \"cff112d3-3898-41c9-aab6-ed2e2a57a484\") " pod="openstack/nova-scheduler-0" Sep 30 19:53:00 crc kubenswrapper[4756]: I0930 19:53:00.924270 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cff112d3-3898-41c9-aab6-ed2e2a57a484-config-data\") pod \"nova-scheduler-0\" (UID: \"cff112d3-3898-41c9-aab6-ed2e2a57a484\") " pod="openstack/nova-scheduler-0" Sep 30 19:53:00 crc kubenswrapper[4756]: I0930 19:53:00.924298 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cff112d3-3898-41c9-aab6-ed2e2a57a484-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"cff112d3-3898-41c9-aab6-ed2e2a57a484\") " pod="openstack/nova-scheduler-0" Sep 30 19:53:00 crc kubenswrapper[4756]: I0930 19:53:00.929264 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cff112d3-3898-41c9-aab6-ed2e2a57a484-config-data\") pod \"nova-scheduler-0\" (UID: \"cff112d3-3898-41c9-aab6-ed2e2a57a484\") " pod="openstack/nova-scheduler-0" Sep 30 19:53:00 crc kubenswrapper[4756]: I0930 19:53:00.934933 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cff112d3-3898-41c9-aab6-ed2e2a57a484-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"cff112d3-3898-41c9-aab6-ed2e2a57a484\") " pod="openstack/nova-scheduler-0" Sep 30 19:53:00 crc kubenswrapper[4756]: I0930 19:53:00.942566 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fxxt9\" (UniqueName: \"kubernetes.io/projected/cff112d3-3898-41c9-aab6-ed2e2a57a484-kube-api-access-fxxt9\") pod \"nova-scheduler-0\" (UID: \"cff112d3-3898-41c9-aab6-ed2e2a57a484\") " pod="openstack/nova-scheduler-0" Sep 30 19:53:01 crc kubenswrapper[4756]: I0930 19:53:01.116504 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Sep 30 19:53:01 crc kubenswrapper[4756]: I0930 19:53:01.126377 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2c09b969-b4ff-472d-a938-80dcdbc49fa0" path="/var/lib/kubelet/pods/2c09b969-b4ff-472d-a938-80dcdbc49fa0/volumes" Sep 30 19:53:01 crc kubenswrapper[4756]: I0930 19:53:01.593773 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Sep 30 19:53:01 crc kubenswrapper[4756]: W0930 19:53:01.594797 4756 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podcff112d3_3898_41c9_aab6_ed2e2a57a484.slice/crio-f12feea9e30642d994c691a0151a4c3fcfac76032b7e80ed89a0189682d60ea7 WatchSource:0}: Error finding container f12feea9e30642d994c691a0151a4c3fcfac76032b7e80ed89a0189682d60ea7: Status 404 returned error can't find the container with id f12feea9e30642d994c691a0151a4c3fcfac76032b7e80ed89a0189682d60ea7 Sep 30 19:53:01 crc kubenswrapper[4756]: I0930 19:53:01.685843 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"cff112d3-3898-41c9-aab6-ed2e2a57a484","Type":"ContainerStarted","Data":"f12feea9e30642d994c691a0151a4c3fcfac76032b7e80ed89a0189682d60ea7"} Sep 30 19:53:02 crc kubenswrapper[4756]: I0930 19:53:02.705668 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"cff112d3-3898-41c9-aab6-ed2e2a57a484","Type":"ContainerStarted","Data":"bceb1af41cc68214074dc394cef8fb93822b175f5fc10cf14701d779b481a5e1"} Sep 30 19:53:02 crc kubenswrapper[4756]: I0930 19:53:02.734135 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.7341202559999997 podStartE2EDuration="2.734120256s" podCreationTimestamp="2025-09-30 19:53:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 19:53:02.729162386 +0000 UTC m=+1312.350095893" watchObservedRunningTime="2025-09-30 19:53:02.734120256 +0000 UTC m=+1312.355053733" Sep 30 19:53:03 crc kubenswrapper[4756]: I0930 19:53:03.040503 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Sep 30 19:53:03 crc kubenswrapper[4756]: I0930 19:53:03.040644 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Sep 30 19:53:05 crc kubenswrapper[4756]: I0930 19:53:05.024045 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Sep 30 19:53:05 crc kubenswrapper[4756]: I0930 19:53:05.024420 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Sep 30 19:53:06 crc kubenswrapper[4756]: I0930 19:53:06.040590 4756 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="eed122c3-f6af-488a-9684-f60ffe104434" containerName="nova-api-api" probeResult="failure" output="Get \"https://10.217.0.204:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Sep 30 19:53:06 crc kubenswrapper[4756]: I0930 19:53:06.040603 4756 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="eed122c3-f6af-488a-9684-f60ffe104434" containerName="nova-api-log" probeResult="failure" output="Get \"https://10.217.0.204:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Sep 30 19:53:06 crc kubenswrapper[4756]: I0930 19:53:06.116698 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Sep 30 19:53:08 crc kubenswrapper[4756]: I0930 19:53:08.040636 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Sep 30 19:53:08 crc kubenswrapper[4756]: I0930 19:53:08.040991 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Sep 30 19:53:09 crc kubenswrapper[4756]: I0930 19:53:09.060585 4756 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="ce0daf51-f01f-4ace-bf9d-3dd2e2023618" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.205:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Sep 30 19:53:09 crc kubenswrapper[4756]: I0930 19:53:09.060621 4756 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="ce0daf51-f01f-4ace-bf9d-3dd2e2023618" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.205:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Sep 30 19:53:11 crc kubenswrapper[4756]: I0930 19:53:11.152205 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Sep 30 19:53:11 crc kubenswrapper[4756]: I0930 19:53:11.180818 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Sep 30 19:53:11 crc kubenswrapper[4756]: I0930 19:53:11.539269 4756 patch_prober.go:28] interesting pod/machine-config-daemon-4n9zj container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 19:53:11 crc kubenswrapper[4756]: I0930 19:53:11.539334 4756 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 19:53:11 crc kubenswrapper[4756]: I0930 19:53:11.842600 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Sep 30 19:53:14 crc kubenswrapper[4756]: I0930 19:53:14.139500 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Sep 30 19:53:15 crc kubenswrapper[4756]: I0930 19:53:15.036224 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Sep 30 19:53:15 crc kubenswrapper[4756]: I0930 19:53:15.036783 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Sep 30 19:53:15 crc kubenswrapper[4756]: I0930 19:53:15.037246 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Sep 30 19:53:15 crc kubenswrapper[4756]: I0930 19:53:15.037290 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Sep 30 19:53:15 crc kubenswrapper[4756]: I0930 19:53:15.044883 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Sep 30 19:53:15 crc kubenswrapper[4756]: I0930 19:53:15.053981 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Sep 30 19:53:18 crc kubenswrapper[4756]: I0930 19:53:18.046384 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Sep 30 19:53:18 crc kubenswrapper[4756]: I0930 19:53:18.047709 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Sep 30 19:53:18 crc kubenswrapper[4756]: I0930 19:53:18.052340 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Sep 30 19:53:18 crc kubenswrapper[4756]: I0930 19:53:18.056458 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Sep 30 19:53:39 crc kubenswrapper[4756]: I0930 19:53:39.235414 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Sep 30 19:53:39 crc kubenswrapper[4756]: I0930 19:53:39.245215 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="7cba2998-4911-447e-8fa8-782a83f0b3be" containerName="cinder-scheduler" containerID="cri-o://3816f49e158334388e40eecbe485b69612c7263f206e5f76f6bf16f35cb72753" gracePeriod=30 Sep 30 19:53:39 crc kubenswrapper[4756]: I0930 19:53:39.245686 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="7cba2998-4911-447e-8fa8-782a83f0b3be" containerName="probe" containerID="cri-o://18b54d61471a434dd27220be8222ac734c16562af18d9cb5d87c937b4bbb70ab" gracePeriod=30 Sep 30 19:53:39 crc kubenswrapper[4756]: I0930 19:53:39.341715 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/openstackclient"] Sep 30 19:53:39 crc kubenswrapper[4756]: I0930 19:53:39.341987 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/openstackclient" podUID="3b333bad-e0f7-4b3a-b21d-7854ad7aafa6" containerName="openstackclient" containerID="cri-o://3ab87a4918f1e8e6e14e0abdb176a7153997521aac91d77dc59e7592393d999c" gracePeriod=2 Sep 30 19:53:39 crc kubenswrapper[4756]: I0930 19:53:39.456607 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/openstackclient"] Sep 30 19:53:39 crc kubenswrapper[4756]: I0930 19:53:39.474167 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Sep 30 19:53:39 crc kubenswrapper[4756]: I0930 19:53:39.499711 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Sep 30 19:53:39 crc kubenswrapper[4756]: I0930 19:53:39.499926 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="ebd344ad-8ef7-4a5c-8b74-1af845398b3b" containerName="cinder-api-log" containerID="cri-o://34a9a3332fdf0e5c76c740afbc2528fa100673eb4984317f115c8a7e501cf8ba" gracePeriod=30 Sep 30 19:53:39 crc kubenswrapper[4756]: I0930 19:53:39.500304 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="ebd344ad-8ef7-4a5c-8b74-1af845398b3b" containerName="cinder-api" containerID="cri-o://6a012f6649739f7a3c55f7eb5ccd4842f6f7de987bfd7b459d8bead1ed555778" gracePeriod=30 Sep 30 19:53:39 crc kubenswrapper[4756]: I0930 19:53:39.518309 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placementa4e2-account-delete-86w5v"] Sep 30 19:53:39 crc kubenswrapper[4756]: E0930 19:53:39.518733 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3b333bad-e0f7-4b3a-b21d-7854ad7aafa6" containerName="openstackclient" Sep 30 19:53:39 crc kubenswrapper[4756]: I0930 19:53:39.518748 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="3b333bad-e0f7-4b3a-b21d-7854ad7aafa6" containerName="openstackclient" Sep 30 19:53:39 crc kubenswrapper[4756]: I0930 19:53:39.518926 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="3b333bad-e0f7-4b3a-b21d-7854ad7aafa6" containerName="openstackclient" Sep 30 19:53:39 crc kubenswrapper[4756]: I0930 19:53:39.520120 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placementa4e2-account-delete-86w5v" Sep 30 19:53:39 crc kubenswrapper[4756]: I0930 19:53:39.545998 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placementa4e2-account-delete-86w5v"] Sep 30 19:53:39 crc kubenswrapper[4756]: I0930 19:53:39.562568 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gj44b\" (UniqueName: \"kubernetes.io/projected/859e275e-1d03-482b-aff6-8df9d80957fe-kube-api-access-gj44b\") pod \"placementa4e2-account-delete-86w5v\" (UID: \"859e275e-1d03-482b-aff6-8df9d80957fe\") " pod="openstack/placementa4e2-account-delete-86w5v" Sep 30 19:53:39 crc kubenswrapper[4756]: I0930 19:53:39.593406 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/novacell0073f-account-delete-mnvpw"] Sep 30 19:53:39 crc kubenswrapper[4756]: I0930 19:53:39.594515 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/novacell0073f-account-delete-mnvpw" Sep 30 19:53:39 crc kubenswrapper[4756]: I0930 19:53:39.620189 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/novacell0073f-account-delete-mnvpw"] Sep 30 19:53:39 crc kubenswrapper[4756]: I0930 19:53:39.661704 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/novaapic915-account-delete-sw2ck"] Sep 30 19:53:39 crc kubenswrapper[4756]: I0930 19:53:39.664621 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gj44b\" (UniqueName: \"kubernetes.io/projected/859e275e-1d03-482b-aff6-8df9d80957fe-kube-api-access-gj44b\") pod \"placementa4e2-account-delete-86w5v\" (UID: \"859e275e-1d03-482b-aff6-8df9d80957fe\") " pod="openstack/placementa4e2-account-delete-86w5v" Sep 30 19:53:39 crc kubenswrapper[4756]: E0930 19:53:39.665291 4756 configmap.go:193] Couldn't get configMap openstack/rabbitmq-config-data: configmap "rabbitmq-config-data" not found Sep 30 19:53:39 crc kubenswrapper[4756]: E0930 19:53:39.665338 4756 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/c3b1ef6d-6617-4d15-8709-a7623e75faa4-config-data podName:c3b1ef6d-6617-4d15-8709-a7623e75faa4 nodeName:}" failed. No retries permitted until 2025-09-30 19:53:40.165320595 +0000 UTC m=+1349.786254072 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/c3b1ef6d-6617-4d15-8709-a7623e75faa4-config-data") pod "rabbitmq-server-0" (UID: "c3b1ef6d-6617-4d15-8709-a7623e75faa4") : configmap "rabbitmq-config-data" not found Sep 30 19:53:39 crc kubenswrapper[4756]: I0930 19:53:39.693100 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gj44b\" (UniqueName: \"kubernetes.io/projected/859e275e-1d03-482b-aff6-8df9d80957fe-kube-api-access-gj44b\") pod \"placementa4e2-account-delete-86w5v\" (UID: \"859e275e-1d03-482b-aff6-8df9d80957fe\") " pod="openstack/placementa4e2-account-delete-86w5v" Sep 30 19:53:39 crc kubenswrapper[4756]: I0930 19:53:39.704001 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovsdbserver-nb-0"] Sep 30 19:53:39 crc kubenswrapper[4756]: I0930 19:53:39.704343 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/novaapic915-account-delete-sw2ck" Sep 30 19:53:39 crc kubenswrapper[4756]: I0930 19:53:39.704868 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovsdbserver-nb-0" podUID="87e13633-3439-4184-a1f8-1afbbe54883a" containerName="openstack-network-exporter" containerID="cri-o://6767daa671c57644b127486af8258b0a0685ce6fa8c75b10d7e4ec93b1682e95" gracePeriod=300 Sep 30 19:53:39 crc kubenswrapper[4756]: I0930 19:53:39.753347 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/novaapic915-account-delete-sw2ck"] Sep 30 19:53:39 crc kubenswrapper[4756]: I0930 19:53:39.766005 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qvm8z\" (UniqueName: \"kubernetes.io/projected/7dff94a7-1c27-4324-8e94-5a095f79eb9a-kube-api-access-qvm8z\") pod \"novacell0073f-account-delete-mnvpw\" (UID: \"7dff94a7-1c27-4324-8e94-5a095f79eb9a\") " pod="openstack/novacell0073f-account-delete-mnvpw" Sep 30 19:53:39 crc kubenswrapper[4756]: I0930 19:53:39.835573 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glancef07e-account-delete-lzl2g"] Sep 30 19:53:39 crc kubenswrapper[4756]: I0930 19:53:39.837100 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glancef07e-account-delete-lzl2g" Sep 30 19:53:39 crc kubenswrapper[4756]: I0930 19:53:39.849374 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glancef07e-account-delete-lzl2g"] Sep 30 19:53:39 crc kubenswrapper[4756]: I0930 19:53:39.859917 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placementa4e2-account-delete-86w5v" Sep 30 19:53:39 crc kubenswrapper[4756]: I0930 19:53:39.875196 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5skg7\" (UniqueName: \"kubernetes.io/projected/13a44baa-047c-44d4-958e-c0d219d4f5a6-kube-api-access-5skg7\") pod \"novaapic915-account-delete-sw2ck\" (UID: \"13a44baa-047c-44d4-958e-c0d219d4f5a6\") " pod="openstack/novaapic915-account-delete-sw2ck" Sep 30 19:53:39 crc kubenswrapper[4756]: I0930 19:53:39.875274 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qvm8z\" (UniqueName: \"kubernetes.io/projected/7dff94a7-1c27-4324-8e94-5a095f79eb9a-kube-api-access-qvm8z\") pod \"novacell0073f-account-delete-mnvpw\" (UID: \"7dff94a7-1c27-4324-8e94-5a095f79eb9a\") " pod="openstack/novacell0073f-account-delete-mnvpw" Sep 30 19:53:39 crc kubenswrapper[4756]: I0930 19:53:39.901936 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qvm8z\" (UniqueName: \"kubernetes.io/projected/7dff94a7-1c27-4324-8e94-5a095f79eb9a-kube-api-access-qvm8z\") pod \"novacell0073f-account-delete-mnvpw\" (UID: \"7dff94a7-1c27-4324-8e94-5a095f79eb9a\") " pod="openstack/novacell0073f-account-delete-mnvpw" Sep 30 19:53:39 crc kubenswrapper[4756]: I0930 19:53:39.923104 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/novacell0073f-account-delete-mnvpw" Sep 30 19:53:39 crc kubenswrapper[4756]: I0930 19:53:39.952319 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovsdbserver-sb-0"] Sep 30 19:53:39 crc kubenswrapper[4756]: I0930 19:53:39.953382 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovsdbserver-sb-0" podUID="bfe78e50-0ef5-42f5-b989-39beebdd0d7c" containerName="openstack-network-exporter" containerID="cri-o://1e54723557bbe1ca80eed69c4f2a6744b98440dd8b61a4a9acaa14272039ec98" gracePeriod=300 Sep 30 19:53:39 crc kubenswrapper[4756]: I0930 19:53:39.970935 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovsdbserver-nb-0" podUID="87e13633-3439-4184-a1f8-1afbbe54883a" containerName="ovsdbserver-nb" containerID="cri-o://80a97405b4312c52e3a31eee94edf96e1e860adff64ef35aedcd06d9bb1f2983" gracePeriod=300 Sep 30 19:53:39 crc kubenswrapper[4756]: I0930 19:53:39.986902 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5skg7\" (UniqueName: \"kubernetes.io/projected/13a44baa-047c-44d4-958e-c0d219d4f5a6-kube-api-access-5skg7\") pod \"novaapic915-account-delete-sw2ck\" (UID: \"13a44baa-047c-44d4-958e-c0d219d4f5a6\") " pod="openstack/novaapic915-account-delete-sw2ck" Sep 30 19:53:39 crc kubenswrapper[4756]: I0930 19:53:39.986956 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bqx8v\" (UniqueName: \"kubernetes.io/projected/e3956c8b-1699-4cc4-8a97-b3ed35730374-kube-api-access-bqx8v\") pod \"glancef07e-account-delete-lzl2g\" (UID: \"e3956c8b-1699-4cc4-8a97-b3ed35730374\") " pod="openstack/glancef07e-account-delete-lzl2g" Sep 30 19:53:40 crc kubenswrapper[4756]: I0930 19:53:40.015614 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5skg7\" (UniqueName: \"kubernetes.io/projected/13a44baa-047c-44d4-958e-c0d219d4f5a6-kube-api-access-5skg7\") pod \"novaapic915-account-delete-sw2ck\" (UID: \"13a44baa-047c-44d4-958e-c0d219d4f5a6\") " pod="openstack/novaapic915-account-delete-sw2ck" Sep 30 19:53:40 crc kubenswrapper[4756]: I0930 19:53:40.039499 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-db-sync-w6sv7"] Sep 30 19:53:40 crc kubenswrapper[4756]: I0930 19:53:40.070897 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-db-sync-w6sv7"] Sep 30 19:53:40 crc kubenswrapper[4756]: I0930 19:53:40.078860 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovsdbserver-sb-0" podUID="bfe78e50-0ef5-42f5-b989-39beebdd0d7c" containerName="ovsdbserver-sb" containerID="cri-o://b97d2d37d038e0a3a68f083bf479bac02d181211d1a60c673e2f3e39c0ffddc7" gracePeriod=300 Sep 30 19:53:40 crc kubenswrapper[4756]: I0930 19:53:40.088948 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bqx8v\" (UniqueName: \"kubernetes.io/projected/e3956c8b-1699-4cc4-8a97-b3ed35730374-kube-api-access-bqx8v\") pod \"glancef07e-account-delete-lzl2g\" (UID: \"e3956c8b-1699-4cc4-8a97-b3ed35730374\") " pod="openstack/glancef07e-account-delete-lzl2g" Sep 30 19:53:40 crc kubenswrapper[4756]: I0930 19:53:40.089517 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-northd-0"] Sep 30 19:53:40 crc kubenswrapper[4756]: I0930 19:53:40.089827 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovn-northd-0" podUID="f4c19f17-3c18-484d-acfd-99c72926bdef" containerName="ovn-northd" containerID="cri-o://665a9d7a98e4cec82848a7a7dd2273a052bb65806f4b6908a9c7dcf1d34d25df" gracePeriod=30 Sep 30 19:53:40 crc kubenswrapper[4756]: I0930 19:53:40.090224 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovn-northd-0" podUID="f4c19f17-3c18-484d-acfd-99c72926bdef" containerName="openstack-network-exporter" containerID="cri-o://67fa22128871f16827cabfb95e325f7d8e14ac3c57908b0201283ac2b4a62c35" gracePeriod=30 Sep 30 19:53:40 crc kubenswrapper[4756]: I0930 19:53:40.097686 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Sep 30 19:53:40 crc kubenswrapper[4756]: I0930 19:53:40.111726 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-cell-mapping-4kg8f"] Sep 30 19:53:40 crc kubenswrapper[4756]: I0930 19:53:40.124453 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-cell-mapping-4kg8f"] Sep 30 19:53:40 crc kubenswrapper[4756]: I0930 19:53:40.130318 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/novaapic915-account-delete-sw2ck" Sep 30 19:53:40 crc kubenswrapper[4756]: I0930 19:53:40.150146 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-cell-mapping-rkwpj"] Sep 30 19:53:40 crc kubenswrapper[4756]: I0930 19:53:40.152328 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-cell-mapping-rkwpj"] Sep 30 19:53:40 crc kubenswrapper[4756]: I0930 19:53:40.154984 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bqx8v\" (UniqueName: \"kubernetes.io/projected/e3956c8b-1699-4cc4-8a97-b3ed35730374-kube-api-access-bqx8v\") pod \"glancef07e-account-delete-lzl2g\" (UID: \"e3956c8b-1699-4cc4-8a97-b3ed35730374\") " pod="openstack/glancef07e-account-delete-lzl2g" Sep 30 19:53:40 crc kubenswrapper[4756]: I0930 19:53:40.161923 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glancef07e-account-delete-lzl2g" Sep 30 19:53:40 crc kubenswrapper[4756]: E0930 19:53:40.193766 4756 configmap.go:193] Couldn't get configMap openstack/rabbitmq-config-data: configmap "rabbitmq-config-data" not found Sep 30 19:53:40 crc kubenswrapper[4756]: E0930 19:53:40.193821 4756 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/c3b1ef6d-6617-4d15-8709-a7623e75faa4-config-data podName:c3b1ef6d-6617-4d15-8709-a7623e75faa4 nodeName:}" failed. No retries permitted until 2025-09-30 19:53:41.193808207 +0000 UTC m=+1350.814741684 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/c3b1ef6d-6617-4d15-8709-a7623e75faa4-config-data") pod "rabbitmq-server-0" (UID: "c3b1ef6d-6617-4d15-8709-a7623e75faa4") : configmap "rabbitmq-config-data" not found Sep 30 19:53:40 crc kubenswrapper[4756]: I0930 19:53:40.260762 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_87e13633-3439-4184-a1f8-1afbbe54883a/ovsdbserver-nb/0.log" Sep 30 19:53:40 crc kubenswrapper[4756]: I0930 19:53:40.260806 4756 generic.go:334] "Generic (PLEG): container finished" podID="87e13633-3439-4184-a1f8-1afbbe54883a" containerID="6767daa671c57644b127486af8258b0a0685ce6fa8c75b10d7e4ec93b1682e95" exitCode=2 Sep 30 19:53:40 crc kubenswrapper[4756]: I0930 19:53:40.260822 4756 generic.go:334] "Generic (PLEG): container finished" podID="87e13633-3439-4184-a1f8-1afbbe54883a" containerID="80a97405b4312c52e3a31eee94edf96e1e860adff64ef35aedcd06d9bb1f2983" exitCode=143 Sep 30 19:53:40 crc kubenswrapper[4756]: I0930 19:53:40.260875 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"87e13633-3439-4184-a1f8-1afbbe54883a","Type":"ContainerDied","Data":"6767daa671c57644b127486af8258b0a0685ce6fa8c75b10d7e4ec93b1682e95"} Sep 30 19:53:40 crc kubenswrapper[4756]: I0930 19:53:40.260900 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"87e13633-3439-4184-a1f8-1afbbe54883a","Type":"ContainerDied","Data":"80a97405b4312c52e3a31eee94edf96e1e860adff64ef35aedcd06d9bb1f2983"} Sep 30 19:53:40 crc kubenswrapper[4756]: E0930 19:53:40.294107 4756 configmap.go:193] Couldn't get configMap openstack/rabbitmq-cell1-config-data: configmap "rabbitmq-cell1-config-data" not found Sep 30 19:53:40 crc kubenswrapper[4756]: E0930 19:53:40.294183 4756 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/3bacc7b1-00f4-4489-9c0b-fa038cc443d6-config-data podName:3bacc7b1-00f4-4489-9c0b-fa038cc443d6 nodeName:}" failed. No retries permitted until 2025-09-30 19:53:40.794163294 +0000 UTC m=+1350.415096771 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/3bacc7b1-00f4-4489-9c0b-fa038cc443d6-config-data") pod "rabbitmq-cell1-server-0" (UID: "3bacc7b1-00f4-4489-9c0b-fa038cc443d6") : configmap "rabbitmq-cell1-config-data" not found Sep 30 19:53:40 crc kubenswrapper[4756]: I0930 19:53:40.322146 4756 generic.go:334] "Generic (PLEG): container finished" podID="bfe78e50-0ef5-42f5-b989-39beebdd0d7c" containerID="1e54723557bbe1ca80eed69c4f2a6744b98440dd8b61a4a9acaa14272039ec98" exitCode=2 Sep 30 19:53:40 crc kubenswrapper[4756]: I0930 19:53:40.322562 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"bfe78e50-0ef5-42f5-b989-39beebdd0d7c","Type":"ContainerDied","Data":"1e54723557bbe1ca80eed69c4f2a6744b98440dd8b61a4a9acaa14272039ec98"} Sep 30 19:53:40 crc kubenswrapper[4756]: I0930 19:53:40.363948 4756 generic.go:334] "Generic (PLEG): container finished" podID="ebd344ad-8ef7-4a5c-8b74-1af845398b3b" containerID="34a9a3332fdf0e5c76c740afbc2528fa100673eb4984317f115c8a7e501cf8ba" exitCode=143 Sep 30 19:53:40 crc kubenswrapper[4756]: I0930 19:53:40.363989 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"ebd344ad-8ef7-4a5c-8b74-1af845398b3b","Type":"ContainerDied","Data":"34a9a3332fdf0e5c76c740afbc2528fa100673eb4984317f115c8a7e501cf8ba"} Sep 30 19:53:40 crc kubenswrapper[4756]: I0930 19:53:40.384286 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-db-sync-mbntt"] Sep 30 19:53:40 crc kubenswrapper[4756]: E0930 19:53:40.435553 4756 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of b97d2d37d038e0a3a68f083bf479bac02d181211d1a60c673e2f3e39c0ffddc7 is running failed: container process not found" containerID="b97d2d37d038e0a3a68f083bf479bac02d181211d1a60c673e2f3e39c0ffddc7" cmd=["/usr/bin/pidof","ovsdb-server"] Sep 30 19:53:40 crc kubenswrapper[4756]: E0930 19:53:40.436059 4756 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of b97d2d37d038e0a3a68f083bf479bac02d181211d1a60c673e2f3e39c0ffddc7 is running failed: container process not found" containerID="b97d2d37d038e0a3a68f083bf479bac02d181211d1a60c673e2f3e39c0ffddc7" cmd=["/usr/bin/pidof","ovsdb-server"] Sep 30 19:53:40 crc kubenswrapper[4756]: E0930 19:53:40.436886 4756 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of b97d2d37d038e0a3a68f083bf479bac02d181211d1a60c673e2f3e39c0ffddc7 is running failed: container process not found" containerID="b97d2d37d038e0a3a68f083bf479bac02d181211d1a60c673e2f3e39c0ffddc7" cmd=["/usr/bin/pidof","ovsdb-server"] Sep 30 19:53:40 crc kubenswrapper[4756]: E0930 19:53:40.436917 4756 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of b97d2d37d038e0a3a68f083bf479bac02d181211d1a60c673e2f3e39c0ffddc7 is running failed: container process not found" probeType="Readiness" pod="openstack/ovsdbserver-sb-0" podUID="bfe78e50-0ef5-42f5-b989-39beebdd0d7c" containerName="ovsdbserver-sb" Sep 30 19:53:40 crc kubenswrapper[4756]: I0930 19:53:40.438461 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-db-sync-mbntt"] Sep 30 19:53:40 crc kubenswrapper[4756]: I0930 19:53:40.456628 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-db-sync-8hkvg"] Sep 30 19:53:40 crc kubenswrapper[4756]: I0930 19:53:40.477752 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-db-sync-8hkvg"] Sep 30 19:53:40 crc kubenswrapper[4756]: I0930 19:53:40.671279 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-db-sync-j57gg"] Sep 30 19:53:40 crc kubenswrapper[4756]: I0930 19:53:40.770968 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-db-sync-j57gg"] Sep 30 19:53:40 crc kubenswrapper[4756]: I0930 19:53:40.811544 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placementa4e2-account-delete-86w5v"] Sep 30 19:53:40 crc kubenswrapper[4756]: E0930 19:53:40.821630 4756 configmap.go:193] Couldn't get configMap openstack/rabbitmq-cell1-config-data: configmap "rabbitmq-cell1-config-data" not found Sep 30 19:53:40 crc kubenswrapper[4756]: E0930 19:53:40.821704 4756 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/3bacc7b1-00f4-4489-9c0b-fa038cc443d6-config-data podName:3bacc7b1-00f4-4489-9c0b-fa038cc443d6 nodeName:}" failed. No retries permitted until 2025-09-30 19:53:41.821686692 +0000 UTC m=+1351.442620169 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/3bacc7b1-00f4-4489-9c0b-fa038cc443d6-config-data") pod "rabbitmq-cell1-server-0" (UID: "3bacc7b1-00f4-4489-9c0b-fa038cc443d6") : configmap "rabbitmq-cell1-config-data" not found Sep 30 19:53:40 crc kubenswrapper[4756]: I0930 19:53:40.921895 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-db-sync-pljns"] Sep 30 19:53:40 crc kubenswrapper[4756]: I0930 19:53:40.952715 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-db-sync-pljns"] Sep 30 19:53:40 crc kubenswrapper[4756]: I0930 19:53:40.983812 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-kfdjm"] Sep 30 19:53:41 crc kubenswrapper[4756]: I0930 19:53:41.064358 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-metrics-tnm2n"] Sep 30 19:53:41 crc kubenswrapper[4756]: I0930 19:53:41.064804 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovn-controller-metrics-tnm2n" podUID="9e9466ec-a908-431a-9c47-fda720095cd8" containerName="openstack-network-exporter" containerID="cri-o://a689bb821757f51e26e8f98c83247791b78940eeab2a2b87776e95ccb89ff5af" gracePeriod=30 Sep 30 19:53:41 crc kubenswrapper[4756]: I0930 19:53:41.089894 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-ovs-kmbtz"] Sep 30 19:53:41 crc kubenswrapper[4756]: I0930 19:53:41.100180 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-6bfdd55b6d-fqgk9"] Sep 30 19:53:41 crc kubenswrapper[4756]: I0930 19:53:41.100719 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/placement-6bfdd55b6d-fqgk9" podUID="28dfe26f-4d95-4def-9500-a0410c260cf2" containerName="placement-log" containerID="cri-o://a02c15c325b692d99424e55c3116131ba5bc7283ac4bd35486df44c56fb3a3f2" gracePeriod=30 Sep 30 19:53:41 crc kubenswrapper[4756]: I0930 19:53:41.100836 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/placement-6bfdd55b6d-fqgk9" podUID="28dfe26f-4d95-4def-9500-a0410c260cf2" containerName="placement-api" containerID="cri-o://0a44e61d030a7208b33d6a4a2edac3b0fbac211badfde73f8a2bb902741ee1c5" gracePeriod=30 Sep 30 19:53:41 crc kubenswrapper[4756]: E0930 19:53:41.238899 4756 configmap.go:193] Couldn't get configMap openstack/rabbitmq-config-data: configmap "rabbitmq-config-data" not found Sep 30 19:53:41 crc kubenswrapper[4756]: E0930 19:53:41.238953 4756 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/c3b1ef6d-6617-4d15-8709-a7623e75faa4-config-data podName:c3b1ef6d-6617-4d15-8709-a7623e75faa4 nodeName:}" failed. No retries permitted until 2025-09-30 19:53:43.238937254 +0000 UTC m=+1352.859870731 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/c3b1ef6d-6617-4d15-8709-a7623e75faa4-config-data") pod "rabbitmq-server-0" (UID: "c3b1ef6d-6617-4d15-8709-a7623e75faa4") : configmap "rabbitmq-config-data" not found Sep 30 19:53:41 crc kubenswrapper[4756]: I0930 19:53:41.247185 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0f0f53de-f545-431e-9901-e45eedc73bf9" path="/var/lib/kubelet/pods/0f0f53de-f545-431e-9901-e45eedc73bf9/volumes" Sep 30 19:53:41 crc kubenswrapper[4756]: I0930 19:53:41.250047 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2d1a09a3-9f2d-41ec-a2de-009e4835154e" path="/var/lib/kubelet/pods/2d1a09a3-9f2d-41ec-a2de-009e4835154e/volumes" Sep 30 19:53:41 crc kubenswrapper[4756]: I0930 19:53:41.250722 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="41ec7c91-a63c-45a9-97dc-f9c1666bdcce" path="/var/lib/kubelet/pods/41ec7c91-a63c-45a9-97dc-f9c1666bdcce/volumes" Sep 30 19:53:41 crc kubenswrapper[4756]: I0930 19:53:41.251182 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="99d41b4c-92d7-43f2-b6d1-044876e9a8ea" path="/var/lib/kubelet/pods/99d41b4c-92d7-43f2-b6d1-044876e9a8ea/volumes" Sep 30 19:53:41 crc kubenswrapper[4756]: I0930 19:53:41.268825 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c83d1164-f7e4-4ad5-a81e-1c5fa5d1486c" path="/var/lib/kubelet/pods/c83d1164-f7e4-4ad5-a81e-1c5fa5d1486c/volumes" Sep 30 19:53:41 crc kubenswrapper[4756]: I0930 19:53:41.269614 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f08a96a9-79ca-4c30-b158-273876fe89b0" path="/var/lib/kubelet/pods/f08a96a9-79ca-4c30-b158-273876fe89b0/volumes" Sep 30 19:53:41 crc kubenswrapper[4756]: I0930 19:53:41.270190 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fb5b7a75-2b20-4dc3-bd63-e29dbb6cdba2" path="/var/lib/kubelet/pods/fb5b7a75-2b20-4dc3-bd63-e29dbb6cdba2/volumes" Sep 30 19:53:41 crc kubenswrapper[4756]: I0930 19:53:41.271082 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/swift-ring-rebalance-qf8st"] Sep 30 19:53:41 crc kubenswrapper[4756]: I0930 19:53:41.275247 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/swift-ring-rebalance-qf8st"] Sep 30 19:53:41 crc kubenswrapper[4756]: I0930 19:53:41.275281 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Sep 30 19:53:41 crc kubenswrapper[4756]: I0930 19:53:41.275301 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/novacell0073f-account-delete-mnvpw"] Sep 30 19:53:41 crc kubenswrapper[4756]: I0930 19:53:41.275311 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-59cf4bdb65-ljgq7"] Sep 30 19:53:41 crc kubenswrapper[4756]: I0930 19:53:41.275323 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/openstack-cell1-galera-0"] Sep 30 19:53:41 crc kubenswrapper[4756]: I0930 19:53:41.275335 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placementa4e2-account-delete-86w5v"] Sep 30 19:53:41 crc kubenswrapper[4756]: I0930 19:53:41.275347 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-a4e2-account-create-q5qj9"] Sep 30 19:53:41 crc kubenswrapper[4756]: I0930 19:53:41.275357 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-a4e2-account-create-q5qj9"] Sep 30 19:53:41 crc kubenswrapper[4756]: I0930 19:53:41.275367 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-db-create-wvmsq"] Sep 30 19:53:41 crc kubenswrapper[4756]: I0930 19:53:41.275774 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="ce0daf51-f01f-4ace-bf9d-3dd2e2023618" containerName="nova-metadata-log" containerID="cri-o://5f2536bd80f2c6c079d1749352246b7217d86ded3227c9601a2ad2db0771bef2" gracePeriod=30 Sep 30 19:53:41 crc kubenswrapper[4756]: I0930 19:53:41.275912 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-59cf4bdb65-ljgq7" podUID="3ad8997f-b736-41c8-b087-d09b6316065d" containerName="dnsmasq-dns" containerID="cri-o://51ef4e173c225032f765e5047d88bbd26bb0ef46029f5e6ee965a80da5a74dba" gracePeriod=10 Sep 30 19:53:41 crc kubenswrapper[4756]: I0930 19:53:41.275983 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="ce0daf51-f01f-4ace-bf9d-3dd2e2023618" containerName="nova-metadata-metadata" containerID="cri-o://493c10b1ea169f530bd9a7626736c31aefe74b4181647d0bdc31bf4a5ec5cc4f" gracePeriod=30 Sep 30 19:53:41 crc kubenswrapper[4756]: I0930 19:53:41.313657 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-db-create-wvmsq"] Sep 30 19:53:41 crc kubenswrapper[4756]: I0930 19:53:41.357664 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_87e13633-3439-4184-a1f8-1afbbe54883a/ovsdbserver-nb/0.log" Sep 30 19:53:41 crc kubenswrapper[4756]: I0930 19:53:41.357910 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Sep 30 19:53:41 crc kubenswrapper[4756]: I0930 19:53:41.358222 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/swift-storage-0"] Sep 30 19:53:41 crc kubenswrapper[4756]: I0930 19:53:41.358617 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="26be10f5-be18-4ab5-9f8e-0d6f2a8dcb4f" containerName="account-server" containerID="cri-o://2ded9539aabf2d7ed41a0b6709054bd69836dd45212e96330a8ced4a80d659c3" gracePeriod=30 Sep 30 19:53:41 crc kubenswrapper[4756]: I0930 19:53:41.358702 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="26be10f5-be18-4ab5-9f8e-0d6f2a8dcb4f" containerName="swift-recon-cron" containerID="cri-o://920c7a436f02252c0b5d507de8037ccd6743d1c2a255a147a18b894f28b926a4" gracePeriod=30 Sep 30 19:53:41 crc kubenswrapper[4756]: I0930 19:53:41.358737 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="26be10f5-be18-4ab5-9f8e-0d6f2a8dcb4f" containerName="rsync" containerID="cri-o://762a83f21b826aa3eb53b3ebd785e06362136c9dadaba330d0843609fc9c2284" gracePeriod=30 Sep 30 19:53:41 crc kubenswrapper[4756]: I0930 19:53:41.358766 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="26be10f5-be18-4ab5-9f8e-0d6f2a8dcb4f" containerName="object-expirer" containerID="cri-o://51f94be61ce4fe7988183466f20b547fdd3570bb129d81fb1a0d14b57bcbd868" gracePeriod=30 Sep 30 19:53:41 crc kubenswrapper[4756]: I0930 19:53:41.358794 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="26be10f5-be18-4ab5-9f8e-0d6f2a8dcb4f" containerName="object-updater" containerID="cri-o://c7c8e729cceb6e35134441b29a4688dd7e5d7409c1ff4e605c6736fd89e5f899" gracePeriod=30 Sep 30 19:53:41 crc kubenswrapper[4756]: I0930 19:53:41.358823 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="26be10f5-be18-4ab5-9f8e-0d6f2a8dcb4f" containerName="object-auditor" containerID="cri-o://5434ba65bcd1a16d1031ac5831c922a2bfb696f3b7f8c608c5e3fbd5cff3290f" gracePeriod=30 Sep 30 19:53:41 crc kubenswrapper[4756]: I0930 19:53:41.358850 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="26be10f5-be18-4ab5-9f8e-0d6f2a8dcb4f" containerName="object-replicator" containerID="cri-o://c9e0fb72308640c83674dc311f125ea56a518ff2b747a1f3595f7eb4f02f32bd" gracePeriod=30 Sep 30 19:53:41 crc kubenswrapper[4756]: I0930 19:53:41.358888 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="26be10f5-be18-4ab5-9f8e-0d6f2a8dcb4f" containerName="object-server" containerID="cri-o://9a23a18c16fa569d73aa74575f47acbf5abd74569f207256e830e5ae1207a4c1" gracePeriod=30 Sep 30 19:53:41 crc kubenswrapper[4756]: I0930 19:53:41.358917 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="26be10f5-be18-4ab5-9f8e-0d6f2a8dcb4f" containerName="container-updater" containerID="cri-o://d845f4ce57e99f7f80a72593e5611ab764740d01bc79f238f2095047d2ba9939" gracePeriod=30 Sep 30 19:53:41 crc kubenswrapper[4756]: I0930 19:53:41.358951 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="26be10f5-be18-4ab5-9f8e-0d6f2a8dcb4f" containerName="container-auditor" containerID="cri-o://c1034fd69491ceca2b5a5e3609ed94e95183c5cd8752878c0eb7d55136e8ac9b" gracePeriod=30 Sep 30 19:53:41 crc kubenswrapper[4756]: I0930 19:53:41.358982 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="26be10f5-be18-4ab5-9f8e-0d6f2a8dcb4f" containerName="container-replicator" containerID="cri-o://9d88cf8ad00002797476e74b41a910b1b6ec952d10c00335bb0abfc6bd17fddd" gracePeriod=30 Sep 30 19:53:41 crc kubenswrapper[4756]: I0930 19:53:41.359011 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="26be10f5-be18-4ab5-9f8e-0d6f2a8dcb4f" containerName="container-server" containerID="cri-o://db446a0e9626857a987217e5f70793bfbb0795d88490d45a933892f52b588044" gracePeriod=30 Sep 30 19:53:41 crc kubenswrapper[4756]: I0930 19:53:41.359039 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="26be10f5-be18-4ab5-9f8e-0d6f2a8dcb4f" containerName="account-reaper" containerID="cri-o://4b05a265d73c3a7ceb454829ebf559c526823bafbc3ff9d5547722704959942d" gracePeriod=30 Sep 30 19:53:41 crc kubenswrapper[4756]: I0930 19:53:41.359068 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="26be10f5-be18-4ab5-9f8e-0d6f2a8dcb4f" containerName="account-auditor" containerID="cri-o://28e0221dbc10ee875c0d1d749687dc2243eba9bd6154bd2190340276bba9e102" gracePeriod=30 Sep 30 19:53:41 crc kubenswrapper[4756]: I0930 19:53:41.359111 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="26be10f5-be18-4ab5-9f8e-0d6f2a8dcb4f" containerName="account-replicator" containerID="cri-o://e54877dad5935cd8933ec14fc3b8e4b0d87beeda887453f4702efc3c346c99e8" gracePeriod=30 Sep 30 19:53:41 crc kubenswrapper[4756]: I0930 19:53:41.384130 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Sep 30 19:53:41 crc kubenswrapper[4756]: I0930 19:53:41.384346 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="cff112d3-3898-41c9-aab6-ed2e2a57a484" containerName="nova-scheduler-scheduler" containerID="cri-o://bceb1af41cc68214074dc394cef8fb93822b175f5fc10cf14701d779b481a5e1" gracePeriod=30 Sep 30 19:53:41 crc kubenswrapper[4756]: I0930 19:53:41.419176 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Sep 30 19:53:41 crc kubenswrapper[4756]: I0930 19:53:41.419517 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="0befea8f-1ced-4703-b2a2-a10e154271f4" containerName="glance-log" containerID="cri-o://184b0bb964b5d979736c25d5b822cdec9e2f47d28b7b5da07359b35546a67d3b" gracePeriod=30 Sep 30 19:53:41 crc kubenswrapper[4756]: I0930 19:53:41.419922 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="0befea8f-1ced-4703-b2a2-a10e154271f4" containerName="glance-httpd" containerID="cri-o://a96ca6143ea765730fa414e7b169828518681d4fa546277df96764fd20203c32" gracePeriod=30 Sep 30 19:53:41 crc kubenswrapper[4756]: I0930 19:53:41.443441 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vj58x\" (UniqueName: \"kubernetes.io/projected/87e13633-3439-4184-a1f8-1afbbe54883a-kube-api-access-vj58x\") pod \"87e13633-3439-4184-a1f8-1afbbe54883a\" (UID: \"87e13633-3439-4184-a1f8-1afbbe54883a\") " Sep 30 19:53:41 crc kubenswrapper[4756]: I0930 19:53:41.443545 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/87e13633-3439-4184-a1f8-1afbbe54883a-ovsdb-rundir\") pod \"87e13633-3439-4184-a1f8-1afbbe54883a\" (UID: \"87e13633-3439-4184-a1f8-1afbbe54883a\") " Sep 30 19:53:41 crc kubenswrapper[4756]: I0930 19:53:41.443568 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/87e13633-3439-4184-a1f8-1afbbe54883a-ovsdbserver-nb-tls-certs\") pod \"87e13633-3439-4184-a1f8-1afbbe54883a\" (UID: \"87e13633-3439-4184-a1f8-1afbbe54883a\") " Sep 30 19:53:41 crc kubenswrapper[4756]: I0930 19:53:41.443604 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndbcluster-nb-etc-ovn\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"87e13633-3439-4184-a1f8-1afbbe54883a\" (UID: \"87e13633-3439-4184-a1f8-1afbbe54883a\") " Sep 30 19:53:41 crc kubenswrapper[4756]: I0930 19:53:41.443627 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/87e13633-3439-4184-a1f8-1afbbe54883a-metrics-certs-tls-certs\") pod \"87e13633-3439-4184-a1f8-1afbbe54883a\" (UID: \"87e13633-3439-4184-a1f8-1afbbe54883a\") " Sep 30 19:53:41 crc kubenswrapper[4756]: I0930 19:53:41.443662 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/87e13633-3439-4184-a1f8-1afbbe54883a-scripts\") pod \"87e13633-3439-4184-a1f8-1afbbe54883a\" (UID: \"87e13633-3439-4184-a1f8-1afbbe54883a\") " Sep 30 19:53:41 crc kubenswrapper[4756]: I0930 19:53:41.443734 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/87e13633-3439-4184-a1f8-1afbbe54883a-config\") pod \"87e13633-3439-4184-a1f8-1afbbe54883a\" (UID: \"87e13633-3439-4184-a1f8-1afbbe54883a\") " Sep 30 19:53:41 crc kubenswrapper[4756]: I0930 19:53:41.443770 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/87e13633-3439-4184-a1f8-1afbbe54883a-combined-ca-bundle\") pod \"87e13633-3439-4184-a1f8-1afbbe54883a\" (UID: \"87e13633-3439-4184-a1f8-1afbbe54883a\") " Sep 30 19:53:41 crc kubenswrapper[4756]: I0930 19:53:41.446811 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/87e13633-3439-4184-a1f8-1afbbe54883a-scripts" (OuterVolumeSpecName: "scripts") pod "87e13633-3439-4184-a1f8-1afbbe54883a" (UID: "87e13633-3439-4184-a1f8-1afbbe54883a"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:53:41 crc kubenswrapper[4756]: I0930 19:53:41.447431 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/87e13633-3439-4184-a1f8-1afbbe54883a-config" (OuterVolumeSpecName: "config") pod "87e13633-3439-4184-a1f8-1afbbe54883a" (UID: "87e13633-3439-4184-a1f8-1afbbe54883a"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:53:41 crc kubenswrapper[4756]: I0930 19:53:41.447955 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/87e13633-3439-4184-a1f8-1afbbe54883a-ovsdb-rundir" (OuterVolumeSpecName: "ovsdb-rundir") pod "87e13633-3439-4184-a1f8-1afbbe54883a" (UID: "87e13633-3439-4184-a1f8-1afbbe54883a"). InnerVolumeSpecName "ovsdb-rundir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 19:53:41 crc kubenswrapper[4756]: I0930 19:53:41.458901 4756 generic.go:334] "Generic (PLEG): container finished" podID="28dfe26f-4d95-4def-9500-a0410c260cf2" containerID="a02c15c325b692d99424e55c3116131ba5bc7283ac4bd35486df44c56fb3a3f2" exitCode=143 Sep 30 19:53:41 crc kubenswrapper[4756]: I0930 19:53:41.458955 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-6bfdd55b6d-fqgk9" event={"ID":"28dfe26f-4d95-4def-9500-a0410c260cf2","Type":"ContainerDied","Data":"a02c15c325b692d99424e55c3116131ba5bc7283ac4bd35486df44c56fb3a3f2"} Sep 30 19:53:41 crc kubenswrapper[4756]: I0930 19:53:41.470547 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/87e13633-3439-4184-a1f8-1afbbe54883a-kube-api-access-vj58x" (OuterVolumeSpecName: "kube-api-access-vj58x") pod "87e13633-3439-4184-a1f8-1afbbe54883a" (UID: "87e13633-3439-4184-a1f8-1afbbe54883a"). InnerVolumeSpecName "kube-api-access-vj58x". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:53:41 crc kubenswrapper[4756]: I0930 19:53:41.484492 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage06-crc" (OuterVolumeSpecName: "ovndbcluster-nb-etc-ovn") pod "87e13633-3439-4184-a1f8-1afbbe54883a" (UID: "87e13633-3439-4184-a1f8-1afbbe54883a"). InnerVolumeSpecName "local-storage06-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Sep 30 19:53:41 crc kubenswrapper[4756]: I0930 19:53:41.485241 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-metrics-tnm2n_9e9466ec-a908-431a-9c47-fda720095cd8/openstack-network-exporter/0.log" Sep 30 19:53:41 crc kubenswrapper[4756]: I0930 19:53:41.485277 4756 generic.go:334] "Generic (PLEG): container finished" podID="9e9466ec-a908-431a-9c47-fda720095cd8" containerID="a689bb821757f51e26e8f98c83247791b78940eeab2a2b87776e95ccb89ff5af" exitCode=2 Sep 30 19:53:41 crc kubenswrapper[4756]: I0930 19:53:41.485323 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-tnm2n" event={"ID":"9e9466ec-a908-431a-9c47-fda720095cd8","Type":"ContainerDied","Data":"a689bb821757f51e26e8f98c83247791b78940eeab2a2b87776e95ccb89ff5af"} Sep 30 19:53:41 crc kubenswrapper[4756]: I0930 19:53:41.493301 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Sep 30 19:53:41 crc kubenswrapper[4756]: E0930 19:53:41.493665 4756 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="665a9d7a98e4cec82848a7a7dd2273a052bb65806f4b6908a9c7dcf1d34d25df" cmd=["/usr/local/bin/container-scripts/status_check.sh"] Sep 30 19:53:41 crc kubenswrapper[4756]: I0930 19:53:41.494941 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="eed122c3-f6af-488a-9684-f60ffe104434" containerName="nova-api-log" containerID="cri-o://2c21c56307aa57c6e443391243b07699b843b3fabf15be8a802984834452c537" gracePeriod=30 Sep 30 19:53:41 crc kubenswrapper[4756]: E0930 19:53:41.495207 4756 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="665a9d7a98e4cec82848a7a7dd2273a052bb65806f4b6908a9c7dcf1d34d25df" cmd=["/usr/local/bin/container-scripts/status_check.sh"] Sep 30 19:53:41 crc kubenswrapper[4756]: I0930 19:53:41.495544 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="eed122c3-f6af-488a-9684-f60ffe104434" containerName="nova-api-api" containerID="cri-o://e10cbb9a45161cb9ac5e6eb2582b06fee216894c3414ab0191425a8e6235e510" gracePeriod=30 Sep 30 19:53:41 crc kubenswrapper[4756]: E0930 19:53:41.500300 4756 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="665a9d7a98e4cec82848a7a7dd2273a052bb65806f4b6908a9c7dcf1d34d25df" cmd=["/usr/local/bin/container-scripts/status_check.sh"] Sep 30 19:53:41 crc kubenswrapper[4756]: E0930 19:53:41.500334 4756 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/ovn-northd-0" podUID="f4c19f17-3c18-484d-acfd-99c72926bdef" containerName="ovn-northd" Sep 30 19:53:41 crc kubenswrapper[4756]: I0930 19:53:41.503151 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placementa4e2-account-delete-86w5v" event={"ID":"859e275e-1d03-482b-aff6-8df9d80957fe","Type":"ContainerStarted","Data":"8d8722ed40b905000d9d217044faffa3c95adddef11627a27463d44076ca7163"} Sep 30 19:53:41 crc kubenswrapper[4756]: I0930 19:53:41.504515 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_bfe78e50-0ef5-42f5-b989-39beebdd0d7c/ovsdbserver-sb/0.log" Sep 30 19:53:41 crc kubenswrapper[4756]: I0930 19:53:41.504586 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Sep 30 19:53:41 crc kubenswrapper[4756]: I0930 19:53:41.513788 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-db-create-t8wq8"] Sep 30 19:53:41 crc kubenswrapper[4756]: I0930 19:53:41.514793 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/novacell0073f-account-delete-mnvpw" event={"ID":"7dff94a7-1c27-4324-8e94-5a095f79eb9a","Type":"ContainerStarted","Data":"cd049f406cf7955336fe29a5a15537a2d77c78e871e9634bac31eb19b62d96c6"} Sep 30 19:53:41 crc kubenswrapper[4756]: I0930 19:53:41.526112 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-db-create-t8wq8"] Sep 30 19:53:41 crc kubenswrapper[4756]: I0930 19:53:41.537644 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_87e13633-3439-4184-a1f8-1afbbe54883a/ovsdbserver-nb/0.log" Sep 30 19:53:41 crc kubenswrapper[4756]: I0930 19:53:41.537715 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"87e13633-3439-4184-a1f8-1afbbe54883a","Type":"ContainerDied","Data":"bc44bc513fdd73eee4a5df8aee511b36430a81186729183142b396ffd487277b"} Sep 30 19:53:41 crc kubenswrapper[4756]: I0930 19:53:41.537750 4756 scope.go:117] "RemoveContainer" containerID="6767daa671c57644b127486af8258b0a0685ce6fa8c75b10d7e4ec93b1682e95" Sep 30 19:53:41 crc kubenswrapper[4756]: I0930 19:53:41.537877 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Sep 30 19:53:41 crc kubenswrapper[4756]: I0930 19:53:41.538809 4756 patch_prober.go:28] interesting pod/machine-config-daemon-4n9zj container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 19:53:41 crc kubenswrapper[4756]: I0930 19:53:41.538836 4756 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 19:53:41 crc kubenswrapper[4756]: I0930 19:53:41.538862 4756 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" Sep 30 19:53:41 crc kubenswrapper[4756]: I0930 19:53:41.539293 4756 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"3226587ad1762e9a93b7134218b589749878b020e4cd2179741113741ca24dea"} pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 30 19:53:41 crc kubenswrapper[4756]: I0930 19:53:41.539331 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" containerName="machine-config-daemon" containerID="cri-o://3226587ad1762e9a93b7134218b589749878b020e4cd2179741113741ca24dea" gracePeriod=600 Sep 30 19:53:41 crc kubenswrapper[4756]: I0930 19:53:41.548704 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bfe78e50-0ef5-42f5-b989-39beebdd0d7c-combined-ca-bundle\") pod \"bfe78e50-0ef5-42f5-b989-39beebdd0d7c\" (UID: \"bfe78e50-0ef5-42f5-b989-39beebdd0d7c\") " Sep 30 19:53:41 crc kubenswrapper[4756]: I0930 19:53:41.548815 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bfe78e50-0ef5-42f5-b989-39beebdd0d7c-config\") pod \"bfe78e50-0ef5-42f5-b989-39beebdd0d7c\" (UID: \"bfe78e50-0ef5-42f5-b989-39beebdd0d7c\") " Sep 30 19:53:41 crc kubenswrapper[4756]: I0930 19:53:41.548871 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/bfe78e50-0ef5-42f5-b989-39beebdd0d7c-metrics-certs-tls-certs\") pod \"bfe78e50-0ef5-42f5-b989-39beebdd0d7c\" (UID: \"bfe78e50-0ef5-42f5-b989-39beebdd0d7c\") " Sep 30 19:53:41 crc kubenswrapper[4756]: I0930 19:53:41.548921 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/bfe78e50-0ef5-42f5-b989-39beebdd0d7c-ovsdbserver-sb-tls-certs\") pod \"bfe78e50-0ef5-42f5-b989-39beebdd0d7c\" (UID: \"bfe78e50-0ef5-42f5-b989-39beebdd0d7c\") " Sep 30 19:53:41 crc kubenswrapper[4756]: I0930 19:53:41.548937 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-j8hkd\" (UniqueName: \"kubernetes.io/projected/bfe78e50-0ef5-42f5-b989-39beebdd0d7c-kube-api-access-j8hkd\") pod \"bfe78e50-0ef5-42f5-b989-39beebdd0d7c\" (UID: \"bfe78e50-0ef5-42f5-b989-39beebdd0d7c\") " Sep 30 19:53:41 crc kubenswrapper[4756]: I0930 19:53:41.549417 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndbcluster-sb-etc-ovn\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"bfe78e50-0ef5-42f5-b989-39beebdd0d7c\" (UID: \"bfe78e50-0ef5-42f5-b989-39beebdd0d7c\") " Sep 30 19:53:41 crc kubenswrapper[4756]: I0930 19:53:41.549496 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/bfe78e50-0ef5-42f5-b989-39beebdd0d7c-scripts\") pod \"bfe78e50-0ef5-42f5-b989-39beebdd0d7c\" (UID: \"bfe78e50-0ef5-42f5-b989-39beebdd0d7c\") " Sep 30 19:53:41 crc kubenswrapper[4756]: I0930 19:53:41.549521 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/bfe78e50-0ef5-42f5-b989-39beebdd0d7c-ovsdb-rundir\") pod \"bfe78e50-0ef5-42f5-b989-39beebdd0d7c\" (UID: \"bfe78e50-0ef5-42f5-b989-39beebdd0d7c\") " Sep 30 19:53:41 crc kubenswrapper[4756]: I0930 19:53:41.550006 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vj58x\" (UniqueName: \"kubernetes.io/projected/87e13633-3439-4184-a1f8-1afbbe54883a-kube-api-access-vj58x\") on node \"crc\" DevicePath \"\"" Sep 30 19:53:41 crc kubenswrapper[4756]: I0930 19:53:41.550018 4756 reconciler_common.go:293] "Volume detached for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/87e13633-3439-4184-a1f8-1afbbe54883a-ovsdb-rundir\") on node \"crc\" DevicePath \"\"" Sep 30 19:53:41 crc kubenswrapper[4756]: I0930 19:53:41.550036 4756 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") on node \"crc\" " Sep 30 19:53:41 crc kubenswrapper[4756]: I0930 19:53:41.550046 4756 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/87e13633-3439-4184-a1f8-1afbbe54883a-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 19:53:41 crc kubenswrapper[4756]: I0930 19:53:41.550054 4756 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/87e13633-3439-4184-a1f8-1afbbe54883a-config\") on node \"crc\" DevicePath \"\"" Sep 30 19:53:41 crc kubenswrapper[4756]: I0930 19:53:41.556341 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bfe78e50-0ef5-42f5-b989-39beebdd0d7c-ovsdb-rundir" (OuterVolumeSpecName: "ovsdb-rundir") pod "bfe78e50-0ef5-42f5-b989-39beebdd0d7c" (UID: "bfe78e50-0ef5-42f5-b989-39beebdd0d7c"). InnerVolumeSpecName "ovsdb-rundir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 19:53:41 crc kubenswrapper[4756]: I0930 19:53:41.556690 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bfe78e50-0ef5-42f5-b989-39beebdd0d7c-config" (OuterVolumeSpecName: "config") pod "bfe78e50-0ef5-42f5-b989-39beebdd0d7c" (UID: "bfe78e50-0ef5-42f5-b989-39beebdd0d7c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:53:41 crc kubenswrapper[4756]: I0930 19:53:41.564063 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bfe78e50-0ef5-42f5-b989-39beebdd0d7c-scripts" (OuterVolumeSpecName: "scripts") pod "bfe78e50-0ef5-42f5-b989-39beebdd0d7c" (UID: "bfe78e50-0ef5-42f5-b989-39beebdd0d7c"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:53:41 crc kubenswrapper[4756]: I0930 19:53:41.585510 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bfe78e50-0ef5-42f5-b989-39beebdd0d7c-kube-api-access-j8hkd" (OuterVolumeSpecName: "kube-api-access-j8hkd") pod "bfe78e50-0ef5-42f5-b989-39beebdd0d7c" (UID: "bfe78e50-0ef5-42f5-b989-39beebdd0d7c"). InnerVolumeSpecName "kube-api-access-j8hkd". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:53:41 crc kubenswrapper[4756]: I0930 19:53:41.593903 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 30 19:53:41 crc kubenswrapper[4756]: I0930 19:53:41.594235 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="207e631b-3ef5-430f-bad1-4a9972dc8eb4" containerName="glance-log" containerID="cri-o://910743b27955784ee4da86c480d093283c52757852937aeb938dde4695e57b83" gracePeriod=30 Sep 30 19:53:41 crc kubenswrapper[4756]: I0930 19:53:41.594819 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="207e631b-3ef5-430f-bad1-4a9972dc8eb4" containerName="glance-httpd" containerID="cri-o://af672176e9d6745734bae103367eaab92c17b4c3f23ec9cc9c702908a2e393cc" gracePeriod=30 Sep 30 19:53:41 crc kubenswrapper[4756]: I0930 19:53:41.598552 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/87e13633-3439-4184-a1f8-1afbbe54883a-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "87e13633-3439-4184-a1f8-1afbbe54883a" (UID: "87e13633-3439-4184-a1f8-1afbbe54883a"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:53:41 crc kubenswrapper[4756]: I0930 19:53:41.609044 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage02-crc" (OuterVolumeSpecName: "ovndbcluster-sb-etc-ovn") pod "bfe78e50-0ef5-42f5-b989-39beebdd0d7c" (UID: "bfe78e50-0ef5-42f5-b989-39beebdd0d7c"). InnerVolumeSpecName "local-storage02-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Sep 30 19:53:41 crc kubenswrapper[4756]: I0930 19:53:41.616921 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-bf36-account-create-6xdsk"] Sep 30 19:53:41 crc kubenswrapper[4756]: I0930 19:53:41.637389 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-bf36-account-create-6xdsk"] Sep 30 19:53:41 crc kubenswrapper[4756]: I0930 19:53:41.638573 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_bfe78e50-0ef5-42f5-b989-39beebdd0d7c/ovsdbserver-sb/0.log" Sep 30 19:53:41 crc kubenswrapper[4756]: I0930 19:53:41.638610 4756 generic.go:334] "Generic (PLEG): container finished" podID="bfe78e50-0ef5-42f5-b989-39beebdd0d7c" containerID="b97d2d37d038e0a3a68f083bf479bac02d181211d1a60c673e2f3e39c0ffddc7" exitCode=143 Sep 30 19:53:41 crc kubenswrapper[4756]: I0930 19:53:41.638651 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"bfe78e50-0ef5-42f5-b989-39beebdd0d7c","Type":"ContainerDied","Data":"b97d2d37d038e0a3a68f083bf479bac02d181211d1a60c673e2f3e39c0ffddc7"} Sep 30 19:53:41 crc kubenswrapper[4756]: I0930 19:53:41.638736 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Sep 30 19:53:41 crc kubenswrapper[4756]: I0930 19:53:41.648665 4756 generic.go:334] "Generic (PLEG): container finished" podID="f4c19f17-3c18-484d-acfd-99c72926bdef" containerID="67fa22128871f16827cabfb95e325f7d8e14ac3c57908b0201283ac2b4a62c35" exitCode=2 Sep 30 19:53:41 crc kubenswrapper[4756]: I0930 19:53:41.648702 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"f4c19f17-3c18-484d-acfd-99c72926bdef","Type":"ContainerDied","Data":"67fa22128871f16827cabfb95e325f7d8e14ac3c57908b0201283ac2b4a62c35"} Sep 30 19:53:41 crc kubenswrapper[4756]: I0930 19:53:41.649585 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-db-create-zzp77"] Sep 30 19:53:41 crc kubenswrapper[4756]: I0930 19:53:41.657471 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-j8hkd\" (UniqueName: \"kubernetes.io/projected/bfe78e50-0ef5-42f5-b989-39beebdd0d7c-kube-api-access-j8hkd\") on node \"crc\" DevicePath \"\"" Sep 30 19:53:41 crc kubenswrapper[4756]: I0930 19:53:41.657506 4756 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") on node \"crc\" " Sep 30 19:53:41 crc kubenswrapper[4756]: I0930 19:53:41.657517 4756 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/87e13633-3439-4184-a1f8-1afbbe54883a-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 19:53:41 crc kubenswrapper[4756]: I0930 19:53:41.657527 4756 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/bfe78e50-0ef5-42f5-b989-39beebdd0d7c-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 19:53:41 crc kubenswrapper[4756]: I0930 19:53:41.657534 4756 reconciler_common.go:293] "Volume detached for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/bfe78e50-0ef5-42f5-b989-39beebdd0d7c-ovsdb-rundir\") on node \"crc\" DevicePath \"\"" Sep 30 19:53:41 crc kubenswrapper[4756]: I0930 19:53:41.657543 4756 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bfe78e50-0ef5-42f5-b989-39beebdd0d7c-config\") on node \"crc\" DevicePath \"\"" Sep 30 19:53:41 crc kubenswrapper[4756]: I0930 19:53:41.658530 4756 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage06-crc" (UniqueName: "kubernetes.io/local-volume/local-storage06-crc") on node "crc" Sep 30 19:53:41 crc kubenswrapper[4756]: I0930 19:53:41.660730 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-db-create-zzp77"] Sep 30 19:53:41 crc kubenswrapper[4756]: I0930 19:53:41.676064 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-073f-account-create-45blc"] Sep 30 19:53:41 crc kubenswrapper[4756]: I0930 19:53:41.703845 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bfe78e50-0ef5-42f5-b989-39beebdd0d7c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "bfe78e50-0ef5-42f5-b989-39beebdd0d7c" (UID: "bfe78e50-0ef5-42f5-b989-39beebdd0d7c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:53:41 crc kubenswrapper[4756]: I0930 19:53:41.767702 4756 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bfe78e50-0ef5-42f5-b989-39beebdd0d7c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 19:53:41 crc kubenswrapper[4756]: I0930 19:53:41.767729 4756 reconciler_common.go:293] "Volume detached for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") on node \"crc\" DevicePath \"\"" Sep 30 19:53:41 crc kubenswrapper[4756]: I0930 19:53:41.772384 4756 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage02-crc" (UniqueName: "kubernetes.io/local-volume/local-storage02-crc") on node "crc" Sep 30 19:53:41 crc kubenswrapper[4756]: I0930 19:53:41.773457 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-073f-account-create-45blc"] Sep 30 19:53:41 crc kubenswrapper[4756]: I0930 19:53:41.792868 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/87e13633-3439-4184-a1f8-1afbbe54883a-ovsdbserver-nb-tls-certs" (OuterVolumeSpecName: "ovsdbserver-nb-tls-certs") pod "87e13633-3439-4184-a1f8-1afbbe54883a" (UID: "87e13633-3439-4184-a1f8-1afbbe54883a"). InnerVolumeSpecName "ovsdbserver-nb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:53:41 crc kubenswrapper[4756]: I0930 19:53:41.794184 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/openstack-cell1-galera-0" podUID="05ea7072-b87a-4cf4-8f86-f32e5836951a" containerName="galera" containerID="cri-o://c7c419536d0be0324d543ae02bef0262d1e3a0cb35dd8af1179be449734d0c2e" gracePeriod=30 Sep 30 19:53:41 crc kubenswrapper[4756]: I0930 19:53:41.818127 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bfe78e50-0ef5-42f5-b989-39beebdd0d7c-ovsdbserver-sb-tls-certs" (OuterVolumeSpecName: "ovsdbserver-sb-tls-certs") pod "bfe78e50-0ef5-42f5-b989-39beebdd0d7c" (UID: "bfe78e50-0ef5-42f5-b989-39beebdd0d7c"). InnerVolumeSpecName "ovsdbserver-sb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:53:41 crc kubenswrapper[4756]: I0930 19:53:41.819559 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bfe78e50-0ef5-42f5-b989-39beebdd0d7c-metrics-certs-tls-certs" (OuterVolumeSpecName: "metrics-certs-tls-certs") pod "bfe78e50-0ef5-42f5-b989-39beebdd0d7c" (UID: "bfe78e50-0ef5-42f5-b989-39beebdd0d7c"). InnerVolumeSpecName "metrics-certs-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:53:41 crc kubenswrapper[4756]: I0930 19:53:41.835177 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/novacell0073f-account-delete-mnvpw"] Sep 30 19:53:41 crc kubenswrapper[4756]: E0930 19:53:41.841220 4756 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="c7c419536d0be0324d543ae02bef0262d1e3a0cb35dd8af1179be449734d0c2e" cmd=["/bin/bash","/var/lib/operator-scripts/mysql_probe.sh","readiness"] Sep 30 19:53:41 crc kubenswrapper[4756]: E0930 19:53:41.858884 4756 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="c7c419536d0be0324d543ae02bef0262d1e3a0cb35dd8af1179be449734d0c2e" cmd=["/bin/bash","/var/lib/operator-scripts/mysql_probe.sh","readiness"] Sep 30 19:53:41 crc kubenswrapper[4756]: I0930 19:53:41.861610 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-db-create-wrpgb"] Sep 30 19:53:41 crc kubenswrapper[4756]: I0930 19:53:41.869332 4756 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/87e13633-3439-4184-a1f8-1afbbe54883a-ovsdbserver-nb-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 30 19:53:41 crc kubenswrapper[4756]: I0930 19:53:41.869349 4756 reconciler_common.go:293] "Volume detached for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/bfe78e50-0ef5-42f5-b989-39beebdd0d7c-metrics-certs-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 30 19:53:41 crc kubenswrapper[4756]: I0930 19:53:41.869358 4756 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/bfe78e50-0ef5-42f5-b989-39beebdd0d7c-ovsdbserver-sb-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 30 19:53:41 crc kubenswrapper[4756]: I0930 19:53:41.869366 4756 reconciler_common.go:293] "Volume detached for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") on node \"crc\" DevicePath \"\"" Sep 30 19:53:41 crc kubenswrapper[4756]: E0930 19:53:41.869541 4756 configmap.go:193] Couldn't get configMap openstack/rabbitmq-cell1-config-data: configmap "rabbitmq-cell1-config-data" not found Sep 30 19:53:41 crc kubenswrapper[4756]: E0930 19:53:41.869583 4756 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/3bacc7b1-00f4-4489-9c0b-fa038cc443d6-config-data podName:3bacc7b1-00f4-4489-9c0b-fa038cc443d6 nodeName:}" failed. No retries permitted until 2025-09-30 19:53:43.869570539 +0000 UTC m=+1353.490504016 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/3bacc7b1-00f4-4489-9c0b-fa038cc443d6-config-data") pod "rabbitmq-cell1-server-0" (UID: "3bacc7b1-00f4-4489-9c0b-fa038cc443d6") : configmap "rabbitmq-cell1-config-data" not found Sep 30 19:53:41 crc kubenswrapper[4756]: E0930 19:53:41.874588 4756 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="c7c419536d0be0324d543ae02bef0262d1e3a0cb35dd8af1179be449734d0c2e" cmd=["/bin/bash","/var/lib/operator-scripts/mysql_probe.sh","readiness"] Sep 30 19:53:41 crc kubenswrapper[4756]: E0930 19:53:41.874631 4756 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/openstack-cell1-galera-0" podUID="05ea7072-b87a-4cf4-8f86-f32e5836951a" containerName="galera" Sep 30 19:53:41 crc kubenswrapper[4756]: I0930 19:53:41.879452 4756 scope.go:117] "RemoveContainer" containerID="80a97405b4312c52e3a31eee94edf96e1e860adff64ef35aedcd06d9bb1f2983" Sep 30 19:53:41 crc kubenswrapper[4756]: I0930 19:53:41.879602 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-db-create-wrpgb"] Sep 30 19:53:41 crc kubenswrapper[4756]: I0930 19:53:41.889552 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/novaapic915-account-delete-sw2ck"] Sep 30 19:53:41 crc kubenswrapper[4756]: I0930 19:53:41.905991 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-c915-account-create-77pfr"] Sep 30 19:53:41 crc kubenswrapper[4756]: I0930 19:53:41.941291 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-d4df5d75-8jctn"] Sep 30 19:53:41 crc kubenswrapper[4756]: I0930 19:53:41.941807 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-d4df5d75-8jctn" podUID="9dead4ff-0706-4a5e-b6ae-c388ae1c2313" containerName="neutron-api" containerID="cri-o://900bdddf277fe7986f2ca62e27752937a1ace2d13997fb2c03e798ffa202304c" gracePeriod=30 Sep 30 19:53:41 crc kubenswrapper[4756]: I0930 19:53:41.941934 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-d4df5d75-8jctn" podUID="9dead4ff-0706-4a5e-b6ae-c388ae1c2313" containerName="neutron-httpd" containerID="cri-o://9a4fb3358fd6f28ed48e21d357f0e517d47e56682744c7e3918edea524d6cfd4" gracePeriod=30 Sep 30 19:53:41 crc kubenswrapper[4756]: I0930 19:53:41.958499 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-c915-account-create-77pfr"] Sep 30 19:53:41 crc kubenswrapper[4756]: I0930 19:53:41.968847 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-2b0c-account-create-vvcgf"] Sep 30 19:53:41 crc kubenswrapper[4756]: I0930 19:53:41.986228 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-2b0c-account-create-vvcgf"] Sep 30 19:53:42 crc kubenswrapper[4756]: I0930 19:53:42.021352 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-db-create-blvtr"] Sep 30 19:53:42 crc kubenswrapper[4756]: I0930 19:53:42.036039 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-db-create-blvtr"] Sep 30 19:53:42 crc kubenswrapper[4756]: I0930 19:53:42.053773 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-db-create-89jrd"] Sep 30 19:53:42 crc kubenswrapper[4756]: I0930 19:53:42.061794 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-e450-account-create-m4pnm"] Sep 30 19:53:42 crc kubenswrapper[4756]: I0930 19:53:42.073958 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-db-create-89jrd"] Sep 30 19:53:42 crc kubenswrapper[4756]: I0930 19:53:42.087407 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-e450-account-create-m4pnm"] Sep 30 19:53:42 crc kubenswrapper[4756]: I0930 19:53:42.091472 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/87e13633-3439-4184-a1f8-1afbbe54883a-metrics-certs-tls-certs" (OuterVolumeSpecName: "metrics-certs-tls-certs") pod "87e13633-3439-4184-a1f8-1afbbe54883a" (UID: "87e13633-3439-4184-a1f8-1afbbe54883a"). InnerVolumeSpecName "metrics-certs-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:53:42 crc kubenswrapper[4756]: I0930 19:53:42.096010 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-db-create-qw4hr"] Sep 30 19:53:42 crc kubenswrapper[4756]: I0930 19:53:42.103689 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-db-create-qw4hr"] Sep 30 19:53:42 crc kubenswrapper[4756]: I0930 19:53:42.109955 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glancef07e-account-delete-lzl2g"] Sep 30 19:53:42 crc kubenswrapper[4756]: I0930 19:53:42.142475 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-db-create-2hkb2"] Sep 30 19:53:42 crc kubenswrapper[4756]: I0930 19:53:42.147806 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-f07e-account-create-ng8hh"] Sep 30 19:53:42 crc kubenswrapper[4756]: I0930 19:53:42.157085 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-db-create-2hkb2"] Sep 30 19:53:42 crc kubenswrapper[4756]: I0930 19:53:42.171646 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-cea4-account-create-kkbq5"] Sep 30 19:53:42 crc kubenswrapper[4756]: I0930 19:53:42.179810 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-f07e-account-create-ng8hh"] Sep 30 19:53:42 crc kubenswrapper[4756]: I0930 19:53:42.183322 4756 reconciler_common.go:293] "Volume detached for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/87e13633-3439-4184-a1f8-1afbbe54883a-metrics-certs-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 30 19:53:42 crc kubenswrapper[4756]: I0930 19:53:42.183346 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovn-controller-ovs-kmbtz" podUID="cf5c32f2-40b7-4c8f-ab7b-58b6fa3200ed" containerName="ovs-vswitchd" containerID="cri-o://291eb080a7f4ba699c9eeb5b409724fd85cb98d31853893faa9a4c9845063aea" gracePeriod=29 Sep 30 19:53:42 crc kubenswrapper[4756]: I0930 19:53:42.196533 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-cea4-account-create-kkbq5"] Sep 30 19:53:42 crc kubenswrapper[4756]: I0930 19:53:42.211901 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-server-0"] Sep 30 19:53:42 crc kubenswrapper[4756]: I0930 19:53:42.230343 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/swift-proxy-5f8d8945f5-z5bmn"] Sep 30 19:53:42 crc kubenswrapper[4756]: I0930 19:53:42.230625 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-proxy-5f8d8945f5-z5bmn" podUID="0a1a5fbf-06be-4bbe-a7ab-9a3becd5f84d" containerName="proxy-httpd" containerID="cri-o://2ab71e28b3fab8dbf1f220c5eb4490f4cf686ee22cd811d509ce95839b4580b6" gracePeriod=30 Sep 30 19:53:42 crc kubenswrapper[4756]: I0930 19:53:42.231032 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-proxy-5f8d8945f5-z5bmn" podUID="0a1a5fbf-06be-4bbe-a7ab-9a3becd5f84d" containerName="proxy-server" containerID="cri-o://8d013d9f6a59da2bcacdb9355cb603a5a104cc57783d02a5fc8770f2294bb8f0" gracePeriod=30 Sep 30 19:53:42 crc kubenswrapper[4756]: I0930 19:53:42.249888 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Sep 30 19:53:42 crc kubenswrapper[4756]: I0930 19:53:42.250130 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell1-novncproxy-0" podUID="6f3ed405-3d2e-4296-8ea5-7b426f2bd987" containerName="nova-cell1-novncproxy-novncproxy" containerID="cri-o://10c6b1bb1f7f4de1bc5c4aca7ca3dd73b0eea0fe2e2a9e20084f2def6aa17018" gracePeriod=30 Sep 30 19:53:42 crc kubenswrapper[4756]: I0930 19:53:42.292902 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-keystone-listener-7b959497f4-7qz6r"] Sep 30 19:53:42 crc kubenswrapper[4756]: I0930 19:53:42.293169 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-keystone-listener-7b959497f4-7qz6r" podUID="0511d760-5c65-453b-aefd-89ebb9a52ac6" containerName="barbican-keystone-listener-log" containerID="cri-o://9ba683265e05ae2f501e9cd38e1f199da027465720a175466d77105b0eedb2f4" gracePeriod=30 Sep 30 19:53:42 crc kubenswrapper[4756]: I0930 19:53:42.293555 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-keystone-listener-7b959497f4-7qz6r" podUID="0511d760-5c65-453b-aefd-89ebb9a52ac6" containerName="barbican-keystone-listener" containerID="cri-o://5a28b01b0fd0453603e79634b63f1a8bc7b5df27306c20e99103f36e06cb4aff" gracePeriod=30 Sep 30 19:53:42 crc kubenswrapper[4756]: I0930 19:53:42.326895 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/rabbitmq-server-0" podUID="c3b1ef6d-6617-4d15-8709-a7623e75faa4" containerName="rabbitmq" containerID="cri-o://d30c7480fbfaf752f8c10fcadf2dff0b40a331d23daaf5997b23b75ef5b2aafa" gracePeriod=604800 Sep 30 19:53:42 crc kubenswrapper[4756]: I0930 19:53:42.337951 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-worker-bbf58c-78vxt"] Sep 30 19:53:42 crc kubenswrapper[4756]: I0930 19:53:42.338143 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-worker-bbf58c-78vxt" podUID="3c14829a-8f5d-490d-a922-719d81da47a4" containerName="barbican-worker-log" containerID="cri-o://baa07f784aff7642d4619ac09ef1f58c98f9a2e756003ce12cf3674dba0d7ec6" gracePeriod=30 Sep 30 19:53:42 crc kubenswrapper[4756]: I0930 19:53:42.338243 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-worker-bbf58c-78vxt" podUID="3c14829a-8f5d-490d-a922-719d81da47a4" containerName="barbican-worker" containerID="cri-o://46a6dadf1f6a1d4672d2a84013aec14bd516d6c5c4b16525937c9d80c7745458" gracePeriod=30 Sep 30 19:53:42 crc kubenswrapper[4756]: I0930 19:53:42.344093 4756 scope.go:117] "RemoveContainer" containerID="1e54723557bbe1ca80eed69c4f2a6744b98440dd8b61a4a9acaa14272039ec98" Sep 30 19:53:42 crc kubenswrapper[4756]: I0930 19:53:42.356943 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Sep 30 19:53:42 crc kubenswrapper[4756]: I0930 19:53:42.398476 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-684ccb4bd6-x97br"] Sep 30 19:53:42 crc kubenswrapper[4756]: I0930 19:53:42.398738 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-684ccb4bd6-x97br" podUID="6fbaa08d-11b7-4498-aeb2-e82ac13f64d0" containerName="barbican-api-log" containerID="cri-o://b64e18bd6132f9cc117785155229a1d2aa54ec5983a3ee45e5e205868af69f65" gracePeriod=30 Sep 30 19:53:42 crc kubenswrapper[4756]: I0930 19:53:42.398966 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-metrics-tnm2n_9e9466ec-a908-431a-9c47-fda720095cd8/openstack-network-exporter/0.log" Sep 30 19:53:42 crc kubenswrapper[4756]: I0930 19:53:42.399022 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-684ccb4bd6-x97br" podUID="6fbaa08d-11b7-4498-aeb2-e82ac13f64d0" containerName="barbican-api" containerID="cri-o://207904952f6540ef119af443514c1298c2cf8341c4b2adc7b85d185c51cdf006" gracePeriod=30 Sep 30 19:53:42 crc kubenswrapper[4756]: I0930 19:53:42.399029 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-tnm2n" Sep 30 19:53:42 crc kubenswrapper[4756]: I0930 19:53:42.411652 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-0"] Sep 30 19:53:42 crc kubenswrapper[4756]: I0930 19:53:42.411887 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell0-conductor-0" podUID="f9ab0288-ae1d-41f7-954d-8cbadc83c614" containerName="nova-cell0-conductor-conductor" containerID="cri-o://8dbe31d5b2222309a75d567a85830fd5e0fbe8a178b7a9f99d3211c5bbf82f1c" gracePeriod=30 Sep 30 19:53:42 crc kubenswrapper[4756]: I0930 19:53:42.423324 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-mzmtl"] Sep 30 19:53:42 crc kubenswrapper[4756]: I0930 19:53:42.430459 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-mzmtl"] Sep 30 19:53:42 crc kubenswrapper[4756]: I0930 19:53:42.435877 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-4blkh"] Sep 30 19:53:42 crc kubenswrapper[4756]: I0930 19:53:42.441102 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-conductor-0"] Sep 30 19:53:42 crc kubenswrapper[4756]: I0930 19:53:42.441293 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell1-conductor-0" podUID="164b639f-c027-4486-a298-06e9510e14ac" containerName="nova-cell1-conductor-conductor" containerID="cri-o://1eade56fcdb4689bb071f3b44ecd695f64f66b527074f30c140f41aff392a497" gracePeriod=30 Sep 30 19:53:42 crc kubenswrapper[4756]: I0930 19:53:42.452828 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Sep 30 19:53:42 crc kubenswrapper[4756]: I0930 19:53:42.454802 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-59cf4bdb65-ljgq7" Sep 30 19:53:42 crc kubenswrapper[4756]: I0930 19:53:42.454895 4756 scope.go:117] "RemoveContainer" containerID="b97d2d37d038e0a3a68f083bf479bac02d181211d1a60c673e2f3e39c0ffddc7" Sep 30 19:53:42 crc kubenswrapper[4756]: I0930 19:53:42.481232 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-4blkh"] Sep 30 19:53:42 crc kubenswrapper[4756]: I0930 19:53:42.489244 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/novaapic915-account-delete-sw2ck"] Sep 30 19:53:42 crc kubenswrapper[4756]: I0930 19:53:42.490094 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ww2jq\" (UniqueName: \"kubernetes.io/projected/9e9466ec-a908-431a-9c47-fda720095cd8-kube-api-access-ww2jq\") pod \"9e9466ec-a908-431a-9c47-fda720095cd8\" (UID: \"9e9466ec-a908-431a-9c47-fda720095cd8\") " Sep 30 19:53:42 crc kubenswrapper[4756]: I0930 19:53:42.490173 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9e9466ec-a908-431a-9c47-fda720095cd8-combined-ca-bundle\") pod \"9e9466ec-a908-431a-9c47-fda720095cd8\" (UID: \"9e9466ec-a908-431a-9c47-fda720095cd8\") " Sep 30 19:53:42 crc kubenswrapper[4756]: I0930 19:53:42.490206 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9e9466ec-a908-431a-9c47-fda720095cd8-config\") pod \"9e9466ec-a908-431a-9c47-fda720095cd8\" (UID: \"9e9466ec-a908-431a-9c47-fda720095cd8\") " Sep 30 19:53:42 crc kubenswrapper[4756]: I0930 19:53:42.490252 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/9e9466ec-a908-431a-9c47-fda720095cd8-ovn-rundir\") pod \"9e9466ec-a908-431a-9c47-fda720095cd8\" (UID: \"9e9466ec-a908-431a-9c47-fda720095cd8\") " Sep 30 19:53:42 crc kubenswrapper[4756]: I0930 19:53:42.490268 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/9e9466ec-a908-431a-9c47-fda720095cd8-metrics-certs-tls-certs\") pod \"9e9466ec-a908-431a-9c47-fda720095cd8\" (UID: \"9e9466ec-a908-431a-9c47-fda720095cd8\") " Sep 30 19:53:42 crc kubenswrapper[4756]: I0930 19:53:42.490338 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/9e9466ec-a908-431a-9c47-fda720095cd8-ovs-rundir\") pod \"9e9466ec-a908-431a-9c47-fda720095cd8\" (UID: \"9e9466ec-a908-431a-9c47-fda720095cd8\") " Sep 30 19:53:42 crc kubenswrapper[4756]: I0930 19:53:42.490852 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/9e9466ec-a908-431a-9c47-fda720095cd8-ovs-rundir" (OuterVolumeSpecName: "ovs-rundir") pod "9e9466ec-a908-431a-9c47-fda720095cd8" (UID: "9e9466ec-a908-431a-9c47-fda720095cd8"). InnerVolumeSpecName "ovs-rundir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 19:53:42 crc kubenswrapper[4756]: I0930 19:53:42.490886 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/9e9466ec-a908-431a-9c47-fda720095cd8-ovn-rundir" (OuterVolumeSpecName: "ovn-rundir") pod "9e9466ec-a908-431a-9c47-fda720095cd8" (UID: "9e9466ec-a908-431a-9c47-fda720095cd8"). InnerVolumeSpecName "ovn-rundir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 19:53:42 crc kubenswrapper[4756]: I0930 19:53:42.491354 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9e9466ec-a908-431a-9c47-fda720095cd8-config" (OuterVolumeSpecName: "config") pod "9e9466ec-a908-431a-9c47-fda720095cd8" (UID: "9e9466ec-a908-431a-9c47-fda720095cd8"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:53:42 crc kubenswrapper[4756]: I0930 19:53:42.496528 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glancef07e-account-delete-lzl2g"] Sep 30 19:53:42 crc kubenswrapper[4756]: I0930 19:53:42.500450 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovsdbserver-nb-0"] Sep 30 19:53:42 crc kubenswrapper[4756]: I0930 19:53:42.505108 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovsdbserver-nb-0"] Sep 30 19:53:42 crc kubenswrapper[4756]: I0930 19:53:42.512747 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9e9466ec-a908-431a-9c47-fda720095cd8-kube-api-access-ww2jq" (OuterVolumeSpecName: "kube-api-access-ww2jq") pod "9e9466ec-a908-431a-9c47-fda720095cd8" (UID: "9e9466ec-a908-431a-9c47-fda720095cd8"). InnerVolumeSpecName "kube-api-access-ww2jq". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:53:42 crc kubenswrapper[4756]: I0930 19:53:42.512806 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovsdbserver-sb-0"] Sep 30 19:53:42 crc kubenswrapper[4756]: I0930 19:53:42.516769 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovsdbserver-sb-0"] Sep 30 19:53:42 crc kubenswrapper[4756]: E0930 19:53:42.561849 4756 handlers.go:78] "Exec lifecycle hook for Container in Pod failed" err=< Sep 30 19:53:42 crc kubenswrapper[4756]: command '/usr/local/bin/container-scripts/stop-ovsdb-server.sh' exited with 137: ++ dirname /usr/local/bin/container-scripts/stop-ovsdb-server.sh Sep 30 19:53:42 crc kubenswrapper[4756]: + source /usr/local/bin/container-scripts/functions Sep 30 19:53:42 crc kubenswrapper[4756]: ++ OVNBridge=br-int Sep 30 19:53:42 crc kubenswrapper[4756]: ++ OVNRemote=tcp:localhost:6642 Sep 30 19:53:42 crc kubenswrapper[4756]: ++ OVNEncapType=geneve Sep 30 19:53:42 crc kubenswrapper[4756]: ++ OVNAvailabilityZones= Sep 30 19:53:42 crc kubenswrapper[4756]: ++ EnableChassisAsGateway=true Sep 30 19:53:42 crc kubenswrapper[4756]: ++ PhysicalNetworks= Sep 30 19:53:42 crc kubenswrapper[4756]: ++ OVNHostName= Sep 30 19:53:42 crc kubenswrapper[4756]: ++ DB_FILE=/etc/openvswitch/conf.db Sep 30 19:53:42 crc kubenswrapper[4756]: ++ ovs_dir=/var/lib/openvswitch Sep 30 19:53:42 crc kubenswrapper[4756]: ++ FLOWS_RESTORE_SCRIPT=/var/lib/openvswitch/flows-script Sep 30 19:53:42 crc kubenswrapper[4756]: ++ FLOWS_RESTORE_DIR=/var/lib/openvswitch/saved-flows Sep 30 19:53:42 crc kubenswrapper[4756]: ++ SAFE_TO_STOP_OVSDB_SERVER_SEMAPHORE=/var/lib/openvswitch/is_safe_to_stop_ovsdb_server Sep 30 19:53:42 crc kubenswrapper[4756]: + '[' '!' -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server ']' Sep 30 19:53:42 crc kubenswrapper[4756]: + sleep 0.5 Sep 30 19:53:42 crc kubenswrapper[4756]: + '[' '!' -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server ']' Sep 30 19:53:42 crc kubenswrapper[4756]: + sleep 0.5 Sep 30 19:53:42 crc kubenswrapper[4756]: + '[' '!' -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server ']' Sep 30 19:53:42 crc kubenswrapper[4756]: + cleanup_ovsdb_server_semaphore Sep 30 19:53:42 crc kubenswrapper[4756]: + rm -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server Sep 30 19:53:42 crc kubenswrapper[4756]: + /usr/share/openvswitch/scripts/ovs-ctl stop --no-ovs-vswitchd Sep 30 19:53:42 crc kubenswrapper[4756]: > execCommand=["/usr/local/bin/container-scripts/stop-ovsdb-server.sh"] containerName="ovsdb-server" pod="openstack/ovn-controller-ovs-kmbtz" message=< Sep 30 19:53:42 crc kubenswrapper[4756]: Exiting ovsdb-server (5) [ OK ] Sep 30 19:53:42 crc kubenswrapper[4756]: ++ dirname /usr/local/bin/container-scripts/stop-ovsdb-server.sh Sep 30 19:53:42 crc kubenswrapper[4756]: + source /usr/local/bin/container-scripts/functions Sep 30 19:53:42 crc kubenswrapper[4756]: ++ OVNBridge=br-int Sep 30 19:53:42 crc kubenswrapper[4756]: ++ OVNRemote=tcp:localhost:6642 Sep 30 19:53:42 crc kubenswrapper[4756]: ++ OVNEncapType=geneve Sep 30 19:53:42 crc kubenswrapper[4756]: ++ OVNAvailabilityZones= Sep 30 19:53:42 crc kubenswrapper[4756]: ++ EnableChassisAsGateway=true Sep 30 19:53:42 crc kubenswrapper[4756]: ++ PhysicalNetworks= Sep 30 19:53:42 crc kubenswrapper[4756]: ++ OVNHostName= Sep 30 19:53:42 crc kubenswrapper[4756]: ++ DB_FILE=/etc/openvswitch/conf.db Sep 30 19:53:42 crc kubenswrapper[4756]: ++ ovs_dir=/var/lib/openvswitch Sep 30 19:53:42 crc kubenswrapper[4756]: ++ FLOWS_RESTORE_SCRIPT=/var/lib/openvswitch/flows-script Sep 30 19:53:42 crc kubenswrapper[4756]: ++ FLOWS_RESTORE_DIR=/var/lib/openvswitch/saved-flows Sep 30 19:53:42 crc kubenswrapper[4756]: ++ SAFE_TO_STOP_OVSDB_SERVER_SEMAPHORE=/var/lib/openvswitch/is_safe_to_stop_ovsdb_server Sep 30 19:53:42 crc kubenswrapper[4756]: + '[' '!' -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server ']' Sep 30 19:53:42 crc kubenswrapper[4756]: + sleep 0.5 Sep 30 19:53:42 crc kubenswrapper[4756]: + '[' '!' -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server ']' Sep 30 19:53:42 crc kubenswrapper[4756]: + sleep 0.5 Sep 30 19:53:42 crc kubenswrapper[4756]: + '[' '!' -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server ']' Sep 30 19:53:42 crc kubenswrapper[4756]: + cleanup_ovsdb_server_semaphore Sep 30 19:53:42 crc kubenswrapper[4756]: + rm -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server Sep 30 19:53:42 crc kubenswrapper[4756]: + /usr/share/openvswitch/scripts/ovs-ctl stop --no-ovs-vswitchd Sep 30 19:53:42 crc kubenswrapper[4756]: > Sep 30 19:53:42 crc kubenswrapper[4756]: E0930 19:53:42.562185 4756 kuberuntime_container.go:691] "PreStop hook failed" err=< Sep 30 19:53:42 crc kubenswrapper[4756]: command '/usr/local/bin/container-scripts/stop-ovsdb-server.sh' exited with 137: ++ dirname /usr/local/bin/container-scripts/stop-ovsdb-server.sh Sep 30 19:53:42 crc kubenswrapper[4756]: + source /usr/local/bin/container-scripts/functions Sep 30 19:53:42 crc kubenswrapper[4756]: ++ OVNBridge=br-int Sep 30 19:53:42 crc kubenswrapper[4756]: ++ OVNRemote=tcp:localhost:6642 Sep 30 19:53:42 crc kubenswrapper[4756]: ++ OVNEncapType=geneve Sep 30 19:53:42 crc kubenswrapper[4756]: ++ OVNAvailabilityZones= Sep 30 19:53:42 crc kubenswrapper[4756]: ++ EnableChassisAsGateway=true Sep 30 19:53:42 crc kubenswrapper[4756]: ++ PhysicalNetworks= Sep 30 19:53:42 crc kubenswrapper[4756]: ++ OVNHostName= Sep 30 19:53:42 crc kubenswrapper[4756]: ++ DB_FILE=/etc/openvswitch/conf.db Sep 30 19:53:42 crc kubenswrapper[4756]: ++ ovs_dir=/var/lib/openvswitch Sep 30 19:53:42 crc kubenswrapper[4756]: ++ FLOWS_RESTORE_SCRIPT=/var/lib/openvswitch/flows-script Sep 30 19:53:42 crc kubenswrapper[4756]: ++ FLOWS_RESTORE_DIR=/var/lib/openvswitch/saved-flows Sep 30 19:53:42 crc kubenswrapper[4756]: ++ SAFE_TO_STOP_OVSDB_SERVER_SEMAPHORE=/var/lib/openvswitch/is_safe_to_stop_ovsdb_server Sep 30 19:53:42 crc kubenswrapper[4756]: + '[' '!' -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server ']' Sep 30 19:53:42 crc kubenswrapper[4756]: + sleep 0.5 Sep 30 19:53:42 crc kubenswrapper[4756]: + '[' '!' -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server ']' Sep 30 19:53:42 crc kubenswrapper[4756]: + sleep 0.5 Sep 30 19:53:42 crc kubenswrapper[4756]: + '[' '!' -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server ']' Sep 30 19:53:42 crc kubenswrapper[4756]: + cleanup_ovsdb_server_semaphore Sep 30 19:53:42 crc kubenswrapper[4756]: + rm -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server Sep 30 19:53:42 crc kubenswrapper[4756]: + /usr/share/openvswitch/scripts/ovs-ctl stop --no-ovs-vswitchd Sep 30 19:53:42 crc kubenswrapper[4756]: > pod="openstack/ovn-controller-ovs-kmbtz" podUID="cf5c32f2-40b7-4c8f-ab7b-58b6fa3200ed" containerName="ovsdb-server" containerID="cri-o://223b5334941e108a9544fa45c9f6b28cb4f077714ac53ffb084467fb3d266eb6" Sep 30 19:53:42 crc kubenswrapper[4756]: I0930 19:53:42.562226 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovn-controller-ovs-kmbtz" podUID="cf5c32f2-40b7-4c8f-ab7b-58b6fa3200ed" containerName="ovsdb-server" containerID="cri-o://223b5334941e108a9544fa45c9f6b28cb4f077714ac53ffb084467fb3d266eb6" gracePeriod=29 Sep 30 19:53:42 crc kubenswrapper[4756]: E0930 19:53:42.571477 4756 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="291eb080a7f4ba699c9eeb5b409724fd85cb98d31853893faa9a4c9845063aea" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Sep 30 19:53:42 crc kubenswrapper[4756]: E0930 19:53:42.583530 4756 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="291eb080a7f4ba699c9eeb5b409724fd85cb98d31853893faa9a4c9845063aea" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Sep 30 19:53:42 crc kubenswrapper[4756]: I0930 19:53:42.592285 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/3ad8997f-b736-41c8-b087-d09b6316065d-ovsdbserver-sb\") pod \"3ad8997f-b736-41c8-b087-d09b6316065d\" (UID: \"3ad8997f-b736-41c8-b087-d09b6316065d\") " Sep 30 19:53:42 crc kubenswrapper[4756]: I0930 19:53:42.592328 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/3ad8997f-b736-41c8-b087-d09b6316065d-ovsdbserver-nb\") pod \"3ad8997f-b736-41c8-b087-d09b6316065d\" (UID: \"3ad8997f-b736-41c8-b087-d09b6316065d\") " Sep 30 19:53:42 crc kubenswrapper[4756]: I0930 19:53:42.592377 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-58grk\" (UniqueName: \"kubernetes.io/projected/3ad8997f-b736-41c8-b087-d09b6316065d-kube-api-access-58grk\") pod \"3ad8997f-b736-41c8-b087-d09b6316065d\" (UID: \"3ad8997f-b736-41c8-b087-d09b6316065d\") " Sep 30 19:53:42 crc kubenswrapper[4756]: I0930 19:53:42.592425 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/3ad8997f-b736-41c8-b087-d09b6316065d-dns-swift-storage-0\") pod \"3ad8997f-b736-41c8-b087-d09b6316065d\" (UID: \"3ad8997f-b736-41c8-b087-d09b6316065d\") " Sep 30 19:53:42 crc kubenswrapper[4756]: I0930 19:53:42.592456 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3ad8997f-b736-41c8-b087-d09b6316065d-dns-svc\") pod \"3ad8997f-b736-41c8-b087-d09b6316065d\" (UID: \"3ad8997f-b736-41c8-b087-d09b6316065d\") " Sep 30 19:53:42 crc kubenswrapper[4756]: I0930 19:53:42.592490 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3b333bad-e0f7-4b3a-b21d-7854ad7aafa6-combined-ca-bundle\") pod \"3b333bad-e0f7-4b3a-b21d-7854ad7aafa6\" (UID: \"3b333bad-e0f7-4b3a-b21d-7854ad7aafa6\") " Sep 30 19:53:42 crc kubenswrapper[4756]: I0930 19:53:42.592534 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3ad8997f-b736-41c8-b087-d09b6316065d-config\") pod \"3ad8997f-b736-41c8-b087-d09b6316065d\" (UID: \"3ad8997f-b736-41c8-b087-d09b6316065d\") " Sep 30 19:53:42 crc kubenswrapper[4756]: I0930 19:53:42.592642 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/3b333bad-e0f7-4b3a-b21d-7854ad7aafa6-openstack-config\") pod \"3b333bad-e0f7-4b3a-b21d-7854ad7aafa6\" (UID: \"3b333bad-e0f7-4b3a-b21d-7854ad7aafa6\") " Sep 30 19:53:42 crc kubenswrapper[4756]: I0930 19:53:42.592668 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/3b333bad-e0f7-4b3a-b21d-7854ad7aafa6-openstack-config-secret\") pod \"3b333bad-e0f7-4b3a-b21d-7854ad7aafa6\" (UID: \"3b333bad-e0f7-4b3a-b21d-7854ad7aafa6\") " Sep 30 19:53:42 crc kubenswrapper[4756]: I0930 19:53:42.592863 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w8bkj\" (UniqueName: \"kubernetes.io/projected/3b333bad-e0f7-4b3a-b21d-7854ad7aafa6-kube-api-access-w8bkj\") pod \"3b333bad-e0f7-4b3a-b21d-7854ad7aafa6\" (UID: \"3b333bad-e0f7-4b3a-b21d-7854ad7aafa6\") " Sep 30 19:53:42 crc kubenswrapper[4756]: I0930 19:53:42.593236 4756 reconciler_common.go:293] "Volume detached for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/9e9466ec-a908-431a-9c47-fda720095cd8-ovs-rundir\") on node \"crc\" DevicePath \"\"" Sep 30 19:53:42 crc kubenswrapper[4756]: I0930 19:53:42.593251 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ww2jq\" (UniqueName: \"kubernetes.io/projected/9e9466ec-a908-431a-9c47-fda720095cd8-kube-api-access-ww2jq\") on node \"crc\" DevicePath \"\"" Sep 30 19:53:42 crc kubenswrapper[4756]: I0930 19:53:42.593261 4756 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9e9466ec-a908-431a-9c47-fda720095cd8-config\") on node \"crc\" DevicePath \"\"" Sep 30 19:53:42 crc kubenswrapper[4756]: I0930 19:53:42.593271 4756 reconciler_common.go:293] "Volume detached for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/9e9466ec-a908-431a-9c47-fda720095cd8-ovn-rundir\") on node \"crc\" DevicePath \"\"" Sep 30 19:53:42 crc kubenswrapper[4756]: E0930 19:53:42.597811 4756 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="291eb080a7f4ba699c9eeb5b409724fd85cb98d31853893faa9a4c9845063aea" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Sep 30 19:53:42 crc kubenswrapper[4756]: E0930 19:53:42.597885 4756 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/ovn-controller-ovs-kmbtz" podUID="cf5c32f2-40b7-4c8f-ab7b-58b6fa3200ed" containerName="ovs-vswitchd" Sep 30 19:53:42 crc kubenswrapper[4756]: I0930 19:53:42.604750 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3ad8997f-b736-41c8-b087-d09b6316065d-kube-api-access-58grk" (OuterVolumeSpecName: "kube-api-access-58grk") pod "3ad8997f-b736-41c8-b087-d09b6316065d" (UID: "3ad8997f-b736-41c8-b087-d09b6316065d"). InnerVolumeSpecName "kube-api-access-58grk". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:53:42 crc kubenswrapper[4756]: I0930 19:53:42.636344 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3b333bad-e0f7-4b3a-b21d-7854ad7aafa6-kube-api-access-w8bkj" (OuterVolumeSpecName: "kube-api-access-w8bkj") pod "3b333bad-e0f7-4b3a-b21d-7854ad7aafa6" (UID: "3b333bad-e0f7-4b3a-b21d-7854ad7aafa6"). InnerVolumeSpecName "kube-api-access-w8bkj". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:53:42 crc kubenswrapper[4756]: I0930 19:53:42.679611 4756 generic.go:334] "Generic (PLEG): container finished" podID="0befea8f-1ced-4703-b2a2-a10e154271f4" containerID="184b0bb964b5d979736c25d5b822cdec9e2f47d28b7b5da07359b35546a67d3b" exitCode=143 Sep 30 19:53:42 crc kubenswrapper[4756]: I0930 19:53:42.679717 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"0befea8f-1ced-4703-b2a2-a10e154271f4","Type":"ContainerDied","Data":"184b0bb964b5d979736c25d5b822cdec9e2f47d28b7b5da07359b35546a67d3b"} Sep 30 19:53:42 crc kubenswrapper[4756]: I0930 19:53:42.685705 4756 generic.go:334] "Generic (PLEG): container finished" podID="859e275e-1d03-482b-aff6-8df9d80957fe" containerID="214d8b533d03867b293add42a85a43e01c2f7db0aa4bc6c0526e7fb93719b660" exitCode=0 Sep 30 19:53:42 crc kubenswrapper[4756]: I0930 19:53:42.685862 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placementa4e2-account-delete-86w5v" event={"ID":"859e275e-1d03-482b-aff6-8df9d80957fe","Type":"ContainerDied","Data":"214d8b533d03867b293add42a85a43e01c2f7db0aa4bc6c0526e7fb93719b660"} Sep 30 19:53:42 crc kubenswrapper[4756]: I0930 19:53:42.695725 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w8bkj\" (UniqueName: \"kubernetes.io/projected/3b333bad-e0f7-4b3a-b21d-7854ad7aafa6-kube-api-access-w8bkj\") on node \"crc\" DevicePath \"\"" Sep 30 19:53:42 crc kubenswrapper[4756]: I0930 19:53:42.695923 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-58grk\" (UniqueName: \"kubernetes.io/projected/3ad8997f-b736-41c8-b087-d09b6316065d-kube-api-access-58grk\") on node \"crc\" DevicePath \"\"" Sep 30 19:53:42 crc kubenswrapper[4756]: I0930 19:53:42.696924 4756 generic.go:334] "Generic (PLEG): container finished" podID="9dead4ff-0706-4a5e-b6ae-c388ae1c2313" containerID="9a4fb3358fd6f28ed48e21d357f0e517d47e56682744c7e3918edea524d6cfd4" exitCode=0 Sep 30 19:53:42 crc kubenswrapper[4756]: I0930 19:53:42.701651 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-d4df5d75-8jctn" event={"ID":"9dead4ff-0706-4a5e-b6ae-c388ae1c2313","Type":"ContainerDied","Data":"9a4fb3358fd6f28ed48e21d357f0e517d47e56682744c7e3918edea524d6cfd4"} Sep 30 19:53:42 crc kubenswrapper[4756]: I0930 19:53:42.708941 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"207e631b-3ef5-430f-bad1-4a9972dc8eb4","Type":"ContainerDied","Data":"910743b27955784ee4da86c480d093283c52757852937aeb938dde4695e57b83"} Sep 30 19:53:42 crc kubenswrapper[4756]: I0930 19:53:42.708914 4756 generic.go:334] "Generic (PLEG): container finished" podID="207e631b-3ef5-430f-bad1-4a9972dc8eb4" containerID="910743b27955784ee4da86c480d093283c52757852937aeb938dde4695e57b83" exitCode=143 Sep 30 19:53:42 crc kubenswrapper[4756]: I0930 19:53:42.726853 4756 generic.go:334] "Generic (PLEG): container finished" podID="26be10f5-be18-4ab5-9f8e-0d6f2a8dcb4f" containerID="762a83f21b826aa3eb53b3ebd785e06362136c9dadaba330d0843609fc9c2284" exitCode=0 Sep 30 19:53:42 crc kubenswrapper[4756]: I0930 19:53:42.726883 4756 generic.go:334] "Generic (PLEG): container finished" podID="26be10f5-be18-4ab5-9f8e-0d6f2a8dcb4f" containerID="51f94be61ce4fe7988183466f20b547fdd3570bb129d81fb1a0d14b57bcbd868" exitCode=0 Sep 30 19:53:42 crc kubenswrapper[4756]: I0930 19:53:42.726891 4756 generic.go:334] "Generic (PLEG): container finished" podID="26be10f5-be18-4ab5-9f8e-0d6f2a8dcb4f" containerID="c7c8e729cceb6e35134441b29a4688dd7e5d7409c1ff4e605c6736fd89e5f899" exitCode=0 Sep 30 19:53:42 crc kubenswrapper[4756]: I0930 19:53:42.726898 4756 generic.go:334] "Generic (PLEG): container finished" podID="26be10f5-be18-4ab5-9f8e-0d6f2a8dcb4f" containerID="5434ba65bcd1a16d1031ac5831c922a2bfb696f3b7f8c608c5e3fbd5cff3290f" exitCode=0 Sep 30 19:53:42 crc kubenswrapper[4756]: I0930 19:53:42.726904 4756 generic.go:334] "Generic (PLEG): container finished" podID="26be10f5-be18-4ab5-9f8e-0d6f2a8dcb4f" containerID="c9e0fb72308640c83674dc311f125ea56a518ff2b747a1f3595f7eb4f02f32bd" exitCode=0 Sep 30 19:53:42 crc kubenswrapper[4756]: I0930 19:53:42.726910 4756 generic.go:334] "Generic (PLEG): container finished" podID="26be10f5-be18-4ab5-9f8e-0d6f2a8dcb4f" containerID="9a23a18c16fa569d73aa74575f47acbf5abd74569f207256e830e5ae1207a4c1" exitCode=0 Sep 30 19:53:42 crc kubenswrapper[4756]: I0930 19:53:42.726916 4756 generic.go:334] "Generic (PLEG): container finished" podID="26be10f5-be18-4ab5-9f8e-0d6f2a8dcb4f" containerID="d845f4ce57e99f7f80a72593e5611ab764740d01bc79f238f2095047d2ba9939" exitCode=0 Sep 30 19:53:42 crc kubenswrapper[4756]: I0930 19:53:42.726922 4756 generic.go:334] "Generic (PLEG): container finished" podID="26be10f5-be18-4ab5-9f8e-0d6f2a8dcb4f" containerID="c1034fd69491ceca2b5a5e3609ed94e95183c5cd8752878c0eb7d55136e8ac9b" exitCode=0 Sep 30 19:53:42 crc kubenswrapper[4756]: I0930 19:53:42.726928 4756 generic.go:334] "Generic (PLEG): container finished" podID="26be10f5-be18-4ab5-9f8e-0d6f2a8dcb4f" containerID="9d88cf8ad00002797476e74b41a910b1b6ec952d10c00335bb0abfc6bd17fddd" exitCode=0 Sep 30 19:53:42 crc kubenswrapper[4756]: I0930 19:53:42.726934 4756 generic.go:334] "Generic (PLEG): container finished" podID="26be10f5-be18-4ab5-9f8e-0d6f2a8dcb4f" containerID="db446a0e9626857a987217e5f70793bfbb0795d88490d45a933892f52b588044" exitCode=0 Sep 30 19:53:42 crc kubenswrapper[4756]: I0930 19:53:42.726941 4756 generic.go:334] "Generic (PLEG): container finished" podID="26be10f5-be18-4ab5-9f8e-0d6f2a8dcb4f" containerID="4b05a265d73c3a7ceb454829ebf559c526823bafbc3ff9d5547722704959942d" exitCode=0 Sep 30 19:53:42 crc kubenswrapper[4756]: I0930 19:53:42.726948 4756 generic.go:334] "Generic (PLEG): container finished" podID="26be10f5-be18-4ab5-9f8e-0d6f2a8dcb4f" containerID="28e0221dbc10ee875c0d1d749687dc2243eba9bd6154bd2190340276bba9e102" exitCode=0 Sep 30 19:53:42 crc kubenswrapper[4756]: I0930 19:53:42.726954 4756 generic.go:334] "Generic (PLEG): container finished" podID="26be10f5-be18-4ab5-9f8e-0d6f2a8dcb4f" containerID="e54877dad5935cd8933ec14fc3b8e4b0d87beeda887453f4702efc3c346c99e8" exitCode=0 Sep 30 19:53:42 crc kubenswrapper[4756]: I0930 19:53:42.726960 4756 generic.go:334] "Generic (PLEG): container finished" podID="26be10f5-be18-4ab5-9f8e-0d6f2a8dcb4f" containerID="2ded9539aabf2d7ed41a0b6709054bd69836dd45212e96330a8ced4a80d659c3" exitCode=0 Sep 30 19:53:42 crc kubenswrapper[4756]: I0930 19:53:42.727248 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"26be10f5-be18-4ab5-9f8e-0d6f2a8dcb4f","Type":"ContainerDied","Data":"762a83f21b826aa3eb53b3ebd785e06362136c9dadaba330d0843609fc9c2284"} Sep 30 19:53:42 crc kubenswrapper[4756]: I0930 19:53:42.727279 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"26be10f5-be18-4ab5-9f8e-0d6f2a8dcb4f","Type":"ContainerDied","Data":"51f94be61ce4fe7988183466f20b547fdd3570bb129d81fb1a0d14b57bcbd868"} Sep 30 19:53:42 crc kubenswrapper[4756]: I0930 19:53:42.727293 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"26be10f5-be18-4ab5-9f8e-0d6f2a8dcb4f","Type":"ContainerDied","Data":"c7c8e729cceb6e35134441b29a4688dd7e5d7409c1ff4e605c6736fd89e5f899"} Sep 30 19:53:42 crc kubenswrapper[4756]: I0930 19:53:42.727325 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"26be10f5-be18-4ab5-9f8e-0d6f2a8dcb4f","Type":"ContainerDied","Data":"5434ba65bcd1a16d1031ac5831c922a2bfb696f3b7f8c608c5e3fbd5cff3290f"} Sep 30 19:53:42 crc kubenswrapper[4756]: I0930 19:53:42.727336 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"26be10f5-be18-4ab5-9f8e-0d6f2a8dcb4f","Type":"ContainerDied","Data":"c9e0fb72308640c83674dc311f125ea56a518ff2b747a1f3595f7eb4f02f32bd"} Sep 30 19:53:42 crc kubenswrapper[4756]: I0930 19:53:42.727344 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"26be10f5-be18-4ab5-9f8e-0d6f2a8dcb4f","Type":"ContainerDied","Data":"9a23a18c16fa569d73aa74575f47acbf5abd74569f207256e830e5ae1207a4c1"} Sep 30 19:53:42 crc kubenswrapper[4756]: I0930 19:53:42.727352 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"26be10f5-be18-4ab5-9f8e-0d6f2a8dcb4f","Type":"ContainerDied","Data":"d845f4ce57e99f7f80a72593e5611ab764740d01bc79f238f2095047d2ba9939"} Sep 30 19:53:42 crc kubenswrapper[4756]: I0930 19:53:42.727442 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"26be10f5-be18-4ab5-9f8e-0d6f2a8dcb4f","Type":"ContainerDied","Data":"c1034fd69491ceca2b5a5e3609ed94e95183c5cd8752878c0eb7d55136e8ac9b"} Sep 30 19:53:42 crc kubenswrapper[4756]: I0930 19:53:42.727459 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"26be10f5-be18-4ab5-9f8e-0d6f2a8dcb4f","Type":"ContainerDied","Data":"9d88cf8ad00002797476e74b41a910b1b6ec952d10c00335bb0abfc6bd17fddd"} Sep 30 19:53:42 crc kubenswrapper[4756]: I0930 19:53:42.727470 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"26be10f5-be18-4ab5-9f8e-0d6f2a8dcb4f","Type":"ContainerDied","Data":"db446a0e9626857a987217e5f70793bfbb0795d88490d45a933892f52b588044"} Sep 30 19:53:42 crc kubenswrapper[4756]: I0930 19:53:42.727480 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"26be10f5-be18-4ab5-9f8e-0d6f2a8dcb4f","Type":"ContainerDied","Data":"4b05a265d73c3a7ceb454829ebf559c526823bafbc3ff9d5547722704959942d"} Sep 30 19:53:42 crc kubenswrapper[4756]: I0930 19:53:42.727490 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"26be10f5-be18-4ab5-9f8e-0d6f2a8dcb4f","Type":"ContainerDied","Data":"28e0221dbc10ee875c0d1d749687dc2243eba9bd6154bd2190340276bba9e102"} Sep 30 19:53:42 crc kubenswrapper[4756]: I0930 19:53:42.727500 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"26be10f5-be18-4ab5-9f8e-0d6f2a8dcb4f","Type":"ContainerDied","Data":"e54877dad5935cd8933ec14fc3b8e4b0d87beeda887453f4702efc3c346c99e8"} Sep 30 19:53:42 crc kubenswrapper[4756]: I0930 19:53:42.727509 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"26be10f5-be18-4ab5-9f8e-0d6f2a8dcb4f","Type":"ContainerDied","Data":"2ded9539aabf2d7ed41a0b6709054bd69836dd45212e96330a8ced4a80d659c3"} Sep 30 19:53:42 crc kubenswrapper[4756]: I0930 19:53:42.734530 4756 generic.go:334] "Generic (PLEG): container finished" podID="7dff94a7-1c27-4324-8e94-5a095f79eb9a" containerID="518a7a9f5453f1009879b94cd4d7635553563523922db8f285224d9c9a603800" exitCode=0 Sep 30 19:53:42 crc kubenswrapper[4756]: I0930 19:53:42.734589 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/novacell0073f-account-delete-mnvpw" event={"ID":"7dff94a7-1c27-4324-8e94-5a095f79eb9a","Type":"ContainerDied","Data":"518a7a9f5453f1009879b94cd4d7635553563523922db8f285224d9c9a603800"} Sep 30 19:53:42 crc kubenswrapper[4756]: I0930 19:53:42.742548 4756 generic.go:334] "Generic (PLEG): container finished" podID="7cba2998-4911-447e-8fa8-782a83f0b3be" containerID="18b54d61471a434dd27220be8222ac734c16562af18d9cb5d87c937b4bbb70ab" exitCode=0 Sep 30 19:53:42 crc kubenswrapper[4756]: I0930 19:53:42.742584 4756 generic.go:334] "Generic (PLEG): container finished" podID="7cba2998-4911-447e-8fa8-782a83f0b3be" containerID="3816f49e158334388e40eecbe485b69612c7263f206e5f76f6bf16f35cb72753" exitCode=0 Sep 30 19:53:42 crc kubenswrapper[4756]: I0930 19:53:42.742638 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"7cba2998-4911-447e-8fa8-782a83f0b3be","Type":"ContainerDied","Data":"18b54d61471a434dd27220be8222ac734c16562af18d9cb5d87c937b4bbb70ab"} Sep 30 19:53:42 crc kubenswrapper[4756]: I0930 19:53:42.742710 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"7cba2998-4911-447e-8fa8-782a83f0b3be","Type":"ContainerDied","Data":"3816f49e158334388e40eecbe485b69612c7263f206e5f76f6bf16f35cb72753"} Sep 30 19:53:42 crc kubenswrapper[4756]: I0930 19:53:42.746239 4756 generic.go:334] "Generic (PLEG): container finished" podID="0511d760-5c65-453b-aefd-89ebb9a52ac6" containerID="9ba683265e05ae2f501e9cd38e1f199da027465720a175466d77105b0eedb2f4" exitCode=143 Sep 30 19:53:42 crc kubenswrapper[4756]: I0930 19:53:42.746291 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-7b959497f4-7qz6r" event={"ID":"0511d760-5c65-453b-aefd-89ebb9a52ac6","Type":"ContainerDied","Data":"9ba683265e05ae2f501e9cd38e1f199da027465720a175466d77105b0eedb2f4"} Sep 30 19:53:42 crc kubenswrapper[4756]: I0930 19:53:42.750667 4756 generic.go:334] "Generic (PLEG): container finished" podID="3c14829a-8f5d-490d-a922-719d81da47a4" containerID="baa07f784aff7642d4619ac09ef1f58c98f9a2e756003ce12cf3674dba0d7ec6" exitCode=143 Sep 30 19:53:42 crc kubenswrapper[4756]: I0930 19:53:42.750735 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-bbf58c-78vxt" event={"ID":"3c14829a-8f5d-490d-a922-719d81da47a4","Type":"ContainerDied","Data":"baa07f784aff7642d4619ac09ef1f58c98f9a2e756003ce12cf3674dba0d7ec6"} Sep 30 19:53:42 crc kubenswrapper[4756]: I0930 19:53:42.763977 4756 generic.go:334] "Generic (PLEG): container finished" podID="eed122c3-f6af-488a-9684-f60ffe104434" containerID="2c21c56307aa57c6e443391243b07699b843b3fabf15be8a802984834452c537" exitCode=143 Sep 30 19:53:42 crc kubenswrapper[4756]: I0930 19:53:42.764037 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"eed122c3-f6af-488a-9684-f60ffe104434","Type":"ContainerDied","Data":"2c21c56307aa57c6e443391243b07699b843b3fabf15be8a802984834452c537"} Sep 30 19:53:42 crc kubenswrapper[4756]: I0930 19:53:42.784189 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glancef07e-account-delete-lzl2g" event={"ID":"e3956c8b-1699-4cc4-8a97-b3ed35730374","Type":"ContainerStarted","Data":"827c7b2f7f156fcdf862e0095d48d06cec352e6cceced661c8524bfb39b9f3ea"} Sep 30 19:53:42 crc kubenswrapper[4756]: I0930 19:53:42.816059 4756 generic.go:334] "Generic (PLEG): container finished" podID="6fbaa08d-11b7-4498-aeb2-e82ac13f64d0" containerID="b64e18bd6132f9cc117785155229a1d2aa54ec5983a3ee45e5e205868af69f65" exitCode=143 Sep 30 19:53:42 crc kubenswrapper[4756]: I0930 19:53:42.816144 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-684ccb4bd6-x97br" event={"ID":"6fbaa08d-11b7-4498-aeb2-e82ac13f64d0","Type":"ContainerDied","Data":"b64e18bd6132f9cc117785155229a1d2aa54ec5983a3ee45e5e205868af69f65"} Sep 30 19:53:42 crc kubenswrapper[4756]: I0930 19:53:42.819527 4756 generic.go:334] "Generic (PLEG): container finished" podID="3b333bad-e0f7-4b3a-b21d-7854ad7aafa6" containerID="3ab87a4918f1e8e6e14e0abdb176a7153997521aac91d77dc59e7592393d999c" exitCode=137 Sep 30 19:53:42 crc kubenswrapper[4756]: I0930 19:53:42.819602 4756 scope.go:117] "RemoveContainer" containerID="3ab87a4918f1e8e6e14e0abdb176a7153997521aac91d77dc59e7592393d999c" Sep 30 19:53:42 crc kubenswrapper[4756]: I0930 19:53:42.819781 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Sep 30 19:53:42 crc kubenswrapper[4756]: I0930 19:53:42.826680 4756 generic.go:334] "Generic (PLEG): container finished" podID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" containerID="3226587ad1762e9a93b7134218b589749878b020e4cd2179741113741ca24dea" exitCode=0 Sep 30 19:53:42 crc kubenswrapper[4756]: I0930 19:53:42.826766 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" event={"ID":"3370c2ca-fec3-4f90-8df7-51e21e6c7e1c","Type":"ContainerDied","Data":"3226587ad1762e9a93b7134218b589749878b020e4cd2179741113741ca24dea"} Sep 30 19:53:42 crc kubenswrapper[4756]: I0930 19:53:42.826794 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" event={"ID":"3370c2ca-fec3-4f90-8df7-51e21e6c7e1c","Type":"ContainerStarted","Data":"f56aaee77dcbc16390076ba6d3302a79b4672011f53fc17fe5d502f42d15f64b"} Sep 30 19:53:42 crc kubenswrapper[4756]: I0930 19:53:42.831594 4756 generic.go:334] "Generic (PLEG): container finished" podID="3ad8997f-b736-41c8-b087-d09b6316065d" containerID="51ef4e173c225032f765e5047d88bbd26bb0ef46029f5e6ee965a80da5a74dba" exitCode=0 Sep 30 19:53:42 crc kubenswrapper[4756]: I0930 19:53:42.831644 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-59cf4bdb65-ljgq7" event={"ID":"3ad8997f-b736-41c8-b087-d09b6316065d","Type":"ContainerDied","Data":"51ef4e173c225032f765e5047d88bbd26bb0ef46029f5e6ee965a80da5a74dba"} Sep 30 19:53:42 crc kubenswrapper[4756]: I0930 19:53:42.831665 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-59cf4bdb65-ljgq7" event={"ID":"3ad8997f-b736-41c8-b087-d09b6316065d","Type":"ContainerDied","Data":"5f40a8e4c92dc45ec9c7183da68ffaa56203051c2de942ed08935bd6e16423b1"} Sep 30 19:53:42 crc kubenswrapper[4756]: I0930 19:53:42.831727 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-59cf4bdb65-ljgq7" Sep 30 19:53:42 crc kubenswrapper[4756]: I0930 19:53:42.833412 4756 generic.go:334] "Generic (PLEG): container finished" podID="0a1a5fbf-06be-4bbe-a7ab-9a3becd5f84d" containerID="2ab71e28b3fab8dbf1f220c5eb4490f4cf686ee22cd811d509ce95839b4580b6" exitCode=0 Sep 30 19:53:42 crc kubenswrapper[4756]: I0930 19:53:42.833461 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-5f8d8945f5-z5bmn" event={"ID":"0a1a5fbf-06be-4bbe-a7ab-9a3becd5f84d","Type":"ContainerDied","Data":"2ab71e28b3fab8dbf1f220c5eb4490f4cf686ee22cd811d509ce95839b4580b6"} Sep 30 19:53:42 crc kubenswrapper[4756]: I0930 19:53:42.834563 4756 generic.go:334] "Generic (PLEG): container finished" podID="ce0daf51-f01f-4ace-bf9d-3dd2e2023618" containerID="5f2536bd80f2c6c079d1749352246b7217d86ded3227c9601a2ad2db0771bef2" exitCode=143 Sep 30 19:53:42 crc kubenswrapper[4756]: I0930 19:53:42.834600 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"ce0daf51-f01f-4ace-bf9d-3dd2e2023618","Type":"ContainerDied","Data":"5f2536bd80f2c6c079d1749352246b7217d86ded3227c9601a2ad2db0771bef2"} Sep 30 19:53:42 crc kubenswrapper[4756]: I0930 19:53:42.835303 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/novaapic915-account-delete-sw2ck" event={"ID":"13a44baa-047c-44d4-958e-c0d219d4f5a6","Type":"ContainerStarted","Data":"3f96b74293e71014bacb4cbda699cf6a26e9e135a464e74b94ba9f1ecd7cb05c"} Sep 30 19:53:42 crc kubenswrapper[4756]: I0930 19:53:42.836792 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-metrics-tnm2n_9e9466ec-a908-431a-9c47-fda720095cd8/openstack-network-exporter/0.log" Sep 30 19:53:42 crc kubenswrapper[4756]: I0930 19:53:42.836833 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-tnm2n" event={"ID":"9e9466ec-a908-431a-9c47-fda720095cd8","Type":"ContainerDied","Data":"168a227c3f7e1651059e907990bc1c808a02888d89b0330c43178f26f6d733e9"} Sep 30 19:53:42 crc kubenswrapper[4756]: I0930 19:53:42.836914 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-tnm2n" Sep 30 19:53:42 crc kubenswrapper[4756]: I0930 19:53:42.913130 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/rabbitmq-cell1-server-0" podUID="3bacc7b1-00f4-4489-9c0b-fa038cc443d6" containerName="rabbitmq" containerID="cri-o://22acee4e4b450fbce8e6aa4569d2260f650fed76f5340a9472eecc6bc744543f" gracePeriod=604800 Sep 30 19:53:43 crc kubenswrapper[4756]: E0930 19:53:43.067180 4756 log.go:32] "ExecSync cmd from runtime service failed" err=< Sep 30 19:53:43 crc kubenswrapper[4756]: rpc error: code = Unknown desc = command error: setns `mnt`: Bad file descriptor Sep 30 19:53:43 crc kubenswrapper[4756]: fail startup Sep 30 19:53:43 crc kubenswrapper[4756]: , stdout: , stderr: , exit code -1 Sep 30 19:53:43 crc kubenswrapper[4756]: > containerID="223b5334941e108a9544fa45c9f6b28cb4f077714ac53ffb084467fb3d266eb6" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Sep 30 19:53:43 crc kubenswrapper[4756]: E0930 19:53:43.083571 4756 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 223b5334941e108a9544fa45c9f6b28cb4f077714ac53ffb084467fb3d266eb6 is running failed: container process not found" containerID="223b5334941e108a9544fa45c9f6b28cb4f077714ac53ffb084467fb3d266eb6" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Sep 30 19:53:43 crc kubenswrapper[4756]: E0930 19:53:43.084568 4756 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 223b5334941e108a9544fa45c9f6b28cb4f077714ac53ffb084467fb3d266eb6 is running failed: container process not found" containerID="223b5334941e108a9544fa45c9f6b28cb4f077714ac53ffb084467fb3d266eb6" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Sep 30 19:53:43 crc kubenswrapper[4756]: E0930 19:53:43.084640 4756 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 223b5334941e108a9544fa45c9f6b28cb4f077714ac53ffb084467fb3d266eb6 is running failed: container process not found" probeType="Readiness" pod="openstack/ovn-controller-ovs-kmbtz" podUID="cf5c32f2-40b7-4c8f-ab7b-58b6fa3200ed" containerName="ovsdb-server" Sep 30 19:53:43 crc kubenswrapper[4756]: I0930 19:53:43.119717 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3b333bad-e0f7-4b3a-b21d-7854ad7aafa6-openstack-config" (OuterVolumeSpecName: "openstack-config") pod "3b333bad-e0f7-4b3a-b21d-7854ad7aafa6" (UID: "3b333bad-e0f7-4b3a-b21d-7854ad7aafa6"). InnerVolumeSpecName "openstack-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:53:43 crc kubenswrapper[4756]: I0930 19:53:43.129412 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3ad8997f-b736-41c8-b087-d09b6316065d-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "3ad8997f-b736-41c8-b087-d09b6316065d" (UID: "3ad8997f-b736-41c8-b087-d09b6316065d"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:53:43 crc kubenswrapper[4756]: I0930 19:53:43.136091 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1a2ea5dd-0745-414b-8ba3-283132a38f5b" path="/var/lib/kubelet/pods/1a2ea5dd-0745-414b-8ba3-283132a38f5b/volumes" Sep 30 19:53:43 crc kubenswrapper[4756]: I0930 19:53:43.152485 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="39016fe2-5f7f-45fe-8f6e-4bce3684c667" path="/var/lib/kubelet/pods/39016fe2-5f7f-45fe-8f6e-4bce3684c667/volumes" Sep 30 19:53:43 crc kubenswrapper[4756]: I0930 19:53:43.153887 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="427631be-e1d9-46d5-8435-187eddd507c5" path="/var/lib/kubelet/pods/427631be-e1d9-46d5-8435-187eddd507c5/volumes" Sep 30 19:53:43 crc kubenswrapper[4756]: I0930 19:53:43.154692 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4701f9b9-b166-4254-9cd3-73625df60ce0" path="/var/lib/kubelet/pods/4701f9b9-b166-4254-9cd3-73625df60ce0/volumes" Sep 30 19:53:43 crc kubenswrapper[4756]: I0930 19:53:43.155715 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="62c0f048-1f35-4c66-81ff-500f4b11e32c" path="/var/lib/kubelet/pods/62c0f048-1f35-4c66-81ff-500f4b11e32c/volumes" Sep 30 19:53:43 crc kubenswrapper[4756]: I0930 19:53:43.156413 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7d940278-5ba3-4ce6-996a-e4797a22e0d3" path="/var/lib/kubelet/pods/7d940278-5ba3-4ce6-996a-e4797a22e0d3/volumes" Sep 30 19:53:43 crc kubenswrapper[4756]: I0930 19:53:43.157829 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3b333bad-e0f7-4b3a-b21d-7854ad7aafa6-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "3b333bad-e0f7-4b3a-b21d-7854ad7aafa6" (UID: "3b333bad-e0f7-4b3a-b21d-7854ad7aafa6"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:53:43 crc kubenswrapper[4756]: I0930 19:53:43.158435 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="87e13633-3439-4184-a1f8-1afbbe54883a" path="/var/lib/kubelet/pods/87e13633-3439-4184-a1f8-1afbbe54883a/volumes" Sep 30 19:53:43 crc kubenswrapper[4756]: I0930 19:53:43.159978 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="988b9bea-09e1-48d3-9f07-5b7a8099f23f" path="/var/lib/kubelet/pods/988b9bea-09e1-48d3-9f07-5b7a8099f23f/volumes" Sep 30 19:53:43 crc kubenswrapper[4756]: I0930 19:53:43.160659 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9a0dfe5d-82f1-43c6-ad07-91da00e313d1" path="/var/lib/kubelet/pods/9a0dfe5d-82f1-43c6-ad07-91da00e313d1/volumes" Sep 30 19:53:43 crc kubenswrapper[4756]: I0930 19:53:43.161302 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9aa77118-06fc-4e3b-be73-b1cb6e95ae70" path="/var/lib/kubelet/pods/9aa77118-06fc-4e3b-be73-b1cb6e95ae70/volumes" Sep 30 19:53:43 crc kubenswrapper[4756]: I0930 19:53:43.162220 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a84a6bd0-737d-4dc9-a94f-f6e449c51bd1" path="/var/lib/kubelet/pods/a84a6bd0-737d-4dc9-a94f-f6e449c51bd1/volumes" Sep 30 19:53:43 crc kubenswrapper[4756]: I0930 19:53:43.173597 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9e9466ec-a908-431a-9c47-fda720095cd8-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "9e9466ec-a908-431a-9c47-fda720095cd8" (UID: "9e9466ec-a908-431a-9c47-fda720095cd8"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:53:43 crc kubenswrapper[4756]: I0930 19:53:43.174136 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a87a5a44-c4dc-4c43-9d51-f41f5d257b24" path="/var/lib/kubelet/pods/a87a5a44-c4dc-4c43-9d51-f41f5d257b24/volumes" Sep 30 19:53:43 crc kubenswrapper[4756]: I0930 19:53:43.174849 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bbdacb5e-ec79-4c63-b745-cae6af7434d6" path="/var/lib/kubelet/pods/bbdacb5e-ec79-4c63-b745-cae6af7434d6/volumes" Sep 30 19:53:43 crc kubenswrapper[4756]: I0930 19:53:43.175764 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="be948507-c9db-4853-be77-02918a4ad42c" path="/var/lib/kubelet/pods/be948507-c9db-4853-be77-02918a4ad42c/volumes" Sep 30 19:53:43 crc kubenswrapper[4756]: I0930 19:53:43.177738 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3ad8997f-b736-41c8-b087-d09b6316065d-config" (OuterVolumeSpecName: "config") pod "3ad8997f-b736-41c8-b087-d09b6316065d" (UID: "3ad8997f-b736-41c8-b087-d09b6316065d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:53:43 crc kubenswrapper[4756]: I0930 19:53:43.177856 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bfe78e50-0ef5-42f5-b989-39beebdd0d7c" path="/var/lib/kubelet/pods/bfe78e50-0ef5-42f5-b989-39beebdd0d7c/volumes" Sep 30 19:53:43 crc kubenswrapper[4756]: I0930 19:53:43.179882 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="caf2d23d-add8-47d3-b675-97cdffa9680f" path="/var/lib/kubelet/pods/caf2d23d-add8-47d3-b675-97cdffa9680f/volumes" Sep 30 19:53:43 crc kubenswrapper[4756]: I0930 19:53:43.180318 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cdc6b8bd-8038-4943-8861-4b5898f5940a" path="/var/lib/kubelet/pods/cdc6b8bd-8038-4943-8861-4b5898f5940a/volumes" Sep 30 19:53:43 crc kubenswrapper[4756]: I0930 19:53:43.180836 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dc949160-e62c-4982-bb20-8f3ebe1a2692" path="/var/lib/kubelet/pods/dc949160-e62c-4982-bb20-8f3ebe1a2692/volumes" Sep 30 19:53:43 crc kubenswrapper[4756]: I0930 19:53:43.183581 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ec7d0c64-c002-496a-b7ef-727f7d4b4458" path="/var/lib/kubelet/pods/ec7d0c64-c002-496a-b7ef-727f7d4b4458/volumes" Sep 30 19:53:43 crc kubenswrapper[4756]: I0930 19:53:43.186367 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ed2e720d-f813-47b0-a459-9a2b77737ebc" path="/var/lib/kubelet/pods/ed2e720d-f813-47b0-a459-9a2b77737ebc/volumes" Sep 30 19:53:43 crc kubenswrapper[4756]: I0930 19:53:43.186842 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ed8f7aa8-9e70-4c1f-9ea8-187cd6900d87" path="/var/lib/kubelet/pods/ed8f7aa8-9e70-4c1f-9ea8-187cd6900d87/volumes" Sep 30 19:53:43 crc kubenswrapper[4756]: I0930 19:53:43.197669 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3ad8997f-b736-41c8-b087-d09b6316065d-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "3ad8997f-b736-41c8-b087-d09b6316065d" (UID: "3ad8997f-b736-41c8-b087-d09b6316065d"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:53:43 crc kubenswrapper[4756]: I0930 19:53:43.208379 4756 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9e9466ec-a908-431a-9c47-fda720095cd8-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 19:53:43 crc kubenswrapper[4756]: I0930 19:53:43.208614 4756 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3ad8997f-b736-41c8-b087-d09b6316065d-config\") on node \"crc\" DevicePath \"\"" Sep 30 19:53:43 crc kubenswrapper[4756]: I0930 19:53:43.208628 4756 reconciler_common.go:293] "Volume detached for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/3b333bad-e0f7-4b3a-b21d-7854ad7aafa6-openstack-config\") on node \"crc\" DevicePath \"\"" Sep 30 19:53:43 crc kubenswrapper[4756]: I0930 19:53:43.208639 4756 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/3ad8997f-b736-41c8-b087-d09b6316065d-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Sep 30 19:53:43 crc kubenswrapper[4756]: I0930 19:53:43.208668 4756 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3ad8997f-b736-41c8-b087-d09b6316065d-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 30 19:53:43 crc kubenswrapper[4756]: I0930 19:53:43.208681 4756 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3b333bad-e0f7-4b3a-b21d-7854ad7aafa6-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 19:53:43 crc kubenswrapper[4756]: I0930 19:53:43.253952 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3ad8997f-b736-41c8-b087-d09b6316065d-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "3ad8997f-b736-41c8-b087-d09b6316065d" (UID: "3ad8997f-b736-41c8-b087-d09b6316065d"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:53:43 crc kubenswrapper[4756]: I0930 19:53:43.260251 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3b333bad-e0f7-4b3a-b21d-7854ad7aafa6-openstack-config-secret" (OuterVolumeSpecName: "openstack-config-secret") pod "3b333bad-e0f7-4b3a-b21d-7854ad7aafa6" (UID: "3b333bad-e0f7-4b3a-b21d-7854ad7aafa6"). InnerVolumeSpecName "openstack-config-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:53:43 crc kubenswrapper[4756]: I0930 19:53:43.278977 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3ad8997f-b736-41c8-b087-d09b6316065d-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "3ad8997f-b736-41c8-b087-d09b6316065d" (UID: "3ad8997f-b736-41c8-b087-d09b6316065d"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:53:43 crc kubenswrapper[4756]: I0930 19:53:43.299454 4756 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/cinder-api-0" podUID="ebd344ad-8ef7-4a5c-8b74-1af845398b3b" containerName="cinder-api" probeResult="failure" output="Get \"https://10.217.0.178:8776/healthcheck\": read tcp 10.217.0.2:50274->10.217.0.178:8776: read: connection reset by peer" Sep 30 19:53:43 crc kubenswrapper[4756]: I0930 19:53:43.310041 4756 reconciler_common.go:293] "Volume detached for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/3b333bad-e0f7-4b3a-b21d-7854ad7aafa6-openstack-config-secret\") on node \"crc\" DevicePath \"\"" Sep 30 19:53:43 crc kubenswrapper[4756]: I0930 19:53:43.310075 4756 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/3ad8997f-b736-41c8-b087-d09b6316065d-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Sep 30 19:53:43 crc kubenswrapper[4756]: I0930 19:53:43.310086 4756 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/3ad8997f-b736-41c8-b087-d09b6316065d-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Sep 30 19:53:43 crc kubenswrapper[4756]: E0930 19:53:43.310152 4756 configmap.go:193] Couldn't get configMap openstack/rabbitmq-config-data: configmap "rabbitmq-config-data" not found Sep 30 19:53:43 crc kubenswrapper[4756]: E0930 19:53:43.310203 4756 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/c3b1ef6d-6617-4d15-8709-a7623e75faa4-config-data podName:c3b1ef6d-6617-4d15-8709-a7623e75faa4 nodeName:}" failed. No retries permitted until 2025-09-30 19:53:47.31018632 +0000 UTC m=+1356.931119797 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/c3b1ef6d-6617-4d15-8709-a7623e75faa4-config-data") pod "rabbitmq-server-0" (UID: "c3b1ef6d-6617-4d15-8709-a7623e75faa4") : configmap "rabbitmq-config-data" not found Sep 30 19:53:43 crc kubenswrapper[4756]: I0930 19:53:43.355364 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9e9466ec-a908-431a-9c47-fda720095cd8-metrics-certs-tls-certs" (OuterVolumeSpecName: "metrics-certs-tls-certs") pod "9e9466ec-a908-431a-9c47-fda720095cd8" (UID: "9e9466ec-a908-431a-9c47-fda720095cd8"). InnerVolumeSpecName "metrics-certs-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:53:43 crc kubenswrapper[4756]: I0930 19:53:43.411994 4756 reconciler_common.go:293] "Volume detached for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/9e9466ec-a908-431a-9c47-fda720095cd8-metrics-certs-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 30 19:53:43 crc kubenswrapper[4756]: I0930 19:53:43.860111 4756 generic.go:334] "Generic (PLEG): container finished" podID="e3956c8b-1699-4cc4-8a97-b3ed35730374" containerID="44335673e361aea98b156cef861ddf8d96faef038113cc87e22500730d644162" exitCode=0 Sep 30 19:53:43 crc kubenswrapper[4756]: I0930 19:53:43.876788 4756 generic.go:334] "Generic (PLEG): container finished" podID="cf5c32f2-40b7-4c8f-ab7b-58b6fa3200ed" containerID="223b5334941e108a9544fa45c9f6b28cb4f077714ac53ffb084467fb3d266eb6" exitCode=0 Sep 30 19:53:43 crc kubenswrapper[4756]: I0930 19:53:43.919565 4756 generic.go:334] "Generic (PLEG): container finished" podID="05ea7072-b87a-4cf4-8f86-f32e5836951a" containerID="c7c419536d0be0324d543ae02bef0262d1e3a0cb35dd8af1179be449734d0c2e" exitCode=0 Sep 30 19:53:43 crc kubenswrapper[4756]: I0930 19:53:43.922080 4756 generic.go:334] "Generic (PLEG): container finished" podID="ebd344ad-8ef7-4a5c-8b74-1af845398b3b" containerID="6a012f6649739f7a3c55f7eb5ccd4842f6f7de987bfd7b459d8bead1ed555778" exitCode=0 Sep 30 19:53:43 crc kubenswrapper[4756]: I0930 19:53:43.923376 4756 generic.go:334] "Generic (PLEG): container finished" podID="6f3ed405-3d2e-4296-8ea5-7b426f2bd987" containerID="10c6b1bb1f7f4de1bc5c4aca7ca3dd73b0eea0fe2e2a9e20084f2def6aa17018" exitCode=0 Sep 30 19:53:43 crc kubenswrapper[4756]: E0930 19:53:43.925767 4756 configmap.go:193] Couldn't get configMap openstack/rabbitmq-cell1-config-data: configmap "rabbitmq-cell1-config-data" not found Sep 30 19:53:43 crc kubenswrapper[4756]: E0930 19:53:43.925813 4756 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/3bacc7b1-00f4-4489-9c0b-fa038cc443d6-config-data podName:3bacc7b1-00f4-4489-9c0b-fa038cc443d6 nodeName:}" failed. No retries permitted until 2025-09-30 19:53:47.925799655 +0000 UTC m=+1357.546733132 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/3bacc7b1-00f4-4489-9c0b-fa038cc443d6-config-data") pod "rabbitmq-cell1-server-0" (UID: "3bacc7b1-00f4-4489-9c0b-fa038cc443d6") : configmap "rabbitmq-cell1-config-data" not found Sep 30 19:53:43 crc kubenswrapper[4756]: I0930 19:53:43.938997 4756 generic.go:334] "Generic (PLEG): container finished" podID="0a1a5fbf-06be-4bbe-a7ab-9a3becd5f84d" containerID="8d013d9f6a59da2bcacdb9355cb603a5a104cc57783d02a5fc8770f2294bb8f0" exitCode=0 Sep 30 19:53:43 crc kubenswrapper[4756]: I0930 19:53:43.941738 4756 generic.go:334] "Generic (PLEG): container finished" podID="164b639f-c027-4486-a298-06e9510e14ac" containerID="1eade56fcdb4689bb071f3b44ecd695f64f66b527074f30c140f41aff392a497" exitCode=0 Sep 30 19:53:43 crc kubenswrapper[4756]: I0930 19:53:43.959412 4756 generic.go:334] "Generic (PLEG): container finished" podID="13a44baa-047c-44d4-958e-c0d219d4f5a6" containerID="aa828a69651e69602fa03f311a4beeda7cff828299fab3940dde2d62bb4fb1c5" exitCode=0 Sep 30 19:53:43 crc kubenswrapper[4756]: I0930 19:53:43.971306 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glancef07e-account-delete-lzl2g" event={"ID":"e3956c8b-1699-4cc4-8a97-b3ed35730374","Type":"ContainerDied","Data":"44335673e361aea98b156cef861ddf8d96faef038113cc87e22500730d644162"} Sep 30 19:53:43 crc kubenswrapper[4756]: I0930 19:53:43.971347 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-kmbtz" event={"ID":"cf5c32f2-40b7-4c8f-ab7b-58b6fa3200ed","Type":"ContainerDied","Data":"223b5334941e108a9544fa45c9f6b28cb4f077714ac53ffb084467fb3d266eb6"} Sep 30 19:53:43 crc kubenswrapper[4756]: I0930 19:53:43.971361 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placementa4e2-account-delete-86w5v" event={"ID":"859e275e-1d03-482b-aff6-8df9d80957fe","Type":"ContainerDied","Data":"8d8722ed40b905000d9d217044faffa3c95adddef11627a27463d44076ca7163"} Sep 30 19:53:43 crc kubenswrapper[4756]: I0930 19:53:43.971375 4756 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8d8722ed40b905000d9d217044faffa3c95adddef11627a27463d44076ca7163" Sep 30 19:53:43 crc kubenswrapper[4756]: I0930 19:53:43.971385 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"7cba2998-4911-447e-8fa8-782a83f0b3be","Type":"ContainerDied","Data":"679931f313811a8019f531141ea2974a1523f54138f776e501dd4cc16431520e"} Sep 30 19:53:43 crc kubenswrapper[4756]: I0930 19:53:43.971408 4756 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="679931f313811a8019f531141ea2974a1523f54138f776e501dd4cc16431520e" Sep 30 19:53:43 crc kubenswrapper[4756]: I0930 19:53:43.971416 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"05ea7072-b87a-4cf4-8f86-f32e5836951a","Type":"ContainerDied","Data":"c7c419536d0be0324d543ae02bef0262d1e3a0cb35dd8af1179be449734d0c2e"} Sep 30 19:53:43 crc kubenswrapper[4756]: I0930 19:53:43.971428 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"05ea7072-b87a-4cf4-8f86-f32e5836951a","Type":"ContainerDied","Data":"0913e99d26eda1e4278aa3d309581833c1126df98769b7a7d895021c8366e10e"} Sep 30 19:53:43 crc kubenswrapper[4756]: I0930 19:53:43.971435 4756 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0913e99d26eda1e4278aa3d309581833c1126df98769b7a7d895021c8366e10e" Sep 30 19:53:43 crc kubenswrapper[4756]: I0930 19:53:43.971443 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"ebd344ad-8ef7-4a5c-8b74-1af845398b3b","Type":"ContainerDied","Data":"6a012f6649739f7a3c55f7eb5ccd4842f6f7de987bfd7b459d8bead1ed555778"} Sep 30 19:53:43 crc kubenswrapper[4756]: I0930 19:53:43.971453 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"ebd344ad-8ef7-4a5c-8b74-1af845398b3b","Type":"ContainerDied","Data":"4ddd58096914be50411a905f20f0137a78da0564094809dcc3b23b355fadfd18"} Sep 30 19:53:43 crc kubenswrapper[4756]: I0930 19:53:43.971462 4756 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4ddd58096914be50411a905f20f0137a78da0564094809dcc3b23b355fadfd18" Sep 30 19:53:43 crc kubenswrapper[4756]: I0930 19:53:43.971470 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"6f3ed405-3d2e-4296-8ea5-7b426f2bd987","Type":"ContainerDied","Data":"10c6b1bb1f7f4de1bc5c4aca7ca3dd73b0eea0fe2e2a9e20084f2def6aa17018"} Sep 30 19:53:43 crc kubenswrapper[4756]: I0930 19:53:43.971481 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"6f3ed405-3d2e-4296-8ea5-7b426f2bd987","Type":"ContainerDied","Data":"9eea8b3b58082a50bbf72ebde514f3da218f03bcad9af6a78faeb3bc854d34f7"} Sep 30 19:53:43 crc kubenswrapper[4756]: I0930 19:53:43.971490 4756 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9eea8b3b58082a50bbf72ebde514f3da218f03bcad9af6a78faeb3bc854d34f7" Sep 30 19:53:43 crc kubenswrapper[4756]: I0930 19:53:43.971497 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/novacell0073f-account-delete-mnvpw" event={"ID":"7dff94a7-1c27-4324-8e94-5a095f79eb9a","Type":"ContainerDied","Data":"cd049f406cf7955336fe29a5a15537a2d77c78e871e9634bac31eb19b62d96c6"} Sep 30 19:53:43 crc kubenswrapper[4756]: I0930 19:53:43.971505 4756 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="cd049f406cf7955336fe29a5a15537a2d77c78e871e9634bac31eb19b62d96c6" Sep 30 19:53:43 crc kubenswrapper[4756]: I0930 19:53:43.971514 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-5f8d8945f5-z5bmn" event={"ID":"0a1a5fbf-06be-4bbe-a7ab-9a3becd5f84d","Type":"ContainerDied","Data":"8d013d9f6a59da2bcacdb9355cb603a5a104cc57783d02a5fc8770f2294bb8f0"} Sep 30 19:53:43 crc kubenswrapper[4756]: I0930 19:53:43.971523 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-5f8d8945f5-z5bmn" event={"ID":"0a1a5fbf-06be-4bbe-a7ab-9a3becd5f84d","Type":"ContainerDied","Data":"b4bc6478bbf289ef6ad9dcd6e7f00f20e768178dd143775b77f42f3b2af3bfcd"} Sep 30 19:53:43 crc kubenswrapper[4756]: I0930 19:53:43.971530 4756 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b4bc6478bbf289ef6ad9dcd6e7f00f20e768178dd143775b77f42f3b2af3bfcd" Sep 30 19:53:43 crc kubenswrapper[4756]: I0930 19:53:43.971537 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"164b639f-c027-4486-a298-06e9510e14ac","Type":"ContainerDied","Data":"1eade56fcdb4689bb071f3b44ecd695f64f66b527074f30c140f41aff392a497"} Sep 30 19:53:43 crc kubenswrapper[4756]: I0930 19:53:43.971547 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/novaapic915-account-delete-sw2ck" event={"ID":"13a44baa-047c-44d4-958e-c0d219d4f5a6","Type":"ContainerDied","Data":"aa828a69651e69602fa03f311a4beeda7cff828299fab3940dde2d62bb4fb1c5"} Sep 30 19:53:44 crc kubenswrapper[4756]: I0930 19:53:44.029425 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Sep 30 19:53:44 crc kubenswrapper[4756]: I0930 19:53:44.034726 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-59cf4bdb65-ljgq7"] Sep 30 19:53:44 crc kubenswrapper[4756]: I0930 19:53:44.037544 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-59cf4bdb65-ljgq7"] Sep 30 19:53:44 crc kubenswrapper[4756]: I0930 19:53:44.037993 4756 scope.go:117] "RemoveContainer" containerID="3ab87a4918f1e8e6e14e0abdb176a7153997521aac91d77dc59e7592393d999c" Sep 30 19:53:44 crc kubenswrapper[4756]: E0930 19:53:44.040694 4756 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3ab87a4918f1e8e6e14e0abdb176a7153997521aac91d77dc59e7592393d999c\": container with ID starting with 3ab87a4918f1e8e6e14e0abdb176a7153997521aac91d77dc59e7592393d999c not found: ID does not exist" containerID="3ab87a4918f1e8e6e14e0abdb176a7153997521aac91d77dc59e7592393d999c" Sep 30 19:53:44 crc kubenswrapper[4756]: I0930 19:53:44.040730 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3ab87a4918f1e8e6e14e0abdb176a7153997521aac91d77dc59e7592393d999c"} err="failed to get container status \"3ab87a4918f1e8e6e14e0abdb176a7153997521aac91d77dc59e7592393d999c\": rpc error: code = NotFound desc = could not find container \"3ab87a4918f1e8e6e14e0abdb176a7153997521aac91d77dc59e7592393d999c\": container with ID starting with 3ab87a4918f1e8e6e14e0abdb176a7153997521aac91d77dc59e7592393d999c not found: ID does not exist" Sep 30 19:53:44 crc kubenswrapper[4756]: I0930 19:53:44.040756 4756 scope.go:117] "RemoveContainer" containerID="1fda6d8389dddb1fad1f1c0dcec9808a71ad955037c692639b3d4fd84034ae3c" Sep 30 19:53:44 crc kubenswrapper[4756]: I0930 19:53:44.047116 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Sep 30 19:53:44 crc kubenswrapper[4756]: I0930 19:53:44.072712 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-metrics-tnm2n"] Sep 30 19:53:44 crc kubenswrapper[4756]: I0930 19:53:44.096640 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placementa4e2-account-delete-86w5v" Sep 30 19:53:44 crc kubenswrapper[4756]: I0930 19:53:44.097675 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/novacell0073f-account-delete-mnvpw" Sep 30 19:53:44 crc kubenswrapper[4756]: I0930 19:53:44.106572 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-controller-metrics-tnm2n"] Sep 30 19:53:44 crc kubenswrapper[4756]: I0930 19:53:44.127535 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-proxy-5f8d8945f5-z5bmn" Sep 30 19:53:44 crc kubenswrapper[4756]: I0930 19:53:44.127655 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Sep 30 19:53:44 crc kubenswrapper[4756]: I0930 19:53:44.130338 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7cba2998-4911-447e-8fa8-782a83f0b3be-scripts\") pod \"7cba2998-4911-447e-8fa8-782a83f0b3be\" (UID: \"7cba2998-4911-447e-8fa8-782a83f0b3be\") " Sep 30 19:53:44 crc kubenswrapper[4756]: I0930 19:53:44.130409 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/05ea7072-b87a-4cf4-8f86-f32e5836951a-operator-scripts\") pod \"05ea7072-b87a-4cf4-8f86-f32e5836951a\" (UID: \"05ea7072-b87a-4cf4-8f86-f32e5836951a\") " Sep 30 19:53:44 crc kubenswrapper[4756]: I0930 19:53:44.130479 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zb8bd\" (UniqueName: \"kubernetes.io/projected/7cba2998-4911-447e-8fa8-782a83f0b3be-kube-api-access-zb8bd\") pod \"7cba2998-4911-447e-8fa8-782a83f0b3be\" (UID: \"7cba2998-4911-447e-8fa8-782a83f0b3be\") " Sep 30 19:53:44 crc kubenswrapper[4756]: I0930 19:53:44.130547 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/05ea7072-b87a-4cf4-8f86-f32e5836951a-galera-tls-certs\") pod \"05ea7072-b87a-4cf4-8f86-f32e5836951a\" (UID: \"05ea7072-b87a-4cf4-8f86-f32e5836951a\") " Sep 30 19:53:44 crc kubenswrapper[4756]: I0930 19:53:44.130619 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/05ea7072-b87a-4cf4-8f86-f32e5836951a-combined-ca-bundle\") pod \"05ea7072-b87a-4cf4-8f86-f32e5836951a\" (UID: \"05ea7072-b87a-4cf4-8f86-f32e5836951a\") " Sep 30 19:53:44 crc kubenswrapper[4756]: I0930 19:53:44.130649 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/7cba2998-4911-447e-8fa8-782a83f0b3be-config-data-custom\") pod \"7cba2998-4911-447e-8fa8-782a83f0b3be\" (UID: \"7cba2998-4911-447e-8fa8-782a83f0b3be\") " Sep 30 19:53:44 crc kubenswrapper[4756]: I0930 19:53:44.130663 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7cba2998-4911-447e-8fa8-782a83f0b3be-combined-ca-bundle\") pod \"7cba2998-4911-447e-8fa8-782a83f0b3be\" (UID: \"7cba2998-4911-447e-8fa8-782a83f0b3be\") " Sep 30 19:53:44 crc kubenswrapper[4756]: I0930 19:53:44.130691 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/05ea7072-b87a-4cf4-8f86-f32e5836951a-kolla-config\") pod \"05ea7072-b87a-4cf4-8f86-f32e5836951a\" (UID: \"05ea7072-b87a-4cf4-8f86-f32e5836951a\") " Sep 30 19:53:44 crc kubenswrapper[4756]: I0930 19:53:44.130757 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/05ea7072-b87a-4cf4-8f86-f32e5836951a-config-data-generated\") pod \"05ea7072-b87a-4cf4-8f86-f32e5836951a\" (UID: \"05ea7072-b87a-4cf4-8f86-f32e5836951a\") " Sep 30 19:53:44 crc kubenswrapper[4756]: I0930 19:53:44.130776 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7cba2998-4911-447e-8fa8-782a83f0b3be-config-data\") pod \"7cba2998-4911-447e-8fa8-782a83f0b3be\" (UID: \"7cba2998-4911-447e-8fa8-782a83f0b3be\") " Sep 30 19:53:44 crc kubenswrapper[4756]: I0930 19:53:44.130802 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/7cba2998-4911-447e-8fa8-782a83f0b3be-etc-machine-id\") pod \"7cba2998-4911-447e-8fa8-782a83f0b3be\" (UID: \"7cba2998-4911-447e-8fa8-782a83f0b3be\") " Sep 30 19:53:44 crc kubenswrapper[4756]: I0930 19:53:44.130818 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zctjc\" (UniqueName: \"kubernetes.io/projected/05ea7072-b87a-4cf4-8f86-f32e5836951a-kube-api-access-zctjc\") pod \"05ea7072-b87a-4cf4-8f86-f32e5836951a\" (UID: \"05ea7072-b87a-4cf4-8f86-f32e5836951a\") " Sep 30 19:53:44 crc kubenswrapper[4756]: I0930 19:53:44.130837 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/05ea7072-b87a-4cf4-8f86-f32e5836951a-secrets\") pod \"05ea7072-b87a-4cf4-8f86-f32e5836951a\" (UID: \"05ea7072-b87a-4cf4-8f86-f32e5836951a\") " Sep 30 19:53:44 crc kubenswrapper[4756]: I0930 19:53:44.130854 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/05ea7072-b87a-4cf4-8f86-f32e5836951a-config-data-default\") pod \"05ea7072-b87a-4cf4-8f86-f32e5836951a\" (UID: \"05ea7072-b87a-4cf4-8f86-f32e5836951a\") " Sep 30 19:53:44 crc kubenswrapper[4756]: I0930 19:53:44.130875 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mysql-db\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"05ea7072-b87a-4cf4-8f86-f32e5836951a\" (UID: \"05ea7072-b87a-4cf4-8f86-f32e5836951a\") " Sep 30 19:53:44 crc kubenswrapper[4756]: I0930 19:53:44.131163 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/7cba2998-4911-447e-8fa8-782a83f0b3be-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "7cba2998-4911-447e-8fa8-782a83f0b3be" (UID: "7cba2998-4911-447e-8fa8-782a83f0b3be"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 19:53:44 crc kubenswrapper[4756]: I0930 19:53:44.132346 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/05ea7072-b87a-4cf4-8f86-f32e5836951a-kolla-config" (OuterVolumeSpecName: "kolla-config") pod "05ea7072-b87a-4cf4-8f86-f32e5836951a" (UID: "05ea7072-b87a-4cf4-8f86-f32e5836951a"). InnerVolumeSpecName "kolla-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:53:44 crc kubenswrapper[4756]: I0930 19:53:44.132933 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/05ea7072-b87a-4cf4-8f86-f32e5836951a-config-data-generated" (OuterVolumeSpecName: "config-data-generated") pod "05ea7072-b87a-4cf4-8f86-f32e5836951a" (UID: "05ea7072-b87a-4cf4-8f86-f32e5836951a"). InnerVolumeSpecName "config-data-generated". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 19:53:44 crc kubenswrapper[4756]: I0930 19:53:44.134318 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/05ea7072-b87a-4cf4-8f86-f32e5836951a-config-data-default" (OuterVolumeSpecName: "config-data-default") pod "05ea7072-b87a-4cf4-8f86-f32e5836951a" (UID: "05ea7072-b87a-4cf4-8f86-f32e5836951a"). InnerVolumeSpecName "config-data-default". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:53:44 crc kubenswrapper[4756]: I0930 19:53:44.140275 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/05ea7072-b87a-4cf4-8f86-f32e5836951a-secrets" (OuterVolumeSpecName: "secrets") pod "05ea7072-b87a-4cf4-8f86-f32e5836951a" (UID: "05ea7072-b87a-4cf4-8f86-f32e5836951a"). InnerVolumeSpecName "secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:53:44 crc kubenswrapper[4756]: I0930 19:53:44.145758 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/05ea7072-b87a-4cf4-8f86-f32e5836951a-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "05ea7072-b87a-4cf4-8f86-f32e5836951a" (UID: "05ea7072-b87a-4cf4-8f86-f32e5836951a"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:53:44 crc kubenswrapper[4756]: I0930 19:53:44.146032 4756 reconciler_common.go:293] "Volume detached for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/05ea7072-b87a-4cf4-8f86-f32e5836951a-kolla-config\") on node \"crc\" DevicePath \"\"" Sep 30 19:53:44 crc kubenswrapper[4756]: I0930 19:53:44.146095 4756 reconciler_common.go:293] "Volume detached for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/05ea7072-b87a-4cf4-8f86-f32e5836951a-config-data-generated\") on node \"crc\" DevicePath \"\"" Sep 30 19:53:44 crc kubenswrapper[4756]: I0930 19:53:44.146169 4756 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/7cba2998-4911-447e-8fa8-782a83f0b3be-etc-machine-id\") on node \"crc\" DevicePath \"\"" Sep 30 19:53:44 crc kubenswrapper[4756]: I0930 19:53:44.146226 4756 reconciler_common.go:293] "Volume detached for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/05ea7072-b87a-4cf4-8f86-f32e5836951a-secrets\") on node \"crc\" DevicePath \"\"" Sep 30 19:53:44 crc kubenswrapper[4756]: I0930 19:53:44.146276 4756 reconciler_common.go:293] "Volume detached for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/05ea7072-b87a-4cf4-8f86-f32e5836951a-config-data-default\") on node \"crc\" DevicePath \"\"" Sep 30 19:53:44 crc kubenswrapper[4756]: I0930 19:53:44.146328 4756 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/05ea7072-b87a-4cf4-8f86-f32e5836951a-operator-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 19:53:44 crc kubenswrapper[4756]: I0930 19:53:44.148820 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7cba2998-4911-447e-8fa8-782a83f0b3be-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "7cba2998-4911-447e-8fa8-782a83f0b3be" (UID: "7cba2998-4911-447e-8fa8-782a83f0b3be"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:53:44 crc kubenswrapper[4756]: I0930 19:53:44.148890 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7cba2998-4911-447e-8fa8-782a83f0b3be-scripts" (OuterVolumeSpecName: "scripts") pod "7cba2998-4911-447e-8fa8-782a83f0b3be" (UID: "7cba2998-4911-447e-8fa8-782a83f0b3be"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:53:44 crc kubenswrapper[4756]: I0930 19:53:44.151874 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Sep 30 19:53:44 crc kubenswrapper[4756]: I0930 19:53:44.152524 4756 scope.go:117] "RemoveContainer" containerID="51ef4e173c225032f765e5047d88bbd26bb0ef46029f5e6ee965a80da5a74dba" Sep 30 19:53:44 crc kubenswrapper[4756]: I0930 19:53:44.156971 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7cba2998-4911-447e-8fa8-782a83f0b3be-kube-api-access-zb8bd" (OuterVolumeSpecName: "kube-api-access-zb8bd") pod "7cba2998-4911-447e-8fa8-782a83f0b3be" (UID: "7cba2998-4911-447e-8fa8-782a83f0b3be"). InnerVolumeSpecName "kube-api-access-zb8bd". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:53:44 crc kubenswrapper[4756]: I0930 19:53:44.173431 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/05ea7072-b87a-4cf4-8f86-f32e5836951a-kube-api-access-zctjc" (OuterVolumeSpecName: "kube-api-access-zctjc") pod "05ea7072-b87a-4cf4-8f86-f32e5836951a" (UID: "05ea7072-b87a-4cf4-8f86-f32e5836951a"). InnerVolumeSpecName "kube-api-access-zctjc". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:53:44 crc kubenswrapper[4756]: I0930 19:53:44.191433 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage04-crc" (OuterVolumeSpecName: "mysql-db") pod "05ea7072-b87a-4cf4-8f86-f32e5836951a" (UID: "05ea7072-b87a-4cf4-8f86-f32e5836951a"). InnerVolumeSpecName "local-storage04-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Sep 30 19:53:44 crc kubenswrapper[4756]: I0930 19:53:44.217728 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/05ea7072-b87a-4cf4-8f86-f32e5836951a-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "05ea7072-b87a-4cf4-8f86-f32e5836951a" (UID: "05ea7072-b87a-4cf4-8f86-f32e5836951a"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:53:44 crc kubenswrapper[4756]: I0930 19:53:44.247303 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ebd344ad-8ef7-4a5c-8b74-1af845398b3b-internal-tls-certs\") pod \"ebd344ad-8ef7-4a5c-8b74-1af845398b3b\" (UID: \"ebd344ad-8ef7-4a5c-8b74-1af845398b3b\") " Sep 30 19:53:44 crc kubenswrapper[4756]: I0930 19:53:44.247526 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/0a1a5fbf-06be-4bbe-a7ab-9a3becd5f84d-etc-swift\") pod \"0a1a5fbf-06be-4bbe-a7ab-9a3becd5f84d\" (UID: \"0a1a5fbf-06be-4bbe-a7ab-9a3becd5f84d\") " Sep 30 19:53:44 crc kubenswrapper[4756]: I0930 19:53:44.247627 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ebd344ad-8ef7-4a5c-8b74-1af845398b3b-combined-ca-bundle\") pod \"ebd344ad-8ef7-4a5c-8b74-1af845398b3b\" (UID: \"ebd344ad-8ef7-4a5c-8b74-1af845398b3b\") " Sep 30 19:53:44 crc kubenswrapper[4756]: I0930 19:53:44.247720 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0a1a5fbf-06be-4bbe-a7ab-9a3becd5f84d-run-httpd\") pod \"0a1a5fbf-06be-4bbe-a7ab-9a3becd5f84d\" (UID: \"0a1a5fbf-06be-4bbe-a7ab-9a3becd5f84d\") " Sep 30 19:53:44 crc kubenswrapper[4756]: I0930 19:53:44.247800 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ebd344ad-8ef7-4a5c-8b74-1af845398b3b-logs\") pod \"ebd344ad-8ef7-4a5c-8b74-1af845398b3b\" (UID: \"ebd344ad-8ef7-4a5c-8b74-1af845398b3b\") " Sep 30 19:53:44 crc kubenswrapper[4756]: I0930 19:53:44.247887 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ebd344ad-8ef7-4a5c-8b74-1af845398b3b-public-tls-certs\") pod \"ebd344ad-8ef7-4a5c-8b74-1af845398b3b\" (UID: \"ebd344ad-8ef7-4a5c-8b74-1af845398b3b\") " Sep 30 19:53:44 crc kubenswrapper[4756]: I0930 19:53:44.248005 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0a1a5fbf-06be-4bbe-a7ab-9a3becd5f84d-combined-ca-bundle\") pod \"0a1a5fbf-06be-4bbe-a7ab-9a3becd5f84d\" (UID: \"0a1a5fbf-06be-4bbe-a7ab-9a3becd5f84d\") " Sep 30 19:53:44 crc kubenswrapper[4756]: I0930 19:53:44.248082 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wlcks\" (UniqueName: \"kubernetes.io/projected/6f3ed405-3d2e-4296-8ea5-7b426f2bd987-kube-api-access-wlcks\") pod \"6f3ed405-3d2e-4296-8ea5-7b426f2bd987\" (UID: \"6f3ed405-3d2e-4296-8ea5-7b426f2bd987\") " Sep 30 19:53:44 crc kubenswrapper[4756]: I0930 19:53:44.248166 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dm9sn\" (UniqueName: \"kubernetes.io/projected/ebd344ad-8ef7-4a5c-8b74-1af845398b3b-kube-api-access-dm9sn\") pod \"ebd344ad-8ef7-4a5c-8b74-1af845398b3b\" (UID: \"ebd344ad-8ef7-4a5c-8b74-1af845398b3b\") " Sep 30 19:53:44 crc kubenswrapper[4756]: I0930 19:53:44.248270 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0a1a5fbf-06be-4bbe-a7ab-9a3becd5f84d-config-data\") pod \"0a1a5fbf-06be-4bbe-a7ab-9a3becd5f84d\" (UID: \"0a1a5fbf-06be-4bbe-a7ab-9a3becd5f84d\") " Sep 30 19:53:44 crc kubenswrapper[4756]: I0930 19:53:44.248373 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-phfbs\" (UniqueName: \"kubernetes.io/projected/0a1a5fbf-06be-4bbe-a7ab-9a3becd5f84d-kube-api-access-phfbs\") pod \"0a1a5fbf-06be-4bbe-a7ab-9a3becd5f84d\" (UID: \"0a1a5fbf-06be-4bbe-a7ab-9a3becd5f84d\") " Sep 30 19:53:44 crc kubenswrapper[4756]: I0930 19:53:44.248516 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6f3ed405-3d2e-4296-8ea5-7b426f2bd987-combined-ca-bundle\") pod \"6f3ed405-3d2e-4296-8ea5-7b426f2bd987\" (UID: \"6f3ed405-3d2e-4296-8ea5-7b426f2bd987\") " Sep 30 19:53:44 crc kubenswrapper[4756]: I0930 19:53:44.248634 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/0a1a5fbf-06be-4bbe-a7ab-9a3becd5f84d-public-tls-certs\") pod \"0a1a5fbf-06be-4bbe-a7ab-9a3becd5f84d\" (UID: \"0a1a5fbf-06be-4bbe-a7ab-9a3becd5f84d\") " Sep 30 19:53:44 crc kubenswrapper[4756]: I0930 19:53:44.248835 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/6f3ed405-3d2e-4296-8ea5-7b426f2bd987-nova-novncproxy-tls-certs\") pod \"6f3ed405-3d2e-4296-8ea5-7b426f2bd987\" (UID: \"6f3ed405-3d2e-4296-8ea5-7b426f2bd987\") " Sep 30 19:53:44 crc kubenswrapper[4756]: I0930 19:53:44.248938 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ebd344ad-8ef7-4a5c-8b74-1af845398b3b-config-data\") pod \"ebd344ad-8ef7-4a5c-8b74-1af845398b3b\" (UID: \"ebd344ad-8ef7-4a5c-8b74-1af845398b3b\") " Sep 30 19:53:44 crc kubenswrapper[4756]: I0930 19:53:44.249047 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/6f3ed405-3d2e-4296-8ea5-7b426f2bd987-vencrypt-tls-certs\") pod \"6f3ed405-3d2e-4296-8ea5-7b426f2bd987\" (UID: \"6f3ed405-3d2e-4296-8ea5-7b426f2bd987\") " Sep 30 19:53:44 crc kubenswrapper[4756]: I0930 19:53:44.249256 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/ebd344ad-8ef7-4a5c-8b74-1af845398b3b-etc-machine-id\") pod \"ebd344ad-8ef7-4a5c-8b74-1af845398b3b\" (UID: \"ebd344ad-8ef7-4a5c-8b74-1af845398b3b\") " Sep 30 19:53:44 crc kubenswrapper[4756]: I0930 19:53:44.253849 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ebd344ad-8ef7-4a5c-8b74-1af845398b3b-scripts\") pod \"ebd344ad-8ef7-4a5c-8b74-1af845398b3b\" (UID: \"ebd344ad-8ef7-4a5c-8b74-1af845398b3b\") " Sep 30 19:53:44 crc kubenswrapper[4756]: I0930 19:53:44.253889 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6f3ed405-3d2e-4296-8ea5-7b426f2bd987-config-data\") pod \"6f3ed405-3d2e-4296-8ea5-7b426f2bd987\" (UID: \"6f3ed405-3d2e-4296-8ea5-7b426f2bd987\") " Sep 30 19:53:44 crc kubenswrapper[4756]: I0930 19:53:44.253914 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qvm8z\" (UniqueName: \"kubernetes.io/projected/7dff94a7-1c27-4324-8e94-5a095f79eb9a-kube-api-access-qvm8z\") pod \"7dff94a7-1c27-4324-8e94-5a095f79eb9a\" (UID: \"7dff94a7-1c27-4324-8e94-5a095f79eb9a\") " Sep 30 19:53:44 crc kubenswrapper[4756]: I0930 19:53:44.253934 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/0a1a5fbf-06be-4bbe-a7ab-9a3becd5f84d-internal-tls-certs\") pod \"0a1a5fbf-06be-4bbe-a7ab-9a3becd5f84d\" (UID: \"0a1a5fbf-06be-4bbe-a7ab-9a3becd5f84d\") " Sep 30 19:53:44 crc kubenswrapper[4756]: I0930 19:53:44.253983 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gj44b\" (UniqueName: \"kubernetes.io/projected/859e275e-1d03-482b-aff6-8df9d80957fe-kube-api-access-gj44b\") pod \"859e275e-1d03-482b-aff6-8df9d80957fe\" (UID: \"859e275e-1d03-482b-aff6-8df9d80957fe\") " Sep 30 19:53:44 crc kubenswrapper[4756]: I0930 19:53:44.254005 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ebd344ad-8ef7-4a5c-8b74-1af845398b3b-config-data-custom\") pod \"ebd344ad-8ef7-4a5c-8b74-1af845398b3b\" (UID: \"ebd344ad-8ef7-4a5c-8b74-1af845398b3b\") " Sep 30 19:53:44 crc kubenswrapper[4756]: I0930 19:53:44.254025 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0a1a5fbf-06be-4bbe-a7ab-9a3becd5f84d-log-httpd\") pod \"0a1a5fbf-06be-4bbe-a7ab-9a3becd5f84d\" (UID: \"0a1a5fbf-06be-4bbe-a7ab-9a3becd5f84d\") " Sep 30 19:53:44 crc kubenswrapper[4756]: I0930 19:53:44.254764 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zctjc\" (UniqueName: \"kubernetes.io/projected/05ea7072-b87a-4cf4-8f86-f32e5836951a-kube-api-access-zctjc\") on node \"crc\" DevicePath \"\"" Sep 30 19:53:44 crc kubenswrapper[4756]: I0930 19:53:44.254790 4756 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") on node \"crc\" " Sep 30 19:53:44 crc kubenswrapper[4756]: I0930 19:53:44.254801 4756 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7cba2998-4911-447e-8fa8-782a83f0b3be-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 19:53:44 crc kubenswrapper[4756]: I0930 19:53:44.254811 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zb8bd\" (UniqueName: \"kubernetes.io/projected/7cba2998-4911-447e-8fa8-782a83f0b3be-kube-api-access-zb8bd\") on node \"crc\" DevicePath \"\"" Sep 30 19:53:44 crc kubenswrapper[4756]: I0930 19:53:44.254832 4756 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/05ea7072-b87a-4cf4-8f86-f32e5836951a-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 19:53:44 crc kubenswrapper[4756]: I0930 19:53:44.254840 4756 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/7cba2998-4911-447e-8fa8-782a83f0b3be-config-data-custom\") on node \"crc\" DevicePath \"\"" Sep 30 19:53:44 crc kubenswrapper[4756]: I0930 19:53:44.250701 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0a1a5fbf-06be-4bbe-a7ab-9a3becd5f84d-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "0a1a5fbf-06be-4bbe-a7ab-9a3becd5f84d" (UID: "0a1a5fbf-06be-4bbe-a7ab-9a3becd5f84d"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 19:53:44 crc kubenswrapper[4756]: I0930 19:53:44.256267 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0a1a5fbf-06be-4bbe-a7ab-9a3becd5f84d-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "0a1a5fbf-06be-4bbe-a7ab-9a3becd5f84d" (UID: "0a1a5fbf-06be-4bbe-a7ab-9a3becd5f84d"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 19:53:44 crc kubenswrapper[4756]: I0930 19:53:44.257617 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/ebd344ad-8ef7-4a5c-8b74-1af845398b3b-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "ebd344ad-8ef7-4a5c-8b74-1af845398b3b" (UID: "ebd344ad-8ef7-4a5c-8b74-1af845398b3b"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 19:53:44 crc kubenswrapper[4756]: I0930 19:53:44.263654 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ebd344ad-8ef7-4a5c-8b74-1af845398b3b-logs" (OuterVolumeSpecName: "logs") pod "ebd344ad-8ef7-4a5c-8b74-1af845398b3b" (UID: "ebd344ad-8ef7-4a5c-8b74-1af845398b3b"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 19:53:44 crc kubenswrapper[4756]: I0930 19:53:44.265950 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ebd344ad-8ef7-4a5c-8b74-1af845398b3b-kube-api-access-dm9sn" (OuterVolumeSpecName: "kube-api-access-dm9sn") pod "ebd344ad-8ef7-4a5c-8b74-1af845398b3b" (UID: "ebd344ad-8ef7-4a5c-8b74-1af845398b3b"). InnerVolumeSpecName "kube-api-access-dm9sn". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:53:44 crc kubenswrapper[4756]: I0930 19:53:44.274241 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7dff94a7-1c27-4324-8e94-5a095f79eb9a-kube-api-access-qvm8z" (OuterVolumeSpecName: "kube-api-access-qvm8z") pod "7dff94a7-1c27-4324-8e94-5a095f79eb9a" (UID: "7dff94a7-1c27-4324-8e94-5a095f79eb9a"). InnerVolumeSpecName "kube-api-access-qvm8z". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:53:44 crc kubenswrapper[4756]: I0930 19:53:44.281929 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ebd344ad-8ef7-4a5c-8b74-1af845398b3b-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "ebd344ad-8ef7-4a5c-8b74-1af845398b3b" (UID: "ebd344ad-8ef7-4a5c-8b74-1af845398b3b"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:53:44 crc kubenswrapper[4756]: I0930 19:53:44.281959 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/859e275e-1d03-482b-aff6-8df9d80957fe-kube-api-access-gj44b" (OuterVolumeSpecName: "kube-api-access-gj44b") pod "859e275e-1d03-482b-aff6-8df9d80957fe" (UID: "859e275e-1d03-482b-aff6-8df9d80957fe"). InnerVolumeSpecName "kube-api-access-gj44b". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:53:44 crc kubenswrapper[4756]: I0930 19:53:44.285092 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0a1a5fbf-06be-4bbe-a7ab-9a3becd5f84d-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "0a1a5fbf-06be-4bbe-a7ab-9a3becd5f84d" (UID: "0a1a5fbf-06be-4bbe-a7ab-9a3becd5f84d"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:53:44 crc kubenswrapper[4756]: I0930 19:53:44.285217 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0a1a5fbf-06be-4bbe-a7ab-9a3becd5f84d-kube-api-access-phfbs" (OuterVolumeSpecName: "kube-api-access-phfbs") pod "0a1a5fbf-06be-4bbe-a7ab-9a3becd5f84d" (UID: "0a1a5fbf-06be-4bbe-a7ab-9a3becd5f84d"). InnerVolumeSpecName "kube-api-access-phfbs". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:53:44 crc kubenswrapper[4756]: I0930 19:53:44.292789 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ebd344ad-8ef7-4a5c-8b74-1af845398b3b-scripts" (OuterVolumeSpecName: "scripts") pod "ebd344ad-8ef7-4a5c-8b74-1af845398b3b" (UID: "ebd344ad-8ef7-4a5c-8b74-1af845398b3b"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:53:44 crc kubenswrapper[4756]: I0930 19:53:44.293815 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6f3ed405-3d2e-4296-8ea5-7b426f2bd987-kube-api-access-wlcks" (OuterVolumeSpecName: "kube-api-access-wlcks") pod "6f3ed405-3d2e-4296-8ea5-7b426f2bd987" (UID: "6f3ed405-3d2e-4296-8ea5-7b426f2bd987"). InnerVolumeSpecName "kube-api-access-wlcks". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:53:44 crc kubenswrapper[4756]: I0930 19:53:44.297653 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glancef07e-account-delete-lzl2g" Sep 30 19:53:44 crc kubenswrapper[4756]: I0930 19:53:44.299291 4756 scope.go:117] "RemoveContainer" containerID="58b5b5ec91d10a67d194b5ff9f7a783dc05b8dd915bde4ee562175c5aaaf173b" Sep 30 19:53:44 crc kubenswrapper[4756]: I0930 19:53:44.358063 4756 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/0a1a5fbf-06be-4bbe-a7ab-9a3becd5f84d-etc-swift\") on node \"crc\" DevicePath \"\"" Sep 30 19:53:44 crc kubenswrapper[4756]: I0930 19:53:44.358093 4756 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0a1a5fbf-06be-4bbe-a7ab-9a3becd5f84d-run-httpd\") on node \"crc\" DevicePath \"\"" Sep 30 19:53:44 crc kubenswrapper[4756]: I0930 19:53:44.358103 4756 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ebd344ad-8ef7-4a5c-8b74-1af845398b3b-logs\") on node \"crc\" DevicePath \"\"" Sep 30 19:53:44 crc kubenswrapper[4756]: I0930 19:53:44.358113 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wlcks\" (UniqueName: \"kubernetes.io/projected/6f3ed405-3d2e-4296-8ea5-7b426f2bd987-kube-api-access-wlcks\") on node \"crc\" DevicePath \"\"" Sep 30 19:53:44 crc kubenswrapper[4756]: I0930 19:53:44.358126 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dm9sn\" (UniqueName: \"kubernetes.io/projected/ebd344ad-8ef7-4a5c-8b74-1af845398b3b-kube-api-access-dm9sn\") on node \"crc\" DevicePath \"\"" Sep 30 19:53:44 crc kubenswrapper[4756]: I0930 19:53:44.358134 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-phfbs\" (UniqueName: \"kubernetes.io/projected/0a1a5fbf-06be-4bbe-a7ab-9a3becd5f84d-kube-api-access-phfbs\") on node \"crc\" DevicePath \"\"" Sep 30 19:53:44 crc kubenswrapper[4756]: I0930 19:53:44.358144 4756 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/ebd344ad-8ef7-4a5c-8b74-1af845398b3b-etc-machine-id\") on node \"crc\" DevicePath \"\"" Sep 30 19:53:44 crc kubenswrapper[4756]: I0930 19:53:44.358152 4756 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ebd344ad-8ef7-4a5c-8b74-1af845398b3b-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 19:53:44 crc kubenswrapper[4756]: I0930 19:53:44.358163 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qvm8z\" (UniqueName: \"kubernetes.io/projected/7dff94a7-1c27-4324-8e94-5a095f79eb9a-kube-api-access-qvm8z\") on node \"crc\" DevicePath \"\"" Sep 30 19:53:44 crc kubenswrapper[4756]: I0930 19:53:44.358174 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gj44b\" (UniqueName: \"kubernetes.io/projected/859e275e-1d03-482b-aff6-8df9d80957fe-kube-api-access-gj44b\") on node \"crc\" DevicePath \"\"" Sep 30 19:53:44 crc kubenswrapper[4756]: I0930 19:53:44.358182 4756 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0a1a5fbf-06be-4bbe-a7ab-9a3becd5f84d-log-httpd\") on node \"crc\" DevicePath \"\"" Sep 30 19:53:44 crc kubenswrapper[4756]: I0930 19:53:44.358191 4756 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ebd344ad-8ef7-4a5c-8b74-1af845398b3b-config-data-custom\") on node \"crc\" DevicePath \"\"" Sep 30 19:53:44 crc kubenswrapper[4756]: I0930 19:53:44.371941 4756 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ovn-controller-kfdjm" podUID="9996bc15-3d12-4aa1-a22e-d11726f6c4d8" containerName="ovn-controller" probeResult="failure" output="" Sep 30 19:53:44 crc kubenswrapper[4756]: I0930 19:53:44.382791 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ebd344ad-8ef7-4a5c-8b74-1af845398b3b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ebd344ad-8ef7-4a5c-8b74-1af845398b3b" (UID: "ebd344ad-8ef7-4a5c-8b74-1af845398b3b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:53:44 crc kubenswrapper[4756]: E0930 19:53:44.383704 4756 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 1eade56fcdb4689bb071f3b44ecd695f64f66b527074f30c140f41aff392a497 is running failed: container process not found" containerID="1eade56fcdb4689bb071f3b44ecd695f64f66b527074f30c140f41aff392a497" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Sep 30 19:53:44 crc kubenswrapper[4756]: E0930 19:53:44.384082 4756 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 1eade56fcdb4689bb071f3b44ecd695f64f66b527074f30c140f41aff392a497 is running failed: container process not found" containerID="1eade56fcdb4689bb071f3b44ecd695f64f66b527074f30c140f41aff392a497" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Sep 30 19:53:44 crc kubenswrapper[4756]: E0930 19:53:44.384548 4756 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 1eade56fcdb4689bb071f3b44ecd695f64f66b527074f30c140f41aff392a497 is running failed: container process not found" containerID="1eade56fcdb4689bb071f3b44ecd695f64f66b527074f30c140f41aff392a497" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Sep 30 19:53:44 crc kubenswrapper[4756]: E0930 19:53:44.384576 4756 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 1eade56fcdb4689bb071f3b44ecd695f64f66b527074f30c140f41aff392a497 is running failed: container process not found" probeType="Readiness" pod="openstack/nova-cell1-conductor-0" podUID="164b639f-c027-4486-a298-06e9510e14ac" containerName="nova-cell1-conductor-conductor" Sep 30 19:53:44 crc kubenswrapper[4756]: E0930 19:53:44.386977 4756 handlers.go:78] "Exec lifecycle hook for Container in Pod failed" err=< Sep 30 19:53:44 crc kubenswrapper[4756]: command '/usr/share/ovn/scripts/ovn-ctl stop_controller' exited with 137: 2025-09-30T19:53:42Z|00001|fatal_signal|WARN|terminating with signal 14 (Alarm clock) Sep 30 19:53:44 crc kubenswrapper[4756]: /etc/init.d/functions: line 589: 386 Alarm clock "$@" Sep 30 19:53:44 crc kubenswrapper[4756]: > execCommand=["/usr/share/ovn/scripts/ovn-ctl","stop_controller"] containerName="ovn-controller" pod="openstack/ovn-controller-kfdjm" message=< Sep 30 19:53:44 crc kubenswrapper[4756]: Exiting ovn-controller (1) [FAILED] Sep 30 19:53:44 crc kubenswrapper[4756]: Killing ovn-controller (1) [ OK ] Sep 30 19:53:44 crc kubenswrapper[4756]: 2025-09-30T19:53:42Z|00001|fatal_signal|WARN|terminating with signal 14 (Alarm clock) Sep 30 19:53:44 crc kubenswrapper[4756]: /etc/init.d/functions: line 589: 386 Alarm clock "$@" Sep 30 19:53:44 crc kubenswrapper[4756]: > Sep 30 19:53:44 crc kubenswrapper[4756]: E0930 19:53:44.387063 4756 kuberuntime_container.go:691] "PreStop hook failed" err=< Sep 30 19:53:44 crc kubenswrapper[4756]: command '/usr/share/ovn/scripts/ovn-ctl stop_controller' exited with 137: 2025-09-30T19:53:42Z|00001|fatal_signal|WARN|terminating with signal 14 (Alarm clock) Sep 30 19:53:44 crc kubenswrapper[4756]: /etc/init.d/functions: line 589: 386 Alarm clock "$@" Sep 30 19:53:44 crc kubenswrapper[4756]: > pod="openstack/ovn-controller-kfdjm" podUID="9996bc15-3d12-4aa1-a22e-d11726f6c4d8" containerName="ovn-controller" containerID="cri-o://b49cb0b0ecf6e81c4dcede589eabe05df7545b0c049a2725e9fa0033649ddb11" Sep 30 19:53:44 crc kubenswrapper[4756]: I0930 19:53:44.387107 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovn-controller-kfdjm" podUID="9996bc15-3d12-4aa1-a22e-d11726f6c4d8" containerName="ovn-controller" containerID="cri-o://b49cb0b0ecf6e81c4dcede589eabe05df7545b0c049a2725e9fa0033649ddb11" gracePeriod=27 Sep 30 19:53:44 crc kubenswrapper[4756]: I0930 19:53:44.407551 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 30 19:53:44 crc kubenswrapper[4756]: I0930 19:53:44.409829 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="78518120-da64-4b55-9dce-b7412b16bed3" containerName="ceilometer-central-agent" containerID="cri-o://22b219bf8630c672d661ef7fab54e4eb8a02542b53a085535c6963dd7177287e" gracePeriod=30 Sep 30 19:53:44 crc kubenswrapper[4756]: I0930 19:53:44.410339 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="78518120-da64-4b55-9dce-b7412b16bed3" containerName="proxy-httpd" containerID="cri-o://8446379b5b8e3c30e3501dc472fc67b7b914bcaff0ef7dc5380700b34b1f393f" gracePeriod=30 Sep 30 19:53:44 crc kubenswrapper[4756]: I0930 19:53:44.410404 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="78518120-da64-4b55-9dce-b7412b16bed3" containerName="sg-core" containerID="cri-o://8dcf655a6b6e1186f83f889c32c480afbeb2d95de763becd9d328651e8b7a7c9" gracePeriod=30 Sep 30 19:53:44 crc kubenswrapper[4756]: I0930 19:53:44.410453 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="78518120-da64-4b55-9dce-b7412b16bed3" containerName="ceilometer-notification-agent" containerID="cri-o://db3913fcdfdf7d832fb8669bd6c097b34f4a8d5f1bdafc8f900c161696611530" gracePeriod=30 Sep 30 19:53:44 crc kubenswrapper[4756]: I0930 19:53:44.442141 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Sep 30 19:53:44 crc kubenswrapper[4756]: I0930 19:53:44.448656 4756 scope.go:117] "RemoveContainer" containerID="51ef4e173c225032f765e5047d88bbd26bb0ef46029f5e6ee965a80da5a74dba" Sep 30 19:53:44 crc kubenswrapper[4756]: E0930 19:53:44.449655 4756 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"51ef4e173c225032f765e5047d88bbd26bb0ef46029f5e6ee965a80da5a74dba\": container with ID starting with 51ef4e173c225032f765e5047d88bbd26bb0ef46029f5e6ee965a80da5a74dba not found: ID does not exist" containerID="51ef4e173c225032f765e5047d88bbd26bb0ef46029f5e6ee965a80da5a74dba" Sep 30 19:53:44 crc kubenswrapper[4756]: I0930 19:53:44.449748 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"51ef4e173c225032f765e5047d88bbd26bb0ef46029f5e6ee965a80da5a74dba"} err="failed to get container status \"51ef4e173c225032f765e5047d88bbd26bb0ef46029f5e6ee965a80da5a74dba\": rpc error: code = NotFound desc = could not find container \"51ef4e173c225032f765e5047d88bbd26bb0ef46029f5e6ee965a80da5a74dba\": container with ID starting with 51ef4e173c225032f765e5047d88bbd26bb0ef46029f5e6ee965a80da5a74dba not found: ID does not exist" Sep 30 19:53:44 crc kubenswrapper[4756]: I0930 19:53:44.449844 4756 scope.go:117] "RemoveContainer" containerID="58b5b5ec91d10a67d194b5ff9f7a783dc05b8dd915bde4ee562175c5aaaf173b" Sep 30 19:53:44 crc kubenswrapper[4756]: E0930 19:53:44.450101 4756 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"58b5b5ec91d10a67d194b5ff9f7a783dc05b8dd915bde4ee562175c5aaaf173b\": container with ID starting with 58b5b5ec91d10a67d194b5ff9f7a783dc05b8dd915bde4ee562175c5aaaf173b not found: ID does not exist" containerID="58b5b5ec91d10a67d194b5ff9f7a783dc05b8dd915bde4ee562175c5aaaf173b" Sep 30 19:53:44 crc kubenswrapper[4756]: I0930 19:53:44.450200 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"58b5b5ec91d10a67d194b5ff9f7a783dc05b8dd915bde4ee562175c5aaaf173b"} err="failed to get container status \"58b5b5ec91d10a67d194b5ff9f7a783dc05b8dd915bde4ee562175c5aaaf173b\": rpc error: code = NotFound desc = could not find container \"58b5b5ec91d10a67d194b5ff9f7a783dc05b8dd915bde4ee562175c5aaaf173b\": container with ID starting with 58b5b5ec91d10a67d194b5ff9f7a783dc05b8dd915bde4ee562175c5aaaf173b not found: ID does not exist" Sep 30 19:53:44 crc kubenswrapper[4756]: I0930 19:53:44.450261 4756 scope.go:117] "RemoveContainer" containerID="a689bb821757f51e26e8f98c83247791b78940eeab2a2b87776e95ccb89ff5af" Sep 30 19:53:44 crc kubenswrapper[4756]: I0930 19:53:44.458162 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/kube-state-metrics-0" podUID="e92dcf48-58c2-4c3e-907f-7fd43e23a424" containerName="kube-state-metrics" containerID="cri-o://1de9577ca6bdb23f4bd45cbc37f2aa9d5e6f8b133c3ff43908637b2ae485af44" gracePeriod=30 Sep 30 19:53:44 crc kubenswrapper[4756]: I0930 19:53:44.459136 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bqx8v\" (UniqueName: \"kubernetes.io/projected/e3956c8b-1699-4cc4-8a97-b3ed35730374-kube-api-access-bqx8v\") pod \"e3956c8b-1699-4cc4-8a97-b3ed35730374\" (UID: \"e3956c8b-1699-4cc4-8a97-b3ed35730374\") " Sep 30 19:53:44 crc kubenswrapper[4756]: I0930 19:53:44.459687 4756 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ebd344ad-8ef7-4a5c-8b74-1af845398b3b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 19:53:44 crc kubenswrapper[4756]: I0930 19:53:44.468269 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6f3ed405-3d2e-4296-8ea5-7b426f2bd987-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "6f3ed405-3d2e-4296-8ea5-7b426f2bd987" (UID: "6f3ed405-3d2e-4296-8ea5-7b426f2bd987"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:53:44 crc kubenswrapper[4756]: I0930 19:53:44.476683 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e3956c8b-1699-4cc4-8a97-b3ed35730374-kube-api-access-bqx8v" (OuterVolumeSpecName: "kube-api-access-bqx8v") pod "e3956c8b-1699-4cc4-8a97-b3ed35730374" (UID: "e3956c8b-1699-4cc4-8a97-b3ed35730374"). InnerVolumeSpecName "kube-api-access-bqx8v". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:53:44 crc kubenswrapper[4756]: I0930 19:53:44.525623 4756 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="ce0daf51-f01f-4ace-bf9d-3dd2e2023618" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.205:8775/\": read tcp 10.217.0.2:35974->10.217.0.205:8775: read: connection reset by peer" Sep 30 19:53:44 crc kubenswrapper[4756]: I0930 19:53:44.525937 4756 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="ce0daf51-f01f-4ace-bf9d-3dd2e2023618" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.205:8775/\": read tcp 10.217.0.2:35984->10.217.0.205:8775: read: connection reset by peer" Sep 30 19:53:44 crc kubenswrapper[4756]: I0930 19:53:44.552528 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/memcached-0"] Sep 30 19:53:44 crc kubenswrapper[4756]: I0930 19:53:44.552717 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/memcached-0" podUID="5d8f3245-22a4-45a4-bbc4-33e1a078f92b" containerName="memcached" containerID="cri-o://4743fe1d3afca0374e30323f82a3b45ead65b0e37c7b34d43281655e0227ef30" gracePeriod=30 Sep 30 19:53:44 crc kubenswrapper[4756]: I0930 19:53:44.559076 4756 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage04-crc" (UniqueName: "kubernetes.io/local-volume/local-storage04-crc") on node "crc" Sep 30 19:53:44 crc kubenswrapper[4756]: I0930 19:53:44.585813 4756 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/placement-6bfdd55b6d-fqgk9" podUID="28dfe26f-4d95-4def-9500-a0410c260cf2" containerName="placement-log" probeResult="failure" output="Get \"https://10.217.0.144:8778/\": read tcp 10.217.0.2:35952->10.217.0.144:8778: read: connection reset by peer" Sep 30 19:53:44 crc kubenswrapper[4756]: I0930 19:53:44.585899 4756 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/placement-6bfdd55b6d-fqgk9" podUID="28dfe26f-4d95-4def-9500-a0410c260cf2" containerName="placement-api" probeResult="failure" output="Get \"https://10.217.0.144:8778/\": read tcp 10.217.0.2:35946->10.217.0.144:8778: read: connection reset by peer" Sep 30 19:53:44 crc kubenswrapper[4756]: I0930 19:53:44.596079 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7cba2998-4911-447e-8fa8-782a83f0b3be-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "7cba2998-4911-447e-8fa8-782a83f0b3be" (UID: "7cba2998-4911-447e-8fa8-782a83f0b3be"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:53:44 crc kubenswrapper[4756]: I0930 19:53:44.606503 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-sync-jsmf7"] Sep 30 19:53:44 crc kubenswrapper[4756]: I0930 19:53:44.611376 4756 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7cba2998-4911-447e-8fa8-782a83f0b3be-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 19:53:44 crc kubenswrapper[4756]: I0930 19:53:44.611422 4756 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6f3ed405-3d2e-4296-8ea5-7b426f2bd987-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 19:53:44 crc kubenswrapper[4756]: I0930 19:53:44.611432 4756 reconciler_common.go:293] "Volume detached for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") on node \"crc\" DevicePath \"\"" Sep 30 19:53:44 crc kubenswrapper[4756]: I0930 19:53:44.611441 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bqx8v\" (UniqueName: \"kubernetes.io/projected/e3956c8b-1699-4cc4-8a97-b3ed35730374-kube-api-access-bqx8v\") on node \"crc\" DevicePath \"\"" Sep 30 19:53:44 crc kubenswrapper[4756]: I0930 19:53:44.611460 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-dslgq"] Sep 30 19:53:44 crc kubenswrapper[4756]: I0930 19:53:44.615133 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-sync-jsmf7"] Sep 30 19:53:44 crc kubenswrapper[4756]: I0930 19:53:44.622749 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-dslgq"] Sep 30 19:53:44 crc kubenswrapper[4756]: I0930 19:53:44.628623 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-6889fc7587-jlhbj"] Sep 30 19:53:44 crc kubenswrapper[4756]: I0930 19:53:44.628828 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/keystone-6889fc7587-jlhbj" podUID="4e0d0499-c40c-474b-83d5-f72ac55d859c" containerName="keystone-api" containerID="cri-o://941c1f888df56ddf960e94e48f17bb2adec0251bc3c8d8f1dd8e4c62d928796e" gracePeriod=30 Sep 30 19:53:44 crc kubenswrapper[4756]: I0930 19:53:44.654325 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/openstack-galera-0"] Sep 30 19:53:44 crc kubenswrapper[4756]: I0930 19:53:44.670954 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6f3ed405-3d2e-4296-8ea5-7b426f2bd987-config-data" (OuterVolumeSpecName: "config-data") pod "6f3ed405-3d2e-4296-8ea5-7b426f2bd987" (UID: "6f3ed405-3d2e-4296-8ea5-7b426f2bd987"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:53:44 crc kubenswrapper[4756]: I0930 19:53:44.671131 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/05ea7072-b87a-4cf4-8f86-f32e5836951a-galera-tls-certs" (OuterVolumeSpecName: "galera-tls-certs") pod "05ea7072-b87a-4cf4-8f86-f32e5836951a" (UID: "05ea7072-b87a-4cf4-8f86-f32e5836951a"). InnerVolumeSpecName "galera-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:53:44 crc kubenswrapper[4756]: I0930 19:53:44.710237 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-create-27gdt"] Sep 30 19:53:44 crc kubenswrapper[4756]: I0930 19:53:44.721582 4756 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6f3ed405-3d2e-4296-8ea5-7b426f2bd987-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 19:53:44 crc kubenswrapper[4756]: I0930 19:53:44.721613 4756 reconciler_common.go:293] "Volume detached for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/05ea7072-b87a-4cf4-8f86-f32e5836951a-galera-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 30 19:53:44 crc kubenswrapper[4756]: I0930 19:53:44.736984 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/novaapic915-account-delete-sw2ck" Sep 30 19:53:44 crc kubenswrapper[4756]: I0930 19:53:44.745695 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Sep 30 19:53:44 crc kubenswrapper[4756]: I0930 19:53:44.746622 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-create-27gdt"] Sep 30 19:53:44 crc kubenswrapper[4756]: I0930 19:53:44.762861 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-b613-account-create-zjxl7"] Sep 30 19:53:44 crc kubenswrapper[4756]: I0930 19:53:44.788810 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-b613-account-create-zjxl7"] Sep 30 19:53:44 crc kubenswrapper[4756]: I0930 19:53:44.800742 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0a1a5fbf-06be-4bbe-a7ab-9a3becd5f84d-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "0a1a5fbf-06be-4bbe-a7ab-9a3becd5f84d" (UID: "0a1a5fbf-06be-4bbe-a7ab-9a3becd5f84d"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:53:44 crc kubenswrapper[4756]: I0930 19:53:44.818248 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0a1a5fbf-06be-4bbe-a7ab-9a3becd5f84d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "0a1a5fbf-06be-4bbe-a7ab-9a3becd5f84d" (UID: "0a1a5fbf-06be-4bbe-a7ab-9a3becd5f84d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:53:44 crc kubenswrapper[4756]: I0930 19:53:44.823624 4756 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/0a1a5fbf-06be-4bbe-a7ab-9a3becd5f84d-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 30 19:53:44 crc kubenswrapper[4756]: I0930 19:53:44.823653 4756 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0a1a5fbf-06be-4bbe-a7ab-9a3becd5f84d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 19:53:44 crc kubenswrapper[4756]: I0930 19:53:44.831692 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6f3ed405-3d2e-4296-8ea5-7b426f2bd987-nova-novncproxy-tls-certs" (OuterVolumeSpecName: "nova-novncproxy-tls-certs") pod "6f3ed405-3d2e-4296-8ea5-7b426f2bd987" (UID: "6f3ed405-3d2e-4296-8ea5-7b426f2bd987"). InnerVolumeSpecName "nova-novncproxy-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:53:44 crc kubenswrapper[4756]: I0930 19:53:44.864659 4756 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/glance-default-internal-api-0" podUID="207e631b-3ef5-430f-bad1-4a9972dc8eb4" containerName="glance-log" probeResult="failure" output="Get \"https://10.217.0.174:9292/healthcheck\": read tcp 10.217.0.2:59836->10.217.0.174:9292: read: connection reset by peer" Sep 30 19:53:44 crc kubenswrapper[4756]: I0930 19:53:44.865816 4756 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/glance-default-internal-api-0" podUID="207e631b-3ef5-430f-bad1-4a9972dc8eb4" containerName="glance-httpd" probeResult="failure" output="Get \"https://10.217.0.174:9292/healthcheck\": read tcp 10.217.0.2:59826->10.217.0.174:9292: read: connection reset by peer" Sep 30 19:53:44 crc kubenswrapper[4756]: I0930 19:53:44.874722 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0a1a5fbf-06be-4bbe-a7ab-9a3becd5f84d-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "0a1a5fbf-06be-4bbe-a7ab-9a3becd5f84d" (UID: "0a1a5fbf-06be-4bbe-a7ab-9a3becd5f84d"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:53:44 crc kubenswrapper[4756]: I0930 19:53:44.897095 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ebd344ad-8ef7-4a5c-8b74-1af845398b3b-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "ebd344ad-8ef7-4a5c-8b74-1af845398b3b" (UID: "ebd344ad-8ef7-4a5c-8b74-1af845398b3b"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:53:44 crc kubenswrapper[4756]: I0930 19:53:44.908768 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7cba2998-4911-447e-8fa8-782a83f0b3be-config-data" (OuterVolumeSpecName: "config-data") pod "7cba2998-4911-447e-8fa8-782a83f0b3be" (UID: "7cba2998-4911-447e-8fa8-782a83f0b3be"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:53:44 crc kubenswrapper[4756]: I0930 19:53:44.919463 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ebd344ad-8ef7-4a5c-8b74-1af845398b3b-config-data" (OuterVolumeSpecName: "config-data") pod "ebd344ad-8ef7-4a5c-8b74-1af845398b3b" (UID: "ebd344ad-8ef7-4a5c-8b74-1af845398b3b"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:53:44 crc kubenswrapper[4756]: I0930 19:53:44.923855 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ebd344ad-8ef7-4a5c-8b74-1af845398b3b-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "ebd344ad-8ef7-4a5c-8b74-1af845398b3b" (UID: "ebd344ad-8ef7-4a5c-8b74-1af845398b3b"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:53:44 crc kubenswrapper[4756]: I0930 19:53:44.925616 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/164b639f-c027-4486-a298-06e9510e14ac-combined-ca-bundle\") pod \"164b639f-c027-4486-a298-06e9510e14ac\" (UID: \"164b639f-c027-4486-a298-06e9510e14ac\") " Sep 30 19:53:44 crc kubenswrapper[4756]: I0930 19:53:44.926588 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/164b639f-c027-4486-a298-06e9510e14ac-config-data\") pod \"164b639f-c027-4486-a298-06e9510e14ac\" (UID: \"164b639f-c027-4486-a298-06e9510e14ac\") " Sep 30 19:53:44 crc kubenswrapper[4756]: I0930 19:53:44.928051 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5skg7\" (UniqueName: \"kubernetes.io/projected/13a44baa-047c-44d4-958e-c0d219d4f5a6-kube-api-access-5skg7\") pod \"13a44baa-047c-44d4-958e-c0d219d4f5a6\" (UID: \"13a44baa-047c-44d4-958e-c0d219d4f5a6\") " Sep 30 19:53:44 crc kubenswrapper[4756]: I0930 19:53:44.928165 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s65d9\" (UniqueName: \"kubernetes.io/projected/164b639f-c027-4486-a298-06e9510e14ac-kube-api-access-s65d9\") pod \"164b639f-c027-4486-a298-06e9510e14ac\" (UID: \"164b639f-c027-4486-a298-06e9510e14ac\") " Sep 30 19:53:44 crc kubenswrapper[4756]: I0930 19:53:44.928735 4756 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7cba2998-4911-447e-8fa8-782a83f0b3be-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 19:53:44 crc kubenswrapper[4756]: I0930 19:53:44.929218 4756 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/0a1a5fbf-06be-4bbe-a7ab-9a3becd5f84d-public-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 30 19:53:44 crc kubenswrapper[4756]: I0930 19:53:44.929469 4756 reconciler_common.go:293] "Volume detached for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/6f3ed405-3d2e-4296-8ea5-7b426f2bd987-nova-novncproxy-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 30 19:53:44 crc kubenswrapper[4756]: I0930 19:53:44.929568 4756 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ebd344ad-8ef7-4a5c-8b74-1af845398b3b-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 19:53:44 crc kubenswrapper[4756]: I0930 19:53:44.929637 4756 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ebd344ad-8ef7-4a5c-8b74-1af845398b3b-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 30 19:53:44 crc kubenswrapper[4756]: I0930 19:53:44.929697 4756 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ebd344ad-8ef7-4a5c-8b74-1af845398b3b-public-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 30 19:53:44 crc kubenswrapper[4756]: I0930 19:53:44.934465 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0a1a5fbf-06be-4bbe-a7ab-9a3becd5f84d-config-data" (OuterVolumeSpecName: "config-data") pod "0a1a5fbf-06be-4bbe-a7ab-9a3becd5f84d" (UID: "0a1a5fbf-06be-4bbe-a7ab-9a3becd5f84d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:53:44 crc kubenswrapper[4756]: I0930 19:53:44.937825 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/13a44baa-047c-44d4-958e-c0d219d4f5a6-kube-api-access-5skg7" (OuterVolumeSpecName: "kube-api-access-5skg7") pod "13a44baa-047c-44d4-958e-c0d219d4f5a6" (UID: "13a44baa-047c-44d4-958e-c0d219d4f5a6"). InnerVolumeSpecName "kube-api-access-5skg7". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:53:44 crc kubenswrapper[4756]: I0930 19:53:44.945820 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/164b639f-c027-4486-a298-06e9510e14ac-kube-api-access-s65d9" (OuterVolumeSpecName: "kube-api-access-s65d9") pod "164b639f-c027-4486-a298-06e9510e14ac" (UID: "164b639f-c027-4486-a298-06e9510e14ac"). InnerVolumeSpecName "kube-api-access-s65d9". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:53:44 crc kubenswrapper[4756]: I0930 19:53:44.972489 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/164b639f-c027-4486-a298-06e9510e14ac-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "164b639f-c027-4486-a298-06e9510e14ac" (UID: "164b639f-c027-4486-a298-06e9510e14ac"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:53:44 crc kubenswrapper[4756]: I0930 19:53:44.980526 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/164b639f-c027-4486-a298-06e9510e14ac-config-data" (OuterVolumeSpecName: "config-data") pod "164b639f-c027-4486-a298-06e9510e14ac" (UID: "164b639f-c027-4486-a298-06e9510e14ac"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:53:44 crc kubenswrapper[4756]: I0930 19:53:44.996995 4756 generic.go:334] "Generic (PLEG): container finished" podID="0befea8f-1ced-4703-b2a2-a10e154271f4" containerID="a96ca6143ea765730fa414e7b169828518681d4fa546277df96764fd20203c32" exitCode=0 Sep 30 19:53:44 crc kubenswrapper[4756]: I0930 19:53:44.997006 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6f3ed405-3d2e-4296-8ea5-7b426f2bd987-vencrypt-tls-certs" (OuterVolumeSpecName: "vencrypt-tls-certs") pod "6f3ed405-3d2e-4296-8ea5-7b426f2bd987" (UID: "6f3ed405-3d2e-4296-8ea5-7b426f2bd987"). InnerVolumeSpecName "vencrypt-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:53:44 crc kubenswrapper[4756]: I0930 19:53:44.997053 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"0befea8f-1ced-4703-b2a2-a10e154271f4","Type":"ContainerDied","Data":"a96ca6143ea765730fa414e7b169828518681d4fa546277df96764fd20203c32"} Sep 30 19:53:45 crc kubenswrapper[4756]: I0930 19:53:45.006842 4756 generic.go:334] "Generic (PLEG): container finished" podID="78518120-da64-4b55-9dce-b7412b16bed3" containerID="8446379b5b8e3c30e3501dc472fc67b7b914bcaff0ef7dc5380700b34b1f393f" exitCode=0 Sep 30 19:53:45 crc kubenswrapper[4756]: I0930 19:53:45.006869 4756 generic.go:334] "Generic (PLEG): container finished" podID="78518120-da64-4b55-9dce-b7412b16bed3" containerID="8dcf655a6b6e1186f83f889c32c480afbeb2d95de763becd9d328651e8b7a7c9" exitCode=2 Sep 30 19:53:45 crc kubenswrapper[4756]: I0930 19:53:45.006913 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"78518120-da64-4b55-9dce-b7412b16bed3","Type":"ContainerDied","Data":"8446379b5b8e3c30e3501dc472fc67b7b914bcaff0ef7dc5380700b34b1f393f"} Sep 30 19:53:45 crc kubenswrapper[4756]: I0930 19:53:45.006937 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"78518120-da64-4b55-9dce-b7412b16bed3","Type":"ContainerDied","Data":"8dcf655a6b6e1186f83f889c32c480afbeb2d95de763becd9d328651e8b7a7c9"} Sep 30 19:53:45 crc kubenswrapper[4756]: I0930 19:53:45.009644 4756 generic.go:334] "Generic (PLEG): container finished" podID="ce0daf51-f01f-4ace-bf9d-3dd2e2023618" containerID="493c10b1ea169f530bd9a7626736c31aefe74b4181647d0bdc31bf4a5ec5cc4f" exitCode=0 Sep 30 19:53:45 crc kubenswrapper[4756]: I0930 19:53:45.009693 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"ce0daf51-f01f-4ace-bf9d-3dd2e2023618","Type":"ContainerDied","Data":"493c10b1ea169f530bd9a7626736c31aefe74b4181647d0bdc31bf4a5ec5cc4f"} Sep 30 19:53:45 crc kubenswrapper[4756]: I0930 19:53:45.012246 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Sep 30 19:53:45 crc kubenswrapper[4756]: I0930 19:53:45.012716 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"164b639f-c027-4486-a298-06e9510e14ac","Type":"ContainerDied","Data":"726f5baf0d60f8fe7ae861fd6090b19269009f3509e3a690a1594c710c9cc203"} Sep 30 19:53:45 crc kubenswrapper[4756]: I0930 19:53:45.012763 4756 scope.go:117] "RemoveContainer" containerID="1eade56fcdb4689bb071f3b44ecd695f64f66b527074f30c140f41aff392a497" Sep 30 19:53:45 crc kubenswrapper[4756]: I0930 19:53:45.027711 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glancef07e-account-delete-lzl2g" event={"ID":"e3956c8b-1699-4cc4-8a97-b3ed35730374","Type":"ContainerDied","Data":"827c7b2f7f156fcdf862e0095d48d06cec352e6cceced661c8524bfb39b9f3ea"} Sep 30 19:53:45 crc kubenswrapper[4756]: I0930 19:53:45.027834 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glancef07e-account-delete-lzl2g" Sep 30 19:53:45 crc kubenswrapper[4756]: I0930 19:53:45.031007 4756 reconciler_common.go:293] "Volume detached for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/6f3ed405-3d2e-4296-8ea5-7b426f2bd987-vencrypt-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 30 19:53:45 crc kubenswrapper[4756]: I0930 19:53:45.031029 4756 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/164b639f-c027-4486-a298-06e9510e14ac-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 19:53:45 crc kubenswrapper[4756]: I0930 19:53:45.031038 4756 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/164b639f-c027-4486-a298-06e9510e14ac-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 19:53:45 crc kubenswrapper[4756]: I0930 19:53:45.031046 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5skg7\" (UniqueName: \"kubernetes.io/projected/13a44baa-047c-44d4-958e-c0d219d4f5a6-kube-api-access-5skg7\") on node \"crc\" DevicePath \"\"" Sep 30 19:53:45 crc kubenswrapper[4756]: I0930 19:53:45.031055 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s65d9\" (UniqueName: \"kubernetes.io/projected/164b639f-c027-4486-a298-06e9510e14ac-kube-api-access-s65d9\") on node \"crc\" DevicePath \"\"" Sep 30 19:53:45 crc kubenswrapper[4756]: I0930 19:53:45.031063 4756 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0a1a5fbf-06be-4bbe-a7ab-9a3becd5f84d-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 19:53:45 crc kubenswrapper[4756]: I0930 19:53:45.051700 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-kfdjm_9996bc15-3d12-4aa1-a22e-d11726f6c4d8/ovn-controller/0.log" Sep 30 19:53:45 crc kubenswrapper[4756]: I0930 19:53:45.051753 4756 generic.go:334] "Generic (PLEG): container finished" podID="9996bc15-3d12-4aa1-a22e-d11726f6c4d8" containerID="b49cb0b0ecf6e81c4dcede589eabe05df7545b0c049a2725e9fa0033649ddb11" exitCode=143 Sep 30 19:53:45 crc kubenswrapper[4756]: I0930 19:53:45.051826 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-kfdjm" event={"ID":"9996bc15-3d12-4aa1-a22e-d11726f6c4d8","Type":"ContainerDied","Data":"b49cb0b0ecf6e81c4dcede589eabe05df7545b0c049a2725e9fa0033649ddb11"} Sep 30 19:53:45 crc kubenswrapper[4756]: I0930 19:53:45.054831 4756 generic.go:334] "Generic (PLEG): container finished" podID="e92dcf48-58c2-4c3e-907f-7fd43e23a424" containerID="1de9577ca6bdb23f4bd45cbc37f2aa9d5e6f8b133c3ff43908637b2ae485af44" exitCode=2 Sep 30 19:53:45 crc kubenswrapper[4756]: I0930 19:53:45.054900 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"e92dcf48-58c2-4c3e-907f-7fd43e23a424","Type":"ContainerDied","Data":"1de9577ca6bdb23f4bd45cbc37f2aa9d5e6f8b133c3ff43908637b2ae485af44"} Sep 30 19:53:45 crc kubenswrapper[4756]: I0930 19:53:45.057239 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/novaapic915-account-delete-sw2ck" event={"ID":"13a44baa-047c-44d4-958e-c0d219d4f5a6","Type":"ContainerDied","Data":"3f96b74293e71014bacb4cbda699cf6a26e9e135a464e74b94ba9f1ecd7cb05c"} Sep 30 19:53:45 crc kubenswrapper[4756]: I0930 19:53:45.057305 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/novaapic915-account-delete-sw2ck" Sep 30 19:53:45 crc kubenswrapper[4756]: I0930 19:53:45.062287 4756 generic.go:334] "Generic (PLEG): container finished" podID="28dfe26f-4d95-4def-9500-a0410c260cf2" containerID="0a44e61d030a7208b33d6a4a2edac3b0fbac211badfde73f8a2bb902741ee1c5" exitCode=0 Sep 30 19:53:45 crc kubenswrapper[4756]: I0930 19:53:45.062372 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Sep 30 19:53:45 crc kubenswrapper[4756]: I0930 19:53:45.064923 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Sep 30 19:53:45 crc kubenswrapper[4756]: I0930 19:53:45.065570 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-proxy-5f8d8945f5-z5bmn" Sep 30 19:53:45 crc kubenswrapper[4756]: I0930 19:53:45.065787 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-6bfdd55b6d-fqgk9" event={"ID":"28dfe26f-4d95-4def-9500-a0410c260cf2","Type":"ContainerDied","Data":"0a44e61d030a7208b33d6a4a2edac3b0fbac211badfde73f8a2bb902741ee1c5"} Sep 30 19:53:45 crc kubenswrapper[4756]: I0930 19:53:45.066003 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placementa4e2-account-delete-86w5v" Sep 30 19:53:45 crc kubenswrapper[4756]: I0930 19:53:45.066074 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Sep 30 19:53:45 crc kubenswrapper[4756]: I0930 19:53:45.066180 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/novacell0073f-account-delete-mnvpw" Sep 30 19:53:45 crc kubenswrapper[4756]: I0930 19:53:45.066435 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Sep 30 19:53:45 crc kubenswrapper[4756]: I0930 19:53:45.115029 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/openstack-galera-0" podUID="feac82fa-065f-4e9a-b649-fa26b2d150b8" containerName="galera" containerID="cri-o://366cf47beb3bf314873bbe2aee65a48d7de8c63bb8eb30ed71c2c3e6ba1b8b24" gracePeriod=30 Sep 30 19:53:45 crc kubenswrapper[4756]: I0930 19:53:45.126391 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="07c7b7f8-1c4a-4e3b-907b-662dc0c798aa" path="/var/lib/kubelet/pods/07c7b7f8-1c4a-4e3b-907b-662dc0c798aa/volumes" Sep 30 19:53:45 crc kubenswrapper[4756]: I0930 19:53:45.127764 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3ad8997f-b736-41c8-b087-d09b6316065d" path="/var/lib/kubelet/pods/3ad8997f-b736-41c8-b087-d09b6316065d/volumes" Sep 30 19:53:45 crc kubenswrapper[4756]: I0930 19:53:45.128585 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3b333bad-e0f7-4b3a-b21d-7854ad7aafa6" path="/var/lib/kubelet/pods/3b333bad-e0f7-4b3a-b21d-7854ad7aafa6/volumes" Sep 30 19:53:45 crc kubenswrapper[4756]: I0930 19:53:45.130178 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7c4f7feb-d6b0-4e8e-ba81-008f5796220b" path="/var/lib/kubelet/pods/7c4f7feb-d6b0-4e8e-ba81-008f5796220b/volumes" Sep 30 19:53:45 crc kubenswrapper[4756]: I0930 19:53:45.131751 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9e9466ec-a908-431a-9c47-fda720095cd8" path="/var/lib/kubelet/pods/9e9466ec-a908-431a-9c47-fda720095cd8/volumes" Sep 30 19:53:45 crc kubenswrapper[4756]: I0930 19:53:45.132638 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="babfbb3a-b8d1-419a-92d9-37c5390467f6" path="/var/lib/kubelet/pods/babfbb3a-b8d1-419a-92d9-37c5390467f6/volumes" Sep 30 19:53:45 crc kubenswrapper[4756]: I0930 19:53:45.133221 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f3bb8b1f-f343-4c14-a5da-70dc960397db" path="/var/lib/kubelet/pods/f3bb8b1f-f343-4c14-a5da-70dc960397db/volumes" Sep 30 19:53:45 crc kubenswrapper[4756]: I0930 19:53:45.505552 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-kfdjm_9996bc15-3d12-4aa1-a22e-d11726f6c4d8/ovn-controller/0.log" Sep 30 19:53:45 crc kubenswrapper[4756]: I0930 19:53:45.505614 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-kfdjm" Sep 30 19:53:45 crc kubenswrapper[4756]: I0930 19:53:45.519036 4756 scope.go:117] "RemoveContainer" containerID="44335673e361aea98b156cef861ddf8d96faef038113cc87e22500730d644162" Sep 30 19:53:45 crc kubenswrapper[4756]: I0930 19:53:45.572880 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Sep 30 19:53:45 crc kubenswrapper[4756]: I0930 19:53:45.607867 4756 scope.go:117] "RemoveContainer" containerID="aa828a69651e69602fa03f311a4beeda7cff828299fab3940dde2d62bb4fb1c5" Sep 30 19:53:45 crc kubenswrapper[4756]: I0930 19:53:45.643016 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wl74v\" (UniqueName: \"kubernetes.io/projected/9996bc15-3d12-4aa1-a22e-d11726f6c4d8-kube-api-access-wl74v\") pod \"9996bc15-3d12-4aa1-a22e-d11726f6c4d8\" (UID: \"9996bc15-3d12-4aa1-a22e-d11726f6c4d8\") " Sep 30 19:53:45 crc kubenswrapper[4756]: I0930 19:53:45.643074 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/9996bc15-3d12-4aa1-a22e-d11726f6c4d8-var-run-ovn\") pod \"9996bc15-3d12-4aa1-a22e-d11726f6c4d8\" (UID: \"9996bc15-3d12-4aa1-a22e-d11726f6c4d8\") " Sep 30 19:53:45 crc kubenswrapper[4756]: I0930 19:53:45.643096 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/9996bc15-3d12-4aa1-a22e-d11726f6c4d8-var-run\") pod \"9996bc15-3d12-4aa1-a22e-d11726f6c4d8\" (UID: \"9996bc15-3d12-4aa1-a22e-d11726f6c4d8\") " Sep 30 19:53:45 crc kubenswrapper[4756]: I0930 19:53:45.643198 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/9996bc15-3d12-4aa1-a22e-d11726f6c4d8-var-log-ovn\") pod \"9996bc15-3d12-4aa1-a22e-d11726f6c4d8\" (UID: \"9996bc15-3d12-4aa1-a22e-d11726f6c4d8\") " Sep 30 19:53:45 crc kubenswrapper[4756]: I0930 19:53:45.643226 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/9996bc15-3d12-4aa1-a22e-d11726f6c4d8-scripts\") pod \"9996bc15-3d12-4aa1-a22e-d11726f6c4d8\" (UID: \"9996bc15-3d12-4aa1-a22e-d11726f6c4d8\") " Sep 30 19:53:45 crc kubenswrapper[4756]: I0930 19:53:45.643264 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/9996bc15-3d12-4aa1-a22e-d11726f6c4d8-ovn-controller-tls-certs\") pod \"9996bc15-3d12-4aa1-a22e-d11726f6c4d8\" (UID: \"9996bc15-3d12-4aa1-a22e-d11726f6c4d8\") " Sep 30 19:53:45 crc kubenswrapper[4756]: I0930 19:53:45.643297 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9996bc15-3d12-4aa1-a22e-d11726f6c4d8-combined-ca-bundle\") pod \"9996bc15-3d12-4aa1-a22e-d11726f6c4d8\" (UID: \"9996bc15-3d12-4aa1-a22e-d11726f6c4d8\") " Sep 30 19:53:45 crc kubenswrapper[4756]: I0930 19:53:45.643515 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/9996bc15-3d12-4aa1-a22e-d11726f6c4d8-var-run" (OuterVolumeSpecName: "var-run") pod "9996bc15-3d12-4aa1-a22e-d11726f6c4d8" (UID: "9996bc15-3d12-4aa1-a22e-d11726f6c4d8"). InnerVolumeSpecName "var-run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 19:53:45 crc kubenswrapper[4756]: I0930 19:53:45.643906 4756 reconciler_common.go:293] "Volume detached for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/9996bc15-3d12-4aa1-a22e-d11726f6c4d8-var-run\") on node \"crc\" DevicePath \"\"" Sep 30 19:53:45 crc kubenswrapper[4756]: I0930 19:53:45.644162 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/9996bc15-3d12-4aa1-a22e-d11726f6c4d8-var-run-ovn" (OuterVolumeSpecName: "var-run-ovn") pod "9996bc15-3d12-4aa1-a22e-d11726f6c4d8" (UID: "9996bc15-3d12-4aa1-a22e-d11726f6c4d8"). InnerVolumeSpecName "var-run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 19:53:45 crc kubenswrapper[4756]: I0930 19:53:45.644485 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/9996bc15-3d12-4aa1-a22e-d11726f6c4d8-var-log-ovn" (OuterVolumeSpecName: "var-log-ovn") pod "9996bc15-3d12-4aa1-a22e-d11726f6c4d8" (UID: "9996bc15-3d12-4aa1-a22e-d11726f6c4d8"). InnerVolumeSpecName "var-log-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 19:53:45 crc kubenswrapper[4756]: I0930 19:53:45.645469 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9996bc15-3d12-4aa1-a22e-d11726f6c4d8-scripts" (OuterVolumeSpecName: "scripts") pod "9996bc15-3d12-4aa1-a22e-d11726f6c4d8" (UID: "9996bc15-3d12-4aa1-a22e-d11726f6c4d8"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:53:45 crc kubenswrapper[4756]: I0930 19:53:45.650265 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9996bc15-3d12-4aa1-a22e-d11726f6c4d8-kube-api-access-wl74v" (OuterVolumeSpecName: "kube-api-access-wl74v") pod "9996bc15-3d12-4aa1-a22e-d11726f6c4d8" (UID: "9996bc15-3d12-4aa1-a22e-d11726f6c4d8"). InnerVolumeSpecName "kube-api-access-wl74v". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:53:45 crc kubenswrapper[4756]: I0930 19:53:45.660617 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Sep 30 19:53:45 crc kubenswrapper[4756]: I0930 19:53:45.672786 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-6bfdd55b6d-fqgk9" Sep 30 19:53:45 crc kubenswrapper[4756]: I0930 19:53:45.710607 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9996bc15-3d12-4aa1-a22e-d11726f6c4d8-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "9996bc15-3d12-4aa1-a22e-d11726f6c4d8" (UID: "9996bc15-3d12-4aa1-a22e-d11726f6c4d8"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:53:45 crc kubenswrapper[4756]: I0930 19:53:45.744564 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ce0daf51-f01f-4ace-bf9d-3dd2e2023618-logs\") pod \"ce0daf51-f01f-4ace-bf9d-3dd2e2023618\" (UID: \"ce0daf51-f01f-4ace-bf9d-3dd2e2023618\") " Sep 30 19:53:45 crc kubenswrapper[4756]: I0930 19:53:45.744644 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ln69m\" (UniqueName: \"kubernetes.io/projected/ce0daf51-f01f-4ace-bf9d-3dd2e2023618-kube-api-access-ln69m\") pod \"ce0daf51-f01f-4ace-bf9d-3dd2e2023618\" (UID: \"ce0daf51-f01f-4ace-bf9d-3dd2e2023618\") " Sep 30 19:53:45 crc kubenswrapper[4756]: I0930 19:53:45.744684 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e92dcf48-58c2-4c3e-907f-7fd43e23a424-combined-ca-bundle\") pod \"e92dcf48-58c2-4c3e-907f-7fd43e23a424\" (UID: \"e92dcf48-58c2-4c3e-907f-7fd43e23a424\") " Sep 30 19:53:45 crc kubenswrapper[4756]: I0930 19:53:45.744758 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ce0daf51-f01f-4ace-bf9d-3dd2e2023618-config-data\") pod \"ce0daf51-f01f-4ace-bf9d-3dd2e2023618\" (UID: \"ce0daf51-f01f-4ace-bf9d-3dd2e2023618\") " Sep 30 19:53:45 crc kubenswrapper[4756]: I0930 19:53:45.744774 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/e92dcf48-58c2-4c3e-907f-7fd43e23a424-kube-state-metrics-tls-config\") pod \"e92dcf48-58c2-4c3e-907f-7fd43e23a424\" (UID: \"e92dcf48-58c2-4c3e-907f-7fd43e23a424\") " Sep 30 19:53:45 crc kubenswrapper[4756]: I0930 19:53:45.744797 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/ce0daf51-f01f-4ace-bf9d-3dd2e2023618-nova-metadata-tls-certs\") pod \"ce0daf51-f01f-4ace-bf9d-3dd2e2023618\" (UID: \"ce0daf51-f01f-4ace-bf9d-3dd2e2023618\") " Sep 30 19:53:45 crc kubenswrapper[4756]: I0930 19:53:45.744851 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ce0daf51-f01f-4ace-bf9d-3dd2e2023618-combined-ca-bundle\") pod \"ce0daf51-f01f-4ace-bf9d-3dd2e2023618\" (UID: \"ce0daf51-f01f-4ace-bf9d-3dd2e2023618\") " Sep 30 19:53:45 crc kubenswrapper[4756]: I0930 19:53:45.744901 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/e92dcf48-58c2-4c3e-907f-7fd43e23a424-kube-state-metrics-tls-certs\") pod \"e92dcf48-58c2-4c3e-907f-7fd43e23a424\" (UID: \"e92dcf48-58c2-4c3e-907f-7fd43e23a424\") " Sep 30 19:53:45 crc kubenswrapper[4756]: I0930 19:53:45.744929 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sbdxt\" (UniqueName: \"kubernetes.io/projected/e92dcf48-58c2-4c3e-907f-7fd43e23a424-kube-api-access-sbdxt\") pod \"e92dcf48-58c2-4c3e-907f-7fd43e23a424\" (UID: \"e92dcf48-58c2-4c3e-907f-7fd43e23a424\") " Sep 30 19:53:45 crc kubenswrapper[4756]: I0930 19:53:45.754483 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wl74v\" (UniqueName: \"kubernetes.io/projected/9996bc15-3d12-4aa1-a22e-d11726f6c4d8-kube-api-access-wl74v\") on node \"crc\" DevicePath \"\"" Sep 30 19:53:45 crc kubenswrapper[4756]: I0930 19:53:45.754517 4756 reconciler_common.go:293] "Volume detached for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/9996bc15-3d12-4aa1-a22e-d11726f6c4d8-var-run-ovn\") on node \"crc\" DevicePath \"\"" Sep 30 19:53:45 crc kubenswrapper[4756]: I0930 19:53:45.754526 4756 reconciler_common.go:293] "Volume detached for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/9996bc15-3d12-4aa1-a22e-d11726f6c4d8-var-log-ovn\") on node \"crc\" DevicePath \"\"" Sep 30 19:53:45 crc kubenswrapper[4756]: I0930 19:53:45.754535 4756 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/9996bc15-3d12-4aa1-a22e-d11726f6c4d8-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 19:53:45 crc kubenswrapper[4756]: I0930 19:53:45.754547 4756 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9996bc15-3d12-4aa1-a22e-d11726f6c4d8-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 19:53:45 crc kubenswrapper[4756]: I0930 19:53:45.772267 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ce0daf51-f01f-4ace-bf9d-3dd2e2023618-logs" (OuterVolumeSpecName: "logs") pod "ce0daf51-f01f-4ace-bf9d-3dd2e2023618" (UID: "ce0daf51-f01f-4ace-bf9d-3dd2e2023618"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 19:53:45 crc kubenswrapper[4756]: I0930 19:53:45.778965 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ce0daf51-f01f-4ace-bf9d-3dd2e2023618-kube-api-access-ln69m" (OuterVolumeSpecName: "kube-api-access-ln69m") pod "ce0daf51-f01f-4ace-bf9d-3dd2e2023618" (UID: "ce0daf51-f01f-4ace-bf9d-3dd2e2023618"). InnerVolumeSpecName "kube-api-access-ln69m". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:53:45 crc kubenswrapper[4756]: I0930 19:53:45.782899 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e92dcf48-58c2-4c3e-907f-7fd43e23a424-kube-api-access-sbdxt" (OuterVolumeSpecName: "kube-api-access-sbdxt") pod "e92dcf48-58c2-4c3e-907f-7fd43e23a424" (UID: "e92dcf48-58c2-4c3e-907f-7fd43e23a424"). InnerVolumeSpecName "kube-api-access-sbdxt". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:53:45 crc kubenswrapper[4756]: I0930 19:53:45.855382 4756 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-684ccb4bd6-x97br" podUID="6fbaa08d-11b7-4498-aeb2-e82ac13f64d0" containerName="barbican-api" probeResult="failure" output="Get \"https://10.217.0.165:9311/healthcheck\": dial tcp 10.217.0.165:9311: connect: connection refused" Sep 30 19:53:45 crc kubenswrapper[4756]: I0930 19:53:45.857362 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/28dfe26f-4d95-4def-9500-a0410c260cf2-internal-tls-certs\") pod \"28dfe26f-4d95-4def-9500-a0410c260cf2\" (UID: \"28dfe26f-4d95-4def-9500-a0410c260cf2\") " Sep 30 19:53:45 crc kubenswrapper[4756]: I0930 19:53:45.857421 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/28dfe26f-4d95-4def-9500-a0410c260cf2-public-tls-certs\") pod \"28dfe26f-4d95-4def-9500-a0410c260cf2\" (UID: \"28dfe26f-4d95-4def-9500-a0410c260cf2\") " Sep 30 19:53:45 crc kubenswrapper[4756]: I0930 19:53:45.857456 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/28dfe26f-4d95-4def-9500-a0410c260cf2-scripts\") pod \"28dfe26f-4d95-4def-9500-a0410c260cf2\" (UID: \"28dfe26f-4d95-4def-9500-a0410c260cf2\") " Sep 30 19:53:45 crc kubenswrapper[4756]: I0930 19:53:45.857524 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-k25s9\" (UniqueName: \"kubernetes.io/projected/28dfe26f-4d95-4def-9500-a0410c260cf2-kube-api-access-k25s9\") pod \"28dfe26f-4d95-4def-9500-a0410c260cf2\" (UID: \"28dfe26f-4d95-4def-9500-a0410c260cf2\") " Sep 30 19:53:45 crc kubenswrapper[4756]: I0930 19:53:45.857554 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/28dfe26f-4d95-4def-9500-a0410c260cf2-logs\") pod \"28dfe26f-4d95-4def-9500-a0410c260cf2\" (UID: \"28dfe26f-4d95-4def-9500-a0410c260cf2\") " Sep 30 19:53:45 crc kubenswrapper[4756]: I0930 19:53:45.857640 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/28dfe26f-4d95-4def-9500-a0410c260cf2-combined-ca-bundle\") pod \"28dfe26f-4d95-4def-9500-a0410c260cf2\" (UID: \"28dfe26f-4d95-4def-9500-a0410c260cf2\") " Sep 30 19:53:45 crc kubenswrapper[4756]: I0930 19:53:45.857699 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/28dfe26f-4d95-4def-9500-a0410c260cf2-config-data\") pod \"28dfe26f-4d95-4def-9500-a0410c260cf2\" (UID: \"28dfe26f-4d95-4def-9500-a0410c260cf2\") " Sep 30 19:53:45 crc kubenswrapper[4756]: I0930 19:53:45.858373 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sbdxt\" (UniqueName: \"kubernetes.io/projected/e92dcf48-58c2-4c3e-907f-7fd43e23a424-kube-api-access-sbdxt\") on node \"crc\" DevicePath \"\"" Sep 30 19:53:45 crc kubenswrapper[4756]: I0930 19:53:45.858386 4756 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ce0daf51-f01f-4ace-bf9d-3dd2e2023618-logs\") on node \"crc\" DevicePath \"\"" Sep 30 19:53:45 crc kubenswrapper[4756]: I0930 19:53:45.876092 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ln69m\" (UniqueName: \"kubernetes.io/projected/ce0daf51-f01f-4ace-bf9d-3dd2e2023618-kube-api-access-ln69m\") on node \"crc\" DevicePath \"\"" Sep 30 19:53:45 crc kubenswrapper[4756]: I0930 19:53:45.874884 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ce0daf51-f01f-4ace-bf9d-3dd2e2023618-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ce0daf51-f01f-4ace-bf9d-3dd2e2023618" (UID: "ce0daf51-f01f-4ace-bf9d-3dd2e2023618"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:53:45 crc kubenswrapper[4756]: I0930 19:53:45.865593 4756 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-684ccb4bd6-x97br" podUID="6fbaa08d-11b7-4498-aeb2-e82ac13f64d0" containerName="barbican-api-log" probeResult="failure" output="Get \"https://10.217.0.165:9311/healthcheck\": dial tcp 10.217.0.165:9311: connect: connection refused" Sep 30 19:53:45 crc kubenswrapper[4756]: I0930 19:53:45.877458 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e92dcf48-58c2-4c3e-907f-7fd43e23a424-kube-state-metrics-tls-config" (OuterVolumeSpecName: "kube-state-metrics-tls-config") pod "e92dcf48-58c2-4c3e-907f-7fd43e23a424" (UID: "e92dcf48-58c2-4c3e-907f-7fd43e23a424"). InnerVolumeSpecName "kube-state-metrics-tls-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:53:45 crc kubenswrapper[4756]: I0930 19:53:45.880907 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/28dfe26f-4d95-4def-9500-a0410c260cf2-logs" (OuterVolumeSpecName: "logs") pod "28dfe26f-4d95-4def-9500-a0410c260cf2" (UID: "28dfe26f-4d95-4def-9500-a0410c260cf2"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 19:53:45 crc kubenswrapper[4756]: I0930 19:53:45.883840 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ce0daf51-f01f-4ace-bf9d-3dd2e2023618-config-data" (OuterVolumeSpecName: "config-data") pod "ce0daf51-f01f-4ace-bf9d-3dd2e2023618" (UID: "ce0daf51-f01f-4ace-bf9d-3dd2e2023618"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:53:45 crc kubenswrapper[4756]: I0930 19:53:45.907342 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/28dfe26f-4d95-4def-9500-a0410c260cf2-kube-api-access-k25s9" (OuterVolumeSpecName: "kube-api-access-k25s9") pod "28dfe26f-4d95-4def-9500-a0410c260cf2" (UID: "28dfe26f-4d95-4def-9500-a0410c260cf2"). InnerVolumeSpecName "kube-api-access-k25s9". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:53:45 crc kubenswrapper[4756]: I0930 19:53:45.912621 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/28dfe26f-4d95-4def-9500-a0410c260cf2-scripts" (OuterVolumeSpecName: "scripts") pod "28dfe26f-4d95-4def-9500-a0410c260cf2" (UID: "28dfe26f-4d95-4def-9500-a0410c260cf2"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:53:45 crc kubenswrapper[4756]: I0930 19:53:45.919573 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e92dcf48-58c2-4c3e-907f-7fd43e23a424-kube-state-metrics-tls-certs" (OuterVolumeSpecName: "kube-state-metrics-tls-certs") pod "e92dcf48-58c2-4c3e-907f-7fd43e23a424" (UID: "e92dcf48-58c2-4c3e-907f-7fd43e23a424"). InnerVolumeSpecName "kube-state-metrics-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:53:45 crc kubenswrapper[4756]: I0930 19:53:45.949383 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/28dfe26f-4d95-4def-9500-a0410c260cf2-config-data" (OuterVolumeSpecName: "config-data") pod "28dfe26f-4d95-4def-9500-a0410c260cf2" (UID: "28dfe26f-4d95-4def-9500-a0410c260cf2"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:53:45 crc kubenswrapper[4756]: I0930 19:53:45.957480 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9996bc15-3d12-4aa1-a22e-d11726f6c4d8-ovn-controller-tls-certs" (OuterVolumeSpecName: "ovn-controller-tls-certs") pod "9996bc15-3d12-4aa1-a22e-d11726f6c4d8" (UID: "9996bc15-3d12-4aa1-a22e-d11726f6c4d8"). InnerVolumeSpecName "ovn-controller-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:53:45 crc kubenswrapper[4756]: I0930 19:53:45.967481 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/28dfe26f-4d95-4def-9500-a0410c260cf2-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "28dfe26f-4d95-4def-9500-a0410c260cf2" (UID: "28dfe26f-4d95-4def-9500-a0410c260cf2"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:53:45 crc kubenswrapper[4756]: I0930 19:53:45.985045 4756 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/28dfe26f-4d95-4def-9500-a0410c260cf2-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 19:53:45 crc kubenswrapper[4756]: I0930 19:53:45.985079 4756 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/28dfe26f-4d95-4def-9500-a0410c260cf2-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 19:53:45 crc kubenswrapper[4756]: I0930 19:53:45.985091 4756 reconciler_common.go:293] "Volume detached for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/e92dcf48-58c2-4c3e-907f-7fd43e23a424-kube-state-metrics-tls-config\") on node \"crc\" DevicePath \"\"" Sep 30 19:53:45 crc kubenswrapper[4756]: I0930 19:53:45.985102 4756 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ce0daf51-f01f-4ace-bf9d-3dd2e2023618-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 19:53:45 crc kubenswrapper[4756]: I0930 19:53:45.985112 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-k25s9\" (UniqueName: \"kubernetes.io/projected/28dfe26f-4d95-4def-9500-a0410c260cf2-kube-api-access-k25s9\") on node \"crc\" DevicePath \"\"" Sep 30 19:53:45 crc kubenswrapper[4756]: I0930 19:53:45.985120 4756 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/28dfe26f-4d95-4def-9500-a0410c260cf2-logs\") on node \"crc\" DevicePath \"\"" Sep 30 19:53:45 crc kubenswrapper[4756]: I0930 19:53:45.985128 4756 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ce0daf51-f01f-4ace-bf9d-3dd2e2023618-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 19:53:45 crc kubenswrapper[4756]: I0930 19:53:45.985136 4756 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/28dfe26f-4d95-4def-9500-a0410c260cf2-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 19:53:45 crc kubenswrapper[4756]: I0930 19:53:45.985144 4756 reconciler_common.go:293] "Volume detached for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/e92dcf48-58c2-4c3e-907f-7fd43e23a424-kube-state-metrics-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 30 19:53:45 crc kubenswrapper[4756]: I0930 19:53:45.985152 4756 reconciler_common.go:293] "Volume detached for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/9996bc15-3d12-4aa1-a22e-d11726f6c4d8-ovn-controller-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 30 19:53:46 crc kubenswrapper[4756]: I0930 19:53:46.006943 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e92dcf48-58c2-4c3e-907f-7fd43e23a424-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e92dcf48-58c2-4c3e-907f-7fd43e23a424" (UID: "e92dcf48-58c2-4c3e-907f-7fd43e23a424"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:53:46 crc kubenswrapper[4756]: I0930 19:53:46.016985 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/28dfe26f-4d95-4def-9500-a0410c260cf2-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "28dfe26f-4d95-4def-9500-a0410c260cf2" (UID: "28dfe26f-4d95-4def-9500-a0410c260cf2"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:53:46 crc kubenswrapper[4756]: I0930 19:53:46.063805 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ce0daf51-f01f-4ace-bf9d-3dd2e2023618-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "ce0daf51-f01f-4ace-bf9d-3dd2e2023618" (UID: "ce0daf51-f01f-4ace-bf9d-3dd2e2023618"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:53:46 crc kubenswrapper[4756]: I0930 19:53:46.081113 4756 generic.go:334] "Generic (PLEG): container finished" podID="207e631b-3ef5-430f-bad1-4a9972dc8eb4" containerID="af672176e9d6745734bae103367eaab92c17b4c3f23ec9cc9c702908a2e393cc" exitCode=0 Sep 30 19:53:46 crc kubenswrapper[4756]: I0930 19:53:46.081201 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"207e631b-3ef5-430f-bad1-4a9972dc8eb4","Type":"ContainerDied","Data":"af672176e9d6745734bae103367eaab92c17b4c3f23ec9cc9c702908a2e393cc"} Sep 30 19:53:46 crc kubenswrapper[4756]: I0930 19:53:46.081250 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"207e631b-3ef5-430f-bad1-4a9972dc8eb4","Type":"ContainerDied","Data":"86f0982dddf30b4ac618648f508315697aef35ef0776de68567129b452543324"} Sep 30 19:53:46 crc kubenswrapper[4756]: I0930 19:53:46.081262 4756 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="86f0982dddf30b4ac618648f508315697aef35ef0776de68567129b452543324" Sep 30 19:53:46 crc kubenswrapper[4756]: I0930 19:53:46.083179 4756 generic.go:334] "Generic (PLEG): container finished" podID="eed122c3-f6af-488a-9684-f60ffe104434" containerID="e10cbb9a45161cb9ac5e6eb2582b06fee216894c3414ab0191425a8e6235e510" exitCode=0 Sep 30 19:53:46 crc kubenswrapper[4756]: I0930 19:53:46.083227 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"eed122c3-f6af-488a-9684-f60ffe104434","Type":"ContainerDied","Data":"e10cbb9a45161cb9ac5e6eb2582b06fee216894c3414ab0191425a8e6235e510"} Sep 30 19:53:46 crc kubenswrapper[4756]: I0930 19:53:46.083243 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"eed122c3-f6af-488a-9684-f60ffe104434","Type":"ContainerDied","Data":"0318ec90fdae1de3844c876368978de3d95b8f151ccc9cc6ea5b08c95f8d4641"} Sep 30 19:53:46 crc kubenswrapper[4756]: I0930 19:53:46.083253 4756 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0318ec90fdae1de3844c876368978de3d95b8f151ccc9cc6ea5b08c95f8d4641" Sep 30 19:53:46 crc kubenswrapper[4756]: I0930 19:53:46.085272 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-kfdjm_9996bc15-3d12-4aa1-a22e-d11726f6c4d8/ovn-controller/0.log" Sep 30 19:53:46 crc kubenswrapper[4756]: I0930 19:53:46.085369 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-kfdjm" event={"ID":"9996bc15-3d12-4aa1-a22e-d11726f6c4d8","Type":"ContainerDied","Data":"e9ee3bd8f7d75ab35b946607378c7854f04c393b1d7b5eca7978c162d02e40c6"} Sep 30 19:53:46 crc kubenswrapper[4756]: I0930 19:53:46.085437 4756 scope.go:117] "RemoveContainer" containerID="b49cb0b0ecf6e81c4dcede589eabe05df7545b0c049a2725e9fa0033649ddb11" Sep 30 19:53:46 crc kubenswrapper[4756]: I0930 19:53:46.085457 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-kfdjm" Sep 30 19:53:46 crc kubenswrapper[4756]: I0930 19:53:46.086572 4756 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/28dfe26f-4d95-4def-9500-a0410c260cf2-public-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 30 19:53:46 crc kubenswrapper[4756]: I0930 19:53:46.086591 4756 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/ce0daf51-f01f-4ace-bf9d-3dd2e2023618-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 30 19:53:46 crc kubenswrapper[4756]: I0930 19:53:46.086601 4756 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e92dcf48-58c2-4c3e-907f-7fd43e23a424-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 19:53:46 crc kubenswrapper[4756]: I0930 19:53:46.087565 4756 generic.go:334] "Generic (PLEG): container finished" podID="6fbaa08d-11b7-4498-aeb2-e82ac13f64d0" containerID="207904952f6540ef119af443514c1298c2cf8341c4b2adc7b85d185c51cdf006" exitCode=0 Sep 30 19:53:46 crc kubenswrapper[4756]: I0930 19:53:46.087644 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-684ccb4bd6-x97br" event={"ID":"6fbaa08d-11b7-4498-aeb2-e82ac13f64d0","Type":"ContainerDied","Data":"207904952f6540ef119af443514c1298c2cf8341c4b2adc7b85d185c51cdf006"} Sep 30 19:53:46 crc kubenswrapper[4756]: I0930 19:53:46.090864 4756 generic.go:334] "Generic (PLEG): container finished" podID="78518120-da64-4b55-9dce-b7412b16bed3" containerID="22b219bf8630c672d661ef7fab54e4eb8a02542b53a085535c6963dd7177287e" exitCode=0 Sep 30 19:53:46 crc kubenswrapper[4756]: I0930 19:53:46.090913 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"78518120-da64-4b55-9dce-b7412b16bed3","Type":"ContainerDied","Data":"22b219bf8630c672d661ef7fab54e4eb8a02542b53a085535c6963dd7177287e"} Sep 30 19:53:46 crc kubenswrapper[4756]: I0930 19:53:46.093081 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-6bfdd55b6d-fqgk9" event={"ID":"28dfe26f-4d95-4def-9500-a0410c260cf2","Type":"ContainerDied","Data":"7faa56d1345f4b38e275d73a64f337502b9569aef157ee1fe2beefc111a2401b"} Sep 30 19:53:46 crc kubenswrapper[4756]: I0930 19:53:46.093182 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-6bfdd55b6d-fqgk9" Sep 30 19:53:46 crc kubenswrapper[4756]: I0930 19:53:46.103495 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/28dfe26f-4d95-4def-9500-a0410c260cf2-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "28dfe26f-4d95-4def-9500-a0410c260cf2" (UID: "28dfe26f-4d95-4def-9500-a0410c260cf2"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:53:46 crc kubenswrapper[4756]: I0930 19:53:46.111618 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"0befea8f-1ced-4703-b2a2-a10e154271f4","Type":"ContainerDied","Data":"ae5a3b95769c08291481e06f22b1517c9339cff6ebc043bb2f8fd1964148fa8f"} Sep 30 19:53:46 crc kubenswrapper[4756]: I0930 19:53:46.111665 4756 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ae5a3b95769c08291481e06f22b1517c9339cff6ebc043bb2f8fd1964148fa8f" Sep 30 19:53:46 crc kubenswrapper[4756]: I0930 19:53:46.114174 4756 generic.go:334] "Generic (PLEG): container finished" podID="5d8f3245-22a4-45a4-bbc4-33e1a078f92b" containerID="4743fe1d3afca0374e30323f82a3b45ead65b0e37c7b34d43281655e0227ef30" exitCode=0 Sep 30 19:53:46 crc kubenswrapper[4756]: I0930 19:53:46.114227 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"5d8f3245-22a4-45a4-bbc4-33e1a078f92b","Type":"ContainerDied","Data":"4743fe1d3afca0374e30323f82a3b45ead65b0e37c7b34d43281655e0227ef30"} Sep 30 19:53:46 crc kubenswrapper[4756]: I0930 19:53:46.114250 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"5d8f3245-22a4-45a4-bbc4-33e1a078f92b","Type":"ContainerDied","Data":"dcf469d5d377557749ef58864624ab85356b6cc6830ddb64c8af3f562e9d0ffe"} Sep 30 19:53:46 crc kubenswrapper[4756]: I0930 19:53:46.114261 4756 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="dcf469d5d377557749ef58864624ab85356b6cc6830ddb64c8af3f562e9d0ffe" Sep 30 19:53:46 crc kubenswrapper[4756]: E0930 19:53:46.118229 4756 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of bceb1af41cc68214074dc394cef8fb93822b175f5fc10cf14701d779b481a5e1 is running failed: container process not found" containerID="bceb1af41cc68214074dc394cef8fb93822b175f5fc10cf14701d779b481a5e1" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Sep 30 19:53:46 crc kubenswrapper[4756]: E0930 19:53:46.119563 4756 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of bceb1af41cc68214074dc394cef8fb93822b175f5fc10cf14701d779b481a5e1 is running failed: container process not found" containerID="bceb1af41cc68214074dc394cef8fb93822b175f5fc10cf14701d779b481a5e1" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Sep 30 19:53:46 crc kubenswrapper[4756]: E0930 19:53:46.120650 4756 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of bceb1af41cc68214074dc394cef8fb93822b175f5fc10cf14701d779b481a5e1 is running failed: container process not found" containerID="bceb1af41cc68214074dc394cef8fb93822b175f5fc10cf14701d779b481a5e1" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Sep 30 19:53:46 crc kubenswrapper[4756]: E0930 19:53:46.120676 4756 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of bceb1af41cc68214074dc394cef8fb93822b175f5fc10cf14701d779b481a5e1 is running failed: container process not found" probeType="Readiness" pod="openstack/nova-scheduler-0" podUID="cff112d3-3898-41c9-aab6-ed2e2a57a484" containerName="nova-scheduler-scheduler" Sep 30 19:53:46 crc kubenswrapper[4756]: I0930 19:53:46.121948 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Sep 30 19:53:46 crc kubenswrapper[4756]: I0930 19:53:46.121959 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"e92dcf48-58c2-4c3e-907f-7fd43e23a424","Type":"ContainerDied","Data":"3b988ae109b01b5793feb6f3e4ac31c313feec15e8ddabe87bd972ae4c352c0f"} Sep 30 19:53:46 crc kubenswrapper[4756]: I0930 19:53:46.126270 4756 generic.go:334] "Generic (PLEG): container finished" podID="f9ab0288-ae1d-41f7-954d-8cbadc83c614" containerID="8dbe31d5b2222309a75d567a85830fd5e0fbe8a178b7a9f99d3211c5bbf82f1c" exitCode=0 Sep 30 19:53:46 crc kubenswrapper[4756]: I0930 19:53:46.126435 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"f9ab0288-ae1d-41f7-954d-8cbadc83c614","Type":"ContainerDied","Data":"8dbe31d5b2222309a75d567a85830fd5e0fbe8a178b7a9f99d3211c5bbf82f1c"} Sep 30 19:53:46 crc kubenswrapper[4756]: I0930 19:53:46.128057 4756 generic.go:334] "Generic (PLEG): container finished" podID="cff112d3-3898-41c9-aab6-ed2e2a57a484" containerID="bceb1af41cc68214074dc394cef8fb93822b175f5fc10cf14701d779b481a5e1" exitCode=0 Sep 30 19:53:46 crc kubenswrapper[4756]: I0930 19:53:46.128163 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"cff112d3-3898-41c9-aab6-ed2e2a57a484","Type":"ContainerDied","Data":"bceb1af41cc68214074dc394cef8fb93822b175f5fc10cf14701d779b481a5e1"} Sep 30 19:53:46 crc kubenswrapper[4756]: I0930 19:53:46.138563 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Sep 30 19:53:46 crc kubenswrapper[4756]: I0930 19:53:46.138651 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"ce0daf51-f01f-4ace-bf9d-3dd2e2023618","Type":"ContainerDied","Data":"46ebb7c52a491c73ac8d6f8d7d6acb550544551c6bf2e2051405fb8444e43678"} Sep 30 19:53:46 crc kubenswrapper[4756]: I0930 19:53:46.138726 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Sep 30 19:53:46 crc kubenswrapper[4756]: I0930 19:53:46.151340 4756 scope.go:117] "RemoveContainer" containerID="0a44e61d030a7208b33d6a4a2edac3b0fbac211badfde73f8a2bb902741ee1c5" Sep 30 19:53:46 crc kubenswrapper[4756]: I0930 19:53:46.163005 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 30 19:53:46 crc kubenswrapper[4756]: I0930 19:53:46.181306 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Sep 30 19:53:46 crc kubenswrapper[4756]: I0930 19:53:46.187998 4756 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/28dfe26f-4d95-4def-9500-a0410c260cf2-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 30 19:53:46 crc kubenswrapper[4756]: I0930 19:53:46.188313 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/openstack-cell1-galera-0"] Sep 30 19:53:46 crc kubenswrapper[4756]: I0930 19:53:46.196244 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/openstack-cell1-galera-0"] Sep 30 19:53:46 crc kubenswrapper[4756]: I0930 19:53:46.204897 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-kfdjm"] Sep 30 19:53:46 crc kubenswrapper[4756]: I0930 19:53:46.209952 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-controller-kfdjm"] Sep 30 19:53:46 crc kubenswrapper[4756]: I0930 19:53:46.212735 4756 scope.go:117] "RemoveContainer" containerID="a02c15c325b692d99424e55c3116131ba5bc7283ac4bd35486df44c56fb3a3f2" Sep 30 19:53:46 crc kubenswrapper[4756]: I0930 19:53:46.216313 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Sep 30 19:53:46 crc kubenswrapper[4756]: I0930 19:53:46.217325 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placementa4e2-account-delete-86w5v"] Sep 30 19:53:46 crc kubenswrapper[4756]: I0930 19:53:46.223940 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placementa4e2-account-delete-86w5v"] Sep 30 19:53:46 crc kubenswrapper[4756]: I0930 19:53:46.230038 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Sep 30 19:53:46 crc kubenswrapper[4756]: I0930 19:53:46.236139 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-api-0"] Sep 30 19:53:46 crc kubenswrapper[4756]: I0930 19:53:46.241367 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/novaapic915-account-delete-sw2ck"] Sep 30 19:53:46 crc kubenswrapper[4756]: I0930 19:53:46.247273 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/novaapic915-account-delete-sw2ck"] Sep 30 19:53:46 crc kubenswrapper[4756]: I0930 19:53:46.254152 4756 scope.go:117] "RemoveContainer" containerID="1de9577ca6bdb23f4bd45cbc37f2aa9d5e6f8b133c3ff43908637b2ae485af44" Sep 30 19:53:46 crc kubenswrapper[4756]: I0930 19:53:46.260964 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/novacell0073f-account-delete-mnvpw"] Sep 30 19:53:46 crc kubenswrapper[4756]: I0930 19:53:46.267058 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/novacell0073f-account-delete-mnvpw"] Sep 30 19:53:46 crc kubenswrapper[4756]: I0930 19:53:46.271929 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-conductor-0"] Sep 30 19:53:46 crc kubenswrapper[4756]: I0930 19:53:46.276655 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-conductor-0"] Sep 30 19:53:46 crc kubenswrapper[4756]: I0930 19:53:46.287725 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/swift-proxy-5f8d8945f5-z5bmn"] Sep 30 19:53:46 crc kubenswrapper[4756]: I0930 19:53:46.288786 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/5d8f3245-22a4-45a4-bbc4-33e1a078f92b-config-data\") pod \"5d8f3245-22a4-45a4-bbc4-33e1a078f92b\" (UID: \"5d8f3245-22a4-45a4-bbc4-33e1a078f92b\") " Sep 30 19:53:46 crc kubenswrapper[4756]: I0930 19:53:46.289787 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5d8f3245-22a4-45a4-bbc4-33e1a078f92b-combined-ca-bundle\") pod \"5d8f3245-22a4-45a4-bbc4-33e1a078f92b\" (UID: \"5d8f3245-22a4-45a4-bbc4-33e1a078f92b\") " Sep 30 19:53:46 crc kubenswrapper[4756]: I0930 19:53:46.289825 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6m6rn\" (UniqueName: \"kubernetes.io/projected/0befea8f-1ced-4703-b2a2-a10e154271f4-kube-api-access-6m6rn\") pod \"0befea8f-1ced-4703-b2a2-a10e154271f4\" (UID: \"0befea8f-1ced-4703-b2a2-a10e154271f4\") " Sep 30 19:53:46 crc kubenswrapper[4756]: I0930 19:53:46.289847 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"0befea8f-1ced-4703-b2a2-a10e154271f4\" (UID: \"0befea8f-1ced-4703-b2a2-a10e154271f4\") " Sep 30 19:53:46 crc kubenswrapper[4756]: I0930 19:53:46.289885 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8z7pf\" (UniqueName: \"kubernetes.io/projected/207e631b-3ef5-430f-bad1-4a9972dc8eb4-kube-api-access-8z7pf\") pod \"207e631b-3ef5-430f-bad1-4a9972dc8eb4\" (UID: \"207e631b-3ef5-430f-bad1-4a9972dc8eb4\") " Sep 30 19:53:46 crc kubenswrapper[4756]: I0930 19:53:46.289916 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0befea8f-1ced-4703-b2a2-a10e154271f4-combined-ca-bundle\") pod \"0befea8f-1ced-4703-b2a2-a10e154271f4\" (UID: \"0befea8f-1ced-4703-b2a2-a10e154271f4\") " Sep 30 19:53:46 crc kubenswrapper[4756]: I0930 19:53:46.289935 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0befea8f-1ced-4703-b2a2-a10e154271f4-logs\") pod \"0befea8f-1ced-4703-b2a2-a10e154271f4\" (UID: \"0befea8f-1ced-4703-b2a2-a10e154271f4\") " Sep 30 19:53:46 crc kubenswrapper[4756]: I0930 19:53:46.289967 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0befea8f-1ced-4703-b2a2-a10e154271f4-config-data\") pod \"0befea8f-1ced-4703-b2a2-a10e154271f4\" (UID: \"0befea8f-1ced-4703-b2a2-a10e154271f4\") " Sep 30 19:53:46 crc kubenswrapper[4756]: I0930 19:53:46.289985 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/5d8f3245-22a4-45a4-bbc4-33e1a078f92b-kolla-config\") pod \"5d8f3245-22a4-45a4-bbc4-33e1a078f92b\" (UID: \"5d8f3245-22a4-45a4-bbc4-33e1a078f92b\") " Sep 30 19:53:46 crc kubenswrapper[4756]: I0930 19:53:46.290024 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/0befea8f-1ced-4703-b2a2-a10e154271f4-httpd-run\") pod \"0befea8f-1ced-4703-b2a2-a10e154271f4\" (UID: \"0befea8f-1ced-4703-b2a2-a10e154271f4\") " Sep 30 19:53:46 crc kubenswrapper[4756]: I0930 19:53:46.290039 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0befea8f-1ced-4703-b2a2-a10e154271f4-scripts\") pod \"0befea8f-1ced-4703-b2a2-a10e154271f4\" (UID: \"0befea8f-1ced-4703-b2a2-a10e154271f4\") " Sep 30 19:53:46 crc kubenswrapper[4756]: I0930 19:53:46.290056 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/207e631b-3ef5-430f-bad1-4a9972dc8eb4-config-data\") pod \"207e631b-3ef5-430f-bad1-4a9972dc8eb4\" (UID: \"207e631b-3ef5-430f-bad1-4a9972dc8eb4\") " Sep 30 19:53:46 crc kubenswrapper[4756]: I0930 19:53:46.290081 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/eed122c3-f6af-488a-9684-f60ffe104434-config-data\") pod \"eed122c3-f6af-488a-9684-f60ffe104434\" (UID: \"eed122c3-f6af-488a-9684-f60ffe104434\") " Sep 30 19:53:46 crc kubenswrapper[4756]: I0930 19:53:46.290112 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/eed122c3-f6af-488a-9684-f60ffe104434-internal-tls-certs\") pod \"eed122c3-f6af-488a-9684-f60ffe104434\" (UID: \"eed122c3-f6af-488a-9684-f60ffe104434\") " Sep 30 19:53:46 crc kubenswrapper[4756]: I0930 19:53:46.290198 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5d8f3245-22a4-45a4-bbc4-33e1a078f92b-config-data" (OuterVolumeSpecName: "config-data") pod "5d8f3245-22a4-45a4-bbc4-33e1a078f92b" (UID: "5d8f3245-22a4-45a4-bbc4-33e1a078f92b"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:53:46 crc kubenswrapper[4756]: I0930 19:53:46.290715 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5d8f3245-22a4-45a4-bbc4-33e1a078f92b-kolla-config" (OuterVolumeSpecName: "kolla-config") pod "5d8f3245-22a4-45a4-bbc4-33e1a078f92b" (UID: "5d8f3245-22a4-45a4-bbc4-33e1a078f92b"). InnerVolumeSpecName "kolla-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:53:46 crc kubenswrapper[4756]: I0930 19:53:46.290895 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0befea8f-1ced-4703-b2a2-a10e154271f4-logs" (OuterVolumeSpecName: "logs") pod "0befea8f-1ced-4703-b2a2-a10e154271f4" (UID: "0befea8f-1ced-4703-b2a2-a10e154271f4"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 19:53:46 crc kubenswrapper[4756]: I0930 19:53:46.290928 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/207e631b-3ef5-430f-bad1-4a9972dc8eb4-scripts\") pod \"207e631b-3ef5-430f-bad1-4a9972dc8eb4\" (UID: \"207e631b-3ef5-430f-bad1-4a9972dc8eb4\") " Sep 30 19:53:46 crc kubenswrapper[4756]: I0930 19:53:46.290952 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-j5wjf\" (UniqueName: \"kubernetes.io/projected/eed122c3-f6af-488a-9684-f60ffe104434-kube-api-access-j5wjf\") pod \"eed122c3-f6af-488a-9684-f60ffe104434\" (UID: \"eed122c3-f6af-488a-9684-f60ffe104434\") " Sep 30 19:53:46 crc kubenswrapper[4756]: I0930 19:53:46.290994 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eed122c3-f6af-488a-9684-f60ffe104434-combined-ca-bundle\") pod \"eed122c3-f6af-488a-9684-f60ffe104434\" (UID: \"eed122c3-f6af-488a-9684-f60ffe104434\") " Sep 30 19:53:46 crc kubenswrapper[4756]: I0930 19:53:46.291013 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kjhdm\" (UniqueName: \"kubernetes.io/projected/5d8f3245-22a4-45a4-bbc4-33e1a078f92b-kube-api-access-kjhdm\") pod \"5d8f3245-22a4-45a4-bbc4-33e1a078f92b\" (UID: \"5d8f3245-22a4-45a4-bbc4-33e1a078f92b\") " Sep 30 19:53:46 crc kubenswrapper[4756]: I0930 19:53:46.291078 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/207e631b-3ef5-430f-bad1-4a9972dc8eb4-logs\") pod \"207e631b-3ef5-430f-bad1-4a9972dc8eb4\" (UID: \"207e631b-3ef5-430f-bad1-4a9972dc8eb4\") " Sep 30 19:53:46 crc kubenswrapper[4756]: I0930 19:53:46.291093 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"207e631b-3ef5-430f-bad1-4a9972dc8eb4\" (UID: \"207e631b-3ef5-430f-bad1-4a9972dc8eb4\") " Sep 30 19:53:46 crc kubenswrapper[4756]: I0930 19:53:46.291135 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/5d8f3245-22a4-45a4-bbc4-33e1a078f92b-memcached-tls-certs\") pod \"5d8f3245-22a4-45a4-bbc4-33e1a078f92b\" (UID: \"5d8f3245-22a4-45a4-bbc4-33e1a078f92b\") " Sep 30 19:53:46 crc kubenswrapper[4756]: I0930 19:53:46.291153 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/207e631b-3ef5-430f-bad1-4a9972dc8eb4-httpd-run\") pod \"207e631b-3ef5-430f-bad1-4a9972dc8eb4\" (UID: \"207e631b-3ef5-430f-bad1-4a9972dc8eb4\") " Sep 30 19:53:46 crc kubenswrapper[4756]: I0930 19:53:46.291183 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/0befea8f-1ced-4703-b2a2-a10e154271f4-public-tls-certs\") pod \"0befea8f-1ced-4703-b2a2-a10e154271f4\" (UID: \"0befea8f-1ced-4703-b2a2-a10e154271f4\") " Sep 30 19:53:46 crc kubenswrapper[4756]: I0930 19:53:46.291225 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/207e631b-3ef5-430f-bad1-4a9972dc8eb4-internal-tls-certs\") pod \"207e631b-3ef5-430f-bad1-4a9972dc8eb4\" (UID: \"207e631b-3ef5-430f-bad1-4a9972dc8eb4\") " Sep 30 19:53:46 crc kubenswrapper[4756]: I0930 19:53:46.291240 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/207e631b-3ef5-430f-bad1-4a9972dc8eb4-combined-ca-bundle\") pod \"207e631b-3ef5-430f-bad1-4a9972dc8eb4\" (UID: \"207e631b-3ef5-430f-bad1-4a9972dc8eb4\") " Sep 30 19:53:46 crc kubenswrapper[4756]: I0930 19:53:46.291260 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/eed122c3-f6af-488a-9684-f60ffe104434-logs\") pod \"eed122c3-f6af-488a-9684-f60ffe104434\" (UID: \"eed122c3-f6af-488a-9684-f60ffe104434\") " Sep 30 19:53:46 crc kubenswrapper[4756]: I0930 19:53:46.291421 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/eed122c3-f6af-488a-9684-f60ffe104434-public-tls-certs\") pod \"eed122c3-f6af-488a-9684-f60ffe104434\" (UID: \"eed122c3-f6af-488a-9684-f60ffe104434\") " Sep 30 19:53:46 crc kubenswrapper[4756]: I0930 19:53:46.295444 4756 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/5d8f3245-22a4-45a4-bbc4-33e1a078f92b-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 19:53:46 crc kubenswrapper[4756]: I0930 19:53:46.295462 4756 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0befea8f-1ced-4703-b2a2-a10e154271f4-logs\") on node \"crc\" DevicePath \"\"" Sep 30 19:53:46 crc kubenswrapper[4756]: I0930 19:53:46.295471 4756 reconciler_common.go:293] "Volume detached for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/5d8f3245-22a4-45a4-bbc4-33e1a078f92b-kolla-config\") on node \"crc\" DevicePath \"\"" Sep 30 19:53:46 crc kubenswrapper[4756]: I0930 19:53:46.300548 4756 scope.go:117] "RemoveContainer" containerID="493c10b1ea169f530bd9a7626736c31aefe74b4181647d0bdc31bf4a5ec5cc4f" Sep 30 19:53:46 crc kubenswrapper[4756]: I0930 19:53:46.301000 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0befea8f-1ced-4703-b2a2-a10e154271f4-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "0befea8f-1ced-4703-b2a2-a10e154271f4" (UID: "0befea8f-1ced-4703-b2a2-a10e154271f4"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 19:53:46 crc kubenswrapper[4756]: I0930 19:53:46.300427 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/eed122c3-f6af-488a-9684-f60ffe104434-logs" (OuterVolumeSpecName: "logs") pod "eed122c3-f6af-488a-9684-f60ffe104434" (UID: "eed122c3-f6af-488a-9684-f60ffe104434"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 19:53:46 crc kubenswrapper[4756]: I0930 19:53:46.302506 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/207e631b-3ef5-430f-bad1-4a9972dc8eb4-logs" (OuterVolumeSpecName: "logs") pod "207e631b-3ef5-430f-bad1-4a9972dc8eb4" (UID: "207e631b-3ef5-430f-bad1-4a9972dc8eb4"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 19:53:46 crc kubenswrapper[4756]: I0930 19:53:46.302988 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/207e631b-3ef5-430f-bad1-4a9972dc8eb4-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "207e631b-3ef5-430f-bad1-4a9972dc8eb4" (UID: "207e631b-3ef5-430f-bad1-4a9972dc8eb4"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 19:53:46 crc kubenswrapper[4756]: I0930 19:53:46.310006 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage10-crc" (OuterVolumeSpecName: "glance") pod "0befea8f-1ced-4703-b2a2-a10e154271f4" (UID: "0befea8f-1ced-4703-b2a2-a10e154271f4"). InnerVolumeSpecName "local-storage10-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Sep 30 19:53:46 crc kubenswrapper[4756]: I0930 19:53:46.310033 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5d8f3245-22a4-45a4-bbc4-33e1a078f92b-kube-api-access-kjhdm" (OuterVolumeSpecName: "kube-api-access-kjhdm") pod "5d8f3245-22a4-45a4-bbc4-33e1a078f92b" (UID: "5d8f3245-22a4-45a4-bbc4-33e1a078f92b"). InnerVolumeSpecName "kube-api-access-kjhdm". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:53:46 crc kubenswrapper[4756]: I0930 19:53:46.310055 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0befea8f-1ced-4703-b2a2-a10e154271f4-kube-api-access-6m6rn" (OuterVolumeSpecName: "kube-api-access-6m6rn") pod "0befea8f-1ced-4703-b2a2-a10e154271f4" (UID: "0befea8f-1ced-4703-b2a2-a10e154271f4"). InnerVolumeSpecName "kube-api-access-6m6rn". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:53:46 crc kubenswrapper[4756]: I0930 19:53:46.312902 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/swift-proxy-5f8d8945f5-z5bmn"] Sep 30 19:53:46 crc kubenswrapper[4756]: I0930 19:53:46.324123 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glancef07e-account-delete-lzl2g"] Sep 30 19:53:46 crc kubenswrapper[4756]: I0930 19:53:46.327871 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glancef07e-account-delete-lzl2g"] Sep 30 19:53:46 crc kubenswrapper[4756]: I0930 19:53:46.330642 4756 scope.go:117] "RemoveContainer" containerID="5f2536bd80f2c6c079d1749352246b7217d86ded3227c9601a2ad2db0771bef2" Sep 30 19:53:46 crc kubenswrapper[4756]: I0930 19:53:46.330673 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/207e631b-3ef5-430f-bad1-4a9972dc8eb4-kube-api-access-8z7pf" (OuterVolumeSpecName: "kube-api-access-8z7pf") pod "207e631b-3ef5-430f-bad1-4a9972dc8eb4" (UID: "207e631b-3ef5-430f-bad1-4a9972dc8eb4"). InnerVolumeSpecName "kube-api-access-8z7pf". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:53:46 crc kubenswrapper[4756]: I0930 19:53:46.332044 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage07-crc" (OuterVolumeSpecName: "glance") pod "207e631b-3ef5-430f-bad1-4a9972dc8eb4" (UID: "207e631b-3ef5-430f-bad1-4a9972dc8eb4"). InnerVolumeSpecName "local-storage07-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Sep 30 19:53:46 crc kubenswrapper[4756]: I0930 19:53:46.332669 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/eed122c3-f6af-488a-9684-f60ffe104434-kube-api-access-j5wjf" (OuterVolumeSpecName: "kube-api-access-j5wjf") pod "eed122c3-f6af-488a-9684-f60ffe104434" (UID: "eed122c3-f6af-488a-9684-f60ffe104434"). InnerVolumeSpecName "kube-api-access-j5wjf". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:53:46 crc kubenswrapper[4756]: I0930 19:53:46.339510 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Sep 30 19:53:46 crc kubenswrapper[4756]: I0930 19:53:46.345166 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0befea8f-1ced-4703-b2a2-a10e154271f4-scripts" (OuterVolumeSpecName: "scripts") pod "0befea8f-1ced-4703-b2a2-a10e154271f4" (UID: "0befea8f-1ced-4703-b2a2-a10e154271f4"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:53:46 crc kubenswrapper[4756]: I0930 19:53:46.345588 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/207e631b-3ef5-430f-bad1-4a9972dc8eb4-scripts" (OuterVolumeSpecName: "scripts") pod "207e631b-3ef5-430f-bad1-4a9972dc8eb4" (UID: "207e631b-3ef5-430f-bad1-4a9972dc8eb4"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:53:46 crc kubenswrapper[4756]: I0930 19:53:46.346814 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/kube-state-metrics-0"] Sep 30 19:53:46 crc kubenswrapper[4756]: I0930 19:53:46.347495 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0befea8f-1ced-4703-b2a2-a10e154271f4-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "0befea8f-1ced-4703-b2a2-a10e154271f4" (UID: "0befea8f-1ced-4703-b2a2-a10e154271f4"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:53:46 crc kubenswrapper[4756]: I0930 19:53:46.376078 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Sep 30 19:53:46 crc kubenswrapper[4756]: I0930 19:53:46.384598 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Sep 30 19:53:46 crc kubenswrapper[4756]: I0930 19:53:46.392182 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-684ccb4bd6-x97br" Sep 30 19:53:46 crc kubenswrapper[4756]: I0930 19:53:46.392797 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5d8f3245-22a4-45a4-bbc4-33e1a078f92b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "5d8f3245-22a4-45a4-bbc4-33e1a078f92b" (UID: "5d8f3245-22a4-45a4-bbc4-33e1a078f92b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:53:46 crc kubenswrapper[4756]: I0930 19:53:46.397067 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6m6rn\" (UniqueName: \"kubernetes.io/projected/0befea8f-1ced-4703-b2a2-a10e154271f4-kube-api-access-6m6rn\") on node \"crc\" DevicePath \"\"" Sep 30 19:53:46 crc kubenswrapper[4756]: I0930 19:53:46.397103 4756 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") on node \"crc\" " Sep 30 19:53:46 crc kubenswrapper[4756]: I0930 19:53:46.397114 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8z7pf\" (UniqueName: \"kubernetes.io/projected/207e631b-3ef5-430f-bad1-4a9972dc8eb4-kube-api-access-8z7pf\") on node \"crc\" DevicePath \"\"" Sep 30 19:53:46 crc kubenswrapper[4756]: I0930 19:53:46.397123 4756 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0befea8f-1ced-4703-b2a2-a10e154271f4-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 19:53:46 crc kubenswrapper[4756]: I0930 19:53:46.397135 4756 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/0befea8f-1ced-4703-b2a2-a10e154271f4-httpd-run\") on node \"crc\" DevicePath \"\"" Sep 30 19:53:46 crc kubenswrapper[4756]: I0930 19:53:46.397143 4756 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0befea8f-1ced-4703-b2a2-a10e154271f4-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 19:53:46 crc kubenswrapper[4756]: I0930 19:53:46.397152 4756 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/207e631b-3ef5-430f-bad1-4a9972dc8eb4-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 19:53:46 crc kubenswrapper[4756]: I0930 19:53:46.397162 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-j5wjf\" (UniqueName: \"kubernetes.io/projected/eed122c3-f6af-488a-9684-f60ffe104434-kube-api-access-j5wjf\") on node \"crc\" DevicePath \"\"" Sep 30 19:53:46 crc kubenswrapper[4756]: I0930 19:53:46.397170 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kjhdm\" (UniqueName: \"kubernetes.io/projected/5d8f3245-22a4-45a4-bbc4-33e1a078f92b-kube-api-access-kjhdm\") on node \"crc\" DevicePath \"\"" Sep 30 19:53:46 crc kubenswrapper[4756]: I0930 19:53:46.397178 4756 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/207e631b-3ef5-430f-bad1-4a9972dc8eb4-logs\") on node \"crc\" DevicePath \"\"" Sep 30 19:53:46 crc kubenswrapper[4756]: I0930 19:53:46.397191 4756 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") on node \"crc\" " Sep 30 19:53:46 crc kubenswrapper[4756]: I0930 19:53:46.397200 4756 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/207e631b-3ef5-430f-bad1-4a9972dc8eb4-httpd-run\") on node \"crc\" DevicePath \"\"" Sep 30 19:53:46 crc kubenswrapper[4756]: I0930 19:53:46.397208 4756 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/eed122c3-f6af-488a-9684-f60ffe104434-logs\") on node \"crc\" DevicePath \"\"" Sep 30 19:53:46 crc kubenswrapper[4756]: I0930 19:53:46.397218 4756 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5d8f3245-22a4-45a4-bbc4-33e1a078f92b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 19:53:46 crc kubenswrapper[4756]: I0930 19:53:46.461714 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-6bfdd55b6d-fqgk9"] Sep 30 19:53:46 crc kubenswrapper[4756]: I0930 19:53:46.463107 4756 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage10-crc" (UniqueName: "kubernetes.io/local-volume/local-storage10-crc") on node "crc" Sep 30 19:53:46 crc kubenswrapper[4756]: I0930 19:53:46.464034 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Sep 30 19:53:46 crc kubenswrapper[4756]: I0930 19:53:46.465787 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-6bfdd55b6d-fqgk9"] Sep 30 19:53:46 crc kubenswrapper[4756]: E0930 19:53:46.471449 4756 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="665a9d7a98e4cec82848a7a7dd2273a052bb65806f4b6908a9c7dcf1d34d25df" cmd=["/usr/local/bin/container-scripts/status_check.sh"] Sep 30 19:53:46 crc kubenswrapper[4756]: I0930 19:53:46.473738 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/eed122c3-f6af-488a-9684-f60ffe104434-config-data" (OuterVolumeSpecName: "config-data") pod "eed122c3-f6af-488a-9684-f60ffe104434" (UID: "eed122c3-f6af-488a-9684-f60ffe104434"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:53:46 crc kubenswrapper[4756]: I0930 19:53:46.474508 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0befea8f-1ced-4703-b2a2-a10e154271f4-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "0befea8f-1ced-4703-b2a2-a10e154271f4" (UID: "0befea8f-1ced-4703-b2a2-a10e154271f4"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:53:46 crc kubenswrapper[4756]: I0930 19:53:46.480045 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Sep 30 19:53:46 crc kubenswrapper[4756]: E0930 19:53:46.490961 4756 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="665a9d7a98e4cec82848a7a7dd2273a052bb65806f4b6908a9c7dcf1d34d25df" cmd=["/usr/local/bin/container-scripts/status_check.sh"] Sep 30 19:53:46 crc kubenswrapper[4756]: I0930 19:53:46.490981 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/207e631b-3ef5-430f-bad1-4a9972dc8eb4-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "207e631b-3ef5-430f-bad1-4a9972dc8eb4" (UID: "207e631b-3ef5-430f-bad1-4a9972dc8eb4"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:53:46 crc kubenswrapper[4756]: E0930 19:53:46.493277 4756 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="665a9d7a98e4cec82848a7a7dd2273a052bb65806f4b6908a9c7dcf1d34d25df" cmd=["/usr/local/bin/container-scripts/status_check.sh"] Sep 30 19:53:46 crc kubenswrapper[4756]: E0930 19:53:46.493325 4756 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/ovn-northd-0" podUID="f4c19f17-3c18-484d-acfd-99c72926bdef" containerName="ovn-northd" Sep 30 19:53:46 crc kubenswrapper[4756]: I0930 19:53:46.498493 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/6fbaa08d-11b7-4498-aeb2-e82ac13f64d0-config-data-custom\") pod \"6fbaa08d-11b7-4498-aeb2-e82ac13f64d0\" (UID: \"6fbaa08d-11b7-4498-aeb2-e82ac13f64d0\") " Sep 30 19:53:46 crc kubenswrapper[4756]: I0930 19:53:46.498852 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6fbaa08d-11b7-4498-aeb2-e82ac13f64d0-combined-ca-bundle\") pod \"6fbaa08d-11b7-4498-aeb2-e82ac13f64d0\" (UID: \"6fbaa08d-11b7-4498-aeb2-e82ac13f64d0\") " Sep 30 19:53:46 crc kubenswrapper[4756]: I0930 19:53:46.498926 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/6fbaa08d-11b7-4498-aeb2-e82ac13f64d0-public-tls-certs\") pod \"6fbaa08d-11b7-4498-aeb2-e82ac13f64d0\" (UID: \"6fbaa08d-11b7-4498-aeb2-e82ac13f64d0\") " Sep 30 19:53:46 crc kubenswrapper[4756]: I0930 19:53:46.498976 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6fbaa08d-11b7-4498-aeb2-e82ac13f64d0-config-data\") pod \"6fbaa08d-11b7-4498-aeb2-e82ac13f64d0\" (UID: \"6fbaa08d-11b7-4498-aeb2-e82ac13f64d0\") " Sep 30 19:53:46 crc kubenswrapper[4756]: I0930 19:53:46.498997 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/6fbaa08d-11b7-4498-aeb2-e82ac13f64d0-internal-tls-certs\") pod \"6fbaa08d-11b7-4498-aeb2-e82ac13f64d0\" (UID: \"6fbaa08d-11b7-4498-aeb2-e82ac13f64d0\") " Sep 30 19:53:46 crc kubenswrapper[4756]: I0930 19:53:46.499036 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xl47p\" (UniqueName: \"kubernetes.io/projected/6fbaa08d-11b7-4498-aeb2-e82ac13f64d0-kube-api-access-xl47p\") pod \"6fbaa08d-11b7-4498-aeb2-e82ac13f64d0\" (UID: \"6fbaa08d-11b7-4498-aeb2-e82ac13f64d0\") " Sep 30 19:53:46 crc kubenswrapper[4756]: I0930 19:53:46.499072 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6fbaa08d-11b7-4498-aeb2-e82ac13f64d0-logs\") pod \"6fbaa08d-11b7-4498-aeb2-e82ac13f64d0\" (UID: \"6fbaa08d-11b7-4498-aeb2-e82ac13f64d0\") " Sep 30 19:53:46 crc kubenswrapper[4756]: I0930 19:53:46.499672 4756 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/0befea8f-1ced-4703-b2a2-a10e154271f4-public-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 30 19:53:46 crc kubenswrapper[4756]: I0930 19:53:46.499690 4756 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/207e631b-3ef5-430f-bad1-4a9972dc8eb4-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 19:53:46 crc kubenswrapper[4756]: I0930 19:53:46.499699 4756 reconciler_common.go:293] "Volume detached for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") on node \"crc\" DevicePath \"\"" Sep 30 19:53:46 crc kubenswrapper[4756]: I0930 19:53:46.499709 4756 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/eed122c3-f6af-488a-9684-f60ffe104434-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 19:53:46 crc kubenswrapper[4756]: I0930 19:53:46.500852 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6fbaa08d-11b7-4498-aeb2-e82ac13f64d0-logs" (OuterVolumeSpecName: "logs") pod "6fbaa08d-11b7-4498-aeb2-e82ac13f64d0" (UID: "6fbaa08d-11b7-4498-aeb2-e82ac13f64d0"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 19:53:46 crc kubenswrapper[4756]: I0930 19:53:46.501421 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/eed122c3-f6af-488a-9684-f60ffe104434-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "eed122c3-f6af-488a-9684-f60ffe104434" (UID: "eed122c3-f6af-488a-9684-f60ffe104434"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:53:46 crc kubenswrapper[4756]: I0930 19:53:46.502638 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0befea8f-1ced-4703-b2a2-a10e154271f4-config-data" (OuterVolumeSpecName: "config-data") pod "0befea8f-1ced-4703-b2a2-a10e154271f4" (UID: "0befea8f-1ced-4703-b2a2-a10e154271f4"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:53:46 crc kubenswrapper[4756]: I0930 19:53:46.503322 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6fbaa08d-11b7-4498-aeb2-e82ac13f64d0-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "6fbaa08d-11b7-4498-aeb2-e82ac13f64d0" (UID: "6fbaa08d-11b7-4498-aeb2-e82ac13f64d0"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:53:46 crc kubenswrapper[4756]: I0930 19:53:46.511342 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/207e631b-3ef5-430f-bad1-4a9972dc8eb4-config-data" (OuterVolumeSpecName: "config-data") pod "207e631b-3ef5-430f-bad1-4a9972dc8eb4" (UID: "207e631b-3ef5-430f-bad1-4a9972dc8eb4"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:53:46 crc kubenswrapper[4756]: I0930 19:53:46.514331 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6fbaa08d-11b7-4498-aeb2-e82ac13f64d0-kube-api-access-xl47p" (OuterVolumeSpecName: "kube-api-access-xl47p") pod "6fbaa08d-11b7-4498-aeb2-e82ac13f64d0" (UID: "6fbaa08d-11b7-4498-aeb2-e82ac13f64d0"). InnerVolumeSpecName "kube-api-access-xl47p". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:53:46 crc kubenswrapper[4756]: I0930 19:53:46.528911 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/207e631b-3ef5-430f-bad1-4a9972dc8eb4-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "207e631b-3ef5-430f-bad1-4a9972dc8eb4" (UID: "207e631b-3ef5-430f-bad1-4a9972dc8eb4"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:53:46 crc kubenswrapper[4756]: I0930 19:53:46.540755 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6fbaa08d-11b7-4498-aeb2-e82ac13f64d0-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "6fbaa08d-11b7-4498-aeb2-e82ac13f64d0" (UID: "6fbaa08d-11b7-4498-aeb2-e82ac13f64d0"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:53:46 crc kubenswrapper[4756]: I0930 19:53:46.544650 4756 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage07-crc" (UniqueName: "kubernetes.io/local-volume/local-storage07-crc") on node "crc" Sep 30 19:53:46 crc kubenswrapper[4756]: I0930 19:53:46.565044 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/eed122c3-f6af-488a-9684-f60ffe104434-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "eed122c3-f6af-488a-9684-f60ffe104434" (UID: "eed122c3-f6af-488a-9684-f60ffe104434"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:53:46 crc kubenswrapper[4756]: I0930 19:53:46.574562 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5d8f3245-22a4-45a4-bbc4-33e1a078f92b-memcached-tls-certs" (OuterVolumeSpecName: "memcached-tls-certs") pod "5d8f3245-22a4-45a4-bbc4-33e1a078f92b" (UID: "5d8f3245-22a4-45a4-bbc4-33e1a078f92b"). InnerVolumeSpecName "memcached-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:53:46 crc kubenswrapper[4756]: I0930 19:53:46.594486 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6fbaa08d-11b7-4498-aeb2-e82ac13f64d0-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "6fbaa08d-11b7-4498-aeb2-e82ac13f64d0" (UID: "6fbaa08d-11b7-4498-aeb2-e82ac13f64d0"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:53:46 crc kubenswrapper[4756]: I0930 19:53:46.594572 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6fbaa08d-11b7-4498-aeb2-e82ac13f64d0-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "6fbaa08d-11b7-4498-aeb2-e82ac13f64d0" (UID: "6fbaa08d-11b7-4498-aeb2-e82ac13f64d0"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:53:46 crc kubenswrapper[4756]: I0930 19:53:46.594907 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/eed122c3-f6af-488a-9684-f60ffe104434-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "eed122c3-f6af-488a-9684-f60ffe104434" (UID: "eed122c3-f6af-488a-9684-f60ffe104434"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:53:46 crc kubenswrapper[4756]: I0930 19:53:46.601469 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6fbaa08d-11b7-4498-aeb2-e82ac13f64d0-config-data" (OuterVolumeSpecName: "config-data") pod "6fbaa08d-11b7-4498-aeb2-e82ac13f64d0" (UID: "6fbaa08d-11b7-4498-aeb2-e82ac13f64d0"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:53:46 crc kubenswrapper[4756]: I0930 19:53:46.602001 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f9ab0288-ae1d-41f7-954d-8cbadc83c614-config-data\") pod \"f9ab0288-ae1d-41f7-954d-8cbadc83c614\" (UID: \"f9ab0288-ae1d-41f7-954d-8cbadc83c614\") " Sep 30 19:53:46 crc kubenswrapper[4756]: I0930 19:53:46.602090 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6fbaa08d-11b7-4498-aeb2-e82ac13f64d0-config-data\") pod \"6fbaa08d-11b7-4498-aeb2-e82ac13f64d0\" (UID: \"6fbaa08d-11b7-4498-aeb2-e82ac13f64d0\") " Sep 30 19:53:46 crc kubenswrapper[4756]: I0930 19:53:46.602125 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bbdwg\" (UniqueName: \"kubernetes.io/projected/f9ab0288-ae1d-41f7-954d-8cbadc83c614-kube-api-access-bbdwg\") pod \"f9ab0288-ae1d-41f7-954d-8cbadc83c614\" (UID: \"f9ab0288-ae1d-41f7-954d-8cbadc83c614\") " Sep 30 19:53:46 crc kubenswrapper[4756]: I0930 19:53:46.602205 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fxxt9\" (UniqueName: \"kubernetes.io/projected/cff112d3-3898-41c9-aab6-ed2e2a57a484-kube-api-access-fxxt9\") pod \"cff112d3-3898-41c9-aab6-ed2e2a57a484\" (UID: \"cff112d3-3898-41c9-aab6-ed2e2a57a484\") " Sep 30 19:53:46 crc kubenswrapper[4756]: I0930 19:53:46.602286 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cff112d3-3898-41c9-aab6-ed2e2a57a484-config-data\") pod \"cff112d3-3898-41c9-aab6-ed2e2a57a484\" (UID: \"cff112d3-3898-41c9-aab6-ed2e2a57a484\") " Sep 30 19:53:46 crc kubenswrapper[4756]: I0930 19:53:46.602355 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cff112d3-3898-41c9-aab6-ed2e2a57a484-combined-ca-bundle\") pod \"cff112d3-3898-41c9-aab6-ed2e2a57a484\" (UID: \"cff112d3-3898-41c9-aab6-ed2e2a57a484\") " Sep 30 19:53:46 crc kubenswrapper[4756]: I0930 19:53:46.602450 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f9ab0288-ae1d-41f7-954d-8cbadc83c614-combined-ca-bundle\") pod \"f9ab0288-ae1d-41f7-954d-8cbadc83c614\" (UID: \"f9ab0288-ae1d-41f7-954d-8cbadc83c614\") " Sep 30 19:53:46 crc kubenswrapper[4756]: W0930 19:53:46.602455 4756 empty_dir.go:500] Warning: Unmount skipped because path does not exist: /var/lib/kubelet/pods/6fbaa08d-11b7-4498-aeb2-e82ac13f64d0/volumes/kubernetes.io~secret/config-data Sep 30 19:53:46 crc kubenswrapper[4756]: I0930 19:53:46.602565 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6fbaa08d-11b7-4498-aeb2-e82ac13f64d0-config-data" (OuterVolumeSpecName: "config-data") pod "6fbaa08d-11b7-4498-aeb2-e82ac13f64d0" (UID: "6fbaa08d-11b7-4498-aeb2-e82ac13f64d0"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:53:46 crc kubenswrapper[4756]: I0930 19:53:46.603227 4756 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/207e631b-3ef5-430f-bad1-4a9972dc8eb4-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 19:53:46 crc kubenswrapper[4756]: I0930 19:53:46.603262 4756 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6fbaa08d-11b7-4498-aeb2-e82ac13f64d0-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 19:53:46 crc kubenswrapper[4756]: I0930 19:53:46.603272 4756 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/6fbaa08d-11b7-4498-aeb2-e82ac13f64d0-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 30 19:53:46 crc kubenswrapper[4756]: I0930 19:53:46.603283 4756 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/eed122c3-f6af-488a-9684-f60ffe104434-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 30 19:53:46 crc kubenswrapper[4756]: I0930 19:53:46.603293 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xl47p\" (UniqueName: \"kubernetes.io/projected/6fbaa08d-11b7-4498-aeb2-e82ac13f64d0-kube-api-access-xl47p\") on node \"crc\" DevicePath \"\"" Sep 30 19:53:46 crc kubenswrapper[4756]: I0930 19:53:46.603302 4756 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6fbaa08d-11b7-4498-aeb2-e82ac13f64d0-logs\") on node \"crc\" DevicePath \"\"" Sep 30 19:53:46 crc kubenswrapper[4756]: I0930 19:53:46.603313 4756 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eed122c3-f6af-488a-9684-f60ffe104434-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 19:53:46 crc kubenswrapper[4756]: I0930 19:53:46.603337 4756 reconciler_common.go:293] "Volume detached for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") on node \"crc\" DevicePath \"\"" Sep 30 19:53:46 crc kubenswrapper[4756]: I0930 19:53:46.603347 4756 reconciler_common.go:293] "Volume detached for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/5d8f3245-22a4-45a4-bbc4-33e1a078f92b-memcached-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 30 19:53:46 crc kubenswrapper[4756]: I0930 19:53:46.603356 4756 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/207e631b-3ef5-430f-bad1-4a9972dc8eb4-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 30 19:53:46 crc kubenswrapper[4756]: I0930 19:53:46.603364 4756 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/eed122c3-f6af-488a-9684-f60ffe104434-public-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 30 19:53:46 crc kubenswrapper[4756]: I0930 19:53:46.603372 4756 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/6fbaa08d-11b7-4498-aeb2-e82ac13f64d0-config-data-custom\") on node \"crc\" DevicePath \"\"" Sep 30 19:53:46 crc kubenswrapper[4756]: I0930 19:53:46.603380 4756 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6fbaa08d-11b7-4498-aeb2-e82ac13f64d0-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 19:53:46 crc kubenswrapper[4756]: I0930 19:53:46.603389 4756 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0befea8f-1ced-4703-b2a2-a10e154271f4-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 19:53:46 crc kubenswrapper[4756]: I0930 19:53:46.603419 4756 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/6fbaa08d-11b7-4498-aeb2-e82ac13f64d0-public-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 30 19:53:46 crc kubenswrapper[4756]: I0930 19:53:46.605670 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cff112d3-3898-41c9-aab6-ed2e2a57a484-kube-api-access-fxxt9" (OuterVolumeSpecName: "kube-api-access-fxxt9") pod "cff112d3-3898-41c9-aab6-ed2e2a57a484" (UID: "cff112d3-3898-41c9-aab6-ed2e2a57a484"). InnerVolumeSpecName "kube-api-access-fxxt9". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:53:46 crc kubenswrapper[4756]: I0930 19:53:46.606641 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f9ab0288-ae1d-41f7-954d-8cbadc83c614-kube-api-access-bbdwg" (OuterVolumeSpecName: "kube-api-access-bbdwg") pod "f9ab0288-ae1d-41f7-954d-8cbadc83c614" (UID: "f9ab0288-ae1d-41f7-954d-8cbadc83c614"). InnerVolumeSpecName "kube-api-access-bbdwg". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:53:46 crc kubenswrapper[4756]: I0930 19:53:46.625362 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cff112d3-3898-41c9-aab6-ed2e2a57a484-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "cff112d3-3898-41c9-aab6-ed2e2a57a484" (UID: "cff112d3-3898-41c9-aab6-ed2e2a57a484"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:53:46 crc kubenswrapper[4756]: I0930 19:53:46.625362 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f9ab0288-ae1d-41f7-954d-8cbadc83c614-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f9ab0288-ae1d-41f7-954d-8cbadc83c614" (UID: "f9ab0288-ae1d-41f7-954d-8cbadc83c614"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:53:46 crc kubenswrapper[4756]: I0930 19:53:46.627941 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cff112d3-3898-41c9-aab6-ed2e2a57a484-config-data" (OuterVolumeSpecName: "config-data") pod "cff112d3-3898-41c9-aab6-ed2e2a57a484" (UID: "cff112d3-3898-41c9-aab6-ed2e2a57a484"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:53:46 crc kubenswrapper[4756]: I0930 19:53:46.636729 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f9ab0288-ae1d-41f7-954d-8cbadc83c614-config-data" (OuterVolumeSpecName: "config-data") pod "f9ab0288-ae1d-41f7-954d-8cbadc83c614" (UID: "f9ab0288-ae1d-41f7-954d-8cbadc83c614"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:53:46 crc kubenswrapper[4756]: I0930 19:53:46.704458 4756 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f9ab0288-ae1d-41f7-954d-8cbadc83c614-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 19:53:46 crc kubenswrapper[4756]: I0930 19:53:46.704498 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bbdwg\" (UniqueName: \"kubernetes.io/projected/f9ab0288-ae1d-41f7-954d-8cbadc83c614-kube-api-access-bbdwg\") on node \"crc\" DevicePath \"\"" Sep 30 19:53:46 crc kubenswrapper[4756]: I0930 19:53:46.704510 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fxxt9\" (UniqueName: \"kubernetes.io/projected/cff112d3-3898-41c9-aab6-ed2e2a57a484-kube-api-access-fxxt9\") on node \"crc\" DevicePath \"\"" Sep 30 19:53:46 crc kubenswrapper[4756]: I0930 19:53:46.704519 4756 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cff112d3-3898-41c9-aab6-ed2e2a57a484-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 19:53:46 crc kubenswrapper[4756]: I0930 19:53:46.704528 4756 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cff112d3-3898-41c9-aab6-ed2e2a57a484-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 19:53:46 crc kubenswrapper[4756]: I0930 19:53:46.704536 4756 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f9ab0288-ae1d-41f7-954d-8cbadc83c614-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 19:53:47 crc kubenswrapper[4756]: I0930 19:53:47.128931 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="05ea7072-b87a-4cf4-8f86-f32e5836951a" path="/var/lib/kubelet/pods/05ea7072-b87a-4cf4-8f86-f32e5836951a/volumes" Sep 30 19:53:47 crc kubenswrapper[4756]: I0930 19:53:47.130287 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0a1a5fbf-06be-4bbe-a7ab-9a3becd5f84d" path="/var/lib/kubelet/pods/0a1a5fbf-06be-4bbe-a7ab-9a3becd5f84d/volumes" Sep 30 19:53:47 crc kubenswrapper[4756]: I0930 19:53:47.130942 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="13a44baa-047c-44d4-958e-c0d219d4f5a6" path="/var/lib/kubelet/pods/13a44baa-047c-44d4-958e-c0d219d4f5a6/volumes" Sep 30 19:53:47 crc kubenswrapper[4756]: I0930 19:53:47.132334 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="164b639f-c027-4486-a298-06e9510e14ac" path="/var/lib/kubelet/pods/164b639f-c027-4486-a298-06e9510e14ac/volumes" Sep 30 19:53:47 crc kubenswrapper[4756]: I0930 19:53:47.133053 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="28dfe26f-4d95-4def-9500-a0410c260cf2" path="/var/lib/kubelet/pods/28dfe26f-4d95-4def-9500-a0410c260cf2/volumes" Sep 30 19:53:47 crc kubenswrapper[4756]: I0930 19:53:47.133728 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7dff94a7-1c27-4324-8e94-5a095f79eb9a" path="/var/lib/kubelet/pods/7dff94a7-1c27-4324-8e94-5a095f79eb9a/volumes" Sep 30 19:53:47 crc kubenswrapper[4756]: I0930 19:53:47.134682 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="859e275e-1d03-482b-aff6-8df9d80957fe" path="/var/lib/kubelet/pods/859e275e-1d03-482b-aff6-8df9d80957fe/volumes" Sep 30 19:53:47 crc kubenswrapper[4756]: I0930 19:53:47.135177 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9996bc15-3d12-4aa1-a22e-d11726f6c4d8" path="/var/lib/kubelet/pods/9996bc15-3d12-4aa1-a22e-d11726f6c4d8/volumes" Sep 30 19:53:47 crc kubenswrapper[4756]: I0930 19:53:47.135833 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ce0daf51-f01f-4ace-bf9d-3dd2e2023618" path="/var/lib/kubelet/pods/ce0daf51-f01f-4ace-bf9d-3dd2e2023618/volumes" Sep 30 19:53:47 crc kubenswrapper[4756]: I0930 19:53:47.136762 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e3956c8b-1699-4cc4-8a97-b3ed35730374" path="/var/lib/kubelet/pods/e3956c8b-1699-4cc4-8a97-b3ed35730374/volumes" Sep 30 19:53:47 crc kubenswrapper[4756]: I0930 19:53:47.137254 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e92dcf48-58c2-4c3e-907f-7fd43e23a424" path="/var/lib/kubelet/pods/e92dcf48-58c2-4c3e-907f-7fd43e23a424/volumes" Sep 30 19:53:47 crc kubenswrapper[4756]: I0930 19:53:47.137864 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ebd344ad-8ef7-4a5c-8b74-1af845398b3b" path="/var/lib/kubelet/pods/ebd344ad-8ef7-4a5c-8b74-1af845398b3b/volumes" Sep 30 19:53:47 crc kubenswrapper[4756]: I0930 19:53:47.162083 4756 generic.go:334] "Generic (PLEG): container finished" podID="feac82fa-065f-4e9a-b649-fa26b2d150b8" containerID="366cf47beb3bf314873bbe2aee65a48d7de8c63bb8eb30ed71c2c3e6ba1b8b24" exitCode=0 Sep 30 19:53:47 crc kubenswrapper[4756]: I0930 19:53:47.162155 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"feac82fa-065f-4e9a-b649-fa26b2d150b8","Type":"ContainerDied","Data":"366cf47beb3bf314873bbe2aee65a48d7de8c63bb8eb30ed71c2c3e6ba1b8b24"} Sep 30 19:53:47 crc kubenswrapper[4756]: I0930 19:53:47.162185 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"feac82fa-065f-4e9a-b649-fa26b2d150b8","Type":"ContainerDied","Data":"a50a0a986633a5f1ab92b36d62bd526748b09fae436384eae10f2977e3194410"} Sep 30 19:53:47 crc kubenswrapper[4756]: I0930 19:53:47.162199 4756 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a50a0a986633a5f1ab92b36d62bd526748b09fae436384eae10f2977e3194410" Sep 30 19:53:47 crc kubenswrapper[4756]: I0930 19:53:47.166752 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"cff112d3-3898-41c9-aab6-ed2e2a57a484","Type":"ContainerDied","Data":"f12feea9e30642d994c691a0151a4c3fcfac76032b7e80ed89a0189682d60ea7"} Sep 30 19:53:47 crc kubenswrapper[4756]: I0930 19:53:47.166807 4756 scope.go:117] "RemoveContainer" containerID="bceb1af41cc68214074dc394cef8fb93822b175f5fc10cf14701d779b481a5e1" Sep 30 19:53:47 crc kubenswrapper[4756]: I0930 19:53:47.166957 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Sep 30 19:53:47 crc kubenswrapper[4756]: I0930 19:53:47.181411 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-684ccb4bd6-x97br" Sep 30 19:53:47 crc kubenswrapper[4756]: I0930 19:53:47.181483 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-684ccb4bd6-x97br" event={"ID":"6fbaa08d-11b7-4498-aeb2-e82ac13f64d0","Type":"ContainerDied","Data":"d1ba4efee963eace33036fd3dbc002cb7169e4269312a53dc8ccb5b7e91e4838"} Sep 30 19:53:47 crc kubenswrapper[4756]: I0930 19:53:47.187143 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 30 19:53:47 crc kubenswrapper[4756]: I0930 19:53:47.187451 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"f9ab0288-ae1d-41f7-954d-8cbadc83c614","Type":"ContainerDied","Data":"89dbaff619c61b1a5dff07426f19a3166a124e9784c0732d4dac53a456b0ba9f"} Sep 30 19:53:47 crc kubenswrapper[4756]: I0930 19:53:47.187578 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Sep 30 19:53:47 crc kubenswrapper[4756]: I0930 19:53:47.188626 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Sep 30 19:53:47 crc kubenswrapper[4756]: I0930 19:53:47.191485 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Sep 30 19:53:47 crc kubenswrapper[4756]: I0930 19:53:47.191717 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Sep 30 19:53:47 crc kubenswrapper[4756]: I0930 19:53:47.236210 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Sep 30 19:53:47 crc kubenswrapper[4756]: I0930 19:53:47.246525 4756 scope.go:117] "RemoveContainer" containerID="207904952f6540ef119af443514c1298c2cf8341c4b2adc7b85d185c51cdf006" Sep 30 19:53:47 crc kubenswrapper[4756]: I0930 19:53:47.282830 4756 scope.go:117] "RemoveContainer" containerID="b64e18bd6132f9cc117785155229a1d2aa54ec5983a3ee45e5e205868af69f65" Sep 30 19:53:47 crc kubenswrapper[4756]: I0930 19:53:47.291765 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-684ccb4bd6-x97br"] Sep 30 19:53:47 crc kubenswrapper[4756]: I0930 19:53:47.309349 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-api-684ccb4bd6-x97br"] Sep 30 19:53:47 crc kubenswrapper[4756]: I0930 19:53:47.314585 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/feac82fa-065f-4e9a-b649-fa26b2d150b8-galera-tls-certs\") pod \"feac82fa-065f-4e9a-b649-fa26b2d150b8\" (UID: \"feac82fa-065f-4e9a-b649-fa26b2d150b8\") " Sep 30 19:53:47 crc kubenswrapper[4756]: I0930 19:53:47.314618 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mysql-db\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"feac82fa-065f-4e9a-b649-fa26b2d150b8\" (UID: \"feac82fa-065f-4e9a-b649-fa26b2d150b8\") " Sep 30 19:53:47 crc kubenswrapper[4756]: I0930 19:53:47.314697 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/feac82fa-065f-4e9a-b649-fa26b2d150b8-config-data-default\") pod \"feac82fa-065f-4e9a-b649-fa26b2d150b8\" (UID: \"feac82fa-065f-4e9a-b649-fa26b2d150b8\") " Sep 30 19:53:47 crc kubenswrapper[4756]: I0930 19:53:47.314722 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/feac82fa-065f-4e9a-b649-fa26b2d150b8-operator-scripts\") pod \"feac82fa-065f-4e9a-b649-fa26b2d150b8\" (UID: \"feac82fa-065f-4e9a-b649-fa26b2d150b8\") " Sep 30 19:53:47 crc kubenswrapper[4756]: I0930 19:53:47.314750 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/feac82fa-065f-4e9a-b649-fa26b2d150b8-kolla-config\") pod \"feac82fa-065f-4e9a-b649-fa26b2d150b8\" (UID: \"feac82fa-065f-4e9a-b649-fa26b2d150b8\") " Sep 30 19:53:47 crc kubenswrapper[4756]: I0930 19:53:47.314769 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/feac82fa-065f-4e9a-b649-fa26b2d150b8-combined-ca-bundle\") pod \"feac82fa-065f-4e9a-b649-fa26b2d150b8\" (UID: \"feac82fa-065f-4e9a-b649-fa26b2d150b8\") " Sep 30 19:53:47 crc kubenswrapper[4756]: I0930 19:53:47.314786 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/feac82fa-065f-4e9a-b649-fa26b2d150b8-config-data-generated\") pod \"feac82fa-065f-4e9a-b649-fa26b2d150b8\" (UID: \"feac82fa-065f-4e9a-b649-fa26b2d150b8\") " Sep 30 19:53:47 crc kubenswrapper[4756]: I0930 19:53:47.314802 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-42dgk\" (UniqueName: \"kubernetes.io/projected/feac82fa-065f-4e9a-b649-fa26b2d150b8-kube-api-access-42dgk\") pod \"feac82fa-065f-4e9a-b649-fa26b2d150b8\" (UID: \"feac82fa-065f-4e9a-b649-fa26b2d150b8\") " Sep 30 19:53:47 crc kubenswrapper[4756]: I0930 19:53:47.314826 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/feac82fa-065f-4e9a-b649-fa26b2d150b8-secrets\") pod \"feac82fa-065f-4e9a-b649-fa26b2d150b8\" (UID: \"feac82fa-065f-4e9a-b649-fa26b2d150b8\") " Sep 30 19:53:47 crc kubenswrapper[4756]: E0930 19:53:47.315156 4756 configmap.go:193] Couldn't get configMap openstack/rabbitmq-config-data: configmap "rabbitmq-config-data" not found Sep 30 19:53:47 crc kubenswrapper[4756]: E0930 19:53:47.315198 4756 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/c3b1ef6d-6617-4d15-8709-a7623e75faa4-config-data podName:c3b1ef6d-6617-4d15-8709-a7623e75faa4 nodeName:}" failed. No retries permitted until 2025-09-30 19:53:55.315185728 +0000 UTC m=+1364.936119205 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/c3b1ef6d-6617-4d15-8709-a7623e75faa4-config-data") pod "rabbitmq-server-0" (UID: "c3b1ef6d-6617-4d15-8709-a7623e75faa4") : configmap "rabbitmq-config-data" not found Sep 30 19:53:47 crc kubenswrapper[4756]: I0930 19:53:47.315258 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/feac82fa-065f-4e9a-b649-fa26b2d150b8-config-data-generated" (OuterVolumeSpecName: "config-data-generated") pod "feac82fa-065f-4e9a-b649-fa26b2d150b8" (UID: "feac82fa-065f-4e9a-b649-fa26b2d150b8"). InnerVolumeSpecName "config-data-generated". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 19:53:47 crc kubenswrapper[4756]: I0930 19:53:47.315328 4756 scope.go:117] "RemoveContainer" containerID="8dbe31d5b2222309a75d567a85830fd5e0fbe8a178b7a9f99d3211c5bbf82f1c" Sep 30 19:53:47 crc kubenswrapper[4756]: I0930 19:53:47.315571 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/feac82fa-065f-4e9a-b649-fa26b2d150b8-kolla-config" (OuterVolumeSpecName: "kolla-config") pod "feac82fa-065f-4e9a-b649-fa26b2d150b8" (UID: "feac82fa-065f-4e9a-b649-fa26b2d150b8"). InnerVolumeSpecName "kolla-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:53:47 crc kubenswrapper[4756]: I0930 19:53:47.316100 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/feac82fa-065f-4e9a-b649-fa26b2d150b8-config-data-default" (OuterVolumeSpecName: "config-data-default") pod "feac82fa-065f-4e9a-b649-fa26b2d150b8" (UID: "feac82fa-065f-4e9a-b649-fa26b2d150b8"). InnerVolumeSpecName "config-data-default". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:53:47 crc kubenswrapper[4756]: I0930 19:53:47.317016 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/feac82fa-065f-4e9a-b649-fa26b2d150b8-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "feac82fa-065f-4e9a-b649-fa26b2d150b8" (UID: "feac82fa-065f-4e9a-b649-fa26b2d150b8"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:53:47 crc kubenswrapper[4756]: I0930 19:53:47.318970 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/memcached-0"] Sep 30 19:53:47 crc kubenswrapper[4756]: I0930 19:53:47.327311 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/memcached-0"] Sep 30 19:53:47 crc kubenswrapper[4756]: I0930 19:53:47.328521 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/feac82fa-065f-4e9a-b649-fa26b2d150b8-kube-api-access-42dgk" (OuterVolumeSpecName: "kube-api-access-42dgk") pod "feac82fa-065f-4e9a-b649-fa26b2d150b8" (UID: "feac82fa-065f-4e9a-b649-fa26b2d150b8"). InnerVolumeSpecName "kube-api-access-42dgk". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:53:47 crc kubenswrapper[4756]: I0930 19:53:47.333569 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 30 19:53:47 crc kubenswrapper[4756]: I0930 19:53:47.341271 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 30 19:53:47 crc kubenswrapper[4756]: I0930 19:53:47.348167 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/feac82fa-065f-4e9a-b649-fa26b2d150b8-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "feac82fa-065f-4e9a-b649-fa26b2d150b8" (UID: "feac82fa-065f-4e9a-b649-fa26b2d150b8"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:53:47 crc kubenswrapper[4756]: I0930 19:53:47.348489 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage01-crc" (OuterVolumeSpecName: "mysql-db") pod "feac82fa-065f-4e9a-b649-fa26b2d150b8" (UID: "feac82fa-065f-4e9a-b649-fa26b2d150b8"). InnerVolumeSpecName "local-storage01-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Sep 30 19:53:47 crc kubenswrapper[4756]: I0930 19:53:47.353514 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Sep 30 19:53:47 crc kubenswrapper[4756]: I0930 19:53:47.360748 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/feac82fa-065f-4e9a-b649-fa26b2d150b8-secrets" (OuterVolumeSpecName: "secrets") pod "feac82fa-065f-4e9a-b649-fa26b2d150b8" (UID: "feac82fa-065f-4e9a-b649-fa26b2d150b8"). InnerVolumeSpecName "secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:53:47 crc kubenswrapper[4756]: I0930 19:53:47.362525 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Sep 30 19:53:47 crc kubenswrapper[4756]: I0930 19:53:47.373009 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-0"] Sep 30 19:53:47 crc kubenswrapper[4756]: I0930 19:53:47.379475 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-conductor-0"] Sep 30 19:53:47 crc kubenswrapper[4756]: I0930 19:53:47.384356 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Sep 30 19:53:47 crc kubenswrapper[4756]: I0930 19:53:47.389561 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Sep 30 19:53:47 crc kubenswrapper[4756]: I0930 19:53:47.395097 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Sep 30 19:53:47 crc kubenswrapper[4756]: I0930 19:53:47.398099 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/feac82fa-065f-4e9a-b649-fa26b2d150b8-galera-tls-certs" (OuterVolumeSpecName: "galera-tls-certs") pod "feac82fa-065f-4e9a-b649-fa26b2d150b8" (UID: "feac82fa-065f-4e9a-b649-fa26b2d150b8"). InnerVolumeSpecName "galera-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:53:47 crc kubenswrapper[4756]: I0930 19:53:47.416566 4756 reconciler_common.go:293] "Volume detached for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/feac82fa-065f-4e9a-b649-fa26b2d150b8-config-data-default\") on node \"crc\" DevicePath \"\"" Sep 30 19:53:47 crc kubenswrapper[4756]: I0930 19:53:47.416594 4756 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/feac82fa-065f-4e9a-b649-fa26b2d150b8-operator-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 19:53:47 crc kubenswrapper[4756]: I0930 19:53:47.416604 4756 reconciler_common.go:293] "Volume detached for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/feac82fa-065f-4e9a-b649-fa26b2d150b8-kolla-config\") on node \"crc\" DevicePath \"\"" Sep 30 19:53:47 crc kubenswrapper[4756]: I0930 19:53:47.416612 4756 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/feac82fa-065f-4e9a-b649-fa26b2d150b8-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 19:53:47 crc kubenswrapper[4756]: I0930 19:53:47.416620 4756 reconciler_common.go:293] "Volume detached for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/feac82fa-065f-4e9a-b649-fa26b2d150b8-config-data-generated\") on node \"crc\" DevicePath \"\"" Sep 30 19:53:47 crc kubenswrapper[4756]: I0930 19:53:47.416630 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-42dgk\" (UniqueName: \"kubernetes.io/projected/feac82fa-065f-4e9a-b649-fa26b2d150b8-kube-api-access-42dgk\") on node \"crc\" DevicePath \"\"" Sep 30 19:53:47 crc kubenswrapper[4756]: I0930 19:53:47.416638 4756 reconciler_common.go:293] "Volume detached for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/feac82fa-065f-4e9a-b649-fa26b2d150b8-secrets\") on node \"crc\" DevicePath \"\"" Sep 30 19:53:47 crc kubenswrapper[4756]: I0930 19:53:47.416646 4756 reconciler_common.go:293] "Volume detached for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/feac82fa-065f-4e9a-b649-fa26b2d150b8-galera-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 30 19:53:47 crc kubenswrapper[4756]: I0930 19:53:47.416665 4756 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") on node \"crc\" " Sep 30 19:53:47 crc kubenswrapper[4756]: I0930 19:53:47.418643 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Sep 30 19:53:47 crc kubenswrapper[4756]: I0930 19:53:47.431151 4756 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage01-crc" (UniqueName: "kubernetes.io/local-volume/local-storage01-crc") on node "crc" Sep 30 19:53:47 crc kubenswrapper[4756]: I0930 19:53:47.518760 4756 reconciler_common.go:293] "Volume detached for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") on node \"crc\" DevicePath \"\"" Sep 30 19:53:47 crc kubenswrapper[4756]: E0930 19:53:47.545287 4756 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 223b5334941e108a9544fa45c9f6b28cb4f077714ac53ffb084467fb3d266eb6 is running failed: container process not found" containerID="223b5334941e108a9544fa45c9f6b28cb4f077714ac53ffb084467fb3d266eb6" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Sep 30 19:53:47 crc kubenswrapper[4756]: E0930 19:53:47.545680 4756 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 223b5334941e108a9544fa45c9f6b28cb4f077714ac53ffb084467fb3d266eb6 is running failed: container process not found" containerID="223b5334941e108a9544fa45c9f6b28cb4f077714ac53ffb084467fb3d266eb6" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Sep 30 19:53:47 crc kubenswrapper[4756]: E0930 19:53:47.545967 4756 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 223b5334941e108a9544fa45c9f6b28cb4f077714ac53ffb084467fb3d266eb6 is running failed: container process not found" containerID="223b5334941e108a9544fa45c9f6b28cb4f077714ac53ffb084467fb3d266eb6" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Sep 30 19:53:47 crc kubenswrapper[4756]: E0930 19:53:47.546019 4756 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 223b5334941e108a9544fa45c9f6b28cb4f077714ac53ffb084467fb3d266eb6 is running failed: container process not found" probeType="Readiness" pod="openstack/ovn-controller-ovs-kmbtz" podUID="cf5c32f2-40b7-4c8f-ab7b-58b6fa3200ed" containerName="ovsdb-server" Sep 30 19:53:47 crc kubenswrapper[4756]: E0930 19:53:47.546463 4756 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="291eb080a7f4ba699c9eeb5b409724fd85cb98d31853893faa9a4c9845063aea" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Sep 30 19:53:47 crc kubenswrapper[4756]: E0930 19:53:47.547907 4756 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="291eb080a7f4ba699c9eeb5b409724fd85cb98d31853893faa9a4c9845063aea" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Sep 30 19:53:47 crc kubenswrapper[4756]: E0930 19:53:47.549752 4756 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="291eb080a7f4ba699c9eeb5b409724fd85cb98d31853893faa9a4c9845063aea" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Sep 30 19:53:47 crc kubenswrapper[4756]: E0930 19:53:47.549806 4756 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/ovn-controller-ovs-kmbtz" podUID="cf5c32f2-40b7-4c8f-ab7b-58b6fa3200ed" containerName="ovs-vswitchd" Sep 30 19:53:47 crc kubenswrapper[4756]: I0930 19:53:47.695970 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_f4c19f17-3c18-484d-acfd-99c72926bdef/ovn-northd/0.log" Sep 30 19:53:47 crc kubenswrapper[4756]: I0930 19:53:47.696037 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Sep 30 19:53:47 crc kubenswrapper[4756]: I0930 19:53:47.824808 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vvh5t\" (UniqueName: \"kubernetes.io/projected/f4c19f17-3c18-484d-acfd-99c72926bdef-kube-api-access-vvh5t\") pod \"f4c19f17-3c18-484d-acfd-99c72926bdef\" (UID: \"f4c19f17-3c18-484d-acfd-99c72926bdef\") " Sep 30 19:53:47 crc kubenswrapper[4756]: I0930 19:53:47.824856 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/f4c19f17-3c18-484d-acfd-99c72926bdef-scripts\") pod \"f4c19f17-3c18-484d-acfd-99c72926bdef\" (UID: \"f4c19f17-3c18-484d-acfd-99c72926bdef\") " Sep 30 19:53:47 crc kubenswrapper[4756]: I0930 19:53:47.824882 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/f4c19f17-3c18-484d-acfd-99c72926bdef-metrics-certs-tls-certs\") pod \"f4c19f17-3c18-484d-acfd-99c72926bdef\" (UID: \"f4c19f17-3c18-484d-acfd-99c72926bdef\") " Sep 30 19:53:47 crc kubenswrapper[4756]: I0930 19:53:47.824932 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f4c19f17-3c18-484d-acfd-99c72926bdef-config\") pod \"f4c19f17-3c18-484d-acfd-99c72926bdef\" (UID: \"f4c19f17-3c18-484d-acfd-99c72926bdef\") " Sep 30 19:53:47 crc kubenswrapper[4756]: I0930 19:53:47.824983 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/f4c19f17-3c18-484d-acfd-99c72926bdef-ovn-rundir\") pod \"f4c19f17-3c18-484d-acfd-99c72926bdef\" (UID: \"f4c19f17-3c18-484d-acfd-99c72926bdef\") " Sep 30 19:53:47 crc kubenswrapper[4756]: I0930 19:53:47.825039 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/f4c19f17-3c18-484d-acfd-99c72926bdef-ovn-northd-tls-certs\") pod \"f4c19f17-3c18-484d-acfd-99c72926bdef\" (UID: \"f4c19f17-3c18-484d-acfd-99c72926bdef\") " Sep 30 19:53:47 crc kubenswrapper[4756]: I0930 19:53:47.825068 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f4c19f17-3c18-484d-acfd-99c72926bdef-combined-ca-bundle\") pod \"f4c19f17-3c18-484d-acfd-99c72926bdef\" (UID: \"f4c19f17-3c18-484d-acfd-99c72926bdef\") " Sep 30 19:53:47 crc kubenswrapper[4756]: I0930 19:53:47.825429 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f4c19f17-3c18-484d-acfd-99c72926bdef-ovn-rundir" (OuterVolumeSpecName: "ovn-rundir") pod "f4c19f17-3c18-484d-acfd-99c72926bdef" (UID: "f4c19f17-3c18-484d-acfd-99c72926bdef"). InnerVolumeSpecName "ovn-rundir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 19:53:47 crc kubenswrapper[4756]: I0930 19:53:47.825812 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f4c19f17-3c18-484d-acfd-99c72926bdef-config" (OuterVolumeSpecName: "config") pod "f4c19f17-3c18-484d-acfd-99c72926bdef" (UID: "f4c19f17-3c18-484d-acfd-99c72926bdef"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:53:47 crc kubenswrapper[4756]: I0930 19:53:47.826493 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f4c19f17-3c18-484d-acfd-99c72926bdef-scripts" (OuterVolumeSpecName: "scripts") pod "f4c19f17-3c18-484d-acfd-99c72926bdef" (UID: "f4c19f17-3c18-484d-acfd-99c72926bdef"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:53:47 crc kubenswrapper[4756]: I0930 19:53:47.830594 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f4c19f17-3c18-484d-acfd-99c72926bdef-kube-api-access-vvh5t" (OuterVolumeSpecName: "kube-api-access-vvh5t") pod "f4c19f17-3c18-484d-acfd-99c72926bdef" (UID: "f4c19f17-3c18-484d-acfd-99c72926bdef"). InnerVolumeSpecName "kube-api-access-vvh5t". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:53:47 crc kubenswrapper[4756]: I0930 19:53:47.855012 4756 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/keystone-6889fc7587-jlhbj" podUID="4e0d0499-c40c-474b-83d5-f72ac55d859c" containerName="keystone-api" probeResult="failure" output="Get \"https://10.217.0.145:5000/v3\": read tcp 10.217.0.2:38824->10.217.0.145:5000: read: connection reset by peer" Sep 30 19:53:47 crc kubenswrapper[4756]: I0930 19:53:47.870839 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f4c19f17-3c18-484d-acfd-99c72926bdef-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f4c19f17-3c18-484d-acfd-99c72926bdef" (UID: "f4c19f17-3c18-484d-acfd-99c72926bdef"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:53:47 crc kubenswrapper[4756]: I0930 19:53:47.902890 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f4c19f17-3c18-484d-acfd-99c72926bdef-ovn-northd-tls-certs" (OuterVolumeSpecName: "ovn-northd-tls-certs") pod "f4c19f17-3c18-484d-acfd-99c72926bdef" (UID: "f4c19f17-3c18-484d-acfd-99c72926bdef"). InnerVolumeSpecName "ovn-northd-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:53:47 crc kubenswrapper[4756]: I0930 19:53:47.908521 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f4c19f17-3c18-484d-acfd-99c72926bdef-metrics-certs-tls-certs" (OuterVolumeSpecName: "metrics-certs-tls-certs") pod "f4c19f17-3c18-484d-acfd-99c72926bdef" (UID: "f4c19f17-3c18-484d-acfd-99c72926bdef"). InnerVolumeSpecName "metrics-certs-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:53:47 crc kubenswrapper[4756]: I0930 19:53:47.926746 4756 reconciler_common.go:293] "Volume detached for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/f4c19f17-3c18-484d-acfd-99c72926bdef-ovn-northd-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 30 19:53:47 crc kubenswrapper[4756]: I0930 19:53:47.926783 4756 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f4c19f17-3c18-484d-acfd-99c72926bdef-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 19:53:47 crc kubenswrapper[4756]: I0930 19:53:47.926799 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vvh5t\" (UniqueName: \"kubernetes.io/projected/f4c19f17-3c18-484d-acfd-99c72926bdef-kube-api-access-vvh5t\") on node \"crc\" DevicePath \"\"" Sep 30 19:53:47 crc kubenswrapper[4756]: I0930 19:53:47.926813 4756 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/f4c19f17-3c18-484d-acfd-99c72926bdef-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 19:53:47 crc kubenswrapper[4756]: I0930 19:53:47.926825 4756 reconciler_common.go:293] "Volume detached for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/f4c19f17-3c18-484d-acfd-99c72926bdef-metrics-certs-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 30 19:53:47 crc kubenswrapper[4756]: I0930 19:53:47.926838 4756 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f4c19f17-3c18-484d-acfd-99c72926bdef-config\") on node \"crc\" DevicePath \"\"" Sep 30 19:53:47 crc kubenswrapper[4756]: I0930 19:53:47.926849 4756 reconciler_common.go:293] "Volume detached for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/f4c19f17-3c18-484d-acfd-99c72926bdef-ovn-rundir\") on node \"crc\" DevicePath \"\"" Sep 30 19:53:47 crc kubenswrapper[4756]: E0930 19:53:47.927309 4756 configmap.go:193] Couldn't get configMap openstack/rabbitmq-cell1-config-data: configmap "rabbitmq-cell1-config-data" not found Sep 30 19:53:47 crc kubenswrapper[4756]: E0930 19:53:47.927735 4756 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/3bacc7b1-00f4-4489-9c0b-fa038cc443d6-config-data podName:3bacc7b1-00f4-4489-9c0b-fa038cc443d6 nodeName:}" failed. No retries permitted until 2025-09-30 19:53:55.927519157 +0000 UTC m=+1365.548452634 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/3bacc7b1-00f4-4489-9c0b-fa038cc443d6-config-data") pod "rabbitmq-cell1-server-0" (UID: "3bacc7b1-00f4-4489-9c0b-fa038cc443d6") : configmap "rabbitmq-cell1-config-data" not found Sep 30 19:53:48 crc kubenswrapper[4756]: I0930 19:53:48.200933 4756 generic.go:334] "Generic (PLEG): container finished" podID="4e0d0499-c40c-474b-83d5-f72ac55d859c" containerID="941c1f888df56ddf960e94e48f17bb2adec0251bc3c8d8f1dd8e4c62d928796e" exitCode=0 Sep 30 19:53:48 crc kubenswrapper[4756]: I0930 19:53:48.201008 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-6889fc7587-jlhbj" event={"ID":"4e0d0499-c40c-474b-83d5-f72ac55d859c","Type":"ContainerDied","Data":"941c1f888df56ddf960e94e48f17bb2adec0251bc3c8d8f1dd8e4c62d928796e"} Sep 30 19:53:48 crc kubenswrapper[4756]: I0930 19:53:48.203320 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_f4c19f17-3c18-484d-acfd-99c72926bdef/ovn-northd/0.log" Sep 30 19:53:48 crc kubenswrapper[4756]: I0930 19:53:48.203364 4756 generic.go:334] "Generic (PLEG): container finished" podID="f4c19f17-3c18-484d-acfd-99c72926bdef" containerID="665a9d7a98e4cec82848a7a7dd2273a052bb65806f4b6908a9c7dcf1d34d25df" exitCode=139 Sep 30 19:53:48 crc kubenswrapper[4756]: I0930 19:53:48.203432 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"f4c19f17-3c18-484d-acfd-99c72926bdef","Type":"ContainerDied","Data":"665a9d7a98e4cec82848a7a7dd2273a052bb65806f4b6908a9c7dcf1d34d25df"} Sep 30 19:53:48 crc kubenswrapper[4756]: I0930 19:53:48.203463 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"f4c19f17-3c18-484d-acfd-99c72926bdef","Type":"ContainerDied","Data":"c3a68d5854b68b5f841c43337e1fbeecbe6820158c388a7e166ab069969d7462"} Sep 30 19:53:48 crc kubenswrapper[4756]: I0930 19:53:48.203486 4756 scope.go:117] "RemoveContainer" containerID="67fa22128871f16827cabfb95e325f7d8e14ac3c57908b0201283ac2b4a62c35" Sep 30 19:53:48 crc kubenswrapper[4756]: I0930 19:53:48.203600 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Sep 30 19:53:48 crc kubenswrapper[4756]: I0930 19:53:48.217906 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Sep 30 19:53:48 crc kubenswrapper[4756]: I0930 19:53:48.243213 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-northd-0"] Sep 30 19:53:48 crc kubenswrapper[4756]: I0930 19:53:48.250946 4756 scope.go:117] "RemoveContainer" containerID="665a9d7a98e4cec82848a7a7dd2273a052bb65806f4b6908a9c7dcf1d34d25df" Sep 30 19:53:48 crc kubenswrapper[4756]: I0930 19:53:48.276230 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-6889fc7587-jlhbj" Sep 30 19:53:48 crc kubenswrapper[4756]: I0930 19:53:48.283547 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-northd-0"] Sep 30 19:53:48 crc kubenswrapper[4756]: I0930 19:53:48.311866 4756 scope.go:117] "RemoveContainer" containerID="67fa22128871f16827cabfb95e325f7d8e14ac3c57908b0201283ac2b4a62c35" Sep 30 19:53:48 crc kubenswrapper[4756]: I0930 19:53:48.326010 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/openstack-galera-0"] Sep 30 19:53:48 crc kubenswrapper[4756]: E0930 19:53:48.326643 4756 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"67fa22128871f16827cabfb95e325f7d8e14ac3c57908b0201283ac2b4a62c35\": container with ID starting with 67fa22128871f16827cabfb95e325f7d8e14ac3c57908b0201283ac2b4a62c35 not found: ID does not exist" containerID="67fa22128871f16827cabfb95e325f7d8e14ac3c57908b0201283ac2b4a62c35" Sep 30 19:53:48 crc kubenswrapper[4756]: I0930 19:53:48.326705 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"67fa22128871f16827cabfb95e325f7d8e14ac3c57908b0201283ac2b4a62c35"} err="failed to get container status \"67fa22128871f16827cabfb95e325f7d8e14ac3c57908b0201283ac2b4a62c35\": rpc error: code = NotFound desc = could not find container \"67fa22128871f16827cabfb95e325f7d8e14ac3c57908b0201283ac2b4a62c35\": container with ID starting with 67fa22128871f16827cabfb95e325f7d8e14ac3c57908b0201283ac2b4a62c35 not found: ID does not exist" Sep 30 19:53:48 crc kubenswrapper[4756]: I0930 19:53:48.326737 4756 scope.go:117] "RemoveContainer" containerID="665a9d7a98e4cec82848a7a7dd2273a052bb65806f4b6908a9c7dcf1d34d25df" Sep 30 19:53:48 crc kubenswrapper[4756]: E0930 19:53:48.327127 4756 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"665a9d7a98e4cec82848a7a7dd2273a052bb65806f4b6908a9c7dcf1d34d25df\": container with ID starting with 665a9d7a98e4cec82848a7a7dd2273a052bb65806f4b6908a9c7dcf1d34d25df not found: ID does not exist" containerID="665a9d7a98e4cec82848a7a7dd2273a052bb65806f4b6908a9c7dcf1d34d25df" Sep 30 19:53:48 crc kubenswrapper[4756]: I0930 19:53:48.327146 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"665a9d7a98e4cec82848a7a7dd2273a052bb65806f4b6908a9c7dcf1d34d25df"} err="failed to get container status \"665a9d7a98e4cec82848a7a7dd2273a052bb65806f4b6908a9c7dcf1d34d25df\": rpc error: code = NotFound desc = could not find container \"665a9d7a98e4cec82848a7a7dd2273a052bb65806f4b6908a9c7dcf1d34d25df\": container with ID starting with 665a9d7a98e4cec82848a7a7dd2273a052bb65806f4b6908a9c7dcf1d34d25df not found: ID does not exist" Sep 30 19:53:48 crc kubenswrapper[4756]: I0930 19:53:48.331766 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/openstack-galera-0"] Sep 30 19:53:48 crc kubenswrapper[4756]: I0930 19:53:48.436366 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/4e0d0499-c40c-474b-83d5-f72ac55d859c-credential-keys\") pod \"4e0d0499-c40c-474b-83d5-f72ac55d859c\" (UID: \"4e0d0499-c40c-474b-83d5-f72ac55d859c\") " Sep 30 19:53:48 crc kubenswrapper[4756]: I0930 19:53:48.436448 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/4e0d0499-c40c-474b-83d5-f72ac55d859c-public-tls-certs\") pod \"4e0d0499-c40c-474b-83d5-f72ac55d859c\" (UID: \"4e0d0499-c40c-474b-83d5-f72ac55d859c\") " Sep 30 19:53:48 crc kubenswrapper[4756]: I0930 19:53:48.436476 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4gv9l\" (UniqueName: \"kubernetes.io/projected/4e0d0499-c40c-474b-83d5-f72ac55d859c-kube-api-access-4gv9l\") pod \"4e0d0499-c40c-474b-83d5-f72ac55d859c\" (UID: \"4e0d0499-c40c-474b-83d5-f72ac55d859c\") " Sep 30 19:53:48 crc kubenswrapper[4756]: I0930 19:53:48.436496 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/4e0d0499-c40c-474b-83d5-f72ac55d859c-internal-tls-certs\") pod \"4e0d0499-c40c-474b-83d5-f72ac55d859c\" (UID: \"4e0d0499-c40c-474b-83d5-f72ac55d859c\") " Sep 30 19:53:48 crc kubenswrapper[4756]: I0930 19:53:48.436522 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4e0d0499-c40c-474b-83d5-f72ac55d859c-scripts\") pod \"4e0d0499-c40c-474b-83d5-f72ac55d859c\" (UID: \"4e0d0499-c40c-474b-83d5-f72ac55d859c\") " Sep 30 19:53:48 crc kubenswrapper[4756]: I0930 19:53:48.436579 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/4e0d0499-c40c-474b-83d5-f72ac55d859c-fernet-keys\") pod \"4e0d0499-c40c-474b-83d5-f72ac55d859c\" (UID: \"4e0d0499-c40c-474b-83d5-f72ac55d859c\") " Sep 30 19:53:48 crc kubenswrapper[4756]: I0930 19:53:48.436606 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4e0d0499-c40c-474b-83d5-f72ac55d859c-config-data\") pod \"4e0d0499-c40c-474b-83d5-f72ac55d859c\" (UID: \"4e0d0499-c40c-474b-83d5-f72ac55d859c\") " Sep 30 19:53:48 crc kubenswrapper[4756]: I0930 19:53:48.436637 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4e0d0499-c40c-474b-83d5-f72ac55d859c-combined-ca-bundle\") pod \"4e0d0499-c40c-474b-83d5-f72ac55d859c\" (UID: \"4e0d0499-c40c-474b-83d5-f72ac55d859c\") " Sep 30 19:53:48 crc kubenswrapper[4756]: I0930 19:53:48.445576 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4e0d0499-c40c-474b-83d5-f72ac55d859c-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "4e0d0499-c40c-474b-83d5-f72ac55d859c" (UID: "4e0d0499-c40c-474b-83d5-f72ac55d859c"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:53:48 crc kubenswrapper[4756]: I0930 19:53:48.445650 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4e0d0499-c40c-474b-83d5-f72ac55d859c-scripts" (OuterVolumeSpecName: "scripts") pod "4e0d0499-c40c-474b-83d5-f72ac55d859c" (UID: "4e0d0499-c40c-474b-83d5-f72ac55d859c"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:53:48 crc kubenswrapper[4756]: I0930 19:53:48.445988 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4e0d0499-c40c-474b-83d5-f72ac55d859c-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "4e0d0499-c40c-474b-83d5-f72ac55d859c" (UID: "4e0d0499-c40c-474b-83d5-f72ac55d859c"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:53:48 crc kubenswrapper[4756]: I0930 19:53:48.455432 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4e0d0499-c40c-474b-83d5-f72ac55d859c-kube-api-access-4gv9l" (OuterVolumeSpecName: "kube-api-access-4gv9l") pod "4e0d0499-c40c-474b-83d5-f72ac55d859c" (UID: "4e0d0499-c40c-474b-83d5-f72ac55d859c"). InnerVolumeSpecName "kube-api-access-4gv9l". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:53:48 crc kubenswrapper[4756]: I0930 19:53:48.469798 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4e0d0499-c40c-474b-83d5-f72ac55d859c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "4e0d0499-c40c-474b-83d5-f72ac55d859c" (UID: "4e0d0499-c40c-474b-83d5-f72ac55d859c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:53:48 crc kubenswrapper[4756]: I0930 19:53:48.470853 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4e0d0499-c40c-474b-83d5-f72ac55d859c-config-data" (OuterVolumeSpecName: "config-data") pod "4e0d0499-c40c-474b-83d5-f72ac55d859c" (UID: "4e0d0499-c40c-474b-83d5-f72ac55d859c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:53:48 crc kubenswrapper[4756]: I0930 19:53:48.485300 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4e0d0499-c40c-474b-83d5-f72ac55d859c-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "4e0d0499-c40c-474b-83d5-f72ac55d859c" (UID: "4e0d0499-c40c-474b-83d5-f72ac55d859c"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:53:48 crc kubenswrapper[4756]: I0930 19:53:48.491280 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4e0d0499-c40c-474b-83d5-f72ac55d859c-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "4e0d0499-c40c-474b-83d5-f72ac55d859c" (UID: "4e0d0499-c40c-474b-83d5-f72ac55d859c"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:53:48 crc kubenswrapper[4756]: I0930 19:53:48.538667 4756 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4e0d0499-c40c-474b-83d5-f72ac55d859c-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 19:53:48 crc kubenswrapper[4756]: I0930 19:53:48.538692 4756 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4e0d0499-c40c-474b-83d5-f72ac55d859c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 19:53:48 crc kubenswrapper[4756]: I0930 19:53:48.538703 4756 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/4e0d0499-c40c-474b-83d5-f72ac55d859c-credential-keys\") on node \"crc\" DevicePath \"\"" Sep 30 19:53:48 crc kubenswrapper[4756]: I0930 19:53:48.538711 4756 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/4e0d0499-c40c-474b-83d5-f72ac55d859c-public-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 30 19:53:48 crc kubenswrapper[4756]: I0930 19:53:48.538720 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4gv9l\" (UniqueName: \"kubernetes.io/projected/4e0d0499-c40c-474b-83d5-f72ac55d859c-kube-api-access-4gv9l\") on node \"crc\" DevicePath \"\"" Sep 30 19:53:48 crc kubenswrapper[4756]: I0930 19:53:48.538728 4756 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/4e0d0499-c40c-474b-83d5-f72ac55d859c-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 30 19:53:48 crc kubenswrapper[4756]: I0930 19:53:48.538735 4756 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4e0d0499-c40c-474b-83d5-f72ac55d859c-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 19:53:48 crc kubenswrapper[4756]: I0930 19:53:48.538744 4756 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/4e0d0499-c40c-474b-83d5-f72ac55d859c-fernet-keys\") on node \"crc\" DevicePath \"\"" Sep 30 19:53:48 crc kubenswrapper[4756]: I0930 19:53:48.548323 4756 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-server-0" podUID="c3b1ef6d-6617-4d15-8709-a7623e75faa4" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.104:5671: connect: connection refused" Sep 30 19:53:48 crc kubenswrapper[4756]: I0930 19:53:48.895602 4756 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-cell1-server-0" podUID="3bacc7b1-00f4-4489-9c0b-fa038cc443d6" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.105:5671: connect: connection refused" Sep 30 19:53:48 crc kubenswrapper[4756]: I0930 19:53:48.916775 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Sep 30 19:53:49 crc kubenswrapper[4756]: I0930 19:53:49.045050 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/c3b1ef6d-6617-4d15-8709-a7623e75faa4-rabbitmq-confd\") pod \"c3b1ef6d-6617-4d15-8709-a7623e75faa4\" (UID: \"c3b1ef6d-6617-4d15-8709-a7623e75faa4\") " Sep 30 19:53:49 crc kubenswrapper[4756]: I0930 19:53:49.045281 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/c3b1ef6d-6617-4d15-8709-a7623e75faa4-server-conf\") pod \"c3b1ef6d-6617-4d15-8709-a7623e75faa4\" (UID: \"c3b1ef6d-6617-4d15-8709-a7623e75faa4\") " Sep 30 19:53:49 crc kubenswrapper[4756]: I0930 19:53:49.045317 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/c3b1ef6d-6617-4d15-8709-a7623e75faa4-rabbitmq-erlang-cookie\") pod \"c3b1ef6d-6617-4d15-8709-a7623e75faa4\" (UID: \"c3b1ef6d-6617-4d15-8709-a7623e75faa4\") " Sep 30 19:53:49 crc kubenswrapper[4756]: I0930 19:53:49.045343 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/c3b1ef6d-6617-4d15-8709-a7623e75faa4-rabbitmq-plugins\") pod \"c3b1ef6d-6617-4d15-8709-a7623e75faa4\" (UID: \"c3b1ef6d-6617-4d15-8709-a7623e75faa4\") " Sep 30 19:53:49 crc kubenswrapper[4756]: I0930 19:53:49.045377 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/c3b1ef6d-6617-4d15-8709-a7623e75faa4-erlang-cookie-secret\") pod \"c3b1ef6d-6617-4d15-8709-a7623e75faa4\" (UID: \"c3b1ef6d-6617-4d15-8709-a7623e75faa4\") " Sep 30 19:53:49 crc kubenswrapper[4756]: I0930 19:53:49.046245 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/c3b1ef6d-6617-4d15-8709-a7623e75faa4-plugins-conf\") pod \"c3b1ef6d-6617-4d15-8709-a7623e75faa4\" (UID: \"c3b1ef6d-6617-4d15-8709-a7623e75faa4\") " Sep 30 19:53:49 crc kubenswrapper[4756]: I0930 19:53:49.046330 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/c3b1ef6d-6617-4d15-8709-a7623e75faa4-config-data\") pod \"c3b1ef6d-6617-4d15-8709-a7623e75faa4\" (UID: \"c3b1ef6d-6617-4d15-8709-a7623e75faa4\") " Sep 30 19:53:49 crc kubenswrapper[4756]: I0930 19:53:49.046351 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/c3b1ef6d-6617-4d15-8709-a7623e75faa4-pod-info\") pod \"c3b1ef6d-6617-4d15-8709-a7623e75faa4\" (UID: \"c3b1ef6d-6617-4d15-8709-a7623e75faa4\") " Sep 30 19:53:49 crc kubenswrapper[4756]: I0930 19:53:49.046369 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"c3b1ef6d-6617-4d15-8709-a7623e75faa4\" (UID: \"c3b1ef6d-6617-4d15-8709-a7623e75faa4\") " Sep 30 19:53:49 crc kubenswrapper[4756]: I0930 19:53:49.046414 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w2n7d\" (UniqueName: \"kubernetes.io/projected/c3b1ef6d-6617-4d15-8709-a7623e75faa4-kube-api-access-w2n7d\") pod \"c3b1ef6d-6617-4d15-8709-a7623e75faa4\" (UID: \"c3b1ef6d-6617-4d15-8709-a7623e75faa4\") " Sep 30 19:53:49 crc kubenswrapper[4756]: I0930 19:53:49.046455 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/c3b1ef6d-6617-4d15-8709-a7623e75faa4-rabbitmq-tls\") pod \"c3b1ef6d-6617-4d15-8709-a7623e75faa4\" (UID: \"c3b1ef6d-6617-4d15-8709-a7623e75faa4\") " Sep 30 19:53:49 crc kubenswrapper[4756]: I0930 19:53:49.047801 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c3b1ef6d-6617-4d15-8709-a7623e75faa4-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "c3b1ef6d-6617-4d15-8709-a7623e75faa4" (UID: "c3b1ef6d-6617-4d15-8709-a7623e75faa4"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:53:49 crc kubenswrapper[4756]: I0930 19:53:49.048057 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c3b1ef6d-6617-4d15-8709-a7623e75faa4-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "c3b1ef6d-6617-4d15-8709-a7623e75faa4" (UID: "c3b1ef6d-6617-4d15-8709-a7623e75faa4"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 19:53:49 crc kubenswrapper[4756]: I0930 19:53:49.048777 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c3b1ef6d-6617-4d15-8709-a7623e75faa4-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "c3b1ef6d-6617-4d15-8709-a7623e75faa4" (UID: "c3b1ef6d-6617-4d15-8709-a7623e75faa4"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 19:53:49 crc kubenswrapper[4756]: I0930 19:53:49.051070 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage11-crc" (OuterVolumeSpecName: "persistence") pod "c3b1ef6d-6617-4d15-8709-a7623e75faa4" (UID: "c3b1ef6d-6617-4d15-8709-a7623e75faa4"). InnerVolumeSpecName "local-storage11-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Sep 30 19:53:49 crc kubenswrapper[4756]: I0930 19:53:49.051188 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c3b1ef6d-6617-4d15-8709-a7623e75faa4-rabbitmq-tls" (OuterVolumeSpecName: "rabbitmq-tls") pod "c3b1ef6d-6617-4d15-8709-a7623e75faa4" (UID: "c3b1ef6d-6617-4d15-8709-a7623e75faa4"). InnerVolumeSpecName "rabbitmq-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:53:49 crc kubenswrapper[4756]: I0930 19:53:49.052732 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c3b1ef6d-6617-4d15-8709-a7623e75faa4-kube-api-access-w2n7d" (OuterVolumeSpecName: "kube-api-access-w2n7d") pod "c3b1ef6d-6617-4d15-8709-a7623e75faa4" (UID: "c3b1ef6d-6617-4d15-8709-a7623e75faa4"). InnerVolumeSpecName "kube-api-access-w2n7d". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:53:49 crc kubenswrapper[4756]: I0930 19:53:49.056545 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/c3b1ef6d-6617-4d15-8709-a7623e75faa4-pod-info" (OuterVolumeSpecName: "pod-info") pod "c3b1ef6d-6617-4d15-8709-a7623e75faa4" (UID: "c3b1ef6d-6617-4d15-8709-a7623e75faa4"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Sep 30 19:53:49 crc kubenswrapper[4756]: I0930 19:53:49.057004 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c3b1ef6d-6617-4d15-8709-a7623e75faa4-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "c3b1ef6d-6617-4d15-8709-a7623e75faa4" (UID: "c3b1ef6d-6617-4d15-8709-a7623e75faa4"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:53:49 crc kubenswrapper[4756]: I0930 19:53:49.076501 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c3b1ef6d-6617-4d15-8709-a7623e75faa4-config-data" (OuterVolumeSpecName: "config-data") pod "c3b1ef6d-6617-4d15-8709-a7623e75faa4" (UID: "c3b1ef6d-6617-4d15-8709-a7623e75faa4"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:53:49 crc kubenswrapper[4756]: I0930 19:53:49.084006 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c3b1ef6d-6617-4d15-8709-a7623e75faa4-server-conf" (OuterVolumeSpecName: "server-conf") pod "c3b1ef6d-6617-4d15-8709-a7623e75faa4" (UID: "c3b1ef6d-6617-4d15-8709-a7623e75faa4"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:53:49 crc kubenswrapper[4756]: I0930 19:53:49.131870 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0befea8f-1ced-4703-b2a2-a10e154271f4" path="/var/lib/kubelet/pods/0befea8f-1ced-4703-b2a2-a10e154271f4/volumes" Sep 30 19:53:49 crc kubenswrapper[4756]: I0930 19:53:49.132720 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="207e631b-3ef5-430f-bad1-4a9972dc8eb4" path="/var/lib/kubelet/pods/207e631b-3ef5-430f-bad1-4a9972dc8eb4/volumes" Sep 30 19:53:49 crc kubenswrapper[4756]: I0930 19:53:49.133352 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5d8f3245-22a4-45a4-bbc4-33e1a078f92b" path="/var/lib/kubelet/pods/5d8f3245-22a4-45a4-bbc4-33e1a078f92b/volumes" Sep 30 19:53:49 crc kubenswrapper[4756]: I0930 19:53:49.134551 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6fbaa08d-11b7-4498-aeb2-e82ac13f64d0" path="/var/lib/kubelet/pods/6fbaa08d-11b7-4498-aeb2-e82ac13f64d0/volumes" Sep 30 19:53:49 crc kubenswrapper[4756]: I0930 19:53:49.135029 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cff112d3-3898-41c9-aab6-ed2e2a57a484" path="/var/lib/kubelet/pods/cff112d3-3898-41c9-aab6-ed2e2a57a484/volumes" Sep 30 19:53:49 crc kubenswrapper[4756]: I0930 19:53:49.135596 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="eed122c3-f6af-488a-9684-f60ffe104434" path="/var/lib/kubelet/pods/eed122c3-f6af-488a-9684-f60ffe104434/volumes" Sep 30 19:53:49 crc kubenswrapper[4756]: I0930 19:53:49.136699 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f4c19f17-3c18-484d-acfd-99c72926bdef" path="/var/lib/kubelet/pods/f4c19f17-3c18-484d-acfd-99c72926bdef/volumes" Sep 30 19:53:49 crc kubenswrapper[4756]: I0930 19:53:49.137250 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f9ab0288-ae1d-41f7-954d-8cbadc83c614" path="/var/lib/kubelet/pods/f9ab0288-ae1d-41f7-954d-8cbadc83c614/volumes" Sep 30 19:53:49 crc kubenswrapper[4756]: I0930 19:53:49.138484 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="feac82fa-065f-4e9a-b649-fa26b2d150b8" path="/var/lib/kubelet/pods/feac82fa-065f-4e9a-b649-fa26b2d150b8/volumes" Sep 30 19:53:49 crc kubenswrapper[4756]: I0930 19:53:49.140542 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c3b1ef6d-6617-4d15-8709-a7623e75faa4-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "c3b1ef6d-6617-4d15-8709-a7623e75faa4" (UID: "c3b1ef6d-6617-4d15-8709-a7623e75faa4"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:53:49 crc kubenswrapper[4756]: I0930 19:53:49.147786 4756 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/c3b1ef6d-6617-4d15-8709-a7623e75faa4-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 19:53:49 crc kubenswrapper[4756]: I0930 19:53:49.147807 4756 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/c3b1ef6d-6617-4d15-8709-a7623e75faa4-pod-info\") on node \"crc\" DevicePath \"\"" Sep 30 19:53:49 crc kubenswrapper[4756]: I0930 19:53:49.147831 4756 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") on node \"crc\" " Sep 30 19:53:49 crc kubenswrapper[4756]: I0930 19:53:49.147841 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w2n7d\" (UniqueName: \"kubernetes.io/projected/c3b1ef6d-6617-4d15-8709-a7623e75faa4-kube-api-access-w2n7d\") on node \"crc\" DevicePath \"\"" Sep 30 19:53:49 crc kubenswrapper[4756]: I0930 19:53:49.147851 4756 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/c3b1ef6d-6617-4d15-8709-a7623e75faa4-rabbitmq-tls\") on node \"crc\" DevicePath \"\"" Sep 30 19:53:49 crc kubenswrapper[4756]: I0930 19:53:49.147859 4756 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/c3b1ef6d-6617-4d15-8709-a7623e75faa4-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Sep 30 19:53:49 crc kubenswrapper[4756]: I0930 19:53:49.147868 4756 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/c3b1ef6d-6617-4d15-8709-a7623e75faa4-server-conf\") on node \"crc\" DevicePath \"\"" Sep 30 19:53:49 crc kubenswrapper[4756]: I0930 19:53:49.147876 4756 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/c3b1ef6d-6617-4d15-8709-a7623e75faa4-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Sep 30 19:53:49 crc kubenswrapper[4756]: I0930 19:53:49.147884 4756 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/c3b1ef6d-6617-4d15-8709-a7623e75faa4-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Sep 30 19:53:49 crc kubenswrapper[4756]: I0930 19:53:49.147891 4756 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/c3b1ef6d-6617-4d15-8709-a7623e75faa4-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Sep 30 19:53:49 crc kubenswrapper[4756]: I0930 19:53:49.147900 4756 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/c3b1ef6d-6617-4d15-8709-a7623e75faa4-plugins-conf\") on node \"crc\" DevicePath \"\"" Sep 30 19:53:49 crc kubenswrapper[4756]: I0930 19:53:49.163524 4756 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage11-crc" (UniqueName: "kubernetes.io/local-volume/local-storage11-crc") on node "crc" Sep 30 19:53:49 crc kubenswrapper[4756]: I0930 19:53:49.229977 4756 generic.go:334] "Generic (PLEG): container finished" podID="3bacc7b1-00f4-4489-9c0b-fa038cc443d6" containerID="22acee4e4b450fbce8e6aa4569d2260f650fed76f5340a9472eecc6bc744543f" exitCode=0 Sep 30 19:53:49 crc kubenswrapper[4756]: I0930 19:53:49.230072 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"3bacc7b1-00f4-4489-9c0b-fa038cc443d6","Type":"ContainerDied","Data":"22acee4e4b450fbce8e6aa4569d2260f650fed76f5340a9472eecc6bc744543f"} Sep 30 19:53:49 crc kubenswrapper[4756]: I0930 19:53:49.232721 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-6889fc7587-jlhbj" Sep 30 19:53:49 crc kubenswrapper[4756]: I0930 19:53:49.232701 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-6889fc7587-jlhbj" event={"ID":"4e0d0499-c40c-474b-83d5-f72ac55d859c","Type":"ContainerDied","Data":"4d75a894500d1a1a588e5f2969ca4eb270930f2f5199afa2c832709272569525"} Sep 30 19:53:49 crc kubenswrapper[4756]: I0930 19:53:49.232862 4756 scope.go:117] "RemoveContainer" containerID="941c1f888df56ddf960e94e48f17bb2adec0251bc3c8d8f1dd8e4c62d928796e" Sep 30 19:53:49 crc kubenswrapper[4756]: I0930 19:53:49.238343 4756 generic.go:334] "Generic (PLEG): container finished" podID="c3b1ef6d-6617-4d15-8709-a7623e75faa4" containerID="d30c7480fbfaf752f8c10fcadf2dff0b40a331d23daaf5997b23b75ef5b2aafa" exitCode=0 Sep 30 19:53:49 crc kubenswrapper[4756]: I0930 19:53:49.238440 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"c3b1ef6d-6617-4d15-8709-a7623e75faa4","Type":"ContainerDied","Data":"d30c7480fbfaf752f8c10fcadf2dff0b40a331d23daaf5997b23b75ef5b2aafa"} Sep 30 19:53:49 crc kubenswrapper[4756]: I0930 19:53:49.238473 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"c3b1ef6d-6617-4d15-8709-a7623e75faa4","Type":"ContainerDied","Data":"ef3b11605b90df6591e1850d9530a4e2bd8e4b06f92d8da767ef0f1d015678c9"} Sep 30 19:53:49 crc kubenswrapper[4756]: I0930 19:53:49.238541 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Sep 30 19:53:49 crc kubenswrapper[4756]: I0930 19:53:49.252570 4756 reconciler_common.go:293] "Volume detached for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") on node \"crc\" DevicePath \"\"" Sep 30 19:53:49 crc kubenswrapper[4756]: I0930 19:53:49.260408 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-6889fc7587-jlhbj"] Sep 30 19:53:49 crc kubenswrapper[4756]: I0930 19:53:49.268298 4756 scope.go:117] "RemoveContainer" containerID="d30c7480fbfaf752f8c10fcadf2dff0b40a331d23daaf5997b23b75ef5b2aafa" Sep 30 19:53:49 crc kubenswrapper[4756]: I0930 19:53:49.268769 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-6889fc7587-jlhbj"] Sep 30 19:53:49 crc kubenswrapper[4756]: I0930 19:53:49.296592 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-server-0"] Sep 30 19:53:49 crc kubenswrapper[4756]: I0930 19:53:49.301949 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/rabbitmq-server-0"] Sep 30 19:53:49 crc kubenswrapper[4756]: I0930 19:53:49.309314 4756 scope.go:117] "RemoveContainer" containerID="d8b917c3d73dba41be8fa6de0c7e43dda4897a0fa616dbea5cb7b4cf69c7fa60" Sep 30 19:53:49 crc kubenswrapper[4756]: I0930 19:53:49.324211 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Sep 30 19:53:49 crc kubenswrapper[4756]: I0930 19:53:49.332812 4756 scope.go:117] "RemoveContainer" containerID="d30c7480fbfaf752f8c10fcadf2dff0b40a331d23daaf5997b23b75ef5b2aafa" Sep 30 19:53:49 crc kubenswrapper[4756]: E0930 19:53:49.333132 4756 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d30c7480fbfaf752f8c10fcadf2dff0b40a331d23daaf5997b23b75ef5b2aafa\": container with ID starting with d30c7480fbfaf752f8c10fcadf2dff0b40a331d23daaf5997b23b75ef5b2aafa not found: ID does not exist" containerID="d30c7480fbfaf752f8c10fcadf2dff0b40a331d23daaf5997b23b75ef5b2aafa" Sep 30 19:53:49 crc kubenswrapper[4756]: I0930 19:53:49.333161 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d30c7480fbfaf752f8c10fcadf2dff0b40a331d23daaf5997b23b75ef5b2aafa"} err="failed to get container status \"d30c7480fbfaf752f8c10fcadf2dff0b40a331d23daaf5997b23b75ef5b2aafa\": rpc error: code = NotFound desc = could not find container \"d30c7480fbfaf752f8c10fcadf2dff0b40a331d23daaf5997b23b75ef5b2aafa\": container with ID starting with d30c7480fbfaf752f8c10fcadf2dff0b40a331d23daaf5997b23b75ef5b2aafa not found: ID does not exist" Sep 30 19:53:49 crc kubenswrapper[4756]: I0930 19:53:49.333180 4756 scope.go:117] "RemoveContainer" containerID="d8b917c3d73dba41be8fa6de0c7e43dda4897a0fa616dbea5cb7b4cf69c7fa60" Sep 30 19:53:49 crc kubenswrapper[4756]: E0930 19:53:49.333433 4756 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d8b917c3d73dba41be8fa6de0c7e43dda4897a0fa616dbea5cb7b4cf69c7fa60\": container with ID starting with d8b917c3d73dba41be8fa6de0c7e43dda4897a0fa616dbea5cb7b4cf69c7fa60 not found: ID does not exist" containerID="d8b917c3d73dba41be8fa6de0c7e43dda4897a0fa616dbea5cb7b4cf69c7fa60" Sep 30 19:53:49 crc kubenswrapper[4756]: I0930 19:53:49.333454 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d8b917c3d73dba41be8fa6de0c7e43dda4897a0fa616dbea5cb7b4cf69c7fa60"} err="failed to get container status \"d8b917c3d73dba41be8fa6de0c7e43dda4897a0fa616dbea5cb7b4cf69c7fa60\": rpc error: code = NotFound desc = could not find container \"d8b917c3d73dba41be8fa6de0c7e43dda4897a0fa616dbea5cb7b4cf69c7fa60\": container with ID starting with d8b917c3d73dba41be8fa6de0c7e43dda4897a0fa616dbea5cb7b4cf69c7fa60 not found: ID does not exist" Sep 30 19:53:49 crc kubenswrapper[4756]: I0930 19:53:49.455545 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/3bacc7b1-00f4-4489-9c0b-fa038cc443d6-plugins-conf\") pod \"3bacc7b1-00f4-4489-9c0b-fa038cc443d6\" (UID: \"3bacc7b1-00f4-4489-9c0b-fa038cc443d6\") " Sep 30 19:53:49 crc kubenswrapper[4756]: I0930 19:53:49.455903 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/3bacc7b1-00f4-4489-9c0b-fa038cc443d6-rabbitmq-erlang-cookie\") pod \"3bacc7b1-00f4-4489-9c0b-fa038cc443d6\" (UID: \"3bacc7b1-00f4-4489-9c0b-fa038cc443d6\") " Sep 30 19:53:49 crc kubenswrapper[4756]: I0930 19:53:49.455940 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/3bacc7b1-00f4-4489-9c0b-fa038cc443d6-rabbitmq-tls\") pod \"3bacc7b1-00f4-4489-9c0b-fa038cc443d6\" (UID: \"3bacc7b1-00f4-4489-9c0b-fa038cc443d6\") " Sep 30 19:53:49 crc kubenswrapper[4756]: I0930 19:53:49.455981 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/3bacc7b1-00f4-4489-9c0b-fa038cc443d6-erlang-cookie-secret\") pod \"3bacc7b1-00f4-4489-9c0b-fa038cc443d6\" (UID: \"3bacc7b1-00f4-4489-9c0b-fa038cc443d6\") " Sep 30 19:53:49 crc kubenswrapper[4756]: I0930 19:53:49.456103 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/3bacc7b1-00f4-4489-9c0b-fa038cc443d6-rabbitmq-plugins\") pod \"3bacc7b1-00f4-4489-9c0b-fa038cc443d6\" (UID: \"3bacc7b1-00f4-4489-9c0b-fa038cc443d6\") " Sep 30 19:53:49 crc kubenswrapper[4756]: I0930 19:53:49.456134 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/3bacc7b1-00f4-4489-9c0b-fa038cc443d6-pod-info\") pod \"3bacc7b1-00f4-4489-9c0b-fa038cc443d6\" (UID: \"3bacc7b1-00f4-4489-9c0b-fa038cc443d6\") " Sep 30 19:53:49 crc kubenswrapper[4756]: I0930 19:53:49.456172 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bkdbg\" (UniqueName: \"kubernetes.io/projected/3bacc7b1-00f4-4489-9c0b-fa038cc443d6-kube-api-access-bkdbg\") pod \"3bacc7b1-00f4-4489-9c0b-fa038cc443d6\" (UID: \"3bacc7b1-00f4-4489-9c0b-fa038cc443d6\") " Sep 30 19:53:49 crc kubenswrapper[4756]: I0930 19:53:49.456243 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/3bacc7b1-00f4-4489-9c0b-fa038cc443d6-server-conf\") pod \"3bacc7b1-00f4-4489-9c0b-fa038cc443d6\" (UID: \"3bacc7b1-00f4-4489-9c0b-fa038cc443d6\") " Sep 30 19:53:49 crc kubenswrapper[4756]: I0930 19:53:49.456275 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/3bacc7b1-00f4-4489-9c0b-fa038cc443d6-config-data\") pod \"3bacc7b1-00f4-4489-9c0b-fa038cc443d6\" (UID: \"3bacc7b1-00f4-4489-9c0b-fa038cc443d6\") " Sep 30 19:53:49 crc kubenswrapper[4756]: I0930 19:53:49.456299 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/3bacc7b1-00f4-4489-9c0b-fa038cc443d6-rabbitmq-confd\") pod \"3bacc7b1-00f4-4489-9c0b-fa038cc443d6\" (UID: \"3bacc7b1-00f4-4489-9c0b-fa038cc443d6\") " Sep 30 19:53:49 crc kubenswrapper[4756]: I0930 19:53:49.456355 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"3bacc7b1-00f4-4489-9c0b-fa038cc443d6\" (UID: \"3bacc7b1-00f4-4489-9c0b-fa038cc443d6\") " Sep 30 19:53:49 crc kubenswrapper[4756]: I0930 19:53:49.456649 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3bacc7b1-00f4-4489-9c0b-fa038cc443d6-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "3bacc7b1-00f4-4489-9c0b-fa038cc443d6" (UID: "3bacc7b1-00f4-4489-9c0b-fa038cc443d6"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 19:53:49 crc kubenswrapper[4756]: I0930 19:53:49.456917 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3bacc7b1-00f4-4489-9c0b-fa038cc443d6-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "3bacc7b1-00f4-4489-9c0b-fa038cc443d6" (UID: "3bacc7b1-00f4-4489-9c0b-fa038cc443d6"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:53:49 crc kubenswrapper[4756]: I0930 19:53:49.457082 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3bacc7b1-00f4-4489-9c0b-fa038cc443d6-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "3bacc7b1-00f4-4489-9c0b-fa038cc443d6" (UID: "3bacc7b1-00f4-4489-9c0b-fa038cc443d6"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 19:53:49 crc kubenswrapper[4756]: I0930 19:53:49.460547 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3bacc7b1-00f4-4489-9c0b-fa038cc443d6-kube-api-access-bkdbg" (OuterVolumeSpecName: "kube-api-access-bkdbg") pod "3bacc7b1-00f4-4489-9c0b-fa038cc443d6" (UID: "3bacc7b1-00f4-4489-9c0b-fa038cc443d6"). InnerVolumeSpecName "kube-api-access-bkdbg". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:53:49 crc kubenswrapper[4756]: I0930 19:53:49.461193 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/3bacc7b1-00f4-4489-9c0b-fa038cc443d6-pod-info" (OuterVolumeSpecName: "pod-info") pod "3bacc7b1-00f4-4489-9c0b-fa038cc443d6" (UID: "3bacc7b1-00f4-4489-9c0b-fa038cc443d6"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Sep 30 19:53:49 crc kubenswrapper[4756]: I0930 19:53:49.461210 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage05-crc" (OuterVolumeSpecName: "persistence") pod "3bacc7b1-00f4-4489-9c0b-fa038cc443d6" (UID: "3bacc7b1-00f4-4489-9c0b-fa038cc443d6"). InnerVolumeSpecName "local-storage05-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Sep 30 19:53:49 crc kubenswrapper[4756]: I0930 19:53:49.461311 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3bacc7b1-00f4-4489-9c0b-fa038cc443d6-rabbitmq-tls" (OuterVolumeSpecName: "rabbitmq-tls") pod "3bacc7b1-00f4-4489-9c0b-fa038cc443d6" (UID: "3bacc7b1-00f4-4489-9c0b-fa038cc443d6"). InnerVolumeSpecName "rabbitmq-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:53:49 crc kubenswrapper[4756]: I0930 19:53:49.471958 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3bacc7b1-00f4-4489-9c0b-fa038cc443d6-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "3bacc7b1-00f4-4489-9c0b-fa038cc443d6" (UID: "3bacc7b1-00f4-4489-9c0b-fa038cc443d6"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:53:49 crc kubenswrapper[4756]: I0930 19:53:49.488909 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3bacc7b1-00f4-4489-9c0b-fa038cc443d6-config-data" (OuterVolumeSpecName: "config-data") pod "3bacc7b1-00f4-4489-9c0b-fa038cc443d6" (UID: "3bacc7b1-00f4-4489-9c0b-fa038cc443d6"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:53:49 crc kubenswrapper[4756]: I0930 19:53:49.527686 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3bacc7b1-00f4-4489-9c0b-fa038cc443d6-server-conf" (OuterVolumeSpecName: "server-conf") pod "3bacc7b1-00f4-4489-9c0b-fa038cc443d6" (UID: "3bacc7b1-00f4-4489-9c0b-fa038cc443d6"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:53:49 crc kubenswrapper[4756]: I0930 19:53:49.552581 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3bacc7b1-00f4-4489-9c0b-fa038cc443d6-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "3bacc7b1-00f4-4489-9c0b-fa038cc443d6" (UID: "3bacc7b1-00f4-4489-9c0b-fa038cc443d6"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:53:49 crc kubenswrapper[4756]: I0930 19:53:49.557717 4756 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/3bacc7b1-00f4-4489-9c0b-fa038cc443d6-plugins-conf\") on node \"crc\" DevicePath \"\"" Sep 30 19:53:49 crc kubenswrapper[4756]: I0930 19:53:49.557766 4756 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/3bacc7b1-00f4-4489-9c0b-fa038cc443d6-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Sep 30 19:53:49 crc kubenswrapper[4756]: I0930 19:53:49.557787 4756 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/3bacc7b1-00f4-4489-9c0b-fa038cc443d6-rabbitmq-tls\") on node \"crc\" DevicePath \"\"" Sep 30 19:53:49 crc kubenswrapper[4756]: I0930 19:53:49.557805 4756 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/3bacc7b1-00f4-4489-9c0b-fa038cc443d6-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Sep 30 19:53:49 crc kubenswrapper[4756]: I0930 19:53:49.557822 4756 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/3bacc7b1-00f4-4489-9c0b-fa038cc443d6-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Sep 30 19:53:49 crc kubenswrapper[4756]: I0930 19:53:49.557838 4756 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/3bacc7b1-00f4-4489-9c0b-fa038cc443d6-pod-info\") on node \"crc\" DevicePath \"\"" Sep 30 19:53:49 crc kubenswrapper[4756]: I0930 19:53:49.557856 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bkdbg\" (UniqueName: \"kubernetes.io/projected/3bacc7b1-00f4-4489-9c0b-fa038cc443d6-kube-api-access-bkdbg\") on node \"crc\" DevicePath \"\"" Sep 30 19:53:49 crc kubenswrapper[4756]: I0930 19:53:49.557871 4756 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/3bacc7b1-00f4-4489-9c0b-fa038cc443d6-server-conf\") on node \"crc\" DevicePath \"\"" Sep 30 19:53:49 crc kubenswrapper[4756]: I0930 19:53:49.557887 4756 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/3bacc7b1-00f4-4489-9c0b-fa038cc443d6-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 19:53:49 crc kubenswrapper[4756]: I0930 19:53:49.557903 4756 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/3bacc7b1-00f4-4489-9c0b-fa038cc443d6-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Sep 30 19:53:49 crc kubenswrapper[4756]: I0930 19:53:49.557953 4756 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") on node \"crc\" " Sep 30 19:53:49 crc kubenswrapper[4756]: I0930 19:53:49.572210 4756 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage05-crc" (UniqueName: "kubernetes.io/local-volume/local-storage05-crc") on node "crc" Sep 30 19:53:49 crc kubenswrapper[4756]: I0930 19:53:49.662018 4756 reconciler_common.go:293] "Volume detached for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") on node \"crc\" DevicePath \"\"" Sep 30 19:53:50 crc kubenswrapper[4756]: I0930 19:53:50.090751 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-7b959497f4-7qz6r" Sep 30 19:53:50 crc kubenswrapper[4756]: I0930 19:53:50.168542 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0511d760-5c65-453b-aefd-89ebb9a52ac6-config-data\") pod \"0511d760-5c65-453b-aefd-89ebb9a52ac6\" (UID: \"0511d760-5c65-453b-aefd-89ebb9a52ac6\") " Sep 30 19:53:50 crc kubenswrapper[4756]: I0930 19:53:50.168612 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5tcgf\" (UniqueName: \"kubernetes.io/projected/0511d760-5c65-453b-aefd-89ebb9a52ac6-kube-api-access-5tcgf\") pod \"0511d760-5c65-453b-aefd-89ebb9a52ac6\" (UID: \"0511d760-5c65-453b-aefd-89ebb9a52ac6\") " Sep 30 19:53:50 crc kubenswrapper[4756]: I0930 19:53:50.168668 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/0511d760-5c65-453b-aefd-89ebb9a52ac6-config-data-custom\") pod \"0511d760-5c65-453b-aefd-89ebb9a52ac6\" (UID: \"0511d760-5c65-453b-aefd-89ebb9a52ac6\") " Sep 30 19:53:50 crc kubenswrapper[4756]: I0930 19:53:50.168747 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0511d760-5c65-453b-aefd-89ebb9a52ac6-logs\") pod \"0511d760-5c65-453b-aefd-89ebb9a52ac6\" (UID: \"0511d760-5c65-453b-aefd-89ebb9a52ac6\") " Sep 30 19:53:50 crc kubenswrapper[4756]: I0930 19:53:50.168789 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0511d760-5c65-453b-aefd-89ebb9a52ac6-combined-ca-bundle\") pod \"0511d760-5c65-453b-aefd-89ebb9a52ac6\" (UID: \"0511d760-5c65-453b-aefd-89ebb9a52ac6\") " Sep 30 19:53:50 crc kubenswrapper[4756]: I0930 19:53:50.169192 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0511d760-5c65-453b-aefd-89ebb9a52ac6-logs" (OuterVolumeSpecName: "logs") pod "0511d760-5c65-453b-aefd-89ebb9a52ac6" (UID: "0511d760-5c65-453b-aefd-89ebb9a52ac6"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 19:53:50 crc kubenswrapper[4756]: I0930 19:53:50.174602 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0511d760-5c65-453b-aefd-89ebb9a52ac6-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "0511d760-5c65-453b-aefd-89ebb9a52ac6" (UID: "0511d760-5c65-453b-aefd-89ebb9a52ac6"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:53:50 crc kubenswrapper[4756]: I0930 19:53:50.174606 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0511d760-5c65-453b-aefd-89ebb9a52ac6-kube-api-access-5tcgf" (OuterVolumeSpecName: "kube-api-access-5tcgf") pod "0511d760-5c65-453b-aefd-89ebb9a52ac6" (UID: "0511d760-5c65-453b-aefd-89ebb9a52ac6"). InnerVolumeSpecName "kube-api-access-5tcgf". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:53:50 crc kubenswrapper[4756]: I0930 19:53:50.196032 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0511d760-5c65-453b-aefd-89ebb9a52ac6-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "0511d760-5c65-453b-aefd-89ebb9a52ac6" (UID: "0511d760-5c65-453b-aefd-89ebb9a52ac6"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:53:50 crc kubenswrapper[4756]: I0930 19:53:50.201189 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-bbf58c-78vxt" Sep 30 19:53:50 crc kubenswrapper[4756]: I0930 19:53:50.211723 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0511d760-5c65-453b-aefd-89ebb9a52ac6-config-data" (OuterVolumeSpecName: "config-data") pod "0511d760-5c65-453b-aefd-89ebb9a52ac6" (UID: "0511d760-5c65-453b-aefd-89ebb9a52ac6"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:53:50 crc kubenswrapper[4756]: I0930 19:53:50.249238 4756 generic.go:334] "Generic (PLEG): container finished" podID="3c14829a-8f5d-490d-a922-719d81da47a4" containerID="46a6dadf1f6a1d4672d2a84013aec14bd516d6c5c4b16525937c9d80c7745458" exitCode=0 Sep 30 19:53:50 crc kubenswrapper[4756]: I0930 19:53:50.249519 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-bbf58c-78vxt" Sep 30 19:53:50 crc kubenswrapper[4756]: I0930 19:53:50.249517 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-bbf58c-78vxt" event={"ID":"3c14829a-8f5d-490d-a922-719d81da47a4","Type":"ContainerDied","Data":"46a6dadf1f6a1d4672d2a84013aec14bd516d6c5c4b16525937c9d80c7745458"} Sep 30 19:53:50 crc kubenswrapper[4756]: I0930 19:53:50.249803 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-bbf58c-78vxt" event={"ID":"3c14829a-8f5d-490d-a922-719d81da47a4","Type":"ContainerDied","Data":"fb78833292dfd2206bd7a9a24184bb9c1a0ea31bd2ada4020993f68b93f2d03f"} Sep 30 19:53:50 crc kubenswrapper[4756]: I0930 19:53:50.249826 4756 scope.go:117] "RemoveContainer" containerID="46a6dadf1f6a1d4672d2a84013aec14bd516d6c5c4b16525937c9d80c7745458" Sep 30 19:53:50 crc kubenswrapper[4756]: I0930 19:53:50.253142 4756 generic.go:334] "Generic (PLEG): container finished" podID="0511d760-5c65-453b-aefd-89ebb9a52ac6" containerID="5a28b01b0fd0453603e79634b63f1a8bc7b5df27306c20e99103f36e06cb4aff" exitCode=0 Sep 30 19:53:50 crc kubenswrapper[4756]: I0930 19:53:50.253258 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-7b959497f4-7qz6r" Sep 30 19:53:50 crc kubenswrapper[4756]: I0930 19:53:50.253324 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-7b959497f4-7qz6r" event={"ID":"0511d760-5c65-453b-aefd-89ebb9a52ac6","Type":"ContainerDied","Data":"5a28b01b0fd0453603e79634b63f1a8bc7b5df27306c20e99103f36e06cb4aff"} Sep 30 19:53:50 crc kubenswrapper[4756]: I0930 19:53:50.253354 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-7b959497f4-7qz6r" event={"ID":"0511d760-5c65-453b-aefd-89ebb9a52ac6","Type":"ContainerDied","Data":"5a135196ad107e441834a5114f959ba7f203574a4bd58c03d661dcc56a67f77f"} Sep 30 19:53:50 crc kubenswrapper[4756]: I0930 19:53:50.257145 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"3bacc7b1-00f4-4489-9c0b-fa038cc443d6","Type":"ContainerDied","Data":"5087335d4f38bbc0625718888a0e022f4788c689a1bc5a3b239ad5a0644a309e"} Sep 30 19:53:50 crc kubenswrapper[4756]: I0930 19:53:50.257205 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Sep 30 19:53:50 crc kubenswrapper[4756]: I0930 19:53:50.270150 4756 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/0511d760-5c65-453b-aefd-89ebb9a52ac6-config-data-custom\") on node \"crc\" DevicePath \"\"" Sep 30 19:53:50 crc kubenswrapper[4756]: I0930 19:53:50.270181 4756 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0511d760-5c65-453b-aefd-89ebb9a52ac6-logs\") on node \"crc\" DevicePath \"\"" Sep 30 19:53:50 crc kubenswrapper[4756]: I0930 19:53:50.270192 4756 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0511d760-5c65-453b-aefd-89ebb9a52ac6-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 19:53:50 crc kubenswrapper[4756]: I0930 19:53:50.270203 4756 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0511d760-5c65-453b-aefd-89ebb9a52ac6-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 19:53:50 crc kubenswrapper[4756]: I0930 19:53:50.270226 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5tcgf\" (UniqueName: \"kubernetes.io/projected/0511d760-5c65-453b-aefd-89ebb9a52ac6-kube-api-access-5tcgf\") on node \"crc\" DevicePath \"\"" Sep 30 19:53:50 crc kubenswrapper[4756]: I0930 19:53:50.273607 4756 scope.go:117] "RemoveContainer" containerID="baa07f784aff7642d4619ac09ef1f58c98f9a2e756003ce12cf3674dba0d7ec6" Sep 30 19:53:50 crc kubenswrapper[4756]: I0930 19:53:50.301215 4756 scope.go:117] "RemoveContainer" containerID="46a6dadf1f6a1d4672d2a84013aec14bd516d6c5c4b16525937c9d80c7745458" Sep 30 19:53:50 crc kubenswrapper[4756]: E0930 19:53:50.301680 4756 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"46a6dadf1f6a1d4672d2a84013aec14bd516d6c5c4b16525937c9d80c7745458\": container with ID starting with 46a6dadf1f6a1d4672d2a84013aec14bd516d6c5c4b16525937c9d80c7745458 not found: ID does not exist" containerID="46a6dadf1f6a1d4672d2a84013aec14bd516d6c5c4b16525937c9d80c7745458" Sep 30 19:53:50 crc kubenswrapper[4756]: I0930 19:53:50.301714 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"46a6dadf1f6a1d4672d2a84013aec14bd516d6c5c4b16525937c9d80c7745458"} err="failed to get container status \"46a6dadf1f6a1d4672d2a84013aec14bd516d6c5c4b16525937c9d80c7745458\": rpc error: code = NotFound desc = could not find container \"46a6dadf1f6a1d4672d2a84013aec14bd516d6c5c4b16525937c9d80c7745458\": container with ID starting with 46a6dadf1f6a1d4672d2a84013aec14bd516d6c5c4b16525937c9d80c7745458 not found: ID does not exist" Sep 30 19:53:50 crc kubenswrapper[4756]: I0930 19:53:50.301738 4756 scope.go:117] "RemoveContainer" containerID="baa07f784aff7642d4619ac09ef1f58c98f9a2e756003ce12cf3674dba0d7ec6" Sep 30 19:53:50 crc kubenswrapper[4756]: E0930 19:53:50.301955 4756 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"baa07f784aff7642d4619ac09ef1f58c98f9a2e756003ce12cf3674dba0d7ec6\": container with ID starting with baa07f784aff7642d4619ac09ef1f58c98f9a2e756003ce12cf3674dba0d7ec6 not found: ID does not exist" containerID="baa07f784aff7642d4619ac09ef1f58c98f9a2e756003ce12cf3674dba0d7ec6" Sep 30 19:53:50 crc kubenswrapper[4756]: I0930 19:53:50.301981 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"baa07f784aff7642d4619ac09ef1f58c98f9a2e756003ce12cf3674dba0d7ec6"} err="failed to get container status \"baa07f784aff7642d4619ac09ef1f58c98f9a2e756003ce12cf3674dba0d7ec6\": rpc error: code = NotFound desc = could not find container \"baa07f784aff7642d4619ac09ef1f58c98f9a2e756003ce12cf3674dba0d7ec6\": container with ID starting with baa07f784aff7642d4619ac09ef1f58c98f9a2e756003ce12cf3674dba0d7ec6 not found: ID does not exist" Sep 30 19:53:50 crc kubenswrapper[4756]: I0930 19:53:50.301998 4756 scope.go:117] "RemoveContainer" containerID="5a28b01b0fd0453603e79634b63f1a8bc7b5df27306c20e99103f36e06cb4aff" Sep 30 19:53:50 crc kubenswrapper[4756]: I0930 19:53:50.302542 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Sep 30 19:53:50 crc kubenswrapper[4756]: I0930 19:53:50.313280 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Sep 30 19:53:50 crc kubenswrapper[4756]: I0930 19:53:50.318255 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-keystone-listener-7b959497f4-7qz6r"] Sep 30 19:53:50 crc kubenswrapper[4756]: I0930 19:53:50.322130 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-keystone-listener-7b959497f4-7qz6r"] Sep 30 19:53:50 crc kubenswrapper[4756]: I0930 19:53:50.324940 4756 scope.go:117] "RemoveContainer" containerID="9ba683265e05ae2f501e9cd38e1f199da027465720a175466d77105b0eedb2f4" Sep 30 19:53:50 crc kubenswrapper[4756]: I0930 19:53:50.343801 4756 scope.go:117] "RemoveContainer" containerID="5a28b01b0fd0453603e79634b63f1a8bc7b5df27306c20e99103f36e06cb4aff" Sep 30 19:53:50 crc kubenswrapper[4756]: E0930 19:53:50.344346 4756 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5a28b01b0fd0453603e79634b63f1a8bc7b5df27306c20e99103f36e06cb4aff\": container with ID starting with 5a28b01b0fd0453603e79634b63f1a8bc7b5df27306c20e99103f36e06cb4aff not found: ID does not exist" containerID="5a28b01b0fd0453603e79634b63f1a8bc7b5df27306c20e99103f36e06cb4aff" Sep 30 19:53:50 crc kubenswrapper[4756]: I0930 19:53:50.344426 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5a28b01b0fd0453603e79634b63f1a8bc7b5df27306c20e99103f36e06cb4aff"} err="failed to get container status \"5a28b01b0fd0453603e79634b63f1a8bc7b5df27306c20e99103f36e06cb4aff\": rpc error: code = NotFound desc = could not find container \"5a28b01b0fd0453603e79634b63f1a8bc7b5df27306c20e99103f36e06cb4aff\": container with ID starting with 5a28b01b0fd0453603e79634b63f1a8bc7b5df27306c20e99103f36e06cb4aff not found: ID does not exist" Sep 30 19:53:50 crc kubenswrapper[4756]: I0930 19:53:50.344460 4756 scope.go:117] "RemoveContainer" containerID="9ba683265e05ae2f501e9cd38e1f199da027465720a175466d77105b0eedb2f4" Sep 30 19:53:50 crc kubenswrapper[4756]: E0930 19:53:50.344925 4756 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9ba683265e05ae2f501e9cd38e1f199da027465720a175466d77105b0eedb2f4\": container with ID starting with 9ba683265e05ae2f501e9cd38e1f199da027465720a175466d77105b0eedb2f4 not found: ID does not exist" containerID="9ba683265e05ae2f501e9cd38e1f199da027465720a175466d77105b0eedb2f4" Sep 30 19:53:50 crc kubenswrapper[4756]: I0930 19:53:50.345102 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9ba683265e05ae2f501e9cd38e1f199da027465720a175466d77105b0eedb2f4"} err="failed to get container status \"9ba683265e05ae2f501e9cd38e1f199da027465720a175466d77105b0eedb2f4\": rpc error: code = NotFound desc = could not find container \"9ba683265e05ae2f501e9cd38e1f199da027465720a175466d77105b0eedb2f4\": container with ID starting with 9ba683265e05ae2f501e9cd38e1f199da027465720a175466d77105b0eedb2f4 not found: ID does not exist" Sep 30 19:53:50 crc kubenswrapper[4756]: I0930 19:53:50.345138 4756 scope.go:117] "RemoveContainer" containerID="22acee4e4b450fbce8e6aa4569d2260f650fed76f5340a9472eecc6bc744543f" Sep 30 19:53:50 crc kubenswrapper[4756]: I0930 19:53:50.367107 4756 scope.go:117] "RemoveContainer" containerID="3b361be86fa33c2d42005a3a29c1306df75cc2f04957244da9accd2d1ab215f9" Sep 30 19:53:50 crc kubenswrapper[4756]: I0930 19:53:50.371110 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gnmx8\" (UniqueName: \"kubernetes.io/projected/3c14829a-8f5d-490d-a922-719d81da47a4-kube-api-access-gnmx8\") pod \"3c14829a-8f5d-490d-a922-719d81da47a4\" (UID: \"3c14829a-8f5d-490d-a922-719d81da47a4\") " Sep 30 19:53:50 crc kubenswrapper[4756]: I0930 19:53:50.371230 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3c14829a-8f5d-490d-a922-719d81da47a4-logs\") pod \"3c14829a-8f5d-490d-a922-719d81da47a4\" (UID: \"3c14829a-8f5d-490d-a922-719d81da47a4\") " Sep 30 19:53:50 crc kubenswrapper[4756]: I0930 19:53:50.371265 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3c14829a-8f5d-490d-a922-719d81da47a4-combined-ca-bundle\") pod \"3c14829a-8f5d-490d-a922-719d81da47a4\" (UID: \"3c14829a-8f5d-490d-a922-719d81da47a4\") " Sep 30 19:53:50 crc kubenswrapper[4756]: I0930 19:53:50.371296 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3c14829a-8f5d-490d-a922-719d81da47a4-config-data\") pod \"3c14829a-8f5d-490d-a922-719d81da47a4\" (UID: \"3c14829a-8f5d-490d-a922-719d81da47a4\") " Sep 30 19:53:50 crc kubenswrapper[4756]: I0930 19:53:50.371315 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/3c14829a-8f5d-490d-a922-719d81da47a4-config-data-custom\") pod \"3c14829a-8f5d-490d-a922-719d81da47a4\" (UID: \"3c14829a-8f5d-490d-a922-719d81da47a4\") " Sep 30 19:53:50 crc kubenswrapper[4756]: I0930 19:53:50.373071 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3c14829a-8f5d-490d-a922-719d81da47a4-logs" (OuterVolumeSpecName: "logs") pod "3c14829a-8f5d-490d-a922-719d81da47a4" (UID: "3c14829a-8f5d-490d-a922-719d81da47a4"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 19:53:50 crc kubenswrapper[4756]: I0930 19:53:50.374657 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3c14829a-8f5d-490d-a922-719d81da47a4-kube-api-access-gnmx8" (OuterVolumeSpecName: "kube-api-access-gnmx8") pod "3c14829a-8f5d-490d-a922-719d81da47a4" (UID: "3c14829a-8f5d-490d-a922-719d81da47a4"). InnerVolumeSpecName "kube-api-access-gnmx8". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:53:50 crc kubenswrapper[4756]: I0930 19:53:50.375456 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3c14829a-8f5d-490d-a922-719d81da47a4-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "3c14829a-8f5d-490d-a922-719d81da47a4" (UID: "3c14829a-8f5d-490d-a922-719d81da47a4"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:53:50 crc kubenswrapper[4756]: I0930 19:53:50.396564 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3c14829a-8f5d-490d-a922-719d81da47a4-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "3c14829a-8f5d-490d-a922-719d81da47a4" (UID: "3c14829a-8f5d-490d-a922-719d81da47a4"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:53:50 crc kubenswrapper[4756]: I0930 19:53:50.412006 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3c14829a-8f5d-490d-a922-719d81da47a4-config-data" (OuterVolumeSpecName: "config-data") pod "3c14829a-8f5d-490d-a922-719d81da47a4" (UID: "3c14829a-8f5d-490d-a922-719d81da47a4"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:53:50 crc kubenswrapper[4756]: I0930 19:53:50.472488 4756 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3c14829a-8f5d-490d-a922-719d81da47a4-logs\") on node \"crc\" DevicePath \"\"" Sep 30 19:53:50 crc kubenswrapper[4756]: I0930 19:53:50.472518 4756 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3c14829a-8f5d-490d-a922-719d81da47a4-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 19:53:50 crc kubenswrapper[4756]: I0930 19:53:50.472533 4756 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3c14829a-8f5d-490d-a922-719d81da47a4-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 19:53:50 crc kubenswrapper[4756]: I0930 19:53:50.472544 4756 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/3c14829a-8f5d-490d-a922-719d81da47a4-config-data-custom\") on node \"crc\" DevicePath \"\"" Sep 30 19:53:50 crc kubenswrapper[4756]: I0930 19:53:50.472555 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gnmx8\" (UniqueName: \"kubernetes.io/projected/3c14829a-8f5d-490d-a922-719d81da47a4-kube-api-access-gnmx8\") on node \"crc\" DevicePath \"\"" Sep 30 19:53:50 crc kubenswrapper[4756]: I0930 19:53:50.598119 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-worker-bbf58c-78vxt"] Sep 30 19:53:50 crc kubenswrapper[4756]: I0930 19:53:50.608968 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-worker-bbf58c-78vxt"] Sep 30 19:53:51 crc kubenswrapper[4756]: I0930 19:53:51.051016 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 19:53:51 crc kubenswrapper[4756]: I0930 19:53:51.136927 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0511d760-5c65-453b-aefd-89ebb9a52ac6" path="/var/lib/kubelet/pods/0511d760-5c65-453b-aefd-89ebb9a52ac6/volumes" Sep 30 19:53:51 crc kubenswrapper[4756]: I0930 19:53:51.139586 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3bacc7b1-00f4-4489-9c0b-fa038cc443d6" path="/var/lib/kubelet/pods/3bacc7b1-00f4-4489-9c0b-fa038cc443d6/volumes" Sep 30 19:53:51 crc kubenswrapper[4756]: I0930 19:53:51.140429 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3c14829a-8f5d-490d-a922-719d81da47a4" path="/var/lib/kubelet/pods/3c14829a-8f5d-490d-a922-719d81da47a4/volumes" Sep 30 19:53:51 crc kubenswrapper[4756]: I0930 19:53:51.141750 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4e0d0499-c40c-474b-83d5-f72ac55d859c" path="/var/lib/kubelet/pods/4e0d0499-c40c-474b-83d5-f72ac55d859c/volumes" Sep 30 19:53:51 crc kubenswrapper[4756]: I0930 19:53:51.143160 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c3b1ef6d-6617-4d15-8709-a7623e75faa4" path="/var/lib/kubelet/pods/c3b1ef6d-6617-4d15-8709-a7623e75faa4/volumes" Sep 30 19:53:51 crc kubenswrapper[4756]: I0930 19:53:51.193779 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/78518120-da64-4b55-9dce-b7412b16bed3-scripts\") pod \"78518120-da64-4b55-9dce-b7412b16bed3\" (UID: \"78518120-da64-4b55-9dce-b7412b16bed3\") " Sep 30 19:53:51 crc kubenswrapper[4756]: I0930 19:53:51.193909 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2t229\" (UniqueName: \"kubernetes.io/projected/78518120-da64-4b55-9dce-b7412b16bed3-kube-api-access-2t229\") pod \"78518120-da64-4b55-9dce-b7412b16bed3\" (UID: \"78518120-da64-4b55-9dce-b7412b16bed3\") " Sep 30 19:53:51 crc kubenswrapper[4756]: I0930 19:53:51.193944 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/78518120-da64-4b55-9dce-b7412b16bed3-sg-core-conf-yaml\") pod \"78518120-da64-4b55-9dce-b7412b16bed3\" (UID: \"78518120-da64-4b55-9dce-b7412b16bed3\") " Sep 30 19:53:51 crc kubenswrapper[4756]: I0930 19:53:51.194007 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/78518120-da64-4b55-9dce-b7412b16bed3-ceilometer-tls-certs\") pod \"78518120-da64-4b55-9dce-b7412b16bed3\" (UID: \"78518120-da64-4b55-9dce-b7412b16bed3\") " Sep 30 19:53:51 crc kubenswrapper[4756]: I0930 19:53:51.194038 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/78518120-da64-4b55-9dce-b7412b16bed3-log-httpd\") pod \"78518120-da64-4b55-9dce-b7412b16bed3\" (UID: \"78518120-da64-4b55-9dce-b7412b16bed3\") " Sep 30 19:53:51 crc kubenswrapper[4756]: I0930 19:53:51.194065 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/78518120-da64-4b55-9dce-b7412b16bed3-config-data\") pod \"78518120-da64-4b55-9dce-b7412b16bed3\" (UID: \"78518120-da64-4b55-9dce-b7412b16bed3\") " Sep 30 19:53:51 crc kubenswrapper[4756]: I0930 19:53:51.194148 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/78518120-da64-4b55-9dce-b7412b16bed3-run-httpd\") pod \"78518120-da64-4b55-9dce-b7412b16bed3\" (UID: \"78518120-da64-4b55-9dce-b7412b16bed3\") " Sep 30 19:53:51 crc kubenswrapper[4756]: I0930 19:53:51.194215 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/78518120-da64-4b55-9dce-b7412b16bed3-combined-ca-bundle\") pod \"78518120-da64-4b55-9dce-b7412b16bed3\" (UID: \"78518120-da64-4b55-9dce-b7412b16bed3\") " Sep 30 19:53:51 crc kubenswrapper[4756]: I0930 19:53:51.195607 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/78518120-da64-4b55-9dce-b7412b16bed3-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "78518120-da64-4b55-9dce-b7412b16bed3" (UID: "78518120-da64-4b55-9dce-b7412b16bed3"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 19:53:51 crc kubenswrapper[4756]: I0930 19:53:51.196113 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/78518120-da64-4b55-9dce-b7412b16bed3-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "78518120-da64-4b55-9dce-b7412b16bed3" (UID: "78518120-da64-4b55-9dce-b7412b16bed3"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 19:53:51 crc kubenswrapper[4756]: I0930 19:53:51.200548 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/78518120-da64-4b55-9dce-b7412b16bed3-scripts" (OuterVolumeSpecName: "scripts") pod "78518120-da64-4b55-9dce-b7412b16bed3" (UID: "78518120-da64-4b55-9dce-b7412b16bed3"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:53:51 crc kubenswrapper[4756]: I0930 19:53:51.215566 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/78518120-da64-4b55-9dce-b7412b16bed3-kube-api-access-2t229" (OuterVolumeSpecName: "kube-api-access-2t229") pod "78518120-da64-4b55-9dce-b7412b16bed3" (UID: "78518120-da64-4b55-9dce-b7412b16bed3"). InnerVolumeSpecName "kube-api-access-2t229". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:53:51 crc kubenswrapper[4756]: I0930 19:53:51.219046 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/78518120-da64-4b55-9dce-b7412b16bed3-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "78518120-da64-4b55-9dce-b7412b16bed3" (UID: "78518120-da64-4b55-9dce-b7412b16bed3"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:53:51 crc kubenswrapper[4756]: I0930 19:53:51.253561 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/78518120-da64-4b55-9dce-b7412b16bed3-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "78518120-da64-4b55-9dce-b7412b16bed3" (UID: "78518120-da64-4b55-9dce-b7412b16bed3"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:53:51 crc kubenswrapper[4756]: I0930 19:53:51.274842 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/78518120-da64-4b55-9dce-b7412b16bed3-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "78518120-da64-4b55-9dce-b7412b16bed3" (UID: "78518120-da64-4b55-9dce-b7412b16bed3"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:53:51 crc kubenswrapper[4756]: I0930 19:53:51.276312 4756 generic.go:334] "Generic (PLEG): container finished" podID="78518120-da64-4b55-9dce-b7412b16bed3" containerID="db3913fcdfdf7d832fb8669bd6c097b34f4a8d5f1bdafc8f900c161696611530" exitCode=0 Sep 30 19:53:51 crc kubenswrapper[4756]: I0930 19:53:51.276588 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 19:53:51 crc kubenswrapper[4756]: I0930 19:53:51.276651 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"78518120-da64-4b55-9dce-b7412b16bed3","Type":"ContainerDied","Data":"db3913fcdfdf7d832fb8669bd6c097b34f4a8d5f1bdafc8f900c161696611530"} Sep 30 19:53:51 crc kubenswrapper[4756]: I0930 19:53:51.276829 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"78518120-da64-4b55-9dce-b7412b16bed3","Type":"ContainerDied","Data":"e4fb0c80e0c096f0d548c2c8e2360463ccf8846baf5adfa6abdfcf6710feb13b"} Sep 30 19:53:51 crc kubenswrapper[4756]: I0930 19:53:51.276889 4756 scope.go:117] "RemoveContainer" containerID="8446379b5b8e3c30e3501dc472fc67b7b914bcaff0ef7dc5380700b34b1f393f" Sep 30 19:53:51 crc kubenswrapper[4756]: I0930 19:53:51.279665 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/78518120-da64-4b55-9dce-b7412b16bed3-config-data" (OuterVolumeSpecName: "config-data") pod "78518120-da64-4b55-9dce-b7412b16bed3" (UID: "78518120-da64-4b55-9dce-b7412b16bed3"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:53:51 crc kubenswrapper[4756]: I0930 19:53:51.296129 4756 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/78518120-da64-4b55-9dce-b7412b16bed3-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 19:53:51 crc kubenswrapper[4756]: I0930 19:53:51.296308 4756 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/78518120-da64-4b55-9dce-b7412b16bed3-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 19:53:51 crc kubenswrapper[4756]: I0930 19:53:51.296384 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2t229\" (UniqueName: \"kubernetes.io/projected/78518120-da64-4b55-9dce-b7412b16bed3-kube-api-access-2t229\") on node \"crc\" DevicePath \"\"" Sep 30 19:53:51 crc kubenswrapper[4756]: I0930 19:53:51.296574 4756 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/78518120-da64-4b55-9dce-b7412b16bed3-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Sep 30 19:53:51 crc kubenswrapper[4756]: I0930 19:53:51.296650 4756 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/78518120-da64-4b55-9dce-b7412b16bed3-log-httpd\") on node \"crc\" DevicePath \"\"" Sep 30 19:53:51 crc kubenswrapper[4756]: I0930 19:53:51.296719 4756 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/78518120-da64-4b55-9dce-b7412b16bed3-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 30 19:53:51 crc kubenswrapper[4756]: I0930 19:53:51.296785 4756 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/78518120-da64-4b55-9dce-b7412b16bed3-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 19:53:51 crc kubenswrapper[4756]: I0930 19:53:51.296871 4756 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/78518120-da64-4b55-9dce-b7412b16bed3-run-httpd\") on node \"crc\" DevicePath \"\"" Sep 30 19:53:51 crc kubenswrapper[4756]: I0930 19:53:51.300744 4756 scope.go:117] "RemoveContainer" containerID="8dcf655a6b6e1186f83f889c32c480afbeb2d95de763becd9d328651e8b7a7c9" Sep 30 19:53:51 crc kubenswrapper[4756]: I0930 19:53:51.329291 4756 scope.go:117] "RemoveContainer" containerID="db3913fcdfdf7d832fb8669bd6c097b34f4a8d5f1bdafc8f900c161696611530" Sep 30 19:53:51 crc kubenswrapper[4756]: I0930 19:53:51.345982 4756 scope.go:117] "RemoveContainer" containerID="22b219bf8630c672d661ef7fab54e4eb8a02542b53a085535c6963dd7177287e" Sep 30 19:53:51 crc kubenswrapper[4756]: I0930 19:53:51.364152 4756 scope.go:117] "RemoveContainer" containerID="8446379b5b8e3c30e3501dc472fc67b7b914bcaff0ef7dc5380700b34b1f393f" Sep 30 19:53:51 crc kubenswrapper[4756]: E0930 19:53:51.364487 4756 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8446379b5b8e3c30e3501dc472fc67b7b914bcaff0ef7dc5380700b34b1f393f\": container with ID starting with 8446379b5b8e3c30e3501dc472fc67b7b914bcaff0ef7dc5380700b34b1f393f not found: ID does not exist" containerID="8446379b5b8e3c30e3501dc472fc67b7b914bcaff0ef7dc5380700b34b1f393f" Sep 30 19:53:51 crc kubenswrapper[4756]: I0930 19:53:51.364521 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8446379b5b8e3c30e3501dc472fc67b7b914bcaff0ef7dc5380700b34b1f393f"} err="failed to get container status \"8446379b5b8e3c30e3501dc472fc67b7b914bcaff0ef7dc5380700b34b1f393f\": rpc error: code = NotFound desc = could not find container \"8446379b5b8e3c30e3501dc472fc67b7b914bcaff0ef7dc5380700b34b1f393f\": container with ID starting with 8446379b5b8e3c30e3501dc472fc67b7b914bcaff0ef7dc5380700b34b1f393f not found: ID does not exist" Sep 30 19:53:51 crc kubenswrapper[4756]: I0930 19:53:51.364544 4756 scope.go:117] "RemoveContainer" containerID="8dcf655a6b6e1186f83f889c32c480afbeb2d95de763becd9d328651e8b7a7c9" Sep 30 19:53:51 crc kubenswrapper[4756]: E0930 19:53:51.364952 4756 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8dcf655a6b6e1186f83f889c32c480afbeb2d95de763becd9d328651e8b7a7c9\": container with ID starting with 8dcf655a6b6e1186f83f889c32c480afbeb2d95de763becd9d328651e8b7a7c9 not found: ID does not exist" containerID="8dcf655a6b6e1186f83f889c32c480afbeb2d95de763becd9d328651e8b7a7c9" Sep 30 19:53:51 crc kubenswrapper[4756]: I0930 19:53:51.364977 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8dcf655a6b6e1186f83f889c32c480afbeb2d95de763becd9d328651e8b7a7c9"} err="failed to get container status \"8dcf655a6b6e1186f83f889c32c480afbeb2d95de763becd9d328651e8b7a7c9\": rpc error: code = NotFound desc = could not find container \"8dcf655a6b6e1186f83f889c32c480afbeb2d95de763becd9d328651e8b7a7c9\": container with ID starting with 8dcf655a6b6e1186f83f889c32c480afbeb2d95de763becd9d328651e8b7a7c9 not found: ID does not exist" Sep 30 19:53:51 crc kubenswrapper[4756]: I0930 19:53:51.364993 4756 scope.go:117] "RemoveContainer" containerID="db3913fcdfdf7d832fb8669bd6c097b34f4a8d5f1bdafc8f900c161696611530" Sep 30 19:53:51 crc kubenswrapper[4756]: E0930 19:53:51.365229 4756 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"db3913fcdfdf7d832fb8669bd6c097b34f4a8d5f1bdafc8f900c161696611530\": container with ID starting with db3913fcdfdf7d832fb8669bd6c097b34f4a8d5f1bdafc8f900c161696611530 not found: ID does not exist" containerID="db3913fcdfdf7d832fb8669bd6c097b34f4a8d5f1bdafc8f900c161696611530" Sep 30 19:53:51 crc kubenswrapper[4756]: I0930 19:53:51.365253 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"db3913fcdfdf7d832fb8669bd6c097b34f4a8d5f1bdafc8f900c161696611530"} err="failed to get container status \"db3913fcdfdf7d832fb8669bd6c097b34f4a8d5f1bdafc8f900c161696611530\": rpc error: code = NotFound desc = could not find container \"db3913fcdfdf7d832fb8669bd6c097b34f4a8d5f1bdafc8f900c161696611530\": container with ID starting with db3913fcdfdf7d832fb8669bd6c097b34f4a8d5f1bdafc8f900c161696611530 not found: ID does not exist" Sep 30 19:53:51 crc kubenswrapper[4756]: I0930 19:53:51.365271 4756 scope.go:117] "RemoveContainer" containerID="22b219bf8630c672d661ef7fab54e4eb8a02542b53a085535c6963dd7177287e" Sep 30 19:53:51 crc kubenswrapper[4756]: E0930 19:53:51.365545 4756 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"22b219bf8630c672d661ef7fab54e4eb8a02542b53a085535c6963dd7177287e\": container with ID starting with 22b219bf8630c672d661ef7fab54e4eb8a02542b53a085535c6963dd7177287e not found: ID does not exist" containerID="22b219bf8630c672d661ef7fab54e4eb8a02542b53a085535c6963dd7177287e" Sep 30 19:53:51 crc kubenswrapper[4756]: I0930 19:53:51.365572 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"22b219bf8630c672d661ef7fab54e4eb8a02542b53a085535c6963dd7177287e"} err="failed to get container status \"22b219bf8630c672d661ef7fab54e4eb8a02542b53a085535c6963dd7177287e\": rpc error: code = NotFound desc = could not find container \"22b219bf8630c672d661ef7fab54e4eb8a02542b53a085535c6963dd7177287e\": container with ID starting with 22b219bf8630c672d661ef7fab54e4eb8a02542b53a085535c6963dd7177287e not found: ID does not exist" Sep 30 19:53:51 crc kubenswrapper[4756]: I0930 19:53:51.625506 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 30 19:53:51 crc kubenswrapper[4756]: I0930 19:53:51.630318 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Sep 30 19:53:52 crc kubenswrapper[4756]: E0930 19:53:52.544917 4756 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 223b5334941e108a9544fa45c9f6b28cb4f077714ac53ffb084467fb3d266eb6 is running failed: container process not found" containerID="223b5334941e108a9544fa45c9f6b28cb4f077714ac53ffb084467fb3d266eb6" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Sep 30 19:53:52 crc kubenswrapper[4756]: E0930 19:53:52.545645 4756 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 223b5334941e108a9544fa45c9f6b28cb4f077714ac53ffb084467fb3d266eb6 is running failed: container process not found" containerID="223b5334941e108a9544fa45c9f6b28cb4f077714ac53ffb084467fb3d266eb6" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Sep 30 19:53:52 crc kubenswrapper[4756]: E0930 19:53:52.546035 4756 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 223b5334941e108a9544fa45c9f6b28cb4f077714ac53ffb084467fb3d266eb6 is running failed: container process not found" containerID="223b5334941e108a9544fa45c9f6b28cb4f077714ac53ffb084467fb3d266eb6" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Sep 30 19:53:52 crc kubenswrapper[4756]: E0930 19:53:52.546146 4756 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 223b5334941e108a9544fa45c9f6b28cb4f077714ac53ffb084467fb3d266eb6 is running failed: container process not found" probeType="Readiness" pod="openstack/ovn-controller-ovs-kmbtz" podUID="cf5c32f2-40b7-4c8f-ab7b-58b6fa3200ed" containerName="ovsdb-server" Sep 30 19:53:52 crc kubenswrapper[4756]: E0930 19:53:52.546681 4756 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="291eb080a7f4ba699c9eeb5b409724fd85cb98d31853893faa9a4c9845063aea" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Sep 30 19:53:52 crc kubenswrapper[4756]: E0930 19:53:52.548448 4756 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="291eb080a7f4ba699c9eeb5b409724fd85cb98d31853893faa9a4c9845063aea" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Sep 30 19:53:52 crc kubenswrapper[4756]: E0930 19:53:52.550622 4756 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="291eb080a7f4ba699c9eeb5b409724fd85cb98d31853893faa9a4c9845063aea" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Sep 30 19:53:52 crc kubenswrapper[4756]: E0930 19:53:52.550676 4756 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/ovn-controller-ovs-kmbtz" podUID="cf5c32f2-40b7-4c8f-ab7b-58b6fa3200ed" containerName="ovs-vswitchd" Sep 30 19:53:53 crc kubenswrapper[4756]: I0930 19:53:53.070146 4756 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/neutron-d4df5d75-8jctn" podUID="9dead4ff-0706-4a5e-b6ae-c388ae1c2313" containerName="neutron-httpd" probeResult="failure" output="Get \"https://10.217.0.163:9696/\": dial tcp 10.217.0.163:9696: connect: connection refused" Sep 30 19:53:53 crc kubenswrapper[4756]: I0930 19:53:53.138811 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="78518120-da64-4b55-9dce-b7412b16bed3" path="/var/lib/kubelet/pods/78518120-da64-4b55-9dce-b7412b16bed3/volumes" Sep 30 19:53:57 crc kubenswrapper[4756]: E0930 19:53:57.546032 4756 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 223b5334941e108a9544fa45c9f6b28cb4f077714ac53ffb084467fb3d266eb6 is running failed: container process not found" containerID="223b5334941e108a9544fa45c9f6b28cb4f077714ac53ffb084467fb3d266eb6" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Sep 30 19:53:57 crc kubenswrapper[4756]: E0930 19:53:57.546583 4756 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="291eb080a7f4ba699c9eeb5b409724fd85cb98d31853893faa9a4c9845063aea" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Sep 30 19:53:57 crc kubenswrapper[4756]: E0930 19:53:57.547468 4756 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 223b5334941e108a9544fa45c9f6b28cb4f077714ac53ffb084467fb3d266eb6 is running failed: container process not found" containerID="223b5334941e108a9544fa45c9f6b28cb4f077714ac53ffb084467fb3d266eb6" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Sep 30 19:53:57 crc kubenswrapper[4756]: E0930 19:53:57.548092 4756 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 223b5334941e108a9544fa45c9f6b28cb4f077714ac53ffb084467fb3d266eb6 is running failed: container process not found" containerID="223b5334941e108a9544fa45c9f6b28cb4f077714ac53ffb084467fb3d266eb6" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Sep 30 19:53:57 crc kubenswrapper[4756]: E0930 19:53:57.548201 4756 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 223b5334941e108a9544fa45c9f6b28cb4f077714ac53ffb084467fb3d266eb6 is running failed: container process not found" probeType="Readiness" pod="openstack/ovn-controller-ovs-kmbtz" podUID="cf5c32f2-40b7-4c8f-ab7b-58b6fa3200ed" containerName="ovsdb-server" Sep 30 19:53:57 crc kubenswrapper[4756]: E0930 19:53:57.548893 4756 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="291eb080a7f4ba699c9eeb5b409724fd85cb98d31853893faa9a4c9845063aea" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Sep 30 19:53:57 crc kubenswrapper[4756]: E0930 19:53:57.550936 4756 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="291eb080a7f4ba699c9eeb5b409724fd85cb98d31853893faa9a4c9845063aea" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Sep 30 19:53:57 crc kubenswrapper[4756]: E0930 19:53:57.550997 4756 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/ovn-controller-ovs-kmbtz" podUID="cf5c32f2-40b7-4c8f-ab7b-58b6fa3200ed" containerName="ovs-vswitchd" Sep 30 19:54:02 crc kubenswrapper[4756]: E0930 19:54:02.544384 4756 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 223b5334941e108a9544fa45c9f6b28cb4f077714ac53ffb084467fb3d266eb6 is running failed: container process not found" containerID="223b5334941e108a9544fa45c9f6b28cb4f077714ac53ffb084467fb3d266eb6" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Sep 30 19:54:02 crc kubenswrapper[4756]: E0930 19:54:02.545746 4756 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 223b5334941e108a9544fa45c9f6b28cb4f077714ac53ffb084467fb3d266eb6 is running failed: container process not found" containerID="223b5334941e108a9544fa45c9f6b28cb4f077714ac53ffb084467fb3d266eb6" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Sep 30 19:54:02 crc kubenswrapper[4756]: E0930 19:54:02.546281 4756 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 223b5334941e108a9544fa45c9f6b28cb4f077714ac53ffb084467fb3d266eb6 is running failed: container process not found" containerID="223b5334941e108a9544fa45c9f6b28cb4f077714ac53ffb084467fb3d266eb6" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Sep 30 19:54:02 crc kubenswrapper[4756]: E0930 19:54:02.546335 4756 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 223b5334941e108a9544fa45c9f6b28cb4f077714ac53ffb084467fb3d266eb6 is running failed: container process not found" probeType="Readiness" pod="openstack/ovn-controller-ovs-kmbtz" podUID="cf5c32f2-40b7-4c8f-ab7b-58b6fa3200ed" containerName="ovsdb-server" Sep 30 19:54:02 crc kubenswrapper[4756]: E0930 19:54:02.547504 4756 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="291eb080a7f4ba699c9eeb5b409724fd85cb98d31853893faa9a4c9845063aea" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Sep 30 19:54:02 crc kubenswrapper[4756]: E0930 19:54:02.549750 4756 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="291eb080a7f4ba699c9eeb5b409724fd85cb98d31853893faa9a4c9845063aea" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Sep 30 19:54:02 crc kubenswrapper[4756]: E0930 19:54:02.552226 4756 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="291eb080a7f4ba699c9eeb5b409724fd85cb98d31853893faa9a4c9845063aea" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Sep 30 19:54:02 crc kubenswrapper[4756]: E0930 19:54:02.552538 4756 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/ovn-controller-ovs-kmbtz" podUID="cf5c32f2-40b7-4c8f-ab7b-58b6fa3200ed" containerName="ovs-vswitchd" Sep 30 19:54:04 crc kubenswrapper[4756]: I0930 19:54:04.114917 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-d4df5d75-8jctn" Sep 30 19:54:04 crc kubenswrapper[4756]: I0930 19:54:04.216335 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xmhvz\" (UniqueName: \"kubernetes.io/projected/9dead4ff-0706-4a5e-b6ae-c388ae1c2313-kube-api-access-xmhvz\") pod \"9dead4ff-0706-4a5e-b6ae-c388ae1c2313\" (UID: \"9dead4ff-0706-4a5e-b6ae-c388ae1c2313\") " Sep 30 19:54:04 crc kubenswrapper[4756]: I0930 19:54:04.216409 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/9dead4ff-0706-4a5e-b6ae-c388ae1c2313-httpd-config\") pod \"9dead4ff-0706-4a5e-b6ae-c388ae1c2313\" (UID: \"9dead4ff-0706-4a5e-b6ae-c388ae1c2313\") " Sep 30 19:54:04 crc kubenswrapper[4756]: I0930 19:54:04.216430 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/9dead4ff-0706-4a5e-b6ae-c388ae1c2313-ovndb-tls-certs\") pod \"9dead4ff-0706-4a5e-b6ae-c388ae1c2313\" (UID: \"9dead4ff-0706-4a5e-b6ae-c388ae1c2313\") " Sep 30 19:54:04 crc kubenswrapper[4756]: I0930 19:54:04.216483 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9dead4ff-0706-4a5e-b6ae-c388ae1c2313-combined-ca-bundle\") pod \"9dead4ff-0706-4a5e-b6ae-c388ae1c2313\" (UID: \"9dead4ff-0706-4a5e-b6ae-c388ae1c2313\") " Sep 30 19:54:04 crc kubenswrapper[4756]: I0930 19:54:04.216499 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/9dead4ff-0706-4a5e-b6ae-c388ae1c2313-internal-tls-certs\") pod \"9dead4ff-0706-4a5e-b6ae-c388ae1c2313\" (UID: \"9dead4ff-0706-4a5e-b6ae-c388ae1c2313\") " Sep 30 19:54:04 crc kubenswrapper[4756]: I0930 19:54:04.216524 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/9dead4ff-0706-4a5e-b6ae-c388ae1c2313-public-tls-certs\") pod \"9dead4ff-0706-4a5e-b6ae-c388ae1c2313\" (UID: \"9dead4ff-0706-4a5e-b6ae-c388ae1c2313\") " Sep 30 19:54:04 crc kubenswrapper[4756]: I0930 19:54:04.216558 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/9dead4ff-0706-4a5e-b6ae-c388ae1c2313-config\") pod \"9dead4ff-0706-4a5e-b6ae-c388ae1c2313\" (UID: \"9dead4ff-0706-4a5e-b6ae-c388ae1c2313\") " Sep 30 19:54:04 crc kubenswrapper[4756]: I0930 19:54:04.221755 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9dead4ff-0706-4a5e-b6ae-c388ae1c2313-httpd-config" (OuterVolumeSpecName: "httpd-config") pod "9dead4ff-0706-4a5e-b6ae-c388ae1c2313" (UID: "9dead4ff-0706-4a5e-b6ae-c388ae1c2313"). InnerVolumeSpecName "httpd-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:54:04 crc kubenswrapper[4756]: I0930 19:54:04.232902 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9dead4ff-0706-4a5e-b6ae-c388ae1c2313-kube-api-access-xmhvz" (OuterVolumeSpecName: "kube-api-access-xmhvz") pod "9dead4ff-0706-4a5e-b6ae-c388ae1c2313" (UID: "9dead4ff-0706-4a5e-b6ae-c388ae1c2313"). InnerVolumeSpecName "kube-api-access-xmhvz". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:54:04 crc kubenswrapper[4756]: I0930 19:54:04.261981 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9dead4ff-0706-4a5e-b6ae-c388ae1c2313-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "9dead4ff-0706-4a5e-b6ae-c388ae1c2313" (UID: "9dead4ff-0706-4a5e-b6ae-c388ae1c2313"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:54:04 crc kubenswrapper[4756]: I0930 19:54:04.271233 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9dead4ff-0706-4a5e-b6ae-c388ae1c2313-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "9dead4ff-0706-4a5e-b6ae-c388ae1c2313" (UID: "9dead4ff-0706-4a5e-b6ae-c388ae1c2313"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:54:04 crc kubenswrapper[4756]: I0930 19:54:04.273675 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9dead4ff-0706-4a5e-b6ae-c388ae1c2313-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "9dead4ff-0706-4a5e-b6ae-c388ae1c2313" (UID: "9dead4ff-0706-4a5e-b6ae-c388ae1c2313"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:54:04 crc kubenswrapper[4756]: I0930 19:54:04.278326 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9dead4ff-0706-4a5e-b6ae-c388ae1c2313-config" (OuterVolumeSpecName: "config") pod "9dead4ff-0706-4a5e-b6ae-c388ae1c2313" (UID: "9dead4ff-0706-4a5e-b6ae-c388ae1c2313"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:54:04 crc kubenswrapper[4756]: I0930 19:54:04.290468 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9dead4ff-0706-4a5e-b6ae-c388ae1c2313-ovndb-tls-certs" (OuterVolumeSpecName: "ovndb-tls-certs") pod "9dead4ff-0706-4a5e-b6ae-c388ae1c2313" (UID: "9dead4ff-0706-4a5e-b6ae-c388ae1c2313"). InnerVolumeSpecName "ovndb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:54:04 crc kubenswrapper[4756]: I0930 19:54:04.318467 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xmhvz\" (UniqueName: \"kubernetes.io/projected/9dead4ff-0706-4a5e-b6ae-c388ae1c2313-kube-api-access-xmhvz\") on node \"crc\" DevicePath \"\"" Sep 30 19:54:04 crc kubenswrapper[4756]: I0930 19:54:04.318507 4756 reconciler_common.go:293] "Volume detached for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/9dead4ff-0706-4a5e-b6ae-c388ae1c2313-httpd-config\") on node \"crc\" DevicePath \"\"" Sep 30 19:54:04 crc kubenswrapper[4756]: I0930 19:54:04.318523 4756 reconciler_common.go:293] "Volume detached for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/9dead4ff-0706-4a5e-b6ae-c388ae1c2313-ovndb-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 30 19:54:04 crc kubenswrapper[4756]: I0930 19:54:04.318536 4756 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9dead4ff-0706-4a5e-b6ae-c388ae1c2313-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 19:54:04 crc kubenswrapper[4756]: I0930 19:54:04.318548 4756 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/9dead4ff-0706-4a5e-b6ae-c388ae1c2313-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 30 19:54:04 crc kubenswrapper[4756]: I0930 19:54:04.318560 4756 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/9dead4ff-0706-4a5e-b6ae-c388ae1c2313-public-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 30 19:54:04 crc kubenswrapper[4756]: I0930 19:54:04.318572 4756 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/9dead4ff-0706-4a5e-b6ae-c388ae1c2313-config\") on node \"crc\" DevicePath \"\"" Sep 30 19:54:04 crc kubenswrapper[4756]: I0930 19:54:04.435866 4756 generic.go:334] "Generic (PLEG): container finished" podID="9dead4ff-0706-4a5e-b6ae-c388ae1c2313" containerID="900bdddf277fe7986f2ca62e27752937a1ace2d13997fb2c03e798ffa202304c" exitCode=0 Sep 30 19:54:04 crc kubenswrapper[4756]: I0930 19:54:04.435910 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-d4df5d75-8jctn" event={"ID":"9dead4ff-0706-4a5e-b6ae-c388ae1c2313","Type":"ContainerDied","Data":"900bdddf277fe7986f2ca62e27752937a1ace2d13997fb2c03e798ffa202304c"} Sep 30 19:54:04 crc kubenswrapper[4756]: I0930 19:54:04.435938 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-d4df5d75-8jctn" event={"ID":"9dead4ff-0706-4a5e-b6ae-c388ae1c2313","Type":"ContainerDied","Data":"25c76c73cfb9fb2968a5124f862e69e4d34b8f3323caca226d9c67b8fa92fac5"} Sep 30 19:54:04 crc kubenswrapper[4756]: I0930 19:54:04.435946 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-d4df5d75-8jctn" Sep 30 19:54:04 crc kubenswrapper[4756]: I0930 19:54:04.435959 4756 scope.go:117] "RemoveContainer" containerID="9a4fb3358fd6f28ed48e21d357f0e517d47e56682744c7e3918edea524d6cfd4" Sep 30 19:54:04 crc kubenswrapper[4756]: I0930 19:54:04.468748 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-d4df5d75-8jctn"] Sep 30 19:54:04 crc kubenswrapper[4756]: I0930 19:54:04.474241 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-d4df5d75-8jctn"] Sep 30 19:54:04 crc kubenswrapper[4756]: I0930 19:54:04.479140 4756 scope.go:117] "RemoveContainer" containerID="900bdddf277fe7986f2ca62e27752937a1ace2d13997fb2c03e798ffa202304c" Sep 30 19:54:04 crc kubenswrapper[4756]: I0930 19:54:04.501002 4756 scope.go:117] "RemoveContainer" containerID="9a4fb3358fd6f28ed48e21d357f0e517d47e56682744c7e3918edea524d6cfd4" Sep 30 19:54:04 crc kubenswrapper[4756]: E0930 19:54:04.501606 4756 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9a4fb3358fd6f28ed48e21d357f0e517d47e56682744c7e3918edea524d6cfd4\": container with ID starting with 9a4fb3358fd6f28ed48e21d357f0e517d47e56682744c7e3918edea524d6cfd4 not found: ID does not exist" containerID="9a4fb3358fd6f28ed48e21d357f0e517d47e56682744c7e3918edea524d6cfd4" Sep 30 19:54:04 crc kubenswrapper[4756]: I0930 19:54:04.501639 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9a4fb3358fd6f28ed48e21d357f0e517d47e56682744c7e3918edea524d6cfd4"} err="failed to get container status \"9a4fb3358fd6f28ed48e21d357f0e517d47e56682744c7e3918edea524d6cfd4\": rpc error: code = NotFound desc = could not find container \"9a4fb3358fd6f28ed48e21d357f0e517d47e56682744c7e3918edea524d6cfd4\": container with ID starting with 9a4fb3358fd6f28ed48e21d357f0e517d47e56682744c7e3918edea524d6cfd4 not found: ID does not exist" Sep 30 19:54:04 crc kubenswrapper[4756]: I0930 19:54:04.501660 4756 scope.go:117] "RemoveContainer" containerID="900bdddf277fe7986f2ca62e27752937a1ace2d13997fb2c03e798ffa202304c" Sep 30 19:54:04 crc kubenswrapper[4756]: E0930 19:54:04.502083 4756 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"900bdddf277fe7986f2ca62e27752937a1ace2d13997fb2c03e798ffa202304c\": container with ID starting with 900bdddf277fe7986f2ca62e27752937a1ace2d13997fb2c03e798ffa202304c not found: ID does not exist" containerID="900bdddf277fe7986f2ca62e27752937a1ace2d13997fb2c03e798ffa202304c" Sep 30 19:54:04 crc kubenswrapper[4756]: I0930 19:54:04.502179 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"900bdddf277fe7986f2ca62e27752937a1ace2d13997fb2c03e798ffa202304c"} err="failed to get container status \"900bdddf277fe7986f2ca62e27752937a1ace2d13997fb2c03e798ffa202304c\": rpc error: code = NotFound desc = could not find container \"900bdddf277fe7986f2ca62e27752937a1ace2d13997fb2c03e798ffa202304c\": container with ID starting with 900bdddf277fe7986f2ca62e27752937a1ace2d13997fb2c03e798ffa202304c not found: ID does not exist" Sep 30 19:54:05 crc kubenswrapper[4756]: I0930 19:54:05.132614 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9dead4ff-0706-4a5e-b6ae-c388ae1c2313" path="/var/lib/kubelet/pods/9dead4ff-0706-4a5e-b6ae-c388ae1c2313/volumes" Sep 30 19:54:07 crc kubenswrapper[4756]: E0930 19:54:07.544243 4756 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 223b5334941e108a9544fa45c9f6b28cb4f077714ac53ffb084467fb3d266eb6 is running failed: container process not found" containerID="223b5334941e108a9544fa45c9f6b28cb4f077714ac53ffb084467fb3d266eb6" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Sep 30 19:54:07 crc kubenswrapper[4756]: E0930 19:54:07.546391 4756 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="291eb080a7f4ba699c9eeb5b409724fd85cb98d31853893faa9a4c9845063aea" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Sep 30 19:54:07 crc kubenswrapper[4756]: E0930 19:54:07.546474 4756 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 223b5334941e108a9544fa45c9f6b28cb4f077714ac53ffb084467fb3d266eb6 is running failed: container process not found" containerID="223b5334941e108a9544fa45c9f6b28cb4f077714ac53ffb084467fb3d266eb6" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Sep 30 19:54:07 crc kubenswrapper[4756]: E0930 19:54:07.547270 4756 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 223b5334941e108a9544fa45c9f6b28cb4f077714ac53ffb084467fb3d266eb6 is running failed: container process not found" containerID="223b5334941e108a9544fa45c9f6b28cb4f077714ac53ffb084467fb3d266eb6" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Sep 30 19:54:07 crc kubenswrapper[4756]: E0930 19:54:07.547325 4756 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 223b5334941e108a9544fa45c9f6b28cb4f077714ac53ffb084467fb3d266eb6 is running failed: container process not found" probeType="Readiness" pod="openstack/ovn-controller-ovs-kmbtz" podUID="cf5c32f2-40b7-4c8f-ab7b-58b6fa3200ed" containerName="ovsdb-server" Sep 30 19:54:07 crc kubenswrapper[4756]: E0930 19:54:07.548467 4756 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="291eb080a7f4ba699c9eeb5b409724fd85cb98d31853893faa9a4c9845063aea" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Sep 30 19:54:07 crc kubenswrapper[4756]: E0930 19:54:07.551267 4756 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="291eb080a7f4ba699c9eeb5b409724fd85cb98d31853893faa9a4c9845063aea" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Sep 30 19:54:07 crc kubenswrapper[4756]: E0930 19:54:07.551319 4756 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/ovn-controller-ovs-kmbtz" podUID="cf5c32f2-40b7-4c8f-ab7b-58b6fa3200ed" containerName="ovs-vswitchd" Sep 30 19:54:11 crc kubenswrapper[4756]: I0930 19:54:11.529664 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-kmbtz_cf5c32f2-40b7-4c8f-ab7b-58b6fa3200ed/ovs-vswitchd/0.log" Sep 30 19:54:11 crc kubenswrapper[4756]: I0930 19:54:11.530823 4756 generic.go:334] "Generic (PLEG): container finished" podID="cf5c32f2-40b7-4c8f-ab7b-58b6fa3200ed" containerID="291eb080a7f4ba699c9eeb5b409724fd85cb98d31853893faa9a4c9845063aea" exitCode=137 Sep 30 19:54:11 crc kubenswrapper[4756]: I0930 19:54:11.530899 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-kmbtz" event={"ID":"cf5c32f2-40b7-4c8f-ab7b-58b6fa3200ed","Type":"ContainerDied","Data":"291eb080a7f4ba699c9eeb5b409724fd85cb98d31853893faa9a4c9845063aea"} Sep 30 19:54:11 crc kubenswrapper[4756]: I0930 19:54:11.544575 4756 generic.go:334] "Generic (PLEG): container finished" podID="26be10f5-be18-4ab5-9f8e-0d6f2a8dcb4f" containerID="920c7a436f02252c0b5d507de8037ccd6743d1c2a255a147a18b894f28b926a4" exitCode=137 Sep 30 19:54:11 crc kubenswrapper[4756]: I0930 19:54:11.544613 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"26be10f5-be18-4ab5-9f8e-0d6f2a8dcb4f","Type":"ContainerDied","Data":"920c7a436f02252c0b5d507de8037ccd6743d1c2a255a147a18b894f28b926a4"} Sep 30 19:54:11 crc kubenswrapper[4756]: I0930 19:54:11.835362 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-kmbtz_cf5c32f2-40b7-4c8f-ab7b-58b6fa3200ed/ovs-vswitchd/0.log" Sep 30 19:54:11 crc kubenswrapper[4756]: I0930 19:54:11.836381 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-kmbtz" Sep 30 19:54:11 crc kubenswrapper[4756]: I0930 19:54:11.845309 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-storage-0" Sep 30 19:54:11 crc kubenswrapper[4756]: I0930 19:54:11.938997 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/cf5c32f2-40b7-4c8f-ab7b-58b6fa3200ed-scripts\") pod \"cf5c32f2-40b7-4c8f-ab7b-58b6fa3200ed\" (UID: \"cf5c32f2-40b7-4c8f-ab7b-58b6fa3200ed\") " Sep 30 19:54:11 crc kubenswrapper[4756]: I0930 19:54:11.939168 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nmxc7\" (UniqueName: \"kubernetes.io/projected/cf5c32f2-40b7-4c8f-ab7b-58b6fa3200ed-kube-api-access-nmxc7\") pod \"cf5c32f2-40b7-4c8f-ab7b-58b6fa3200ed\" (UID: \"cf5c32f2-40b7-4c8f-ab7b-58b6fa3200ed\") " Sep 30 19:54:11 crc kubenswrapper[4756]: I0930 19:54:11.939194 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/cf5c32f2-40b7-4c8f-ab7b-58b6fa3200ed-var-run\") pod \"cf5c32f2-40b7-4c8f-ab7b-58b6fa3200ed\" (UID: \"cf5c32f2-40b7-4c8f-ab7b-58b6fa3200ed\") " Sep 30 19:54:11 crc kubenswrapper[4756]: I0930 19:54:11.939282 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/cf5c32f2-40b7-4c8f-ab7b-58b6fa3200ed-etc-ovs\") pod \"cf5c32f2-40b7-4c8f-ab7b-58b6fa3200ed\" (UID: \"cf5c32f2-40b7-4c8f-ab7b-58b6fa3200ed\") " Sep 30 19:54:11 crc kubenswrapper[4756]: I0930 19:54:11.939348 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/cf5c32f2-40b7-4c8f-ab7b-58b6fa3200ed-var-run" (OuterVolumeSpecName: "var-run") pod "cf5c32f2-40b7-4c8f-ab7b-58b6fa3200ed" (UID: "cf5c32f2-40b7-4c8f-ab7b-58b6fa3200ed"). InnerVolumeSpecName "var-run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 19:54:11 crc kubenswrapper[4756]: I0930 19:54:11.939416 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/cf5c32f2-40b7-4c8f-ab7b-58b6fa3200ed-etc-ovs" (OuterVolumeSpecName: "etc-ovs") pod "cf5c32f2-40b7-4c8f-ab7b-58b6fa3200ed" (UID: "cf5c32f2-40b7-4c8f-ab7b-58b6fa3200ed"). InnerVolumeSpecName "etc-ovs". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 19:54:11 crc kubenswrapper[4756]: I0930 19:54:11.939446 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c5zm2\" (UniqueName: \"kubernetes.io/projected/26be10f5-be18-4ab5-9f8e-0d6f2a8dcb4f-kube-api-access-c5zm2\") pod \"26be10f5-be18-4ab5-9f8e-0d6f2a8dcb4f\" (UID: \"26be10f5-be18-4ab5-9f8e-0d6f2a8dcb4f\") " Sep 30 19:54:11 crc kubenswrapper[4756]: I0930 19:54:11.939522 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/26be10f5-be18-4ab5-9f8e-0d6f2a8dcb4f-lock\") pod \"26be10f5-be18-4ab5-9f8e-0d6f2a8dcb4f\" (UID: \"26be10f5-be18-4ab5-9f8e-0d6f2a8dcb4f\") " Sep 30 19:54:11 crc kubenswrapper[4756]: I0930 19:54:11.939545 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/cf5c32f2-40b7-4c8f-ab7b-58b6fa3200ed-var-lib\") pod \"cf5c32f2-40b7-4c8f-ab7b-58b6fa3200ed\" (UID: \"cf5c32f2-40b7-4c8f-ab7b-58b6fa3200ed\") " Sep 30 19:54:11 crc kubenswrapper[4756]: I0930 19:54:11.939691 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/cf5c32f2-40b7-4c8f-ab7b-58b6fa3200ed-var-lib" (OuterVolumeSpecName: "var-lib") pod "cf5c32f2-40b7-4c8f-ab7b-58b6fa3200ed" (UID: "cf5c32f2-40b7-4c8f-ab7b-58b6fa3200ed"). InnerVolumeSpecName "var-lib". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 19:54:11 crc kubenswrapper[4756]: I0930 19:54:11.940104 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/26be10f5-be18-4ab5-9f8e-0d6f2a8dcb4f-lock" (OuterVolumeSpecName: "lock") pod "26be10f5-be18-4ab5-9f8e-0d6f2a8dcb4f" (UID: "26be10f5-be18-4ab5-9f8e-0d6f2a8dcb4f"). InnerVolumeSpecName "lock". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 19:54:11 crc kubenswrapper[4756]: I0930 19:54:11.940234 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/26be10f5-be18-4ab5-9f8e-0d6f2a8dcb4f-cache\") pod \"26be10f5-be18-4ab5-9f8e-0d6f2a8dcb4f\" (UID: \"26be10f5-be18-4ab5-9f8e-0d6f2a8dcb4f\") " Sep 30 19:54:11 crc kubenswrapper[4756]: I0930 19:54:11.940270 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"swift\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"26be10f5-be18-4ab5-9f8e-0d6f2a8dcb4f\" (UID: \"26be10f5-be18-4ab5-9f8e-0d6f2a8dcb4f\") " Sep 30 19:54:11 crc kubenswrapper[4756]: I0930 19:54:11.940338 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/cf5c32f2-40b7-4c8f-ab7b-58b6fa3200ed-var-log\") pod \"cf5c32f2-40b7-4c8f-ab7b-58b6fa3200ed\" (UID: \"cf5c32f2-40b7-4c8f-ab7b-58b6fa3200ed\") " Sep 30 19:54:11 crc kubenswrapper[4756]: I0930 19:54:11.940361 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/26be10f5-be18-4ab5-9f8e-0d6f2a8dcb4f-etc-swift\") pod \"26be10f5-be18-4ab5-9f8e-0d6f2a8dcb4f\" (UID: \"26be10f5-be18-4ab5-9f8e-0d6f2a8dcb4f\") " Sep 30 19:54:11 crc kubenswrapper[4756]: I0930 19:54:11.940465 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/cf5c32f2-40b7-4c8f-ab7b-58b6fa3200ed-var-log" (OuterVolumeSpecName: "var-log") pod "cf5c32f2-40b7-4c8f-ab7b-58b6fa3200ed" (UID: "cf5c32f2-40b7-4c8f-ab7b-58b6fa3200ed"). InnerVolumeSpecName "var-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 19:54:11 crc kubenswrapper[4756]: I0930 19:54:11.940640 4756 reconciler_common.go:293] "Volume detached for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/cf5c32f2-40b7-4c8f-ab7b-58b6fa3200ed-var-log\") on node \"crc\" DevicePath \"\"" Sep 30 19:54:11 crc kubenswrapper[4756]: I0930 19:54:11.940656 4756 reconciler_common.go:293] "Volume detached for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/cf5c32f2-40b7-4c8f-ab7b-58b6fa3200ed-var-run\") on node \"crc\" DevicePath \"\"" Sep 30 19:54:11 crc kubenswrapper[4756]: I0930 19:54:11.940666 4756 reconciler_common.go:293] "Volume detached for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/cf5c32f2-40b7-4c8f-ab7b-58b6fa3200ed-etc-ovs\") on node \"crc\" DevicePath \"\"" Sep 30 19:54:11 crc kubenswrapper[4756]: I0930 19:54:11.940675 4756 reconciler_common.go:293] "Volume detached for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/26be10f5-be18-4ab5-9f8e-0d6f2a8dcb4f-lock\") on node \"crc\" DevicePath \"\"" Sep 30 19:54:11 crc kubenswrapper[4756]: I0930 19:54:11.940688 4756 reconciler_common.go:293] "Volume detached for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/cf5c32f2-40b7-4c8f-ab7b-58b6fa3200ed-var-lib\") on node \"crc\" DevicePath \"\"" Sep 30 19:54:11 crc kubenswrapper[4756]: I0930 19:54:11.940668 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cf5c32f2-40b7-4c8f-ab7b-58b6fa3200ed-scripts" (OuterVolumeSpecName: "scripts") pod "cf5c32f2-40b7-4c8f-ab7b-58b6fa3200ed" (UID: "cf5c32f2-40b7-4c8f-ab7b-58b6fa3200ed"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:54:11 crc kubenswrapper[4756]: I0930 19:54:11.941019 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/26be10f5-be18-4ab5-9f8e-0d6f2a8dcb4f-cache" (OuterVolumeSpecName: "cache") pod "26be10f5-be18-4ab5-9f8e-0d6f2a8dcb4f" (UID: "26be10f5-be18-4ab5-9f8e-0d6f2a8dcb4f"). InnerVolumeSpecName "cache". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 19:54:11 crc kubenswrapper[4756]: I0930 19:54:11.944794 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage09-crc" (OuterVolumeSpecName: "swift") pod "26be10f5-be18-4ab5-9f8e-0d6f2a8dcb4f" (UID: "26be10f5-be18-4ab5-9f8e-0d6f2a8dcb4f"). InnerVolumeSpecName "local-storage09-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Sep 30 19:54:11 crc kubenswrapper[4756]: I0930 19:54:11.944801 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/26be10f5-be18-4ab5-9f8e-0d6f2a8dcb4f-kube-api-access-c5zm2" (OuterVolumeSpecName: "kube-api-access-c5zm2") pod "26be10f5-be18-4ab5-9f8e-0d6f2a8dcb4f" (UID: "26be10f5-be18-4ab5-9f8e-0d6f2a8dcb4f"). InnerVolumeSpecName "kube-api-access-c5zm2". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:54:11 crc kubenswrapper[4756]: I0930 19:54:11.944876 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cf5c32f2-40b7-4c8f-ab7b-58b6fa3200ed-kube-api-access-nmxc7" (OuterVolumeSpecName: "kube-api-access-nmxc7") pod "cf5c32f2-40b7-4c8f-ab7b-58b6fa3200ed" (UID: "cf5c32f2-40b7-4c8f-ab7b-58b6fa3200ed"). InnerVolumeSpecName "kube-api-access-nmxc7". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:54:11 crc kubenswrapper[4756]: I0930 19:54:11.945142 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/26be10f5-be18-4ab5-9f8e-0d6f2a8dcb4f-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "26be10f5-be18-4ab5-9f8e-0d6f2a8dcb4f" (UID: "26be10f5-be18-4ab5-9f8e-0d6f2a8dcb4f"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:54:12 crc kubenswrapper[4756]: I0930 19:54:12.041877 4756 reconciler_common.go:293] "Volume detached for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/26be10f5-be18-4ab5-9f8e-0d6f2a8dcb4f-cache\") on node \"crc\" DevicePath \"\"" Sep 30 19:54:12 crc kubenswrapper[4756]: I0930 19:54:12.041926 4756 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") on node \"crc\" " Sep 30 19:54:12 crc kubenswrapper[4756]: I0930 19:54:12.041936 4756 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/26be10f5-be18-4ab5-9f8e-0d6f2a8dcb4f-etc-swift\") on node \"crc\" DevicePath \"\"" Sep 30 19:54:12 crc kubenswrapper[4756]: I0930 19:54:12.041946 4756 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/cf5c32f2-40b7-4c8f-ab7b-58b6fa3200ed-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 19:54:12 crc kubenswrapper[4756]: I0930 19:54:12.041956 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nmxc7\" (UniqueName: \"kubernetes.io/projected/cf5c32f2-40b7-4c8f-ab7b-58b6fa3200ed-kube-api-access-nmxc7\") on node \"crc\" DevicePath \"\"" Sep 30 19:54:12 crc kubenswrapper[4756]: I0930 19:54:12.041965 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c5zm2\" (UniqueName: \"kubernetes.io/projected/26be10f5-be18-4ab5-9f8e-0d6f2a8dcb4f-kube-api-access-c5zm2\") on node \"crc\" DevicePath \"\"" Sep 30 19:54:12 crc kubenswrapper[4756]: I0930 19:54:12.054895 4756 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage09-crc" (UniqueName: "kubernetes.io/local-volume/local-storage09-crc") on node "crc" Sep 30 19:54:12 crc kubenswrapper[4756]: I0930 19:54:12.142960 4756 reconciler_common.go:293] "Volume detached for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") on node \"crc\" DevicePath \"\"" Sep 30 19:54:12 crc kubenswrapper[4756]: I0930 19:54:12.561070 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-kmbtz_cf5c32f2-40b7-4c8f-ab7b-58b6fa3200ed/ovs-vswitchd/0.log" Sep 30 19:54:12 crc kubenswrapper[4756]: I0930 19:54:12.562666 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-kmbtz" Sep 30 19:54:12 crc kubenswrapper[4756]: I0930 19:54:12.562664 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-kmbtz" event={"ID":"cf5c32f2-40b7-4c8f-ab7b-58b6fa3200ed","Type":"ContainerDied","Data":"3a56a47b8c0530230e171fc2e4a85c823b8e1fe358dbf42bcc9b47456600499b"} Sep 30 19:54:12 crc kubenswrapper[4756]: I0930 19:54:12.562752 4756 scope.go:117] "RemoveContainer" containerID="291eb080a7f4ba699c9eeb5b409724fd85cb98d31853893faa9a4c9845063aea" Sep 30 19:54:12 crc kubenswrapper[4756]: I0930 19:54:12.576764 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"26be10f5-be18-4ab5-9f8e-0d6f2a8dcb4f","Type":"ContainerDied","Data":"8aa879b20f8157c2cc1b3bc2fc2c825aee61fad6e16fc83bcb4862947fe2fcce"} Sep 30 19:54:12 crc kubenswrapper[4756]: I0930 19:54:12.577052 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-storage-0" Sep 30 19:54:12 crc kubenswrapper[4756]: I0930 19:54:12.598391 4756 scope.go:117] "RemoveContainer" containerID="223b5334941e108a9544fa45c9f6b28cb4f077714ac53ffb084467fb3d266eb6" Sep 30 19:54:12 crc kubenswrapper[4756]: I0930 19:54:12.623549 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-ovs-kmbtz"] Sep 30 19:54:12 crc kubenswrapper[4756]: I0930 19:54:12.637743 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-controller-ovs-kmbtz"] Sep 30 19:54:12 crc kubenswrapper[4756]: I0930 19:54:12.649828 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/swift-storage-0"] Sep 30 19:54:12 crc kubenswrapper[4756]: I0930 19:54:12.654169 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/swift-storage-0"] Sep 30 19:54:12 crc kubenswrapper[4756]: I0930 19:54:12.656873 4756 scope.go:117] "RemoveContainer" containerID="72c557f5ab2787a56fb20fd470b72d16e6f45d14b19c838d4ca4877b21ee5f12" Sep 30 19:54:12 crc kubenswrapper[4756]: I0930 19:54:12.687263 4756 scope.go:117] "RemoveContainer" containerID="920c7a436f02252c0b5d507de8037ccd6743d1c2a255a147a18b894f28b926a4" Sep 30 19:54:12 crc kubenswrapper[4756]: I0930 19:54:12.711033 4756 scope.go:117] "RemoveContainer" containerID="762a83f21b826aa3eb53b3ebd785e06362136c9dadaba330d0843609fc9c2284" Sep 30 19:54:12 crc kubenswrapper[4756]: I0930 19:54:12.737984 4756 scope.go:117] "RemoveContainer" containerID="51f94be61ce4fe7988183466f20b547fdd3570bb129d81fb1a0d14b57bcbd868" Sep 30 19:54:12 crc kubenswrapper[4756]: I0930 19:54:12.754654 4756 scope.go:117] "RemoveContainer" containerID="c7c8e729cceb6e35134441b29a4688dd7e5d7409c1ff4e605c6736fd89e5f899" Sep 30 19:54:12 crc kubenswrapper[4756]: I0930 19:54:12.776943 4756 scope.go:117] "RemoveContainer" containerID="5434ba65bcd1a16d1031ac5831c922a2bfb696f3b7f8c608c5e3fbd5cff3290f" Sep 30 19:54:12 crc kubenswrapper[4756]: I0930 19:54:12.796418 4756 scope.go:117] "RemoveContainer" containerID="c9e0fb72308640c83674dc311f125ea56a518ff2b747a1f3595f7eb4f02f32bd" Sep 30 19:54:12 crc kubenswrapper[4756]: I0930 19:54:12.816689 4756 scope.go:117] "RemoveContainer" containerID="9a23a18c16fa569d73aa74575f47acbf5abd74569f207256e830e5ae1207a4c1" Sep 30 19:54:12 crc kubenswrapper[4756]: I0930 19:54:12.839936 4756 scope.go:117] "RemoveContainer" containerID="d845f4ce57e99f7f80a72593e5611ab764740d01bc79f238f2095047d2ba9939" Sep 30 19:54:12 crc kubenswrapper[4756]: I0930 19:54:12.860290 4756 scope.go:117] "RemoveContainer" containerID="c1034fd69491ceca2b5a5e3609ed94e95183c5cd8752878c0eb7d55136e8ac9b" Sep 30 19:54:12 crc kubenswrapper[4756]: I0930 19:54:12.878228 4756 scope.go:117] "RemoveContainer" containerID="9d88cf8ad00002797476e74b41a910b1b6ec952d10c00335bb0abfc6bd17fddd" Sep 30 19:54:12 crc kubenswrapper[4756]: I0930 19:54:12.895553 4756 scope.go:117] "RemoveContainer" containerID="db446a0e9626857a987217e5f70793bfbb0795d88490d45a933892f52b588044" Sep 30 19:54:12 crc kubenswrapper[4756]: I0930 19:54:12.909775 4756 scope.go:117] "RemoveContainer" containerID="4b05a265d73c3a7ceb454829ebf559c526823bafbc3ff9d5547722704959942d" Sep 30 19:54:12 crc kubenswrapper[4756]: I0930 19:54:12.926066 4756 scope.go:117] "RemoveContainer" containerID="28e0221dbc10ee875c0d1d749687dc2243eba9bd6154bd2190340276bba9e102" Sep 30 19:54:12 crc kubenswrapper[4756]: I0930 19:54:12.946566 4756 scope.go:117] "RemoveContainer" containerID="e54877dad5935cd8933ec14fc3b8e4b0d87beeda887453f4702efc3c346c99e8" Sep 30 19:54:12 crc kubenswrapper[4756]: I0930 19:54:12.964549 4756 scope.go:117] "RemoveContainer" containerID="2ded9539aabf2d7ed41a0b6709054bd69836dd45212e96330a8ced4a80d659c3" Sep 30 19:54:13 crc kubenswrapper[4756]: I0930 19:54:13.135330 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="26be10f5-be18-4ab5-9f8e-0d6f2a8dcb4f" path="/var/lib/kubelet/pods/26be10f5-be18-4ab5-9f8e-0d6f2a8dcb4f/volumes" Sep 30 19:54:13 crc kubenswrapper[4756]: I0930 19:54:13.139576 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cf5c32f2-40b7-4c8f-ab7b-58b6fa3200ed" path="/var/lib/kubelet/pods/cf5c32f2-40b7-4c8f-ab7b-58b6fa3200ed/volumes" Sep 30 19:54:15 crc kubenswrapper[4756]: I0930 19:54:15.511805 4756 pod_container_manager_linux.go:210] "Failed to delete cgroup paths" cgroupName=["kubepods","besteffort","pod6f3ed405-3d2e-4296-8ea5-7b426f2bd987"] err="unable to destroy cgroup paths for cgroup [kubepods besteffort pod6f3ed405-3d2e-4296-8ea5-7b426f2bd987] : Timed out while waiting for systemd to remove kubepods-besteffort-pod6f3ed405_3d2e_4296_8ea5_7b426f2bd987.slice" Sep 30 19:54:15 crc kubenswrapper[4756]: E0930 19:54:15.512187 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to delete cgroup paths for [kubepods besteffort pod6f3ed405-3d2e-4296-8ea5-7b426f2bd987] : unable to destroy cgroup paths for cgroup [kubepods besteffort pod6f3ed405-3d2e-4296-8ea5-7b426f2bd987] : Timed out while waiting for systemd to remove kubepods-besteffort-pod6f3ed405_3d2e_4296_8ea5_7b426f2bd987.slice" pod="openstack/nova-cell1-novncproxy-0" podUID="6f3ed405-3d2e-4296-8ea5-7b426f2bd987" Sep 30 19:54:15 crc kubenswrapper[4756]: I0930 19:54:15.622559 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Sep 30 19:54:15 crc kubenswrapper[4756]: I0930 19:54:15.627320 4756 pod_container_manager_linux.go:210] "Failed to delete cgroup paths" cgroupName=["kubepods","besteffort","pod7cba2998-4911-447e-8fa8-782a83f0b3be"] err="unable to destroy cgroup paths for cgroup [kubepods besteffort pod7cba2998-4911-447e-8fa8-782a83f0b3be] : Timed out while waiting for systemd to remove kubepods-besteffort-pod7cba2998_4911_447e_8fa8_782a83f0b3be.slice" Sep 30 19:54:15 crc kubenswrapper[4756]: E0930 19:54:15.627358 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to delete cgroup paths for [kubepods besteffort pod7cba2998-4911-447e-8fa8-782a83f0b3be] : unable to destroy cgroup paths for cgroup [kubepods besteffort pod7cba2998-4911-447e-8fa8-782a83f0b3be] : Timed out while waiting for systemd to remove kubepods-besteffort-pod7cba2998_4911_447e_8fa8_782a83f0b3be.slice" pod="openstack/cinder-scheduler-0" podUID="7cba2998-4911-447e-8fa8-782a83f0b3be" Sep 30 19:54:15 crc kubenswrapper[4756]: I0930 19:54:15.639615 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Sep 30 19:54:15 crc kubenswrapper[4756]: I0930 19:54:15.644101 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Sep 30 19:54:16 crc kubenswrapper[4756]: I0930 19:54:16.133222 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-j8ff2"] Sep 30 19:54:16 crc kubenswrapper[4756]: E0930 19:54:16.133585 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5d8f3245-22a4-45a4-bbc4-33e1a078f92b" containerName="memcached" Sep 30 19:54:16 crc kubenswrapper[4756]: I0930 19:54:16.133600 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="5d8f3245-22a4-45a4-bbc4-33e1a078f92b" containerName="memcached" Sep 30 19:54:16 crc kubenswrapper[4756]: E0930 19:54:16.133611 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3ad8997f-b736-41c8-b087-d09b6316065d" containerName="dnsmasq-dns" Sep 30 19:54:16 crc kubenswrapper[4756]: I0930 19:54:16.133619 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="3ad8997f-b736-41c8-b087-d09b6316065d" containerName="dnsmasq-dns" Sep 30 19:54:16 crc kubenswrapper[4756]: E0930 19:54:16.133635 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eed122c3-f6af-488a-9684-f60ffe104434" containerName="nova-api-log" Sep 30 19:54:16 crc kubenswrapper[4756]: I0930 19:54:16.133644 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="eed122c3-f6af-488a-9684-f60ffe104434" containerName="nova-api-log" Sep 30 19:54:16 crc kubenswrapper[4756]: E0930 19:54:16.133659 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="87e13633-3439-4184-a1f8-1afbbe54883a" containerName="ovsdbserver-nb" Sep 30 19:54:16 crc kubenswrapper[4756]: I0930 19:54:16.133666 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="87e13633-3439-4184-a1f8-1afbbe54883a" containerName="ovsdbserver-nb" Sep 30 19:54:16 crc kubenswrapper[4756]: E0930 19:54:16.133677 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="26be10f5-be18-4ab5-9f8e-0d6f2a8dcb4f" containerName="container-updater" Sep 30 19:54:16 crc kubenswrapper[4756]: I0930 19:54:16.133684 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="26be10f5-be18-4ab5-9f8e-0d6f2a8dcb4f" containerName="container-updater" Sep 30 19:54:16 crc kubenswrapper[4756]: E0930 19:54:16.133697 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c3b1ef6d-6617-4d15-8709-a7623e75faa4" containerName="setup-container" Sep 30 19:54:16 crc kubenswrapper[4756]: I0930 19:54:16.133705 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="c3b1ef6d-6617-4d15-8709-a7623e75faa4" containerName="setup-container" Sep 30 19:54:16 crc kubenswrapper[4756]: E0930 19:54:16.133720 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="26be10f5-be18-4ab5-9f8e-0d6f2a8dcb4f" containerName="account-server" Sep 30 19:54:16 crc kubenswrapper[4756]: I0930 19:54:16.133728 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="26be10f5-be18-4ab5-9f8e-0d6f2a8dcb4f" containerName="account-server" Sep 30 19:54:16 crc kubenswrapper[4756]: E0930 19:54:16.133742 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7cba2998-4911-447e-8fa8-782a83f0b3be" containerName="probe" Sep 30 19:54:16 crc kubenswrapper[4756]: I0930 19:54:16.133750 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="7cba2998-4911-447e-8fa8-782a83f0b3be" containerName="probe" Sep 30 19:54:16 crc kubenswrapper[4756]: E0930 19:54:16.133764 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0a1a5fbf-06be-4bbe-a7ab-9a3becd5f84d" containerName="proxy-server" Sep 30 19:54:16 crc kubenswrapper[4756]: I0930 19:54:16.133773 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="0a1a5fbf-06be-4bbe-a7ab-9a3becd5f84d" containerName="proxy-server" Sep 30 19:54:16 crc kubenswrapper[4756]: E0930 19:54:16.133785 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="859e275e-1d03-482b-aff6-8df9d80957fe" containerName="mariadb-account-delete" Sep 30 19:54:16 crc kubenswrapper[4756]: I0930 19:54:16.133792 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="859e275e-1d03-482b-aff6-8df9d80957fe" containerName="mariadb-account-delete" Sep 30 19:54:16 crc kubenswrapper[4756]: E0930 19:54:16.133805 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9996bc15-3d12-4aa1-a22e-d11726f6c4d8" containerName="ovn-controller" Sep 30 19:54:16 crc kubenswrapper[4756]: I0930 19:54:16.133813 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="9996bc15-3d12-4aa1-a22e-d11726f6c4d8" containerName="ovn-controller" Sep 30 19:54:16 crc kubenswrapper[4756]: E0930 19:54:16.133832 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0befea8f-1ced-4703-b2a2-a10e154271f4" containerName="glance-httpd" Sep 30 19:54:16 crc kubenswrapper[4756]: I0930 19:54:16.133842 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="0befea8f-1ced-4703-b2a2-a10e154271f4" containerName="glance-httpd" Sep 30 19:54:16 crc kubenswrapper[4756]: E0930 19:54:16.133857 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9dead4ff-0706-4a5e-b6ae-c388ae1c2313" containerName="neutron-httpd" Sep 30 19:54:16 crc kubenswrapper[4756]: I0930 19:54:16.133866 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="9dead4ff-0706-4a5e-b6ae-c388ae1c2313" containerName="neutron-httpd" Sep 30 19:54:16 crc kubenswrapper[4756]: E0930 19:54:16.133878 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="feac82fa-065f-4e9a-b649-fa26b2d150b8" containerName="galera" Sep 30 19:54:16 crc kubenswrapper[4756]: I0930 19:54:16.133887 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="feac82fa-065f-4e9a-b649-fa26b2d150b8" containerName="galera" Sep 30 19:54:16 crc kubenswrapper[4756]: E0930 19:54:16.133897 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3ad8997f-b736-41c8-b087-d09b6316065d" containerName="init" Sep 30 19:54:16 crc kubenswrapper[4756]: I0930 19:54:16.133905 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="3ad8997f-b736-41c8-b087-d09b6316065d" containerName="init" Sep 30 19:54:16 crc kubenswrapper[4756]: E0930 19:54:16.133916 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="207e631b-3ef5-430f-bad1-4a9972dc8eb4" containerName="glance-httpd" Sep 30 19:54:16 crc kubenswrapper[4756]: I0930 19:54:16.133925 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="207e631b-3ef5-430f-bad1-4a9972dc8eb4" containerName="glance-httpd" Sep 30 19:54:16 crc kubenswrapper[4756]: E0930 19:54:16.133944 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3c14829a-8f5d-490d-a922-719d81da47a4" containerName="barbican-worker" Sep 30 19:54:16 crc kubenswrapper[4756]: I0930 19:54:16.133953 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="3c14829a-8f5d-490d-a922-719d81da47a4" containerName="barbican-worker" Sep 30 19:54:16 crc kubenswrapper[4756]: E0930 19:54:16.133970 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="26be10f5-be18-4ab5-9f8e-0d6f2a8dcb4f" containerName="container-replicator" Sep 30 19:54:16 crc kubenswrapper[4756]: I0930 19:54:16.133979 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="26be10f5-be18-4ab5-9f8e-0d6f2a8dcb4f" containerName="container-replicator" Sep 30 19:54:16 crc kubenswrapper[4756]: E0930 19:54:16.133992 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="207e631b-3ef5-430f-bad1-4a9972dc8eb4" containerName="glance-log" Sep 30 19:54:16 crc kubenswrapper[4756]: I0930 19:54:16.134002 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="207e631b-3ef5-430f-bad1-4a9972dc8eb4" containerName="glance-log" Sep 30 19:54:16 crc kubenswrapper[4756]: E0930 19:54:16.134021 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e3956c8b-1699-4cc4-8a97-b3ed35730374" containerName="mariadb-account-delete" Sep 30 19:54:16 crc kubenswrapper[4756]: I0930 19:54:16.134030 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="e3956c8b-1699-4cc4-8a97-b3ed35730374" containerName="mariadb-account-delete" Sep 30 19:54:16 crc kubenswrapper[4756]: E0930 19:54:16.134046 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ce0daf51-f01f-4ace-bf9d-3dd2e2023618" containerName="nova-metadata-log" Sep 30 19:54:16 crc kubenswrapper[4756]: I0930 19:54:16.134055 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="ce0daf51-f01f-4ace-bf9d-3dd2e2023618" containerName="nova-metadata-log" Sep 30 19:54:16 crc kubenswrapper[4756]: E0930 19:54:16.134074 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="26be10f5-be18-4ab5-9f8e-0d6f2a8dcb4f" containerName="account-reaper" Sep 30 19:54:16 crc kubenswrapper[4756]: I0930 19:54:16.134083 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="26be10f5-be18-4ab5-9f8e-0d6f2a8dcb4f" containerName="account-reaper" Sep 30 19:54:16 crc kubenswrapper[4756]: E0930 19:54:16.134100 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="26be10f5-be18-4ab5-9f8e-0d6f2a8dcb4f" containerName="object-expirer" Sep 30 19:54:16 crc kubenswrapper[4756]: I0930 19:54:16.134109 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="26be10f5-be18-4ab5-9f8e-0d6f2a8dcb4f" containerName="object-expirer" Sep 30 19:54:16 crc kubenswrapper[4756]: E0930 19:54:16.134122 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="26be10f5-be18-4ab5-9f8e-0d6f2a8dcb4f" containerName="container-auditor" Sep 30 19:54:16 crc kubenswrapper[4756]: I0930 19:54:16.134132 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="26be10f5-be18-4ab5-9f8e-0d6f2a8dcb4f" containerName="container-auditor" Sep 30 19:54:16 crc kubenswrapper[4756]: E0930 19:54:16.134149 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="26be10f5-be18-4ab5-9f8e-0d6f2a8dcb4f" containerName="object-updater" Sep 30 19:54:16 crc kubenswrapper[4756]: I0930 19:54:16.134159 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="26be10f5-be18-4ab5-9f8e-0d6f2a8dcb4f" containerName="object-updater" Sep 30 19:54:16 crc kubenswrapper[4756]: E0930 19:54:16.134174 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6fbaa08d-11b7-4498-aeb2-e82ac13f64d0" containerName="barbican-api-log" Sep 30 19:54:16 crc kubenswrapper[4756]: I0930 19:54:16.134184 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="6fbaa08d-11b7-4498-aeb2-e82ac13f64d0" containerName="barbican-api-log" Sep 30 19:54:16 crc kubenswrapper[4756]: E0930 19:54:16.134200 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bfe78e50-0ef5-42f5-b989-39beebdd0d7c" containerName="openstack-network-exporter" Sep 30 19:54:16 crc kubenswrapper[4756]: I0930 19:54:16.134212 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="bfe78e50-0ef5-42f5-b989-39beebdd0d7c" containerName="openstack-network-exporter" Sep 30 19:54:16 crc kubenswrapper[4756]: E0930 19:54:16.134225 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="78518120-da64-4b55-9dce-b7412b16bed3" containerName="sg-core" Sep 30 19:54:16 crc kubenswrapper[4756]: I0930 19:54:16.134232 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="78518120-da64-4b55-9dce-b7412b16bed3" containerName="sg-core" Sep 30 19:54:16 crc kubenswrapper[4756]: E0930 19:54:16.134246 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cf5c32f2-40b7-4c8f-ab7b-58b6fa3200ed" containerName="ovsdb-server-init" Sep 30 19:54:16 crc kubenswrapper[4756]: I0930 19:54:16.134255 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="cf5c32f2-40b7-4c8f-ab7b-58b6fa3200ed" containerName="ovsdb-server-init" Sep 30 19:54:16 crc kubenswrapper[4756]: E0930 19:54:16.134270 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="26be10f5-be18-4ab5-9f8e-0d6f2a8dcb4f" containerName="rsync" Sep 30 19:54:16 crc kubenswrapper[4756]: I0930 19:54:16.134278 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="26be10f5-be18-4ab5-9f8e-0d6f2a8dcb4f" containerName="rsync" Sep 30 19:54:16 crc kubenswrapper[4756]: E0930 19:54:16.134288 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="28dfe26f-4d95-4def-9500-a0410c260cf2" containerName="placement-log" Sep 30 19:54:16 crc kubenswrapper[4756]: I0930 19:54:16.134296 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="28dfe26f-4d95-4def-9500-a0410c260cf2" containerName="placement-log" Sep 30 19:54:16 crc kubenswrapper[4756]: E0930 19:54:16.134308 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4c19f17-3c18-484d-acfd-99c72926bdef" containerName="ovn-northd" Sep 30 19:54:16 crc kubenswrapper[4756]: I0930 19:54:16.134315 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4c19f17-3c18-484d-acfd-99c72926bdef" containerName="ovn-northd" Sep 30 19:54:16 crc kubenswrapper[4756]: E0930 19:54:16.134325 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3bacc7b1-00f4-4489-9c0b-fa038cc443d6" containerName="rabbitmq" Sep 30 19:54:16 crc kubenswrapper[4756]: I0930 19:54:16.134334 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="3bacc7b1-00f4-4489-9c0b-fa038cc443d6" containerName="rabbitmq" Sep 30 19:54:16 crc kubenswrapper[4756]: E0930 19:54:16.134344 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cf5c32f2-40b7-4c8f-ab7b-58b6fa3200ed" containerName="ovsdb-server" Sep 30 19:54:16 crc kubenswrapper[4756]: I0930 19:54:16.134351 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="cf5c32f2-40b7-4c8f-ab7b-58b6fa3200ed" containerName="ovsdb-server" Sep 30 19:54:16 crc kubenswrapper[4756]: E0930 19:54:16.134363 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4c19f17-3c18-484d-acfd-99c72926bdef" containerName="openstack-network-exporter" Sep 30 19:54:16 crc kubenswrapper[4756]: I0930 19:54:16.134372 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4c19f17-3c18-484d-acfd-99c72926bdef" containerName="openstack-network-exporter" Sep 30 19:54:16 crc kubenswrapper[4756]: E0930 19:54:16.134380 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7dff94a7-1c27-4324-8e94-5a095f79eb9a" containerName="mariadb-account-delete" Sep 30 19:54:16 crc kubenswrapper[4756]: I0930 19:54:16.134388 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="7dff94a7-1c27-4324-8e94-5a095f79eb9a" containerName="mariadb-account-delete" Sep 30 19:54:16 crc kubenswrapper[4756]: E0930 19:54:16.134455 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0befea8f-1ced-4703-b2a2-a10e154271f4" containerName="glance-log" Sep 30 19:54:16 crc kubenswrapper[4756]: I0930 19:54:16.134466 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="0befea8f-1ced-4703-b2a2-a10e154271f4" containerName="glance-log" Sep 30 19:54:16 crc kubenswrapper[4756]: E0930 19:54:16.134476 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="13a44baa-047c-44d4-958e-c0d219d4f5a6" containerName="mariadb-account-delete" Sep 30 19:54:16 crc kubenswrapper[4756]: I0930 19:54:16.134486 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="13a44baa-047c-44d4-958e-c0d219d4f5a6" containerName="mariadb-account-delete" Sep 30 19:54:16 crc kubenswrapper[4756]: E0930 19:54:16.134500 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="05ea7072-b87a-4cf4-8f86-f32e5836951a" containerName="mysql-bootstrap" Sep 30 19:54:16 crc kubenswrapper[4756]: I0930 19:54:16.134507 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="05ea7072-b87a-4cf4-8f86-f32e5836951a" containerName="mysql-bootstrap" Sep 30 19:54:16 crc kubenswrapper[4756]: E0930 19:54:16.134516 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7cba2998-4911-447e-8fa8-782a83f0b3be" containerName="cinder-scheduler" Sep 30 19:54:16 crc kubenswrapper[4756]: I0930 19:54:16.134524 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="7cba2998-4911-447e-8fa8-782a83f0b3be" containerName="cinder-scheduler" Sep 30 19:54:16 crc kubenswrapper[4756]: E0930 19:54:16.134532 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="26be10f5-be18-4ab5-9f8e-0d6f2a8dcb4f" containerName="object-replicator" Sep 30 19:54:16 crc kubenswrapper[4756]: I0930 19:54:16.134539 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="26be10f5-be18-4ab5-9f8e-0d6f2a8dcb4f" containerName="object-replicator" Sep 30 19:54:16 crc kubenswrapper[4756]: E0930 19:54:16.134549 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ce0daf51-f01f-4ace-bf9d-3dd2e2023618" containerName="nova-metadata-metadata" Sep 30 19:54:16 crc kubenswrapper[4756]: I0930 19:54:16.134557 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="ce0daf51-f01f-4ace-bf9d-3dd2e2023618" containerName="nova-metadata-metadata" Sep 30 19:54:16 crc kubenswrapper[4756]: E0930 19:54:16.134570 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="05ea7072-b87a-4cf4-8f86-f32e5836951a" containerName="galera" Sep 30 19:54:16 crc kubenswrapper[4756]: I0930 19:54:16.134578 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="05ea7072-b87a-4cf4-8f86-f32e5836951a" containerName="galera" Sep 30 19:54:16 crc kubenswrapper[4756]: E0930 19:54:16.134590 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="78518120-da64-4b55-9dce-b7412b16bed3" containerName="ceilometer-central-agent" Sep 30 19:54:16 crc kubenswrapper[4756]: I0930 19:54:16.134598 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="78518120-da64-4b55-9dce-b7412b16bed3" containerName="ceilometer-central-agent" Sep 30 19:54:16 crc kubenswrapper[4756]: E0930 19:54:16.134608 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="164b639f-c027-4486-a298-06e9510e14ac" containerName="nova-cell1-conductor-conductor" Sep 30 19:54:16 crc kubenswrapper[4756]: I0930 19:54:16.134616 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="164b639f-c027-4486-a298-06e9510e14ac" containerName="nova-cell1-conductor-conductor" Sep 30 19:54:16 crc kubenswrapper[4756]: E0930 19:54:16.134625 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="feac82fa-065f-4e9a-b649-fa26b2d150b8" containerName="mysql-bootstrap" Sep 30 19:54:16 crc kubenswrapper[4756]: I0930 19:54:16.134632 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="feac82fa-065f-4e9a-b649-fa26b2d150b8" containerName="mysql-bootstrap" Sep 30 19:54:16 crc kubenswrapper[4756]: E0930 19:54:16.134641 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6fbaa08d-11b7-4498-aeb2-e82ac13f64d0" containerName="barbican-api" Sep 30 19:54:16 crc kubenswrapper[4756]: I0930 19:54:16.134649 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="6fbaa08d-11b7-4498-aeb2-e82ac13f64d0" containerName="barbican-api" Sep 30 19:54:16 crc kubenswrapper[4756]: E0930 19:54:16.134661 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6f3ed405-3d2e-4296-8ea5-7b426f2bd987" containerName="nova-cell1-novncproxy-novncproxy" Sep 30 19:54:16 crc kubenswrapper[4756]: I0930 19:54:16.134670 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="6f3ed405-3d2e-4296-8ea5-7b426f2bd987" containerName="nova-cell1-novncproxy-novncproxy" Sep 30 19:54:16 crc kubenswrapper[4756]: E0930 19:54:16.134679 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9e9466ec-a908-431a-9c47-fda720095cd8" containerName="openstack-network-exporter" Sep 30 19:54:16 crc kubenswrapper[4756]: I0930 19:54:16.134690 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="9e9466ec-a908-431a-9c47-fda720095cd8" containerName="openstack-network-exporter" Sep 30 19:54:16 crc kubenswrapper[4756]: E0930 19:54:16.134701 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="26be10f5-be18-4ab5-9f8e-0d6f2a8dcb4f" containerName="account-auditor" Sep 30 19:54:16 crc kubenswrapper[4756]: I0930 19:54:16.134709 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="26be10f5-be18-4ab5-9f8e-0d6f2a8dcb4f" containerName="account-auditor" Sep 30 19:54:16 crc kubenswrapper[4756]: E0930 19:54:16.134721 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="28dfe26f-4d95-4def-9500-a0410c260cf2" containerName="placement-api" Sep 30 19:54:16 crc kubenswrapper[4756]: I0930 19:54:16.134728 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="28dfe26f-4d95-4def-9500-a0410c260cf2" containerName="placement-api" Sep 30 19:54:16 crc kubenswrapper[4756]: E0930 19:54:16.134742 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0a1a5fbf-06be-4bbe-a7ab-9a3becd5f84d" containerName="proxy-httpd" Sep 30 19:54:16 crc kubenswrapper[4756]: I0930 19:54:16.134749 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="0a1a5fbf-06be-4bbe-a7ab-9a3becd5f84d" containerName="proxy-httpd" Sep 30 19:54:16 crc kubenswrapper[4756]: E0930 19:54:16.134764 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bfe78e50-0ef5-42f5-b989-39beebdd0d7c" containerName="ovsdbserver-sb" Sep 30 19:54:16 crc kubenswrapper[4756]: I0930 19:54:16.134771 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="bfe78e50-0ef5-42f5-b989-39beebdd0d7c" containerName="ovsdbserver-sb" Sep 30 19:54:16 crc kubenswrapper[4756]: E0930 19:54:16.134782 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3bacc7b1-00f4-4489-9c0b-fa038cc443d6" containerName="setup-container" Sep 30 19:54:16 crc kubenswrapper[4756]: I0930 19:54:16.134790 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="3bacc7b1-00f4-4489-9c0b-fa038cc443d6" containerName="setup-container" Sep 30 19:54:16 crc kubenswrapper[4756]: E0930 19:54:16.134804 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e92dcf48-58c2-4c3e-907f-7fd43e23a424" containerName="kube-state-metrics" Sep 30 19:54:16 crc kubenswrapper[4756]: I0930 19:54:16.134811 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="e92dcf48-58c2-4c3e-907f-7fd43e23a424" containerName="kube-state-metrics" Sep 30 19:54:16 crc kubenswrapper[4756]: E0930 19:54:16.134819 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cf5c32f2-40b7-4c8f-ab7b-58b6fa3200ed" containerName="ovs-vswitchd" Sep 30 19:54:16 crc kubenswrapper[4756]: I0930 19:54:16.134826 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="cf5c32f2-40b7-4c8f-ab7b-58b6fa3200ed" containerName="ovs-vswitchd" Sep 30 19:54:16 crc kubenswrapper[4756]: E0930 19:54:16.134838 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4e0d0499-c40c-474b-83d5-f72ac55d859c" containerName="keystone-api" Sep 30 19:54:16 crc kubenswrapper[4756]: I0930 19:54:16.134845 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="4e0d0499-c40c-474b-83d5-f72ac55d859c" containerName="keystone-api" Sep 30 19:54:16 crc kubenswrapper[4756]: E0930 19:54:16.134855 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0511d760-5c65-453b-aefd-89ebb9a52ac6" containerName="barbican-keystone-listener-log" Sep 30 19:54:16 crc kubenswrapper[4756]: I0930 19:54:16.134862 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="0511d760-5c65-453b-aefd-89ebb9a52ac6" containerName="barbican-keystone-listener-log" Sep 30 19:54:16 crc kubenswrapper[4756]: E0930 19:54:16.134873 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cff112d3-3898-41c9-aab6-ed2e2a57a484" containerName="nova-scheduler-scheduler" Sep 30 19:54:16 crc kubenswrapper[4756]: I0930 19:54:16.134880 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="cff112d3-3898-41c9-aab6-ed2e2a57a484" containerName="nova-scheduler-scheduler" Sep 30 19:54:16 crc kubenswrapper[4756]: E0930 19:54:16.134893 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="87e13633-3439-4184-a1f8-1afbbe54883a" containerName="openstack-network-exporter" Sep 30 19:54:16 crc kubenswrapper[4756]: I0930 19:54:16.134900 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="87e13633-3439-4184-a1f8-1afbbe54883a" containerName="openstack-network-exporter" Sep 30 19:54:16 crc kubenswrapper[4756]: E0930 19:54:16.134913 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="26be10f5-be18-4ab5-9f8e-0d6f2a8dcb4f" containerName="object-auditor" Sep 30 19:54:16 crc kubenswrapper[4756]: I0930 19:54:16.134922 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="26be10f5-be18-4ab5-9f8e-0d6f2a8dcb4f" containerName="object-auditor" Sep 30 19:54:16 crc kubenswrapper[4756]: E0930 19:54:16.134934 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="26be10f5-be18-4ab5-9f8e-0d6f2a8dcb4f" containerName="object-server" Sep 30 19:54:16 crc kubenswrapper[4756]: I0930 19:54:16.134943 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="26be10f5-be18-4ab5-9f8e-0d6f2a8dcb4f" containerName="object-server" Sep 30 19:54:16 crc kubenswrapper[4756]: E0930 19:54:16.134959 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ebd344ad-8ef7-4a5c-8b74-1af845398b3b" containerName="cinder-api-log" Sep 30 19:54:16 crc kubenswrapper[4756]: I0930 19:54:16.134968 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="ebd344ad-8ef7-4a5c-8b74-1af845398b3b" containerName="cinder-api-log" Sep 30 19:54:16 crc kubenswrapper[4756]: E0930 19:54:16.134981 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c3b1ef6d-6617-4d15-8709-a7623e75faa4" containerName="rabbitmq" Sep 30 19:54:16 crc kubenswrapper[4756]: I0930 19:54:16.134989 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="c3b1ef6d-6617-4d15-8709-a7623e75faa4" containerName="rabbitmq" Sep 30 19:54:16 crc kubenswrapper[4756]: E0930 19:54:16.135001 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0511d760-5c65-453b-aefd-89ebb9a52ac6" containerName="barbican-keystone-listener" Sep 30 19:54:16 crc kubenswrapper[4756]: I0930 19:54:16.135008 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="0511d760-5c65-453b-aefd-89ebb9a52ac6" containerName="barbican-keystone-listener" Sep 30 19:54:16 crc kubenswrapper[4756]: E0930 19:54:16.135022 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="78518120-da64-4b55-9dce-b7412b16bed3" containerName="ceilometer-notification-agent" Sep 30 19:54:16 crc kubenswrapper[4756]: I0930 19:54:16.135029 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="78518120-da64-4b55-9dce-b7412b16bed3" containerName="ceilometer-notification-agent" Sep 30 19:54:16 crc kubenswrapper[4756]: E0930 19:54:16.135044 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="78518120-da64-4b55-9dce-b7412b16bed3" containerName="proxy-httpd" Sep 30 19:54:16 crc kubenswrapper[4756]: I0930 19:54:16.135053 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="78518120-da64-4b55-9dce-b7412b16bed3" containerName="proxy-httpd" Sep 30 19:54:16 crc kubenswrapper[4756]: E0930 19:54:16.135067 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ebd344ad-8ef7-4a5c-8b74-1af845398b3b" containerName="cinder-api" Sep 30 19:54:16 crc kubenswrapper[4756]: I0930 19:54:16.135075 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="ebd344ad-8ef7-4a5c-8b74-1af845398b3b" containerName="cinder-api" Sep 30 19:54:16 crc kubenswrapper[4756]: E0930 19:54:16.135086 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="26be10f5-be18-4ab5-9f8e-0d6f2a8dcb4f" containerName="swift-recon-cron" Sep 30 19:54:16 crc kubenswrapper[4756]: I0930 19:54:16.135093 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="26be10f5-be18-4ab5-9f8e-0d6f2a8dcb4f" containerName="swift-recon-cron" Sep 30 19:54:16 crc kubenswrapper[4756]: E0930 19:54:16.135107 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eed122c3-f6af-488a-9684-f60ffe104434" containerName="nova-api-api" Sep 30 19:54:16 crc kubenswrapper[4756]: I0930 19:54:16.135114 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="eed122c3-f6af-488a-9684-f60ffe104434" containerName="nova-api-api" Sep 30 19:54:16 crc kubenswrapper[4756]: E0930 19:54:16.135127 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3c14829a-8f5d-490d-a922-719d81da47a4" containerName="barbican-worker-log" Sep 30 19:54:16 crc kubenswrapper[4756]: I0930 19:54:16.135135 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="3c14829a-8f5d-490d-a922-719d81da47a4" containerName="barbican-worker-log" Sep 30 19:54:16 crc kubenswrapper[4756]: E0930 19:54:16.135147 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="26be10f5-be18-4ab5-9f8e-0d6f2a8dcb4f" containerName="container-server" Sep 30 19:54:16 crc kubenswrapper[4756]: I0930 19:54:16.135155 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="26be10f5-be18-4ab5-9f8e-0d6f2a8dcb4f" containerName="container-server" Sep 30 19:54:16 crc kubenswrapper[4756]: E0930 19:54:16.135167 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9dead4ff-0706-4a5e-b6ae-c388ae1c2313" containerName="neutron-api" Sep 30 19:54:16 crc kubenswrapper[4756]: I0930 19:54:16.135176 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="9dead4ff-0706-4a5e-b6ae-c388ae1c2313" containerName="neutron-api" Sep 30 19:54:16 crc kubenswrapper[4756]: E0930 19:54:16.135189 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f9ab0288-ae1d-41f7-954d-8cbadc83c614" containerName="nova-cell0-conductor-conductor" Sep 30 19:54:16 crc kubenswrapper[4756]: I0930 19:54:16.135196 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="f9ab0288-ae1d-41f7-954d-8cbadc83c614" containerName="nova-cell0-conductor-conductor" Sep 30 19:54:16 crc kubenswrapper[4756]: E0930 19:54:16.135210 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="26be10f5-be18-4ab5-9f8e-0d6f2a8dcb4f" containerName="account-replicator" Sep 30 19:54:16 crc kubenswrapper[4756]: I0930 19:54:16.135217 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="26be10f5-be18-4ab5-9f8e-0d6f2a8dcb4f" containerName="account-replicator" Sep 30 19:54:16 crc kubenswrapper[4756]: I0930 19:54:16.135374 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="3c14829a-8f5d-490d-a922-719d81da47a4" containerName="barbican-worker-log" Sep 30 19:54:16 crc kubenswrapper[4756]: I0930 19:54:16.135388 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="f9ab0288-ae1d-41f7-954d-8cbadc83c614" containerName="nova-cell0-conductor-conductor" Sep 30 19:54:16 crc kubenswrapper[4756]: I0930 19:54:16.135417 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="0befea8f-1ced-4703-b2a2-a10e154271f4" containerName="glance-httpd" Sep 30 19:54:16 crc kubenswrapper[4756]: I0930 19:54:16.135428 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="26be10f5-be18-4ab5-9f8e-0d6f2a8dcb4f" containerName="container-auditor" Sep 30 19:54:16 crc kubenswrapper[4756]: I0930 19:54:16.135437 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="05ea7072-b87a-4cf4-8f86-f32e5836951a" containerName="galera" Sep 30 19:54:16 crc kubenswrapper[4756]: I0930 19:54:16.135452 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="26be10f5-be18-4ab5-9f8e-0d6f2a8dcb4f" containerName="account-reaper" Sep 30 19:54:16 crc kubenswrapper[4756]: I0930 19:54:16.135464 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="87e13633-3439-4184-a1f8-1afbbe54883a" containerName="openstack-network-exporter" Sep 30 19:54:16 crc kubenswrapper[4756]: I0930 19:54:16.135477 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="3ad8997f-b736-41c8-b087-d09b6316065d" containerName="dnsmasq-dns" Sep 30 19:54:16 crc kubenswrapper[4756]: I0930 19:54:16.135491 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="78518120-da64-4b55-9dce-b7412b16bed3" containerName="sg-core" Sep 30 19:54:16 crc kubenswrapper[4756]: I0930 19:54:16.135501 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="0511d760-5c65-453b-aefd-89ebb9a52ac6" containerName="barbican-keystone-listener" Sep 30 19:54:16 crc kubenswrapper[4756]: I0930 19:54:16.135528 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="9dead4ff-0706-4a5e-b6ae-c388ae1c2313" containerName="neutron-httpd" Sep 30 19:54:16 crc kubenswrapper[4756]: I0930 19:54:16.135546 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="c3b1ef6d-6617-4d15-8709-a7623e75faa4" containerName="rabbitmq" Sep 30 19:54:16 crc kubenswrapper[4756]: I0930 19:54:16.135559 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="0a1a5fbf-06be-4bbe-a7ab-9a3becd5f84d" containerName="proxy-httpd" Sep 30 19:54:16 crc kubenswrapper[4756]: I0930 19:54:16.135570 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="5d8f3245-22a4-45a4-bbc4-33e1a078f92b" containerName="memcached" Sep 30 19:54:16 crc kubenswrapper[4756]: I0930 19:54:16.135579 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="78518120-da64-4b55-9dce-b7412b16bed3" containerName="ceilometer-notification-agent" Sep 30 19:54:16 crc kubenswrapper[4756]: I0930 19:54:16.135593 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="9996bc15-3d12-4aa1-a22e-d11726f6c4d8" containerName="ovn-controller" Sep 30 19:54:16 crc kubenswrapper[4756]: I0930 19:54:16.135605 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="ebd344ad-8ef7-4a5c-8b74-1af845398b3b" containerName="cinder-api-log" Sep 30 19:54:16 crc kubenswrapper[4756]: I0930 19:54:16.135622 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="26be10f5-be18-4ab5-9f8e-0d6f2a8dcb4f" containerName="container-server" Sep 30 19:54:16 crc kubenswrapper[4756]: I0930 19:54:16.135633 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="28dfe26f-4d95-4def-9500-a0410c260cf2" containerName="placement-api" Sep 30 19:54:16 crc kubenswrapper[4756]: I0930 19:54:16.135649 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="7dff94a7-1c27-4324-8e94-5a095f79eb9a" containerName="mariadb-account-delete" Sep 30 19:54:16 crc kubenswrapper[4756]: I0930 19:54:16.135663 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="26be10f5-be18-4ab5-9f8e-0d6f2a8dcb4f" containerName="account-server" Sep 30 19:54:16 crc kubenswrapper[4756]: I0930 19:54:16.135674 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="26be10f5-be18-4ab5-9f8e-0d6f2a8dcb4f" containerName="swift-recon-cron" Sep 30 19:54:16 crc kubenswrapper[4756]: I0930 19:54:16.135685 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="ebd344ad-8ef7-4a5c-8b74-1af845398b3b" containerName="cinder-api" Sep 30 19:54:16 crc kubenswrapper[4756]: I0930 19:54:16.135697 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4c19f17-3c18-484d-acfd-99c72926bdef" containerName="openstack-network-exporter" Sep 30 19:54:16 crc kubenswrapper[4756]: I0930 19:54:16.135709 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="bfe78e50-0ef5-42f5-b989-39beebdd0d7c" containerName="ovsdbserver-sb" Sep 30 19:54:16 crc kubenswrapper[4756]: I0930 19:54:16.135717 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="7cba2998-4911-447e-8fa8-782a83f0b3be" containerName="probe" Sep 30 19:54:16 crc kubenswrapper[4756]: I0930 19:54:16.135748 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="cf5c32f2-40b7-4c8f-ab7b-58b6fa3200ed" containerName="ovs-vswitchd" Sep 30 19:54:16 crc kubenswrapper[4756]: I0930 19:54:16.135758 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="6fbaa08d-11b7-4498-aeb2-e82ac13f64d0" containerName="barbican-api" Sep 30 19:54:16 crc kubenswrapper[4756]: I0930 19:54:16.135769 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="ce0daf51-f01f-4ace-bf9d-3dd2e2023618" containerName="nova-metadata-metadata" Sep 30 19:54:16 crc kubenswrapper[4756]: I0930 19:54:16.135778 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="3bacc7b1-00f4-4489-9c0b-fa038cc443d6" containerName="rabbitmq" Sep 30 19:54:16 crc kubenswrapper[4756]: I0930 19:54:16.135788 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="0befea8f-1ced-4703-b2a2-a10e154271f4" containerName="glance-log" Sep 30 19:54:16 crc kubenswrapper[4756]: I0930 19:54:16.135797 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="26be10f5-be18-4ab5-9f8e-0d6f2a8dcb4f" containerName="object-expirer" Sep 30 19:54:16 crc kubenswrapper[4756]: I0930 19:54:16.135812 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="26be10f5-be18-4ab5-9f8e-0d6f2a8dcb4f" containerName="rsync" Sep 30 19:54:16 crc kubenswrapper[4756]: I0930 19:54:16.135823 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="207e631b-3ef5-430f-bad1-4a9972dc8eb4" containerName="glance-log" Sep 30 19:54:16 crc kubenswrapper[4756]: I0930 19:54:16.135833 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="87e13633-3439-4184-a1f8-1afbbe54883a" containerName="ovsdbserver-nb" Sep 30 19:54:16 crc kubenswrapper[4756]: I0930 19:54:16.135847 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="26be10f5-be18-4ab5-9f8e-0d6f2a8dcb4f" containerName="object-replicator" Sep 30 19:54:16 crc kubenswrapper[4756]: I0930 19:54:16.135859 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="26be10f5-be18-4ab5-9f8e-0d6f2a8dcb4f" containerName="object-auditor" Sep 30 19:54:16 crc kubenswrapper[4756]: I0930 19:54:16.135868 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="26be10f5-be18-4ab5-9f8e-0d6f2a8dcb4f" containerName="container-updater" Sep 30 19:54:16 crc kubenswrapper[4756]: I0930 19:54:16.135880 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="28dfe26f-4d95-4def-9500-a0410c260cf2" containerName="placement-log" Sep 30 19:54:16 crc kubenswrapper[4756]: I0930 19:54:16.135893 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="bfe78e50-0ef5-42f5-b989-39beebdd0d7c" containerName="openstack-network-exporter" Sep 30 19:54:16 crc kubenswrapper[4756]: I0930 19:54:16.135904 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4c19f17-3c18-484d-acfd-99c72926bdef" containerName="ovn-northd" Sep 30 19:54:16 crc kubenswrapper[4756]: I0930 19:54:16.135917 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="26be10f5-be18-4ab5-9f8e-0d6f2a8dcb4f" containerName="object-updater" Sep 30 19:54:16 crc kubenswrapper[4756]: I0930 19:54:16.135931 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="6f3ed405-3d2e-4296-8ea5-7b426f2bd987" containerName="nova-cell1-novncproxy-novncproxy" Sep 30 19:54:16 crc kubenswrapper[4756]: I0930 19:54:16.135941 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="26be10f5-be18-4ab5-9f8e-0d6f2a8dcb4f" containerName="object-server" Sep 30 19:54:16 crc kubenswrapper[4756]: I0930 19:54:16.135950 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="4e0d0499-c40c-474b-83d5-f72ac55d859c" containerName="keystone-api" Sep 30 19:54:16 crc kubenswrapper[4756]: I0930 19:54:16.135958 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="cff112d3-3898-41c9-aab6-ed2e2a57a484" containerName="nova-scheduler-scheduler" Sep 30 19:54:16 crc kubenswrapper[4756]: I0930 19:54:16.135967 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="0a1a5fbf-06be-4bbe-a7ab-9a3becd5f84d" containerName="proxy-server" Sep 30 19:54:16 crc kubenswrapper[4756]: I0930 19:54:16.135974 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="e92dcf48-58c2-4c3e-907f-7fd43e23a424" containerName="kube-state-metrics" Sep 30 19:54:16 crc kubenswrapper[4756]: I0930 19:54:16.135982 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="e3956c8b-1699-4cc4-8a97-b3ed35730374" containerName="mariadb-account-delete" Sep 30 19:54:16 crc kubenswrapper[4756]: I0930 19:54:16.135995 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="9e9466ec-a908-431a-9c47-fda720095cd8" containerName="openstack-network-exporter" Sep 30 19:54:16 crc kubenswrapper[4756]: I0930 19:54:16.136005 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="ce0daf51-f01f-4ace-bf9d-3dd2e2023618" containerName="nova-metadata-log" Sep 30 19:54:16 crc kubenswrapper[4756]: I0930 19:54:16.136014 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="164b639f-c027-4486-a298-06e9510e14ac" containerName="nova-cell1-conductor-conductor" Sep 30 19:54:16 crc kubenswrapper[4756]: I0930 19:54:16.136024 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="0511d760-5c65-453b-aefd-89ebb9a52ac6" containerName="barbican-keystone-listener-log" Sep 30 19:54:16 crc kubenswrapper[4756]: I0930 19:54:16.136036 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="cf5c32f2-40b7-4c8f-ab7b-58b6fa3200ed" containerName="ovsdb-server" Sep 30 19:54:16 crc kubenswrapper[4756]: I0930 19:54:16.136047 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="207e631b-3ef5-430f-bad1-4a9972dc8eb4" containerName="glance-httpd" Sep 30 19:54:16 crc kubenswrapper[4756]: I0930 19:54:16.136059 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="eed122c3-f6af-488a-9684-f60ffe104434" containerName="nova-api-log" Sep 30 19:54:16 crc kubenswrapper[4756]: I0930 19:54:16.136069 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="3c14829a-8f5d-490d-a922-719d81da47a4" containerName="barbican-worker" Sep 30 19:54:16 crc kubenswrapper[4756]: I0930 19:54:16.136079 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="feac82fa-065f-4e9a-b649-fa26b2d150b8" containerName="galera" Sep 30 19:54:16 crc kubenswrapper[4756]: I0930 19:54:16.136092 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="6fbaa08d-11b7-4498-aeb2-e82ac13f64d0" containerName="barbican-api-log" Sep 30 19:54:16 crc kubenswrapper[4756]: I0930 19:54:16.136102 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="13a44baa-047c-44d4-958e-c0d219d4f5a6" containerName="mariadb-account-delete" Sep 30 19:54:16 crc kubenswrapper[4756]: I0930 19:54:16.136113 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="78518120-da64-4b55-9dce-b7412b16bed3" containerName="ceilometer-central-agent" Sep 30 19:54:16 crc kubenswrapper[4756]: I0930 19:54:16.136121 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="859e275e-1d03-482b-aff6-8df9d80957fe" containerName="mariadb-account-delete" Sep 30 19:54:16 crc kubenswrapper[4756]: I0930 19:54:16.136130 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="26be10f5-be18-4ab5-9f8e-0d6f2a8dcb4f" containerName="container-replicator" Sep 30 19:54:16 crc kubenswrapper[4756]: I0930 19:54:16.136139 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="9dead4ff-0706-4a5e-b6ae-c388ae1c2313" containerName="neutron-api" Sep 30 19:54:16 crc kubenswrapper[4756]: I0930 19:54:16.136151 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="26be10f5-be18-4ab5-9f8e-0d6f2a8dcb4f" containerName="account-replicator" Sep 30 19:54:16 crc kubenswrapper[4756]: I0930 19:54:16.136163 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="26be10f5-be18-4ab5-9f8e-0d6f2a8dcb4f" containerName="account-auditor" Sep 30 19:54:16 crc kubenswrapper[4756]: I0930 19:54:16.136173 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="7cba2998-4911-447e-8fa8-782a83f0b3be" containerName="cinder-scheduler" Sep 30 19:54:16 crc kubenswrapper[4756]: I0930 19:54:16.136184 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="78518120-da64-4b55-9dce-b7412b16bed3" containerName="proxy-httpd" Sep 30 19:54:16 crc kubenswrapper[4756]: I0930 19:54:16.136193 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="eed122c3-f6af-488a-9684-f60ffe104434" containerName="nova-api-api" Sep 30 19:54:16 crc kubenswrapper[4756]: I0930 19:54:16.137517 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-j8ff2" Sep 30 19:54:16 crc kubenswrapper[4756]: I0930 19:54:16.154694 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-j8ff2"] Sep 30 19:54:16 crc kubenswrapper[4756]: I0930 19:54:16.301513 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ndlgg\" (UniqueName: \"kubernetes.io/projected/e7fd1b45-8dfa-488e-a4f4-116fbce0f314-kube-api-access-ndlgg\") pod \"redhat-marketplace-j8ff2\" (UID: \"e7fd1b45-8dfa-488e-a4f4-116fbce0f314\") " pod="openshift-marketplace/redhat-marketplace-j8ff2" Sep 30 19:54:16 crc kubenswrapper[4756]: I0930 19:54:16.301582 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e7fd1b45-8dfa-488e-a4f4-116fbce0f314-catalog-content\") pod \"redhat-marketplace-j8ff2\" (UID: \"e7fd1b45-8dfa-488e-a4f4-116fbce0f314\") " pod="openshift-marketplace/redhat-marketplace-j8ff2" Sep 30 19:54:16 crc kubenswrapper[4756]: I0930 19:54:16.301686 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e7fd1b45-8dfa-488e-a4f4-116fbce0f314-utilities\") pod \"redhat-marketplace-j8ff2\" (UID: \"e7fd1b45-8dfa-488e-a4f4-116fbce0f314\") " pod="openshift-marketplace/redhat-marketplace-j8ff2" Sep 30 19:54:16 crc kubenswrapper[4756]: I0930 19:54:16.402710 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ndlgg\" (UniqueName: \"kubernetes.io/projected/e7fd1b45-8dfa-488e-a4f4-116fbce0f314-kube-api-access-ndlgg\") pod \"redhat-marketplace-j8ff2\" (UID: \"e7fd1b45-8dfa-488e-a4f4-116fbce0f314\") " pod="openshift-marketplace/redhat-marketplace-j8ff2" Sep 30 19:54:16 crc kubenswrapper[4756]: I0930 19:54:16.402781 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e7fd1b45-8dfa-488e-a4f4-116fbce0f314-catalog-content\") pod \"redhat-marketplace-j8ff2\" (UID: \"e7fd1b45-8dfa-488e-a4f4-116fbce0f314\") " pod="openshift-marketplace/redhat-marketplace-j8ff2" Sep 30 19:54:16 crc kubenswrapper[4756]: I0930 19:54:16.402839 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e7fd1b45-8dfa-488e-a4f4-116fbce0f314-utilities\") pod \"redhat-marketplace-j8ff2\" (UID: \"e7fd1b45-8dfa-488e-a4f4-116fbce0f314\") " pod="openshift-marketplace/redhat-marketplace-j8ff2" Sep 30 19:54:16 crc kubenswrapper[4756]: I0930 19:54:16.403264 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e7fd1b45-8dfa-488e-a4f4-116fbce0f314-catalog-content\") pod \"redhat-marketplace-j8ff2\" (UID: \"e7fd1b45-8dfa-488e-a4f4-116fbce0f314\") " pod="openshift-marketplace/redhat-marketplace-j8ff2" Sep 30 19:54:16 crc kubenswrapper[4756]: I0930 19:54:16.403335 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e7fd1b45-8dfa-488e-a4f4-116fbce0f314-utilities\") pod \"redhat-marketplace-j8ff2\" (UID: \"e7fd1b45-8dfa-488e-a4f4-116fbce0f314\") " pod="openshift-marketplace/redhat-marketplace-j8ff2" Sep 30 19:54:16 crc kubenswrapper[4756]: I0930 19:54:16.422154 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ndlgg\" (UniqueName: \"kubernetes.io/projected/e7fd1b45-8dfa-488e-a4f4-116fbce0f314-kube-api-access-ndlgg\") pod \"redhat-marketplace-j8ff2\" (UID: \"e7fd1b45-8dfa-488e-a4f4-116fbce0f314\") " pod="openshift-marketplace/redhat-marketplace-j8ff2" Sep 30 19:54:16 crc kubenswrapper[4756]: I0930 19:54:16.455876 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-j8ff2" Sep 30 19:54:16 crc kubenswrapper[4756]: I0930 19:54:16.630347 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Sep 30 19:54:16 crc kubenswrapper[4756]: I0930 19:54:16.654885 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Sep 30 19:54:16 crc kubenswrapper[4756]: I0930 19:54:16.667748 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-scheduler-0"] Sep 30 19:54:16 crc kubenswrapper[4756]: I0930 19:54:16.884317 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-j8ff2"] Sep 30 19:54:17 crc kubenswrapper[4756]: I0930 19:54:17.134515 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6f3ed405-3d2e-4296-8ea5-7b426f2bd987" path="/var/lib/kubelet/pods/6f3ed405-3d2e-4296-8ea5-7b426f2bd987/volumes" Sep 30 19:54:17 crc kubenswrapper[4756]: I0930 19:54:17.135962 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7cba2998-4911-447e-8fa8-782a83f0b3be" path="/var/lib/kubelet/pods/7cba2998-4911-447e-8fa8-782a83f0b3be/volumes" Sep 30 19:54:17 crc kubenswrapper[4756]: I0930 19:54:17.643348 4756 generic.go:334] "Generic (PLEG): container finished" podID="e7fd1b45-8dfa-488e-a4f4-116fbce0f314" containerID="92fc805c84c53ca80bd45cad9960b618c92aa4e02b177e7019b6bb00520bf5cf" exitCode=0 Sep 30 19:54:17 crc kubenswrapper[4756]: I0930 19:54:17.643490 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-j8ff2" event={"ID":"e7fd1b45-8dfa-488e-a4f4-116fbce0f314","Type":"ContainerDied","Data":"92fc805c84c53ca80bd45cad9960b618c92aa4e02b177e7019b6bb00520bf5cf"} Sep 30 19:54:17 crc kubenswrapper[4756]: I0930 19:54:17.643760 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-j8ff2" event={"ID":"e7fd1b45-8dfa-488e-a4f4-116fbce0f314","Type":"ContainerStarted","Data":"9e01f4a6ff85cc7ae48b9de5c69b07f84e22bbd890d829522d781a1dc7f74fb2"} Sep 30 19:54:17 crc kubenswrapper[4756]: I0930 19:54:17.646389 4756 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Sep 30 19:54:19 crc kubenswrapper[4756]: I0930 19:54:19.687337 4756 generic.go:334] "Generic (PLEG): container finished" podID="e7fd1b45-8dfa-488e-a4f4-116fbce0f314" containerID="5bdb99772c352a2254e7a4da9f87038528f758b56cefd1524c00082403ceb621" exitCode=0 Sep 30 19:54:19 crc kubenswrapper[4756]: I0930 19:54:19.687468 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-j8ff2" event={"ID":"e7fd1b45-8dfa-488e-a4f4-116fbce0f314","Type":"ContainerDied","Data":"5bdb99772c352a2254e7a4da9f87038528f758b56cefd1524c00082403ceb621"} Sep 30 19:54:20 crc kubenswrapper[4756]: I0930 19:54:20.703513 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-j8ff2" event={"ID":"e7fd1b45-8dfa-488e-a4f4-116fbce0f314","Type":"ContainerStarted","Data":"b0f7d17b0531be473972c51d3a61a99a7b75a2b4fb789de7a105ffd3c0475cb0"} Sep 30 19:54:20 crc kubenswrapper[4756]: I0930 19:54:20.742055 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-j8ff2" podStartSLOduration=2.287022381 podStartE2EDuration="4.742021136s" podCreationTimestamp="2025-09-30 19:54:16 +0000 UTC" firstStartedPulling="2025-09-30 19:54:17.64585398 +0000 UTC m=+1387.266787497" lastFinishedPulling="2025-09-30 19:54:20.100852765 +0000 UTC m=+1389.721786252" observedRunningTime="2025-09-30 19:54:20.732052046 +0000 UTC m=+1390.352985563" watchObservedRunningTime="2025-09-30 19:54:20.742021136 +0000 UTC m=+1390.362954663" Sep 30 19:54:26 crc kubenswrapper[4756]: I0930 19:54:26.456445 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-j8ff2" Sep 30 19:54:26 crc kubenswrapper[4756]: I0930 19:54:26.456970 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-j8ff2" Sep 30 19:54:26 crc kubenswrapper[4756]: I0930 19:54:26.532066 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-j8ff2" Sep 30 19:54:26 crc kubenswrapper[4756]: I0930 19:54:26.827736 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-j8ff2" Sep 30 19:54:26 crc kubenswrapper[4756]: I0930 19:54:26.877960 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-j8ff2"] Sep 30 19:54:28 crc kubenswrapper[4756]: I0930 19:54:28.790774 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-j8ff2" podUID="e7fd1b45-8dfa-488e-a4f4-116fbce0f314" containerName="registry-server" containerID="cri-o://b0f7d17b0531be473972c51d3a61a99a7b75a2b4fb789de7a105ffd3c0475cb0" gracePeriod=2 Sep 30 19:54:29 crc kubenswrapper[4756]: I0930 19:54:29.268279 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-j8ff2" Sep 30 19:54:29 crc kubenswrapper[4756]: I0930 19:54:29.453884 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e7fd1b45-8dfa-488e-a4f4-116fbce0f314-utilities\") pod \"e7fd1b45-8dfa-488e-a4f4-116fbce0f314\" (UID: \"e7fd1b45-8dfa-488e-a4f4-116fbce0f314\") " Sep 30 19:54:29 crc kubenswrapper[4756]: I0930 19:54:29.453939 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e7fd1b45-8dfa-488e-a4f4-116fbce0f314-catalog-content\") pod \"e7fd1b45-8dfa-488e-a4f4-116fbce0f314\" (UID: \"e7fd1b45-8dfa-488e-a4f4-116fbce0f314\") " Sep 30 19:54:29 crc kubenswrapper[4756]: I0930 19:54:29.454040 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ndlgg\" (UniqueName: \"kubernetes.io/projected/e7fd1b45-8dfa-488e-a4f4-116fbce0f314-kube-api-access-ndlgg\") pod \"e7fd1b45-8dfa-488e-a4f4-116fbce0f314\" (UID: \"e7fd1b45-8dfa-488e-a4f4-116fbce0f314\") " Sep 30 19:54:29 crc kubenswrapper[4756]: I0930 19:54:29.455194 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e7fd1b45-8dfa-488e-a4f4-116fbce0f314-utilities" (OuterVolumeSpecName: "utilities") pod "e7fd1b45-8dfa-488e-a4f4-116fbce0f314" (UID: "e7fd1b45-8dfa-488e-a4f4-116fbce0f314"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 19:54:29 crc kubenswrapper[4756]: I0930 19:54:29.459213 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e7fd1b45-8dfa-488e-a4f4-116fbce0f314-kube-api-access-ndlgg" (OuterVolumeSpecName: "kube-api-access-ndlgg") pod "e7fd1b45-8dfa-488e-a4f4-116fbce0f314" (UID: "e7fd1b45-8dfa-488e-a4f4-116fbce0f314"). InnerVolumeSpecName "kube-api-access-ndlgg". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:54:29 crc kubenswrapper[4756]: I0930 19:54:29.465910 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e7fd1b45-8dfa-488e-a4f4-116fbce0f314-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "e7fd1b45-8dfa-488e-a4f4-116fbce0f314" (UID: "e7fd1b45-8dfa-488e-a4f4-116fbce0f314"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 19:54:29 crc kubenswrapper[4756]: I0930 19:54:29.556145 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ndlgg\" (UniqueName: \"kubernetes.io/projected/e7fd1b45-8dfa-488e-a4f4-116fbce0f314-kube-api-access-ndlgg\") on node \"crc\" DevicePath \"\"" Sep 30 19:54:29 crc kubenswrapper[4756]: I0930 19:54:29.556184 4756 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e7fd1b45-8dfa-488e-a4f4-116fbce0f314-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 19:54:29 crc kubenswrapper[4756]: I0930 19:54:29.556198 4756 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e7fd1b45-8dfa-488e-a4f4-116fbce0f314-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 19:54:29 crc kubenswrapper[4756]: I0930 19:54:29.806917 4756 generic.go:334] "Generic (PLEG): container finished" podID="e7fd1b45-8dfa-488e-a4f4-116fbce0f314" containerID="b0f7d17b0531be473972c51d3a61a99a7b75a2b4fb789de7a105ffd3c0475cb0" exitCode=0 Sep 30 19:54:29 crc kubenswrapper[4756]: I0930 19:54:29.806986 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-j8ff2" Sep 30 19:54:29 crc kubenswrapper[4756]: I0930 19:54:29.807012 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-j8ff2" event={"ID":"e7fd1b45-8dfa-488e-a4f4-116fbce0f314","Type":"ContainerDied","Data":"b0f7d17b0531be473972c51d3a61a99a7b75a2b4fb789de7a105ffd3c0475cb0"} Sep 30 19:54:29 crc kubenswrapper[4756]: I0930 19:54:29.807079 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-j8ff2" event={"ID":"e7fd1b45-8dfa-488e-a4f4-116fbce0f314","Type":"ContainerDied","Data":"9e01f4a6ff85cc7ae48b9de5c69b07f84e22bbd890d829522d781a1dc7f74fb2"} Sep 30 19:54:29 crc kubenswrapper[4756]: I0930 19:54:29.807114 4756 scope.go:117] "RemoveContainer" containerID="b0f7d17b0531be473972c51d3a61a99a7b75a2b4fb789de7a105ffd3c0475cb0" Sep 30 19:54:29 crc kubenswrapper[4756]: I0930 19:54:29.855806 4756 scope.go:117] "RemoveContainer" containerID="5bdb99772c352a2254e7a4da9f87038528f758b56cefd1524c00082403ceb621" Sep 30 19:54:29 crc kubenswrapper[4756]: I0930 19:54:29.859083 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-j8ff2"] Sep 30 19:54:29 crc kubenswrapper[4756]: I0930 19:54:29.865659 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-j8ff2"] Sep 30 19:54:29 crc kubenswrapper[4756]: I0930 19:54:29.883580 4756 scope.go:117] "RemoveContainer" containerID="92fc805c84c53ca80bd45cad9960b618c92aa4e02b177e7019b6bb00520bf5cf" Sep 30 19:54:29 crc kubenswrapper[4756]: I0930 19:54:29.905758 4756 scope.go:117] "RemoveContainer" containerID="b0f7d17b0531be473972c51d3a61a99a7b75a2b4fb789de7a105ffd3c0475cb0" Sep 30 19:54:29 crc kubenswrapper[4756]: E0930 19:54:29.906262 4756 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b0f7d17b0531be473972c51d3a61a99a7b75a2b4fb789de7a105ffd3c0475cb0\": container with ID starting with b0f7d17b0531be473972c51d3a61a99a7b75a2b4fb789de7a105ffd3c0475cb0 not found: ID does not exist" containerID="b0f7d17b0531be473972c51d3a61a99a7b75a2b4fb789de7a105ffd3c0475cb0" Sep 30 19:54:29 crc kubenswrapper[4756]: I0930 19:54:29.906334 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b0f7d17b0531be473972c51d3a61a99a7b75a2b4fb789de7a105ffd3c0475cb0"} err="failed to get container status \"b0f7d17b0531be473972c51d3a61a99a7b75a2b4fb789de7a105ffd3c0475cb0\": rpc error: code = NotFound desc = could not find container \"b0f7d17b0531be473972c51d3a61a99a7b75a2b4fb789de7a105ffd3c0475cb0\": container with ID starting with b0f7d17b0531be473972c51d3a61a99a7b75a2b4fb789de7a105ffd3c0475cb0 not found: ID does not exist" Sep 30 19:54:29 crc kubenswrapper[4756]: I0930 19:54:29.906366 4756 scope.go:117] "RemoveContainer" containerID="5bdb99772c352a2254e7a4da9f87038528f758b56cefd1524c00082403ceb621" Sep 30 19:54:29 crc kubenswrapper[4756]: E0930 19:54:29.906837 4756 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5bdb99772c352a2254e7a4da9f87038528f758b56cefd1524c00082403ceb621\": container with ID starting with 5bdb99772c352a2254e7a4da9f87038528f758b56cefd1524c00082403ceb621 not found: ID does not exist" containerID="5bdb99772c352a2254e7a4da9f87038528f758b56cefd1524c00082403ceb621" Sep 30 19:54:29 crc kubenswrapper[4756]: I0930 19:54:29.906878 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5bdb99772c352a2254e7a4da9f87038528f758b56cefd1524c00082403ceb621"} err="failed to get container status \"5bdb99772c352a2254e7a4da9f87038528f758b56cefd1524c00082403ceb621\": rpc error: code = NotFound desc = could not find container \"5bdb99772c352a2254e7a4da9f87038528f758b56cefd1524c00082403ceb621\": container with ID starting with 5bdb99772c352a2254e7a4da9f87038528f758b56cefd1524c00082403ceb621 not found: ID does not exist" Sep 30 19:54:29 crc kubenswrapper[4756]: I0930 19:54:29.906899 4756 scope.go:117] "RemoveContainer" containerID="92fc805c84c53ca80bd45cad9960b618c92aa4e02b177e7019b6bb00520bf5cf" Sep 30 19:54:29 crc kubenswrapper[4756]: E0930 19:54:29.907133 4756 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"92fc805c84c53ca80bd45cad9960b618c92aa4e02b177e7019b6bb00520bf5cf\": container with ID starting with 92fc805c84c53ca80bd45cad9960b618c92aa4e02b177e7019b6bb00520bf5cf not found: ID does not exist" containerID="92fc805c84c53ca80bd45cad9960b618c92aa4e02b177e7019b6bb00520bf5cf" Sep 30 19:54:29 crc kubenswrapper[4756]: I0930 19:54:29.907162 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"92fc805c84c53ca80bd45cad9960b618c92aa4e02b177e7019b6bb00520bf5cf"} err="failed to get container status \"92fc805c84c53ca80bd45cad9960b618c92aa4e02b177e7019b6bb00520bf5cf\": rpc error: code = NotFound desc = could not find container \"92fc805c84c53ca80bd45cad9960b618c92aa4e02b177e7019b6bb00520bf5cf\": container with ID starting with 92fc805c84c53ca80bd45cad9960b618c92aa4e02b177e7019b6bb00520bf5cf not found: ID does not exist" Sep 30 19:54:31 crc kubenswrapper[4756]: I0930 19:54:31.131913 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e7fd1b45-8dfa-488e-a4f4-116fbce0f314" path="/var/lib/kubelet/pods/e7fd1b45-8dfa-488e-a4f4-116fbce0f314/volumes" Sep 30 19:55:02 crc kubenswrapper[4756]: I0930 19:55:02.137254 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-mp8tn"] Sep 30 19:55:02 crc kubenswrapper[4756]: E0930 19:55:02.138141 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e7fd1b45-8dfa-488e-a4f4-116fbce0f314" containerName="extract-utilities" Sep 30 19:55:02 crc kubenswrapper[4756]: I0930 19:55:02.138155 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="e7fd1b45-8dfa-488e-a4f4-116fbce0f314" containerName="extract-utilities" Sep 30 19:55:02 crc kubenswrapper[4756]: E0930 19:55:02.138183 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e7fd1b45-8dfa-488e-a4f4-116fbce0f314" containerName="extract-content" Sep 30 19:55:02 crc kubenswrapper[4756]: I0930 19:55:02.138191 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="e7fd1b45-8dfa-488e-a4f4-116fbce0f314" containerName="extract-content" Sep 30 19:55:02 crc kubenswrapper[4756]: E0930 19:55:02.138201 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e7fd1b45-8dfa-488e-a4f4-116fbce0f314" containerName="registry-server" Sep 30 19:55:02 crc kubenswrapper[4756]: I0930 19:55:02.138208 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="e7fd1b45-8dfa-488e-a4f4-116fbce0f314" containerName="registry-server" Sep 30 19:55:02 crc kubenswrapper[4756]: I0930 19:55:02.138378 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="e7fd1b45-8dfa-488e-a4f4-116fbce0f314" containerName="registry-server" Sep 30 19:55:02 crc kubenswrapper[4756]: I0930 19:55:02.139699 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-mp8tn" Sep 30 19:55:02 crc kubenswrapper[4756]: I0930 19:55:02.152569 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-mp8tn"] Sep 30 19:55:02 crc kubenswrapper[4756]: I0930 19:55:02.276567 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b290d95b-26f2-4347-a070-aad2c444f94b-utilities\") pod \"redhat-operators-mp8tn\" (UID: \"b290d95b-26f2-4347-a070-aad2c444f94b\") " pod="openshift-marketplace/redhat-operators-mp8tn" Sep 30 19:55:02 crc kubenswrapper[4756]: I0930 19:55:02.276669 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-59t56\" (UniqueName: \"kubernetes.io/projected/b290d95b-26f2-4347-a070-aad2c444f94b-kube-api-access-59t56\") pod \"redhat-operators-mp8tn\" (UID: \"b290d95b-26f2-4347-a070-aad2c444f94b\") " pod="openshift-marketplace/redhat-operators-mp8tn" Sep 30 19:55:02 crc kubenswrapper[4756]: I0930 19:55:02.276711 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b290d95b-26f2-4347-a070-aad2c444f94b-catalog-content\") pod \"redhat-operators-mp8tn\" (UID: \"b290d95b-26f2-4347-a070-aad2c444f94b\") " pod="openshift-marketplace/redhat-operators-mp8tn" Sep 30 19:55:02 crc kubenswrapper[4756]: I0930 19:55:02.377774 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b290d95b-26f2-4347-a070-aad2c444f94b-catalog-content\") pod \"redhat-operators-mp8tn\" (UID: \"b290d95b-26f2-4347-a070-aad2c444f94b\") " pod="openshift-marketplace/redhat-operators-mp8tn" Sep 30 19:55:02 crc kubenswrapper[4756]: I0930 19:55:02.377882 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b290d95b-26f2-4347-a070-aad2c444f94b-utilities\") pod \"redhat-operators-mp8tn\" (UID: \"b290d95b-26f2-4347-a070-aad2c444f94b\") " pod="openshift-marketplace/redhat-operators-mp8tn" Sep 30 19:55:02 crc kubenswrapper[4756]: I0930 19:55:02.377943 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-59t56\" (UniqueName: \"kubernetes.io/projected/b290d95b-26f2-4347-a070-aad2c444f94b-kube-api-access-59t56\") pod \"redhat-operators-mp8tn\" (UID: \"b290d95b-26f2-4347-a070-aad2c444f94b\") " pod="openshift-marketplace/redhat-operators-mp8tn" Sep 30 19:55:02 crc kubenswrapper[4756]: I0930 19:55:02.378431 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b290d95b-26f2-4347-a070-aad2c444f94b-catalog-content\") pod \"redhat-operators-mp8tn\" (UID: \"b290d95b-26f2-4347-a070-aad2c444f94b\") " pod="openshift-marketplace/redhat-operators-mp8tn" Sep 30 19:55:02 crc kubenswrapper[4756]: I0930 19:55:02.378517 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b290d95b-26f2-4347-a070-aad2c444f94b-utilities\") pod \"redhat-operators-mp8tn\" (UID: \"b290d95b-26f2-4347-a070-aad2c444f94b\") " pod="openshift-marketplace/redhat-operators-mp8tn" Sep 30 19:55:02 crc kubenswrapper[4756]: I0930 19:55:02.403803 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-59t56\" (UniqueName: \"kubernetes.io/projected/b290d95b-26f2-4347-a070-aad2c444f94b-kube-api-access-59t56\") pod \"redhat-operators-mp8tn\" (UID: \"b290d95b-26f2-4347-a070-aad2c444f94b\") " pod="openshift-marketplace/redhat-operators-mp8tn" Sep 30 19:55:02 crc kubenswrapper[4756]: I0930 19:55:02.457834 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-mp8tn" Sep 30 19:55:02 crc kubenswrapper[4756]: I0930 19:55:02.722385 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-mp8tn"] Sep 30 19:55:03 crc kubenswrapper[4756]: I0930 19:55:03.130891 4756 generic.go:334] "Generic (PLEG): container finished" podID="b290d95b-26f2-4347-a070-aad2c444f94b" containerID="56e7a47b3486468106ac8a4b157d9e50983733faf7516610ece04a7e7e08e152" exitCode=0 Sep 30 19:55:03 crc kubenswrapper[4756]: I0930 19:55:03.131010 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mp8tn" event={"ID":"b290d95b-26f2-4347-a070-aad2c444f94b","Type":"ContainerDied","Data":"56e7a47b3486468106ac8a4b157d9e50983733faf7516610ece04a7e7e08e152"} Sep 30 19:55:03 crc kubenswrapper[4756]: I0930 19:55:03.131123 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mp8tn" event={"ID":"b290d95b-26f2-4347-a070-aad2c444f94b","Type":"ContainerStarted","Data":"c1f9d5604e9fb7e4b583b9edd4487ca70830437e40cf0ac515460525deb93b8c"} Sep 30 19:55:04 crc kubenswrapper[4756]: E0930 19:55:04.832810 4756 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb290d95b_26f2_4347_a070_aad2c444f94b.slice/crio-a70214892675ce1d2f9710ffd1133dd87628ef527a3be2b60bbd65980adbb782.scope\": RecentStats: unable to find data in memory cache]" Sep 30 19:55:05 crc kubenswrapper[4756]: I0930 19:55:05.154129 4756 generic.go:334] "Generic (PLEG): container finished" podID="b290d95b-26f2-4347-a070-aad2c444f94b" containerID="a70214892675ce1d2f9710ffd1133dd87628ef527a3be2b60bbd65980adbb782" exitCode=0 Sep 30 19:55:05 crc kubenswrapper[4756]: I0930 19:55:05.154180 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mp8tn" event={"ID":"b290d95b-26f2-4347-a070-aad2c444f94b","Type":"ContainerDied","Data":"a70214892675ce1d2f9710ffd1133dd87628ef527a3be2b60bbd65980adbb782"} Sep 30 19:55:06 crc kubenswrapper[4756]: I0930 19:55:06.167074 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mp8tn" event={"ID":"b290d95b-26f2-4347-a070-aad2c444f94b","Type":"ContainerStarted","Data":"c30c609784a70ff2b4c364de6a9d9cc4d1cb1c773be15e014610caefcf4f2e33"} Sep 30 19:55:06 crc kubenswrapper[4756]: I0930 19:55:06.185981 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-mp8tn" podStartSLOduration=1.761877623 podStartE2EDuration="4.18596297s" podCreationTimestamp="2025-09-30 19:55:02 +0000 UTC" firstStartedPulling="2025-09-30 19:55:03.134219584 +0000 UTC m=+1432.755153061" lastFinishedPulling="2025-09-30 19:55:05.558304921 +0000 UTC m=+1435.179238408" observedRunningTime="2025-09-30 19:55:06.185835966 +0000 UTC m=+1435.806769493" watchObservedRunningTime="2025-09-30 19:55:06.18596297 +0000 UTC m=+1435.806896457" Sep 30 19:55:12 crc kubenswrapper[4756]: I0930 19:55:12.458185 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-mp8tn" Sep 30 19:55:12 crc kubenswrapper[4756]: I0930 19:55:12.458588 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-mp8tn" Sep 30 19:55:12 crc kubenswrapper[4756]: I0930 19:55:12.511890 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-mp8tn" Sep 30 19:55:13 crc kubenswrapper[4756]: I0930 19:55:13.273912 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-mp8tn" Sep 30 19:55:13 crc kubenswrapper[4756]: I0930 19:55:13.328573 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-mp8tn"] Sep 30 19:55:15 crc kubenswrapper[4756]: I0930 19:55:15.250855 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-mp8tn" podUID="b290d95b-26f2-4347-a070-aad2c444f94b" containerName="registry-server" containerID="cri-o://c30c609784a70ff2b4c364de6a9d9cc4d1cb1c773be15e014610caefcf4f2e33" gracePeriod=2 Sep 30 19:55:15 crc kubenswrapper[4756]: I0930 19:55:15.522731 4756 scope.go:117] "RemoveContainer" containerID="366cf47beb3bf314873bbe2aee65a48d7de8c63bb8eb30ed71c2c3e6ba1b8b24" Sep 30 19:55:15 crc kubenswrapper[4756]: I0930 19:55:15.641655 4756 scope.go:117] "RemoveContainer" containerID="fb299811d5b1ee6b8b6e9633ad9ea2340ebae147f0ca313c0124b1d085c0b499" Sep 30 19:55:15 crc kubenswrapper[4756]: I0930 19:55:15.741389 4756 scope.go:117] "RemoveContainer" containerID="8072ad59b49efc7ab3149a130a357ae4c8ebf10c90e906b83a1c2c7c39a26605" Sep 30 19:55:15 crc kubenswrapper[4756]: I0930 19:55:15.799161 4756 scope.go:117] "RemoveContainer" containerID="ae0fbf9992f7682bada4b00715597be672fc0de41bb3330f05ac33f88569fad7" Sep 30 19:55:15 crc kubenswrapper[4756]: I0930 19:55:15.824543 4756 scope.go:117] "RemoveContainer" containerID="b9039dde4f47a71c71085a8872d287b5cca916163354551bfbc89d93d31530ff" Sep 30 19:55:15 crc kubenswrapper[4756]: I0930 19:55:15.854291 4756 scope.go:117] "RemoveContainer" containerID="f505cfd159ce82f4bb52b76159f6e70bf9d14da0427ec40a08f30173d7ae97d4" Sep 30 19:55:15 crc kubenswrapper[4756]: I0930 19:55:15.883140 4756 scope.go:117] "RemoveContainer" containerID="e64c4560859d931a12d9625b07d841a4db85b1016b5f67c49bd7fe3814abe86a" Sep 30 19:55:15 crc kubenswrapper[4756]: I0930 19:55:15.957172 4756 scope.go:117] "RemoveContainer" containerID="4743fe1d3afca0374e30323f82a3b45ead65b0e37c7b34d43281655e0227ef30" Sep 30 19:55:15 crc kubenswrapper[4756]: I0930 19:55:15.978787 4756 scope.go:117] "RemoveContainer" containerID="c7c419536d0be0324d543ae02bef0262d1e3a0cb35dd8af1179be449734d0c2e" Sep 30 19:55:16 crc kubenswrapper[4756]: I0930 19:55:16.265783 4756 generic.go:334] "Generic (PLEG): container finished" podID="b290d95b-26f2-4347-a070-aad2c444f94b" containerID="c30c609784a70ff2b4c364de6a9d9cc4d1cb1c773be15e014610caefcf4f2e33" exitCode=0 Sep 30 19:55:16 crc kubenswrapper[4756]: I0930 19:55:16.265844 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mp8tn" event={"ID":"b290d95b-26f2-4347-a070-aad2c444f94b","Type":"ContainerDied","Data":"c30c609784a70ff2b4c364de6a9d9cc4d1cb1c773be15e014610caefcf4f2e33"} Sep 30 19:55:16 crc kubenswrapper[4756]: I0930 19:55:16.863297 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-mp8tn" Sep 30 19:55:17 crc kubenswrapper[4756]: I0930 19:55:17.017144 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b290d95b-26f2-4347-a070-aad2c444f94b-utilities\") pod \"b290d95b-26f2-4347-a070-aad2c444f94b\" (UID: \"b290d95b-26f2-4347-a070-aad2c444f94b\") " Sep 30 19:55:17 crc kubenswrapper[4756]: I0930 19:55:17.017469 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-59t56\" (UniqueName: \"kubernetes.io/projected/b290d95b-26f2-4347-a070-aad2c444f94b-kube-api-access-59t56\") pod \"b290d95b-26f2-4347-a070-aad2c444f94b\" (UID: \"b290d95b-26f2-4347-a070-aad2c444f94b\") " Sep 30 19:55:17 crc kubenswrapper[4756]: I0930 19:55:17.017535 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b290d95b-26f2-4347-a070-aad2c444f94b-catalog-content\") pod \"b290d95b-26f2-4347-a070-aad2c444f94b\" (UID: \"b290d95b-26f2-4347-a070-aad2c444f94b\") " Sep 30 19:55:17 crc kubenswrapper[4756]: I0930 19:55:17.019209 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b290d95b-26f2-4347-a070-aad2c444f94b-utilities" (OuterVolumeSpecName: "utilities") pod "b290d95b-26f2-4347-a070-aad2c444f94b" (UID: "b290d95b-26f2-4347-a070-aad2c444f94b"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 19:55:17 crc kubenswrapper[4756]: I0930 19:55:17.026227 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b290d95b-26f2-4347-a070-aad2c444f94b-kube-api-access-59t56" (OuterVolumeSpecName: "kube-api-access-59t56") pod "b290d95b-26f2-4347-a070-aad2c444f94b" (UID: "b290d95b-26f2-4347-a070-aad2c444f94b"). InnerVolumeSpecName "kube-api-access-59t56". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:55:17 crc kubenswrapper[4756]: I0930 19:55:17.108799 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b290d95b-26f2-4347-a070-aad2c444f94b-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b290d95b-26f2-4347-a070-aad2c444f94b" (UID: "b290d95b-26f2-4347-a070-aad2c444f94b"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 19:55:17 crc kubenswrapper[4756]: I0930 19:55:17.119689 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-59t56\" (UniqueName: \"kubernetes.io/projected/b290d95b-26f2-4347-a070-aad2c444f94b-kube-api-access-59t56\") on node \"crc\" DevicePath \"\"" Sep 30 19:55:17 crc kubenswrapper[4756]: I0930 19:55:17.119720 4756 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b290d95b-26f2-4347-a070-aad2c444f94b-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 19:55:17 crc kubenswrapper[4756]: I0930 19:55:17.119733 4756 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b290d95b-26f2-4347-a070-aad2c444f94b-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 19:55:17 crc kubenswrapper[4756]: I0930 19:55:17.278297 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mp8tn" event={"ID":"b290d95b-26f2-4347-a070-aad2c444f94b","Type":"ContainerDied","Data":"c1f9d5604e9fb7e4b583b9edd4487ca70830437e40cf0ac515460525deb93b8c"} Sep 30 19:55:17 crc kubenswrapper[4756]: I0930 19:55:17.278594 4756 scope.go:117] "RemoveContainer" containerID="c30c609784a70ff2b4c364de6a9d9cc4d1cb1c773be15e014610caefcf4f2e33" Sep 30 19:55:17 crc kubenswrapper[4756]: I0930 19:55:17.278387 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-mp8tn" Sep 30 19:55:17 crc kubenswrapper[4756]: I0930 19:55:17.305129 4756 scope.go:117] "RemoveContainer" containerID="a70214892675ce1d2f9710ffd1133dd87628ef527a3be2b60bbd65980adbb782" Sep 30 19:55:17 crc kubenswrapper[4756]: I0930 19:55:17.312805 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-mp8tn"] Sep 30 19:55:17 crc kubenswrapper[4756]: I0930 19:55:17.319676 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-mp8tn"] Sep 30 19:55:17 crc kubenswrapper[4756]: I0930 19:55:17.336716 4756 scope.go:117] "RemoveContainer" containerID="56e7a47b3486468106ac8a4b157d9e50983733faf7516610ece04a7e7e08e152" Sep 30 19:55:19 crc kubenswrapper[4756]: I0930 19:55:19.134520 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b290d95b-26f2-4347-a070-aad2c444f94b" path="/var/lib/kubelet/pods/b290d95b-26f2-4347-a070-aad2c444f94b/volumes" Sep 30 19:55:40 crc kubenswrapper[4756]: I0930 19:55:40.921853 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-qmpgr"] Sep 30 19:55:40 crc kubenswrapper[4756]: E0930 19:55:40.922760 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b290d95b-26f2-4347-a070-aad2c444f94b" containerName="extract-content" Sep 30 19:55:40 crc kubenswrapper[4756]: I0930 19:55:40.922772 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="b290d95b-26f2-4347-a070-aad2c444f94b" containerName="extract-content" Sep 30 19:55:40 crc kubenswrapper[4756]: E0930 19:55:40.922789 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b290d95b-26f2-4347-a070-aad2c444f94b" containerName="extract-utilities" Sep 30 19:55:40 crc kubenswrapper[4756]: I0930 19:55:40.922796 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="b290d95b-26f2-4347-a070-aad2c444f94b" containerName="extract-utilities" Sep 30 19:55:40 crc kubenswrapper[4756]: E0930 19:55:40.922822 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b290d95b-26f2-4347-a070-aad2c444f94b" containerName="registry-server" Sep 30 19:55:40 crc kubenswrapper[4756]: I0930 19:55:40.922829 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="b290d95b-26f2-4347-a070-aad2c444f94b" containerName="registry-server" Sep 30 19:55:40 crc kubenswrapper[4756]: I0930 19:55:40.922963 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="b290d95b-26f2-4347-a070-aad2c444f94b" containerName="registry-server" Sep 30 19:55:40 crc kubenswrapper[4756]: I0930 19:55:40.924062 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-qmpgr" Sep 30 19:55:40 crc kubenswrapper[4756]: I0930 19:55:40.940750 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-qmpgr"] Sep 30 19:55:41 crc kubenswrapper[4756]: I0930 19:55:41.070756 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c5d892ce-0288-478e-a017-e89146d3599c-catalog-content\") pod \"community-operators-qmpgr\" (UID: \"c5d892ce-0288-478e-a017-e89146d3599c\") " pod="openshift-marketplace/community-operators-qmpgr" Sep 30 19:55:41 crc kubenswrapper[4756]: I0930 19:55:41.070884 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c5d892ce-0288-478e-a017-e89146d3599c-utilities\") pod \"community-operators-qmpgr\" (UID: \"c5d892ce-0288-478e-a017-e89146d3599c\") " pod="openshift-marketplace/community-operators-qmpgr" Sep 30 19:55:41 crc kubenswrapper[4756]: I0930 19:55:41.070928 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-drfdb\" (UniqueName: \"kubernetes.io/projected/c5d892ce-0288-478e-a017-e89146d3599c-kube-api-access-drfdb\") pod \"community-operators-qmpgr\" (UID: \"c5d892ce-0288-478e-a017-e89146d3599c\") " pod="openshift-marketplace/community-operators-qmpgr" Sep 30 19:55:41 crc kubenswrapper[4756]: I0930 19:55:41.173426 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-drfdb\" (UniqueName: \"kubernetes.io/projected/c5d892ce-0288-478e-a017-e89146d3599c-kube-api-access-drfdb\") pod \"community-operators-qmpgr\" (UID: \"c5d892ce-0288-478e-a017-e89146d3599c\") " pod="openshift-marketplace/community-operators-qmpgr" Sep 30 19:55:41 crc kubenswrapper[4756]: I0930 19:55:41.173523 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c5d892ce-0288-478e-a017-e89146d3599c-catalog-content\") pod \"community-operators-qmpgr\" (UID: \"c5d892ce-0288-478e-a017-e89146d3599c\") " pod="openshift-marketplace/community-operators-qmpgr" Sep 30 19:55:41 crc kubenswrapper[4756]: I0930 19:55:41.173594 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c5d892ce-0288-478e-a017-e89146d3599c-utilities\") pod \"community-operators-qmpgr\" (UID: \"c5d892ce-0288-478e-a017-e89146d3599c\") " pod="openshift-marketplace/community-operators-qmpgr" Sep 30 19:55:41 crc kubenswrapper[4756]: I0930 19:55:41.174068 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c5d892ce-0288-478e-a017-e89146d3599c-utilities\") pod \"community-operators-qmpgr\" (UID: \"c5d892ce-0288-478e-a017-e89146d3599c\") " pod="openshift-marketplace/community-operators-qmpgr" Sep 30 19:55:41 crc kubenswrapper[4756]: I0930 19:55:41.174146 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c5d892ce-0288-478e-a017-e89146d3599c-catalog-content\") pod \"community-operators-qmpgr\" (UID: \"c5d892ce-0288-478e-a017-e89146d3599c\") " pod="openshift-marketplace/community-operators-qmpgr" Sep 30 19:55:41 crc kubenswrapper[4756]: I0930 19:55:41.201786 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-drfdb\" (UniqueName: \"kubernetes.io/projected/c5d892ce-0288-478e-a017-e89146d3599c-kube-api-access-drfdb\") pod \"community-operators-qmpgr\" (UID: \"c5d892ce-0288-478e-a017-e89146d3599c\") " pod="openshift-marketplace/community-operators-qmpgr" Sep 30 19:55:41 crc kubenswrapper[4756]: I0930 19:55:41.261320 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-qmpgr" Sep 30 19:55:41 crc kubenswrapper[4756]: I0930 19:55:41.814787 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-qmpgr"] Sep 30 19:55:42 crc kubenswrapper[4756]: I0930 19:55:42.541665 4756 generic.go:334] "Generic (PLEG): container finished" podID="c5d892ce-0288-478e-a017-e89146d3599c" containerID="387c08b406983b81036fb1b4a4b49282cbfe8dc67e12b2c7460f2975276fd269" exitCode=0 Sep 30 19:55:42 crc kubenswrapper[4756]: I0930 19:55:42.541732 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-qmpgr" event={"ID":"c5d892ce-0288-478e-a017-e89146d3599c","Type":"ContainerDied","Data":"387c08b406983b81036fb1b4a4b49282cbfe8dc67e12b2c7460f2975276fd269"} Sep 30 19:55:42 crc kubenswrapper[4756]: I0930 19:55:42.541965 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-qmpgr" event={"ID":"c5d892ce-0288-478e-a017-e89146d3599c","Type":"ContainerStarted","Data":"ed68177852aadc4829f7ea77526088d50c57cc9cfe1d8ad42662a7cbce2a823a"} Sep 30 19:55:43 crc kubenswrapper[4756]: I0930 19:55:43.552878 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-qmpgr" event={"ID":"c5d892ce-0288-478e-a017-e89146d3599c","Type":"ContainerStarted","Data":"4494f5fadc797475ab0b6747a7f6c336e0d6606b3a90dc89debbd4169375011d"} Sep 30 19:55:44 crc kubenswrapper[4756]: I0930 19:55:44.567643 4756 generic.go:334] "Generic (PLEG): container finished" podID="c5d892ce-0288-478e-a017-e89146d3599c" containerID="4494f5fadc797475ab0b6747a7f6c336e0d6606b3a90dc89debbd4169375011d" exitCode=0 Sep 30 19:55:44 crc kubenswrapper[4756]: I0930 19:55:44.567761 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-qmpgr" event={"ID":"c5d892ce-0288-478e-a017-e89146d3599c","Type":"ContainerDied","Data":"4494f5fadc797475ab0b6747a7f6c336e0d6606b3a90dc89debbd4169375011d"} Sep 30 19:55:45 crc kubenswrapper[4756]: I0930 19:55:45.578141 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-qmpgr" event={"ID":"c5d892ce-0288-478e-a017-e89146d3599c","Type":"ContainerStarted","Data":"52ca7fdded3a31d2cfe58dbc579c9ddf94b2d01000d2dbc7b4393135dc57d802"} Sep 30 19:55:45 crc kubenswrapper[4756]: I0930 19:55:45.605971 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-qmpgr" podStartSLOduration=2.800342895 podStartE2EDuration="5.605948899s" podCreationTimestamp="2025-09-30 19:55:40 +0000 UTC" firstStartedPulling="2025-09-30 19:55:42.544222953 +0000 UTC m=+1472.165156470" lastFinishedPulling="2025-09-30 19:55:45.349828977 +0000 UTC m=+1474.970762474" observedRunningTime="2025-09-30 19:55:45.600944388 +0000 UTC m=+1475.221877895" watchObservedRunningTime="2025-09-30 19:55:45.605948899 +0000 UTC m=+1475.226882396" Sep 30 19:55:51 crc kubenswrapper[4756]: I0930 19:55:51.262293 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-qmpgr" Sep 30 19:55:51 crc kubenswrapper[4756]: I0930 19:55:51.263161 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-qmpgr" Sep 30 19:55:51 crc kubenswrapper[4756]: I0930 19:55:51.339073 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-qmpgr" Sep 30 19:55:51 crc kubenswrapper[4756]: I0930 19:55:51.689322 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-qmpgr" Sep 30 19:55:51 crc kubenswrapper[4756]: I0930 19:55:51.746838 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-qmpgr"] Sep 30 19:55:53 crc kubenswrapper[4756]: I0930 19:55:53.686739 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-qmpgr" podUID="c5d892ce-0288-478e-a017-e89146d3599c" containerName="registry-server" containerID="cri-o://52ca7fdded3a31d2cfe58dbc579c9ddf94b2d01000d2dbc7b4393135dc57d802" gracePeriod=2 Sep 30 19:55:54 crc kubenswrapper[4756]: I0930 19:55:54.166193 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-qmpgr" Sep 30 19:55:54 crc kubenswrapper[4756]: I0930 19:55:54.290999 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c5d892ce-0288-478e-a017-e89146d3599c-utilities\") pod \"c5d892ce-0288-478e-a017-e89146d3599c\" (UID: \"c5d892ce-0288-478e-a017-e89146d3599c\") " Sep 30 19:55:54 crc kubenswrapper[4756]: I0930 19:55:54.291354 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-drfdb\" (UniqueName: \"kubernetes.io/projected/c5d892ce-0288-478e-a017-e89146d3599c-kube-api-access-drfdb\") pod \"c5d892ce-0288-478e-a017-e89146d3599c\" (UID: \"c5d892ce-0288-478e-a017-e89146d3599c\") " Sep 30 19:55:54 crc kubenswrapper[4756]: I0930 19:55:54.291478 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c5d892ce-0288-478e-a017-e89146d3599c-catalog-content\") pod \"c5d892ce-0288-478e-a017-e89146d3599c\" (UID: \"c5d892ce-0288-478e-a017-e89146d3599c\") " Sep 30 19:55:54 crc kubenswrapper[4756]: I0930 19:55:54.292638 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c5d892ce-0288-478e-a017-e89146d3599c-utilities" (OuterVolumeSpecName: "utilities") pod "c5d892ce-0288-478e-a017-e89146d3599c" (UID: "c5d892ce-0288-478e-a017-e89146d3599c"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 19:55:54 crc kubenswrapper[4756]: I0930 19:55:54.301529 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c5d892ce-0288-478e-a017-e89146d3599c-kube-api-access-drfdb" (OuterVolumeSpecName: "kube-api-access-drfdb") pod "c5d892ce-0288-478e-a017-e89146d3599c" (UID: "c5d892ce-0288-478e-a017-e89146d3599c"). InnerVolumeSpecName "kube-api-access-drfdb". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:55:54 crc kubenswrapper[4756]: I0930 19:55:54.394027 4756 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c5d892ce-0288-478e-a017-e89146d3599c-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 19:55:54 crc kubenswrapper[4756]: I0930 19:55:54.394103 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-drfdb\" (UniqueName: \"kubernetes.io/projected/c5d892ce-0288-478e-a017-e89146d3599c-kube-api-access-drfdb\") on node \"crc\" DevicePath \"\"" Sep 30 19:55:54 crc kubenswrapper[4756]: I0930 19:55:54.700207 4756 generic.go:334] "Generic (PLEG): container finished" podID="c5d892ce-0288-478e-a017-e89146d3599c" containerID="52ca7fdded3a31d2cfe58dbc579c9ddf94b2d01000d2dbc7b4393135dc57d802" exitCode=0 Sep 30 19:55:54 crc kubenswrapper[4756]: I0930 19:55:54.700247 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-qmpgr" Sep 30 19:55:54 crc kubenswrapper[4756]: I0930 19:55:54.700271 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-qmpgr" event={"ID":"c5d892ce-0288-478e-a017-e89146d3599c","Type":"ContainerDied","Data":"52ca7fdded3a31d2cfe58dbc579c9ddf94b2d01000d2dbc7b4393135dc57d802"} Sep 30 19:55:54 crc kubenswrapper[4756]: I0930 19:55:54.700911 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-qmpgr" event={"ID":"c5d892ce-0288-478e-a017-e89146d3599c","Type":"ContainerDied","Data":"ed68177852aadc4829f7ea77526088d50c57cc9cfe1d8ad42662a7cbce2a823a"} Sep 30 19:55:54 crc kubenswrapper[4756]: I0930 19:55:54.700969 4756 scope.go:117] "RemoveContainer" containerID="52ca7fdded3a31d2cfe58dbc579c9ddf94b2d01000d2dbc7b4393135dc57d802" Sep 30 19:55:54 crc kubenswrapper[4756]: I0930 19:55:54.731364 4756 scope.go:117] "RemoveContainer" containerID="4494f5fadc797475ab0b6747a7f6c336e0d6606b3a90dc89debbd4169375011d" Sep 30 19:55:54 crc kubenswrapper[4756]: I0930 19:55:54.763102 4756 scope.go:117] "RemoveContainer" containerID="387c08b406983b81036fb1b4a4b49282cbfe8dc67e12b2c7460f2975276fd269" Sep 30 19:55:54 crc kubenswrapper[4756]: I0930 19:55:54.788352 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c5d892ce-0288-478e-a017-e89146d3599c-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "c5d892ce-0288-478e-a017-e89146d3599c" (UID: "c5d892ce-0288-478e-a017-e89146d3599c"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 19:55:54 crc kubenswrapper[4756]: I0930 19:55:54.789570 4756 scope.go:117] "RemoveContainer" containerID="52ca7fdded3a31d2cfe58dbc579c9ddf94b2d01000d2dbc7b4393135dc57d802" Sep 30 19:55:54 crc kubenswrapper[4756]: E0930 19:55:54.790512 4756 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"52ca7fdded3a31d2cfe58dbc579c9ddf94b2d01000d2dbc7b4393135dc57d802\": container with ID starting with 52ca7fdded3a31d2cfe58dbc579c9ddf94b2d01000d2dbc7b4393135dc57d802 not found: ID does not exist" containerID="52ca7fdded3a31d2cfe58dbc579c9ddf94b2d01000d2dbc7b4393135dc57d802" Sep 30 19:55:54 crc kubenswrapper[4756]: I0930 19:55:54.790581 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"52ca7fdded3a31d2cfe58dbc579c9ddf94b2d01000d2dbc7b4393135dc57d802"} err="failed to get container status \"52ca7fdded3a31d2cfe58dbc579c9ddf94b2d01000d2dbc7b4393135dc57d802\": rpc error: code = NotFound desc = could not find container \"52ca7fdded3a31d2cfe58dbc579c9ddf94b2d01000d2dbc7b4393135dc57d802\": container with ID starting with 52ca7fdded3a31d2cfe58dbc579c9ddf94b2d01000d2dbc7b4393135dc57d802 not found: ID does not exist" Sep 30 19:55:54 crc kubenswrapper[4756]: I0930 19:55:54.790625 4756 scope.go:117] "RemoveContainer" containerID="4494f5fadc797475ab0b6747a7f6c336e0d6606b3a90dc89debbd4169375011d" Sep 30 19:55:54 crc kubenswrapper[4756]: E0930 19:55:54.791142 4756 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4494f5fadc797475ab0b6747a7f6c336e0d6606b3a90dc89debbd4169375011d\": container with ID starting with 4494f5fadc797475ab0b6747a7f6c336e0d6606b3a90dc89debbd4169375011d not found: ID does not exist" containerID="4494f5fadc797475ab0b6747a7f6c336e0d6606b3a90dc89debbd4169375011d" Sep 30 19:55:54 crc kubenswrapper[4756]: I0930 19:55:54.791204 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4494f5fadc797475ab0b6747a7f6c336e0d6606b3a90dc89debbd4169375011d"} err="failed to get container status \"4494f5fadc797475ab0b6747a7f6c336e0d6606b3a90dc89debbd4169375011d\": rpc error: code = NotFound desc = could not find container \"4494f5fadc797475ab0b6747a7f6c336e0d6606b3a90dc89debbd4169375011d\": container with ID starting with 4494f5fadc797475ab0b6747a7f6c336e0d6606b3a90dc89debbd4169375011d not found: ID does not exist" Sep 30 19:55:54 crc kubenswrapper[4756]: I0930 19:55:54.791278 4756 scope.go:117] "RemoveContainer" containerID="387c08b406983b81036fb1b4a4b49282cbfe8dc67e12b2c7460f2975276fd269" Sep 30 19:55:54 crc kubenswrapper[4756]: E0930 19:55:54.791680 4756 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"387c08b406983b81036fb1b4a4b49282cbfe8dc67e12b2c7460f2975276fd269\": container with ID starting with 387c08b406983b81036fb1b4a4b49282cbfe8dc67e12b2c7460f2975276fd269 not found: ID does not exist" containerID="387c08b406983b81036fb1b4a4b49282cbfe8dc67e12b2c7460f2975276fd269" Sep 30 19:55:54 crc kubenswrapper[4756]: I0930 19:55:54.791852 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"387c08b406983b81036fb1b4a4b49282cbfe8dc67e12b2c7460f2975276fd269"} err="failed to get container status \"387c08b406983b81036fb1b4a4b49282cbfe8dc67e12b2c7460f2975276fd269\": rpc error: code = NotFound desc = could not find container \"387c08b406983b81036fb1b4a4b49282cbfe8dc67e12b2c7460f2975276fd269\": container with ID starting with 387c08b406983b81036fb1b4a4b49282cbfe8dc67e12b2c7460f2975276fd269 not found: ID does not exist" Sep 30 19:55:54 crc kubenswrapper[4756]: I0930 19:55:54.799758 4756 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c5d892ce-0288-478e-a017-e89146d3599c-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 19:55:55 crc kubenswrapper[4756]: I0930 19:55:55.054984 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-qmpgr"] Sep 30 19:55:55 crc kubenswrapper[4756]: I0930 19:55:55.064201 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-qmpgr"] Sep 30 19:55:55 crc kubenswrapper[4756]: I0930 19:55:55.136272 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c5d892ce-0288-478e-a017-e89146d3599c" path="/var/lib/kubelet/pods/c5d892ce-0288-478e-a017-e89146d3599c/volumes" Sep 30 19:56:06 crc kubenswrapper[4756]: I0930 19:56:06.989676 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-mv94h"] Sep 30 19:56:06 crc kubenswrapper[4756]: E0930 19:56:06.990887 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c5d892ce-0288-478e-a017-e89146d3599c" containerName="registry-server" Sep 30 19:56:06 crc kubenswrapper[4756]: I0930 19:56:06.990910 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="c5d892ce-0288-478e-a017-e89146d3599c" containerName="registry-server" Sep 30 19:56:06 crc kubenswrapper[4756]: E0930 19:56:06.990940 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c5d892ce-0288-478e-a017-e89146d3599c" containerName="extract-content" Sep 30 19:56:06 crc kubenswrapper[4756]: I0930 19:56:06.990953 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="c5d892ce-0288-478e-a017-e89146d3599c" containerName="extract-content" Sep 30 19:56:06 crc kubenswrapper[4756]: E0930 19:56:06.991001 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c5d892ce-0288-478e-a017-e89146d3599c" containerName="extract-utilities" Sep 30 19:56:06 crc kubenswrapper[4756]: I0930 19:56:06.991015 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="c5d892ce-0288-478e-a017-e89146d3599c" containerName="extract-utilities" Sep 30 19:56:06 crc kubenswrapper[4756]: I0930 19:56:06.992441 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="c5d892ce-0288-478e-a017-e89146d3599c" containerName="registry-server" Sep 30 19:56:06 crc kubenswrapper[4756]: I0930 19:56:06.994982 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-mv94h" Sep 30 19:56:07 crc kubenswrapper[4756]: I0930 19:56:07.003923 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-mv94h"] Sep 30 19:56:07 crc kubenswrapper[4756]: I0930 19:56:07.076417 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fpsx4\" (UniqueName: \"kubernetes.io/projected/b2c4f046-797f-42cf-aaa7-b253c9c811e4-kube-api-access-fpsx4\") pod \"certified-operators-mv94h\" (UID: \"b2c4f046-797f-42cf-aaa7-b253c9c811e4\") " pod="openshift-marketplace/certified-operators-mv94h" Sep 30 19:56:07 crc kubenswrapper[4756]: I0930 19:56:07.076574 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b2c4f046-797f-42cf-aaa7-b253c9c811e4-utilities\") pod \"certified-operators-mv94h\" (UID: \"b2c4f046-797f-42cf-aaa7-b253c9c811e4\") " pod="openshift-marketplace/certified-operators-mv94h" Sep 30 19:56:07 crc kubenswrapper[4756]: I0930 19:56:07.076697 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b2c4f046-797f-42cf-aaa7-b253c9c811e4-catalog-content\") pod \"certified-operators-mv94h\" (UID: \"b2c4f046-797f-42cf-aaa7-b253c9c811e4\") " pod="openshift-marketplace/certified-operators-mv94h" Sep 30 19:56:07 crc kubenswrapper[4756]: I0930 19:56:07.177631 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b2c4f046-797f-42cf-aaa7-b253c9c811e4-catalog-content\") pod \"certified-operators-mv94h\" (UID: \"b2c4f046-797f-42cf-aaa7-b253c9c811e4\") " pod="openshift-marketplace/certified-operators-mv94h" Sep 30 19:56:07 crc kubenswrapper[4756]: I0930 19:56:07.177707 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fpsx4\" (UniqueName: \"kubernetes.io/projected/b2c4f046-797f-42cf-aaa7-b253c9c811e4-kube-api-access-fpsx4\") pod \"certified-operators-mv94h\" (UID: \"b2c4f046-797f-42cf-aaa7-b253c9c811e4\") " pod="openshift-marketplace/certified-operators-mv94h" Sep 30 19:56:07 crc kubenswrapper[4756]: I0930 19:56:07.177768 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b2c4f046-797f-42cf-aaa7-b253c9c811e4-utilities\") pod \"certified-operators-mv94h\" (UID: \"b2c4f046-797f-42cf-aaa7-b253c9c811e4\") " pod="openshift-marketplace/certified-operators-mv94h" Sep 30 19:56:07 crc kubenswrapper[4756]: I0930 19:56:07.178226 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b2c4f046-797f-42cf-aaa7-b253c9c811e4-utilities\") pod \"certified-operators-mv94h\" (UID: \"b2c4f046-797f-42cf-aaa7-b253c9c811e4\") " pod="openshift-marketplace/certified-operators-mv94h" Sep 30 19:56:07 crc kubenswrapper[4756]: I0930 19:56:07.178626 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b2c4f046-797f-42cf-aaa7-b253c9c811e4-catalog-content\") pod \"certified-operators-mv94h\" (UID: \"b2c4f046-797f-42cf-aaa7-b253c9c811e4\") " pod="openshift-marketplace/certified-operators-mv94h" Sep 30 19:56:07 crc kubenswrapper[4756]: I0930 19:56:07.201081 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fpsx4\" (UniqueName: \"kubernetes.io/projected/b2c4f046-797f-42cf-aaa7-b253c9c811e4-kube-api-access-fpsx4\") pod \"certified-operators-mv94h\" (UID: \"b2c4f046-797f-42cf-aaa7-b253c9c811e4\") " pod="openshift-marketplace/certified-operators-mv94h" Sep 30 19:56:07 crc kubenswrapper[4756]: I0930 19:56:07.333661 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-mv94h" Sep 30 19:56:07 crc kubenswrapper[4756]: I0930 19:56:07.582112 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-mv94h"] Sep 30 19:56:07 crc kubenswrapper[4756]: I0930 19:56:07.829226 4756 generic.go:334] "Generic (PLEG): container finished" podID="b2c4f046-797f-42cf-aaa7-b253c9c811e4" containerID="3507edcaf6defcb37af4bfd9aac0d94cfe7569223d964a986997ede50214632f" exitCode=0 Sep 30 19:56:07 crc kubenswrapper[4756]: I0930 19:56:07.829297 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mv94h" event={"ID":"b2c4f046-797f-42cf-aaa7-b253c9c811e4","Type":"ContainerDied","Data":"3507edcaf6defcb37af4bfd9aac0d94cfe7569223d964a986997ede50214632f"} Sep 30 19:56:07 crc kubenswrapper[4756]: I0930 19:56:07.829631 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mv94h" event={"ID":"b2c4f046-797f-42cf-aaa7-b253c9c811e4","Type":"ContainerStarted","Data":"b3ceb3c4821dab9e47dd9071f6bcae7b906bdca55bf40038306dd65245bf0d69"} Sep 30 19:56:08 crc kubenswrapper[4756]: I0930 19:56:08.841386 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mv94h" event={"ID":"b2c4f046-797f-42cf-aaa7-b253c9c811e4","Type":"ContainerStarted","Data":"0d3f0ac78ea795dc6dc071bd557b47e3c3876d0a5b39ab41fde5b69f6103ce4d"} Sep 30 19:56:09 crc kubenswrapper[4756]: I0930 19:56:09.854312 4756 generic.go:334] "Generic (PLEG): container finished" podID="b2c4f046-797f-42cf-aaa7-b253c9c811e4" containerID="0d3f0ac78ea795dc6dc071bd557b47e3c3876d0a5b39ab41fde5b69f6103ce4d" exitCode=0 Sep 30 19:56:09 crc kubenswrapper[4756]: I0930 19:56:09.854355 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mv94h" event={"ID":"b2c4f046-797f-42cf-aaa7-b253c9c811e4","Type":"ContainerDied","Data":"0d3f0ac78ea795dc6dc071bd557b47e3c3876d0a5b39ab41fde5b69f6103ce4d"} Sep 30 19:56:10 crc kubenswrapper[4756]: I0930 19:56:10.866525 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mv94h" event={"ID":"b2c4f046-797f-42cf-aaa7-b253c9c811e4","Type":"ContainerStarted","Data":"5fddeb97874c47e41ce037b792ccb06553083ee45cd55e91a01c86c3c41ee8eb"} Sep 30 19:56:11 crc kubenswrapper[4756]: I0930 19:56:11.538873 4756 patch_prober.go:28] interesting pod/machine-config-daemon-4n9zj container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 19:56:11 crc kubenswrapper[4756]: I0930 19:56:11.539259 4756 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 19:56:16 crc kubenswrapper[4756]: I0930 19:56:16.182649 4756 scope.go:117] "RemoveContainer" containerID="ca33aa0424ff5fc1f6488fdb5c4b14e6351ce319000039e3be72f4a12493d153" Sep 30 19:56:16 crc kubenswrapper[4756]: I0930 19:56:16.230936 4756 scope.go:117] "RemoveContainer" containerID="4adae0e79776f455907198b99b57c0679e094944b65745853490c007c0b51563" Sep 30 19:56:16 crc kubenswrapper[4756]: I0930 19:56:16.282631 4756 scope.go:117] "RemoveContainer" containerID="9df0555f3c8e9c61006320811dc3865ff17aeb0ac002679468f323d52fd9ee05" Sep 30 19:56:16 crc kubenswrapper[4756]: I0930 19:56:16.309005 4756 scope.go:117] "RemoveContainer" containerID="2e64c63e69ea630f61c08c4bde77afdbb2cc31157f4f3a8f15cbff5d6d60f2ed" Sep 30 19:56:16 crc kubenswrapper[4756]: I0930 19:56:16.358834 4756 scope.go:117] "RemoveContainer" containerID="0c0236998df6869414ba3511af432b72b556619301b8a1f321a61e81a804042b" Sep 30 19:56:16 crc kubenswrapper[4756]: I0930 19:56:16.381901 4756 scope.go:117] "RemoveContainer" containerID="2efc7a3b56a345b58615fb38229a5dd9d08dd316e4746b7d2a81edbe3083c535" Sep 30 19:56:16 crc kubenswrapper[4756]: I0930 19:56:16.415235 4756 scope.go:117] "RemoveContainer" containerID="b36ede0872ef31860c685d708b8f806dc565231aea3bbc12a13fd67ea25ef4a3" Sep 30 19:56:16 crc kubenswrapper[4756]: I0930 19:56:16.454288 4756 scope.go:117] "RemoveContainer" containerID="a126280212e36fcf459f93852c7632718b1fd8e6ce5b19d55fd989d6bd0a35f0" Sep 30 19:56:16 crc kubenswrapper[4756]: I0930 19:56:16.478929 4756 scope.go:117] "RemoveContainer" containerID="88a68782dbc943aac7628e21b900c0973de74c4cdd5afe3d6e4270e57ae343af" Sep 30 19:56:16 crc kubenswrapper[4756]: I0930 19:56:16.518830 4756 scope.go:117] "RemoveContainer" containerID="001b6cc738cccabf6dc34eb0d26d436e3bdcb9eaae466fb6ce0c2a4a698820c7" Sep 30 19:56:16 crc kubenswrapper[4756]: I0930 19:56:16.553964 4756 scope.go:117] "RemoveContainer" containerID="ce70f4912d6a6d02a8b2a05e892b202161a2779d8b715e8a4d5f36c42277498f" Sep 30 19:56:16 crc kubenswrapper[4756]: I0930 19:56:16.588775 4756 scope.go:117] "RemoveContainer" containerID="a678febbf786620062061e9e22724a0f5433ffe878f66576ce828fa62823fb1a" Sep 30 19:56:16 crc kubenswrapper[4756]: I0930 19:56:16.616057 4756 scope.go:117] "RemoveContainer" containerID="33c3ea5f36bb441ebdd67b050b98c2ab9aa572ab6e8284a59c2e24fc642c85b0" Sep 30 19:56:16 crc kubenswrapper[4756]: I0930 19:56:16.644521 4756 scope.go:117] "RemoveContainer" containerID="edd2cc3b5058fc58938fed149c5e32347e36c9efc0f993f06ade0d7f6a0128e4" Sep 30 19:56:16 crc kubenswrapper[4756]: I0930 19:56:16.703620 4756 scope.go:117] "RemoveContainer" containerID="2da5c1fb9beadd9318440b00bca43cb3548b37d0f21ce36f2e1f3ff26df6dda1" Sep 30 19:56:17 crc kubenswrapper[4756]: I0930 19:56:17.334469 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-mv94h" Sep 30 19:56:17 crc kubenswrapper[4756]: I0930 19:56:17.334736 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-mv94h" Sep 30 19:56:17 crc kubenswrapper[4756]: I0930 19:56:17.414837 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-mv94h" Sep 30 19:56:17 crc kubenswrapper[4756]: I0930 19:56:17.448598 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-mv94h" podStartSLOduration=8.86738033 podStartE2EDuration="11.448569127s" podCreationTimestamp="2025-09-30 19:56:06 +0000 UTC" firstStartedPulling="2025-09-30 19:56:07.830995878 +0000 UTC m=+1497.451929355" lastFinishedPulling="2025-09-30 19:56:10.412184655 +0000 UTC m=+1500.033118152" observedRunningTime="2025-09-30 19:56:10.894369969 +0000 UTC m=+1500.515303466" watchObservedRunningTime="2025-09-30 19:56:17.448569127 +0000 UTC m=+1507.069502644" Sep 30 19:56:17 crc kubenswrapper[4756]: I0930 19:56:17.999531 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-mv94h" Sep 30 19:56:18 crc kubenswrapper[4756]: I0930 19:56:18.056187 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-mv94h"] Sep 30 19:56:19 crc kubenswrapper[4756]: I0930 19:56:19.956505 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-mv94h" podUID="b2c4f046-797f-42cf-aaa7-b253c9c811e4" containerName="registry-server" containerID="cri-o://5fddeb97874c47e41ce037b792ccb06553083ee45cd55e91a01c86c3c41ee8eb" gracePeriod=2 Sep 30 19:56:20 crc kubenswrapper[4756]: I0930 19:56:20.376802 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-mv94h" Sep 30 19:56:20 crc kubenswrapper[4756]: I0930 19:56:20.484193 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b2c4f046-797f-42cf-aaa7-b253c9c811e4-catalog-content\") pod \"b2c4f046-797f-42cf-aaa7-b253c9c811e4\" (UID: \"b2c4f046-797f-42cf-aaa7-b253c9c811e4\") " Sep 30 19:56:20 crc kubenswrapper[4756]: I0930 19:56:20.484262 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b2c4f046-797f-42cf-aaa7-b253c9c811e4-utilities\") pod \"b2c4f046-797f-42cf-aaa7-b253c9c811e4\" (UID: \"b2c4f046-797f-42cf-aaa7-b253c9c811e4\") " Sep 30 19:56:20 crc kubenswrapper[4756]: I0930 19:56:20.484341 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fpsx4\" (UniqueName: \"kubernetes.io/projected/b2c4f046-797f-42cf-aaa7-b253c9c811e4-kube-api-access-fpsx4\") pod \"b2c4f046-797f-42cf-aaa7-b253c9c811e4\" (UID: \"b2c4f046-797f-42cf-aaa7-b253c9c811e4\") " Sep 30 19:56:20 crc kubenswrapper[4756]: I0930 19:56:20.485980 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b2c4f046-797f-42cf-aaa7-b253c9c811e4-utilities" (OuterVolumeSpecName: "utilities") pod "b2c4f046-797f-42cf-aaa7-b253c9c811e4" (UID: "b2c4f046-797f-42cf-aaa7-b253c9c811e4"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 19:56:20 crc kubenswrapper[4756]: I0930 19:56:20.489916 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b2c4f046-797f-42cf-aaa7-b253c9c811e4-kube-api-access-fpsx4" (OuterVolumeSpecName: "kube-api-access-fpsx4") pod "b2c4f046-797f-42cf-aaa7-b253c9c811e4" (UID: "b2c4f046-797f-42cf-aaa7-b253c9c811e4"). InnerVolumeSpecName "kube-api-access-fpsx4". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:56:20 crc kubenswrapper[4756]: I0930 19:56:20.542637 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b2c4f046-797f-42cf-aaa7-b253c9c811e4-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b2c4f046-797f-42cf-aaa7-b253c9c811e4" (UID: "b2c4f046-797f-42cf-aaa7-b253c9c811e4"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 19:56:20 crc kubenswrapper[4756]: I0930 19:56:20.585804 4756 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b2c4f046-797f-42cf-aaa7-b253c9c811e4-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 19:56:20 crc kubenswrapper[4756]: I0930 19:56:20.585843 4756 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b2c4f046-797f-42cf-aaa7-b253c9c811e4-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 19:56:20 crc kubenswrapper[4756]: I0930 19:56:20.585856 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fpsx4\" (UniqueName: \"kubernetes.io/projected/b2c4f046-797f-42cf-aaa7-b253c9c811e4-kube-api-access-fpsx4\") on node \"crc\" DevicePath \"\"" Sep 30 19:56:20 crc kubenswrapper[4756]: I0930 19:56:20.966376 4756 generic.go:334] "Generic (PLEG): container finished" podID="b2c4f046-797f-42cf-aaa7-b253c9c811e4" containerID="5fddeb97874c47e41ce037b792ccb06553083ee45cd55e91a01c86c3c41ee8eb" exitCode=0 Sep 30 19:56:20 crc kubenswrapper[4756]: I0930 19:56:20.966436 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mv94h" event={"ID":"b2c4f046-797f-42cf-aaa7-b253c9c811e4","Type":"ContainerDied","Data":"5fddeb97874c47e41ce037b792ccb06553083ee45cd55e91a01c86c3c41ee8eb"} Sep 30 19:56:20 crc kubenswrapper[4756]: I0930 19:56:20.966460 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mv94h" event={"ID":"b2c4f046-797f-42cf-aaa7-b253c9c811e4","Type":"ContainerDied","Data":"b3ceb3c4821dab9e47dd9071f6bcae7b906bdca55bf40038306dd65245bf0d69"} Sep 30 19:56:20 crc kubenswrapper[4756]: I0930 19:56:20.966477 4756 scope.go:117] "RemoveContainer" containerID="5fddeb97874c47e41ce037b792ccb06553083ee45cd55e91a01c86c3c41ee8eb" Sep 30 19:56:20 crc kubenswrapper[4756]: I0930 19:56:20.966570 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-mv94h" Sep 30 19:56:21 crc kubenswrapper[4756]: I0930 19:56:21.000536 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-mv94h"] Sep 30 19:56:21 crc kubenswrapper[4756]: I0930 19:56:21.004490 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-mv94h"] Sep 30 19:56:21 crc kubenswrapper[4756]: I0930 19:56:21.005355 4756 scope.go:117] "RemoveContainer" containerID="0d3f0ac78ea795dc6dc071bd557b47e3c3876d0a5b39ab41fde5b69f6103ce4d" Sep 30 19:56:21 crc kubenswrapper[4756]: I0930 19:56:21.040986 4756 scope.go:117] "RemoveContainer" containerID="3507edcaf6defcb37af4bfd9aac0d94cfe7569223d964a986997ede50214632f" Sep 30 19:56:21 crc kubenswrapper[4756]: I0930 19:56:21.065055 4756 scope.go:117] "RemoveContainer" containerID="5fddeb97874c47e41ce037b792ccb06553083ee45cd55e91a01c86c3c41ee8eb" Sep 30 19:56:21 crc kubenswrapper[4756]: E0930 19:56:21.065615 4756 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5fddeb97874c47e41ce037b792ccb06553083ee45cd55e91a01c86c3c41ee8eb\": container with ID starting with 5fddeb97874c47e41ce037b792ccb06553083ee45cd55e91a01c86c3c41ee8eb not found: ID does not exist" containerID="5fddeb97874c47e41ce037b792ccb06553083ee45cd55e91a01c86c3c41ee8eb" Sep 30 19:56:21 crc kubenswrapper[4756]: I0930 19:56:21.065643 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5fddeb97874c47e41ce037b792ccb06553083ee45cd55e91a01c86c3c41ee8eb"} err="failed to get container status \"5fddeb97874c47e41ce037b792ccb06553083ee45cd55e91a01c86c3c41ee8eb\": rpc error: code = NotFound desc = could not find container \"5fddeb97874c47e41ce037b792ccb06553083ee45cd55e91a01c86c3c41ee8eb\": container with ID starting with 5fddeb97874c47e41ce037b792ccb06553083ee45cd55e91a01c86c3c41ee8eb not found: ID does not exist" Sep 30 19:56:21 crc kubenswrapper[4756]: I0930 19:56:21.065664 4756 scope.go:117] "RemoveContainer" containerID="0d3f0ac78ea795dc6dc071bd557b47e3c3876d0a5b39ab41fde5b69f6103ce4d" Sep 30 19:56:21 crc kubenswrapper[4756]: E0930 19:56:21.066076 4756 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0d3f0ac78ea795dc6dc071bd557b47e3c3876d0a5b39ab41fde5b69f6103ce4d\": container with ID starting with 0d3f0ac78ea795dc6dc071bd557b47e3c3876d0a5b39ab41fde5b69f6103ce4d not found: ID does not exist" containerID="0d3f0ac78ea795dc6dc071bd557b47e3c3876d0a5b39ab41fde5b69f6103ce4d" Sep 30 19:56:21 crc kubenswrapper[4756]: I0930 19:56:21.066112 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0d3f0ac78ea795dc6dc071bd557b47e3c3876d0a5b39ab41fde5b69f6103ce4d"} err="failed to get container status \"0d3f0ac78ea795dc6dc071bd557b47e3c3876d0a5b39ab41fde5b69f6103ce4d\": rpc error: code = NotFound desc = could not find container \"0d3f0ac78ea795dc6dc071bd557b47e3c3876d0a5b39ab41fde5b69f6103ce4d\": container with ID starting with 0d3f0ac78ea795dc6dc071bd557b47e3c3876d0a5b39ab41fde5b69f6103ce4d not found: ID does not exist" Sep 30 19:56:21 crc kubenswrapper[4756]: I0930 19:56:21.066139 4756 scope.go:117] "RemoveContainer" containerID="3507edcaf6defcb37af4bfd9aac0d94cfe7569223d964a986997ede50214632f" Sep 30 19:56:21 crc kubenswrapper[4756]: E0930 19:56:21.066720 4756 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3507edcaf6defcb37af4bfd9aac0d94cfe7569223d964a986997ede50214632f\": container with ID starting with 3507edcaf6defcb37af4bfd9aac0d94cfe7569223d964a986997ede50214632f not found: ID does not exist" containerID="3507edcaf6defcb37af4bfd9aac0d94cfe7569223d964a986997ede50214632f" Sep 30 19:56:21 crc kubenswrapper[4756]: I0930 19:56:21.066865 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3507edcaf6defcb37af4bfd9aac0d94cfe7569223d964a986997ede50214632f"} err="failed to get container status \"3507edcaf6defcb37af4bfd9aac0d94cfe7569223d964a986997ede50214632f\": rpc error: code = NotFound desc = could not find container \"3507edcaf6defcb37af4bfd9aac0d94cfe7569223d964a986997ede50214632f\": container with ID starting with 3507edcaf6defcb37af4bfd9aac0d94cfe7569223d964a986997ede50214632f not found: ID does not exist" Sep 30 19:56:21 crc kubenswrapper[4756]: I0930 19:56:21.126592 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b2c4f046-797f-42cf-aaa7-b253c9c811e4" path="/var/lib/kubelet/pods/b2c4f046-797f-42cf-aaa7-b253c9c811e4/volumes" Sep 30 19:56:41 crc kubenswrapper[4756]: I0930 19:56:41.539286 4756 patch_prober.go:28] interesting pod/machine-config-daemon-4n9zj container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 19:56:41 crc kubenswrapper[4756]: I0930 19:56:41.540265 4756 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 19:57:11 crc kubenswrapper[4756]: I0930 19:57:11.538391 4756 patch_prober.go:28] interesting pod/machine-config-daemon-4n9zj container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 19:57:11 crc kubenswrapper[4756]: I0930 19:57:11.539196 4756 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 19:57:11 crc kubenswrapper[4756]: I0930 19:57:11.539434 4756 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" Sep 30 19:57:11 crc kubenswrapper[4756]: I0930 19:57:11.540236 4756 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"f56aaee77dcbc16390076ba6d3302a79b4672011f53fc17fe5d502f42d15f64b"} pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 30 19:57:11 crc kubenswrapper[4756]: I0930 19:57:11.540332 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" containerName="machine-config-daemon" containerID="cri-o://f56aaee77dcbc16390076ba6d3302a79b4672011f53fc17fe5d502f42d15f64b" gracePeriod=600 Sep 30 19:57:11 crc kubenswrapper[4756]: E0930 19:57:11.677164 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4n9zj_openshift-machine-config-operator(3370c2ca-fec3-4f90-8df7-51e21e6c7e1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" Sep 30 19:57:12 crc kubenswrapper[4756]: I0930 19:57:12.456883 4756 generic.go:334] "Generic (PLEG): container finished" podID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" containerID="f56aaee77dcbc16390076ba6d3302a79b4672011f53fc17fe5d502f42d15f64b" exitCode=0 Sep 30 19:57:12 crc kubenswrapper[4756]: I0930 19:57:12.456957 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" event={"ID":"3370c2ca-fec3-4f90-8df7-51e21e6c7e1c","Type":"ContainerDied","Data":"f56aaee77dcbc16390076ba6d3302a79b4672011f53fc17fe5d502f42d15f64b"} Sep 30 19:57:12 crc kubenswrapper[4756]: I0930 19:57:12.457069 4756 scope.go:117] "RemoveContainer" containerID="3226587ad1762e9a93b7134218b589749878b020e4cd2179741113741ca24dea" Sep 30 19:57:12 crc kubenswrapper[4756]: I0930 19:57:12.457798 4756 scope.go:117] "RemoveContainer" containerID="f56aaee77dcbc16390076ba6d3302a79b4672011f53fc17fe5d502f42d15f64b" Sep 30 19:57:12 crc kubenswrapper[4756]: E0930 19:57:12.458282 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4n9zj_openshift-machine-config-operator(3370c2ca-fec3-4f90-8df7-51e21e6c7e1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" Sep 30 19:57:17 crc kubenswrapper[4756]: I0930 19:57:17.043224 4756 scope.go:117] "RemoveContainer" containerID="910743b27955784ee4da86c480d093283c52757852937aeb938dde4695e57b83" Sep 30 19:57:17 crc kubenswrapper[4756]: I0930 19:57:17.068724 4756 scope.go:117] "RemoveContainer" containerID="8d013d9f6a59da2bcacdb9355cb603a5a104cc57783d02a5fc8770f2294bb8f0" Sep 30 19:57:17 crc kubenswrapper[4756]: I0930 19:57:17.092550 4756 scope.go:117] "RemoveContainer" containerID="2ab71e28b3fab8dbf1f220c5eb4490f4cf686ee22cd811d509ce95839b4580b6" Sep 30 19:57:17 crc kubenswrapper[4756]: I0930 19:57:17.141646 4756 scope.go:117] "RemoveContainer" containerID="d359624da58f1703e3e0aaf51f2b403fbc047680e4375c39c8c5cf26280ba6fb" Sep 30 19:57:17 crc kubenswrapper[4756]: I0930 19:57:17.169738 4756 scope.go:117] "RemoveContainer" containerID="138f52c6994ac2758b4cb61b9a1de7c4625cdf55dd17f5766e35643527e4dcaa" Sep 30 19:57:17 crc kubenswrapper[4756]: I0930 19:57:17.245100 4756 scope.go:117] "RemoveContainer" containerID="34a9a3332fdf0e5c76c740afbc2528fa100673eb4984317f115c8a7e501cf8ba" Sep 30 19:57:17 crc kubenswrapper[4756]: I0930 19:57:17.269702 4756 scope.go:117] "RemoveContainer" containerID="af672176e9d6745734bae103367eaab92c17b4c3f23ec9cc9c702908a2e393cc" Sep 30 19:57:17 crc kubenswrapper[4756]: I0930 19:57:17.290760 4756 scope.go:117] "RemoveContainer" containerID="a9fb663db742268ff6709a83fa05391902d2464b18f50b8142ccb0124eab0605" Sep 30 19:57:17 crc kubenswrapper[4756]: I0930 19:57:17.315148 4756 scope.go:117] "RemoveContainer" containerID="50da31e7ac0b473cd4688ff664a4f0390cc7fa80d4d402e57bb5dbbd273e547e" Sep 30 19:57:17 crc kubenswrapper[4756]: I0930 19:57:17.336647 4756 scope.go:117] "RemoveContainer" containerID="863f612cd545ac0fb2069bdbe890b8c349b3ace0bee3682e85b0690dbbe53426" Sep 30 19:57:17 crc kubenswrapper[4756]: I0930 19:57:17.364643 4756 scope.go:117] "RemoveContainer" containerID="5958179af95498993e03abf8ecb860e02ff9989d57c79429653db9897c8d3b02" Sep 30 19:57:17 crc kubenswrapper[4756]: I0930 19:57:17.382587 4756 scope.go:117] "RemoveContainer" containerID="1d7ee6a247afc0df8d24969be2e9021b5b7950050932c2f4b2dc96b9234b945e" Sep 30 19:57:17 crc kubenswrapper[4756]: I0930 19:57:17.405973 4756 scope.go:117] "RemoveContainer" containerID="e4a6232e87ef083723c07a59c58f19271017670018abbfa1105600aabf771695" Sep 30 19:57:17 crc kubenswrapper[4756]: I0930 19:57:17.425480 4756 scope.go:117] "RemoveContainer" containerID="0da4b3b06e25cc58e04e2a2d12d8d0b9964704dda90da088c73458a4f101cac4" Sep 30 19:57:17 crc kubenswrapper[4756]: I0930 19:57:17.461777 4756 scope.go:117] "RemoveContainer" containerID="88bc27040e35d9df953d55c08c8b378a2f16d65a8a8f0afd059a1be9ec84599d" Sep 30 19:57:17 crc kubenswrapper[4756]: I0930 19:57:17.509654 4756 scope.go:117] "RemoveContainer" containerID="ea06fef84d4e35b2e9eb548c83e401e03a330d1d6c02e93bb182740cf50abbf7" Sep 30 19:57:17 crc kubenswrapper[4756]: I0930 19:57:17.527912 4756 scope.go:117] "RemoveContainer" containerID="188884cc097d3da46aaa74ebafbd550125a5f4b8a7035fcec7d11bd3deca4c31" Sep 30 19:57:17 crc kubenswrapper[4756]: I0930 19:57:17.547234 4756 scope.go:117] "RemoveContainer" containerID="184b0bb964b5d979736c25d5b822cdec9e2f47d28b7b5da07359b35546a67d3b" Sep 30 19:57:17 crc kubenswrapper[4756]: I0930 19:57:17.587646 4756 scope.go:117] "RemoveContainer" containerID="6ddd09ff48db0699e5502527f64bdf95fb3651a12c72edb43b7654c5869384ba" Sep 30 19:57:17 crc kubenswrapper[4756]: I0930 19:57:17.612158 4756 scope.go:117] "RemoveContainer" containerID="a88c7d451b77d6db9b1544138620e9cae6b1e1fa6c6f9218fa77d075c959d40e" Sep 30 19:57:17 crc kubenswrapper[4756]: I0930 19:57:17.627497 4756 scope.go:117] "RemoveContainer" containerID="a96ca6143ea765730fa414e7b169828518681d4fa546277df96764fd20203c32" Sep 30 19:57:24 crc kubenswrapper[4756]: I0930 19:57:24.116048 4756 scope.go:117] "RemoveContainer" containerID="f56aaee77dcbc16390076ba6d3302a79b4672011f53fc17fe5d502f42d15f64b" Sep 30 19:57:24 crc kubenswrapper[4756]: E0930 19:57:24.116657 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4n9zj_openshift-machine-config-operator(3370c2ca-fec3-4f90-8df7-51e21e6c7e1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" Sep 30 19:57:36 crc kubenswrapper[4756]: I0930 19:57:36.115587 4756 scope.go:117] "RemoveContainer" containerID="f56aaee77dcbc16390076ba6d3302a79b4672011f53fc17fe5d502f42d15f64b" Sep 30 19:57:36 crc kubenswrapper[4756]: E0930 19:57:36.116327 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4n9zj_openshift-machine-config-operator(3370c2ca-fec3-4f90-8df7-51e21e6c7e1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" Sep 30 19:57:49 crc kubenswrapper[4756]: I0930 19:57:49.116306 4756 scope.go:117] "RemoveContainer" containerID="f56aaee77dcbc16390076ba6d3302a79b4672011f53fc17fe5d502f42d15f64b" Sep 30 19:57:49 crc kubenswrapper[4756]: E0930 19:57:49.117032 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4n9zj_openshift-machine-config-operator(3370c2ca-fec3-4f90-8df7-51e21e6c7e1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" Sep 30 19:58:00 crc kubenswrapper[4756]: I0930 19:58:00.115547 4756 scope.go:117] "RemoveContainer" containerID="f56aaee77dcbc16390076ba6d3302a79b4672011f53fc17fe5d502f42d15f64b" Sep 30 19:58:00 crc kubenswrapper[4756]: E0930 19:58:00.116168 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4n9zj_openshift-machine-config-operator(3370c2ca-fec3-4f90-8df7-51e21e6c7e1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" Sep 30 19:58:15 crc kubenswrapper[4756]: I0930 19:58:15.116504 4756 scope.go:117] "RemoveContainer" containerID="f56aaee77dcbc16390076ba6d3302a79b4672011f53fc17fe5d502f42d15f64b" Sep 30 19:58:15 crc kubenswrapper[4756]: E0930 19:58:15.117251 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4n9zj_openshift-machine-config-operator(3370c2ca-fec3-4f90-8df7-51e21e6c7e1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" Sep 30 19:58:17 crc kubenswrapper[4756]: I0930 19:58:17.856075 4756 scope.go:117] "RemoveContainer" containerID="a98b89ec3d4c88ae15503ab82bce6e0663ac2886a6aceee2848aa1443a4ae194" Sep 30 19:58:17 crc kubenswrapper[4756]: I0930 19:58:17.888039 4756 scope.go:117] "RemoveContainer" containerID="f7d5ee1721d495453ac73162ff8b96b6a3b3f3d4c350acafb440165e3ddc9dcb" Sep 30 19:58:17 crc kubenswrapper[4756]: I0930 19:58:17.916934 4756 scope.go:117] "RemoveContainer" containerID="cc833a452e5eb7d11d7eb0502e0bd5e4fb9085857c830eaf9182bddd7e675953" Sep 30 19:58:17 crc kubenswrapper[4756]: I0930 19:58:17.941535 4756 scope.go:117] "RemoveContainer" containerID="6a012f6649739f7a3c55f7eb5ccd4842f6f7de987bfd7b459d8bead1ed555778" Sep 30 19:58:17 crc kubenswrapper[4756]: I0930 19:58:17.975302 4756 scope.go:117] "RemoveContainer" containerID="6642d2ccfdc3501ceb1f771a6ea2bd1b3170a7e579b9cc7e8dc8a0a13881303a" Sep 30 19:58:18 crc kubenswrapper[4756]: I0930 19:58:18.005253 4756 scope.go:117] "RemoveContainer" containerID="e3de5d8e8ddf6f0cd0c0368c9ca27659f3f2da42af7357ba2cc6cf72a80b007a" Sep 30 19:58:18 crc kubenswrapper[4756]: I0930 19:58:18.066733 4756 scope.go:117] "RemoveContainer" containerID="18b54d61471a434dd27220be8222ac734c16562af18d9cb5d87c937b4bbb70ab" Sep 30 19:58:18 crc kubenswrapper[4756]: I0930 19:58:18.093556 4756 scope.go:117] "RemoveContainer" containerID="c873acaf2a9f5921ab2fc0924a478163d49e82779ad8a5ee008a11ab0463e3a2" Sep 30 19:58:18 crc kubenswrapper[4756]: I0930 19:58:18.155208 4756 scope.go:117] "RemoveContainer" containerID="dede2f383ffabf28c795cf50f3401369dd333a7c95f685770c5342063991d80f" Sep 30 19:58:18 crc kubenswrapper[4756]: I0930 19:58:18.196947 4756 scope.go:117] "RemoveContainer" containerID="ba6569afe758498355cd9682bc8fca893079965c6280528e59fd48ecadc1495f" Sep 30 19:58:18 crc kubenswrapper[4756]: I0930 19:58:18.215916 4756 scope.go:117] "RemoveContainer" containerID="3816f49e158334388e40eecbe485b69612c7263f206e5f76f6bf16f35cb72753" Sep 30 19:58:18 crc kubenswrapper[4756]: I0930 19:58:18.237822 4756 scope.go:117] "RemoveContainer" containerID="dd34ebee6ba535fafab2697edd599929e443e0b1d71841eeddabdc0a54f03aaa" Sep 30 19:58:18 crc kubenswrapper[4756]: I0930 19:58:18.258082 4756 scope.go:117] "RemoveContainer" containerID="ef091cd54f04664527cd20ffacef796abb55906c385adb302c9728f615fa218a" Sep 30 19:58:27 crc kubenswrapper[4756]: I0930 19:58:27.117098 4756 scope.go:117] "RemoveContainer" containerID="f56aaee77dcbc16390076ba6d3302a79b4672011f53fc17fe5d502f42d15f64b" Sep 30 19:58:27 crc kubenswrapper[4756]: E0930 19:58:27.118322 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4n9zj_openshift-machine-config-operator(3370c2ca-fec3-4f90-8df7-51e21e6c7e1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" Sep 30 19:58:38 crc kubenswrapper[4756]: I0930 19:58:38.116644 4756 scope.go:117] "RemoveContainer" containerID="f56aaee77dcbc16390076ba6d3302a79b4672011f53fc17fe5d502f42d15f64b" Sep 30 19:58:38 crc kubenswrapper[4756]: E0930 19:58:38.118223 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4n9zj_openshift-machine-config-operator(3370c2ca-fec3-4f90-8df7-51e21e6c7e1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" Sep 30 19:58:53 crc kubenswrapper[4756]: I0930 19:58:53.115998 4756 scope.go:117] "RemoveContainer" containerID="f56aaee77dcbc16390076ba6d3302a79b4672011f53fc17fe5d502f42d15f64b" Sep 30 19:58:53 crc kubenswrapper[4756]: E0930 19:58:53.116670 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4n9zj_openshift-machine-config-operator(3370c2ca-fec3-4f90-8df7-51e21e6c7e1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" Sep 30 19:59:08 crc kubenswrapper[4756]: I0930 19:59:08.116535 4756 scope.go:117] "RemoveContainer" containerID="f56aaee77dcbc16390076ba6d3302a79b4672011f53fc17fe5d502f42d15f64b" Sep 30 19:59:08 crc kubenswrapper[4756]: E0930 19:59:08.117695 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4n9zj_openshift-machine-config-operator(3370c2ca-fec3-4f90-8df7-51e21e6c7e1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" Sep 30 19:59:18 crc kubenswrapper[4756]: I0930 19:59:18.412682 4756 scope.go:117] "RemoveContainer" containerID="10c6b1bb1f7f4de1bc5c4aca7ca3dd73b0eea0fe2e2a9e20084f2def6aa17018" Sep 30 19:59:18 crc kubenswrapper[4756]: I0930 19:59:18.451040 4756 scope.go:117] "RemoveContainer" containerID="f362a6f3b07e09c3283f9cf2b98cdc3b3ef2d6ed31377ef667da3782fae5da8a" Sep 30 19:59:18 crc kubenswrapper[4756]: I0930 19:59:18.502590 4756 scope.go:117] "RemoveContainer" containerID="2c21c56307aa57c6e443391243b07699b843b3fabf15be8a802984834452c537" Sep 30 19:59:18 crc kubenswrapper[4756]: I0930 19:59:18.536166 4756 scope.go:117] "RemoveContainer" containerID="e10cbb9a45161cb9ac5e6eb2582b06fee216894c3414ab0191425a8e6235e510" Sep 30 19:59:22 crc kubenswrapper[4756]: I0930 19:59:22.116065 4756 scope.go:117] "RemoveContainer" containerID="f56aaee77dcbc16390076ba6d3302a79b4672011f53fc17fe5d502f42d15f64b" Sep 30 19:59:22 crc kubenswrapper[4756]: E0930 19:59:22.116650 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4n9zj_openshift-machine-config-operator(3370c2ca-fec3-4f90-8df7-51e21e6c7e1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" Sep 30 19:59:37 crc kubenswrapper[4756]: I0930 19:59:37.116303 4756 scope.go:117] "RemoveContainer" containerID="f56aaee77dcbc16390076ba6d3302a79b4672011f53fc17fe5d502f42d15f64b" Sep 30 19:59:37 crc kubenswrapper[4756]: E0930 19:59:37.117281 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4n9zj_openshift-machine-config-operator(3370c2ca-fec3-4f90-8df7-51e21e6c7e1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" Sep 30 19:59:48 crc kubenswrapper[4756]: I0930 19:59:48.116713 4756 scope.go:117] "RemoveContainer" containerID="f56aaee77dcbc16390076ba6d3302a79b4672011f53fc17fe5d502f42d15f64b" Sep 30 19:59:48 crc kubenswrapper[4756]: E0930 19:59:48.117777 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4n9zj_openshift-machine-config-operator(3370c2ca-fec3-4f90-8df7-51e21e6c7e1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" Sep 30 20:00:00 crc kubenswrapper[4756]: I0930 20:00:00.161475 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29321040-ngp2x"] Sep 30 20:00:00 crc kubenswrapper[4756]: E0930 20:00:00.162187 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b2c4f046-797f-42cf-aaa7-b253c9c811e4" containerName="registry-server" Sep 30 20:00:00 crc kubenswrapper[4756]: I0930 20:00:00.162199 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="b2c4f046-797f-42cf-aaa7-b253c9c811e4" containerName="registry-server" Sep 30 20:00:00 crc kubenswrapper[4756]: E0930 20:00:00.162224 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b2c4f046-797f-42cf-aaa7-b253c9c811e4" containerName="extract-utilities" Sep 30 20:00:00 crc kubenswrapper[4756]: I0930 20:00:00.162230 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="b2c4f046-797f-42cf-aaa7-b253c9c811e4" containerName="extract-utilities" Sep 30 20:00:00 crc kubenswrapper[4756]: E0930 20:00:00.162245 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b2c4f046-797f-42cf-aaa7-b253c9c811e4" containerName="extract-content" Sep 30 20:00:00 crc kubenswrapper[4756]: I0930 20:00:00.162251 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="b2c4f046-797f-42cf-aaa7-b253c9c811e4" containerName="extract-content" Sep 30 20:00:00 crc kubenswrapper[4756]: I0930 20:00:00.162373 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="b2c4f046-797f-42cf-aaa7-b253c9c811e4" containerName="registry-server" Sep 30 20:00:00 crc kubenswrapper[4756]: I0930 20:00:00.162840 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29321040-ngp2x" Sep 30 20:00:00 crc kubenswrapper[4756]: I0930 20:00:00.166081 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Sep 30 20:00:00 crc kubenswrapper[4756]: I0930 20:00:00.171203 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Sep 30 20:00:00 crc kubenswrapper[4756]: I0930 20:00:00.174053 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29321040-ngp2x"] Sep 30 20:00:00 crc kubenswrapper[4756]: I0930 20:00:00.217134 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/bc9ed98f-bdbf-44d8-a4f4-fa4c00e7671a-config-volume\") pod \"collect-profiles-29321040-ngp2x\" (UID: \"bc9ed98f-bdbf-44d8-a4f4-fa4c00e7671a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321040-ngp2x" Sep 30 20:00:00 crc kubenswrapper[4756]: I0930 20:00:00.217305 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cdcnq\" (UniqueName: \"kubernetes.io/projected/bc9ed98f-bdbf-44d8-a4f4-fa4c00e7671a-kube-api-access-cdcnq\") pod \"collect-profiles-29321040-ngp2x\" (UID: \"bc9ed98f-bdbf-44d8-a4f4-fa4c00e7671a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321040-ngp2x" Sep 30 20:00:00 crc kubenswrapper[4756]: I0930 20:00:00.217444 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/bc9ed98f-bdbf-44d8-a4f4-fa4c00e7671a-secret-volume\") pod \"collect-profiles-29321040-ngp2x\" (UID: \"bc9ed98f-bdbf-44d8-a4f4-fa4c00e7671a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321040-ngp2x" Sep 30 20:00:00 crc kubenswrapper[4756]: I0930 20:00:00.319310 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/bc9ed98f-bdbf-44d8-a4f4-fa4c00e7671a-config-volume\") pod \"collect-profiles-29321040-ngp2x\" (UID: \"bc9ed98f-bdbf-44d8-a4f4-fa4c00e7671a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321040-ngp2x" Sep 30 20:00:00 crc kubenswrapper[4756]: I0930 20:00:00.319369 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cdcnq\" (UniqueName: \"kubernetes.io/projected/bc9ed98f-bdbf-44d8-a4f4-fa4c00e7671a-kube-api-access-cdcnq\") pod \"collect-profiles-29321040-ngp2x\" (UID: \"bc9ed98f-bdbf-44d8-a4f4-fa4c00e7671a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321040-ngp2x" Sep 30 20:00:00 crc kubenswrapper[4756]: I0930 20:00:00.319425 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/bc9ed98f-bdbf-44d8-a4f4-fa4c00e7671a-secret-volume\") pod \"collect-profiles-29321040-ngp2x\" (UID: \"bc9ed98f-bdbf-44d8-a4f4-fa4c00e7671a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321040-ngp2x" Sep 30 20:00:00 crc kubenswrapper[4756]: I0930 20:00:00.320217 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/bc9ed98f-bdbf-44d8-a4f4-fa4c00e7671a-config-volume\") pod \"collect-profiles-29321040-ngp2x\" (UID: \"bc9ed98f-bdbf-44d8-a4f4-fa4c00e7671a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321040-ngp2x" Sep 30 20:00:00 crc kubenswrapper[4756]: I0930 20:00:00.327417 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/bc9ed98f-bdbf-44d8-a4f4-fa4c00e7671a-secret-volume\") pod \"collect-profiles-29321040-ngp2x\" (UID: \"bc9ed98f-bdbf-44d8-a4f4-fa4c00e7671a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321040-ngp2x" Sep 30 20:00:00 crc kubenswrapper[4756]: I0930 20:00:00.337417 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cdcnq\" (UniqueName: \"kubernetes.io/projected/bc9ed98f-bdbf-44d8-a4f4-fa4c00e7671a-kube-api-access-cdcnq\") pod \"collect-profiles-29321040-ngp2x\" (UID: \"bc9ed98f-bdbf-44d8-a4f4-fa4c00e7671a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321040-ngp2x" Sep 30 20:00:00 crc kubenswrapper[4756]: I0930 20:00:00.487248 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29321040-ngp2x" Sep 30 20:00:00 crc kubenswrapper[4756]: I0930 20:00:00.899836 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29321040-ngp2x"] Sep 30 20:00:01 crc kubenswrapper[4756]: I0930 20:00:01.036726 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29321040-ngp2x" event={"ID":"bc9ed98f-bdbf-44d8-a4f4-fa4c00e7671a","Type":"ContainerStarted","Data":"14cff4d18e96df57a6a2fc0aff1ce5cd0677a2f7707b6ca3fd2f1e5ca59e7da3"} Sep 30 20:00:01 crc kubenswrapper[4756]: I0930 20:00:01.051357 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29321040-ngp2x" podStartSLOduration=1.051338453 podStartE2EDuration="1.051338453s" podCreationTimestamp="2025-09-30 20:00:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 20:00:01.05120647 +0000 UTC m=+1730.672139957" watchObservedRunningTime="2025-09-30 20:00:01.051338453 +0000 UTC m=+1730.672271940" Sep 30 20:00:02 crc kubenswrapper[4756]: I0930 20:00:02.046037 4756 generic.go:334] "Generic (PLEG): container finished" podID="bc9ed98f-bdbf-44d8-a4f4-fa4c00e7671a" containerID="815009d102e9e8828f9732d699ff140b56c62727f10ef37a42e9d85ab48860b4" exitCode=0 Sep 30 20:00:02 crc kubenswrapper[4756]: I0930 20:00:02.046110 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29321040-ngp2x" event={"ID":"bc9ed98f-bdbf-44d8-a4f4-fa4c00e7671a","Type":"ContainerDied","Data":"815009d102e9e8828f9732d699ff140b56c62727f10ef37a42e9d85ab48860b4"} Sep 30 20:00:03 crc kubenswrapper[4756]: I0930 20:00:03.116159 4756 scope.go:117] "RemoveContainer" containerID="f56aaee77dcbc16390076ba6d3302a79b4672011f53fc17fe5d502f42d15f64b" Sep 30 20:00:03 crc kubenswrapper[4756]: E0930 20:00:03.116372 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4n9zj_openshift-machine-config-operator(3370c2ca-fec3-4f90-8df7-51e21e6c7e1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" Sep 30 20:00:03 crc kubenswrapper[4756]: I0930 20:00:03.303029 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29321040-ngp2x" Sep 30 20:00:03 crc kubenswrapper[4756]: I0930 20:00:03.362117 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cdcnq\" (UniqueName: \"kubernetes.io/projected/bc9ed98f-bdbf-44d8-a4f4-fa4c00e7671a-kube-api-access-cdcnq\") pod \"bc9ed98f-bdbf-44d8-a4f4-fa4c00e7671a\" (UID: \"bc9ed98f-bdbf-44d8-a4f4-fa4c00e7671a\") " Sep 30 20:00:03 crc kubenswrapper[4756]: I0930 20:00:03.362254 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/bc9ed98f-bdbf-44d8-a4f4-fa4c00e7671a-config-volume\") pod \"bc9ed98f-bdbf-44d8-a4f4-fa4c00e7671a\" (UID: \"bc9ed98f-bdbf-44d8-a4f4-fa4c00e7671a\") " Sep 30 20:00:03 crc kubenswrapper[4756]: I0930 20:00:03.362291 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/bc9ed98f-bdbf-44d8-a4f4-fa4c00e7671a-secret-volume\") pod \"bc9ed98f-bdbf-44d8-a4f4-fa4c00e7671a\" (UID: \"bc9ed98f-bdbf-44d8-a4f4-fa4c00e7671a\") " Sep 30 20:00:03 crc kubenswrapper[4756]: I0930 20:00:03.363032 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bc9ed98f-bdbf-44d8-a4f4-fa4c00e7671a-config-volume" (OuterVolumeSpecName: "config-volume") pod "bc9ed98f-bdbf-44d8-a4f4-fa4c00e7671a" (UID: "bc9ed98f-bdbf-44d8-a4f4-fa4c00e7671a"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:00:03 crc kubenswrapper[4756]: I0930 20:00:03.367281 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bc9ed98f-bdbf-44d8-a4f4-fa4c00e7671a-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "bc9ed98f-bdbf-44d8-a4f4-fa4c00e7671a" (UID: "bc9ed98f-bdbf-44d8-a4f4-fa4c00e7671a"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:00:03 crc kubenswrapper[4756]: I0930 20:00:03.370530 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bc9ed98f-bdbf-44d8-a4f4-fa4c00e7671a-kube-api-access-cdcnq" (OuterVolumeSpecName: "kube-api-access-cdcnq") pod "bc9ed98f-bdbf-44d8-a4f4-fa4c00e7671a" (UID: "bc9ed98f-bdbf-44d8-a4f4-fa4c00e7671a"). InnerVolumeSpecName "kube-api-access-cdcnq". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:00:03 crc kubenswrapper[4756]: I0930 20:00:03.464245 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cdcnq\" (UniqueName: \"kubernetes.io/projected/bc9ed98f-bdbf-44d8-a4f4-fa4c00e7671a-kube-api-access-cdcnq\") on node \"crc\" DevicePath \"\"" Sep 30 20:00:03 crc kubenswrapper[4756]: I0930 20:00:03.464284 4756 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/bc9ed98f-bdbf-44d8-a4f4-fa4c00e7671a-config-volume\") on node \"crc\" DevicePath \"\"" Sep 30 20:00:03 crc kubenswrapper[4756]: I0930 20:00:03.464294 4756 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/bc9ed98f-bdbf-44d8-a4f4-fa4c00e7671a-secret-volume\") on node \"crc\" DevicePath \"\"" Sep 30 20:00:04 crc kubenswrapper[4756]: I0930 20:00:04.063558 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29321040-ngp2x" event={"ID":"bc9ed98f-bdbf-44d8-a4f4-fa4c00e7671a","Type":"ContainerDied","Data":"14cff4d18e96df57a6a2fc0aff1ce5cd0677a2f7707b6ca3fd2f1e5ca59e7da3"} Sep 30 20:00:04 crc kubenswrapper[4756]: I0930 20:00:04.063600 4756 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="14cff4d18e96df57a6a2fc0aff1ce5cd0677a2f7707b6ca3fd2f1e5ca59e7da3" Sep 30 20:00:04 crc kubenswrapper[4756]: I0930 20:00:04.063647 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29321040-ngp2x" Sep 30 20:00:15 crc kubenswrapper[4756]: I0930 20:00:15.118437 4756 scope.go:117] "RemoveContainer" containerID="f56aaee77dcbc16390076ba6d3302a79b4672011f53fc17fe5d502f42d15f64b" Sep 30 20:00:15 crc kubenswrapper[4756]: E0930 20:00:15.119982 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4n9zj_openshift-machine-config-operator(3370c2ca-fec3-4f90-8df7-51e21e6c7e1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" Sep 30 20:00:18 crc kubenswrapper[4756]: I0930 20:00:18.623254 4756 scope.go:117] "RemoveContainer" containerID="214d8b533d03867b293add42a85a43e01c2f7db0aa4bc6c0526e7fb93719b660" Sep 30 20:00:18 crc kubenswrapper[4756]: I0930 20:00:18.646638 4756 scope.go:117] "RemoveContainer" containerID="518a7a9f5453f1009879b94cd4d7635553563523922db8f285224d9c9a603800" Sep 30 20:00:29 crc kubenswrapper[4756]: I0930 20:00:29.115812 4756 scope.go:117] "RemoveContainer" containerID="f56aaee77dcbc16390076ba6d3302a79b4672011f53fc17fe5d502f42d15f64b" Sep 30 20:00:29 crc kubenswrapper[4756]: E0930 20:00:29.116280 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4n9zj_openshift-machine-config-operator(3370c2ca-fec3-4f90-8df7-51e21e6c7e1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" Sep 30 20:00:43 crc kubenswrapper[4756]: I0930 20:00:43.115812 4756 scope.go:117] "RemoveContainer" containerID="f56aaee77dcbc16390076ba6d3302a79b4672011f53fc17fe5d502f42d15f64b" Sep 30 20:00:43 crc kubenswrapper[4756]: E0930 20:00:43.116688 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4n9zj_openshift-machine-config-operator(3370c2ca-fec3-4f90-8df7-51e21e6c7e1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" Sep 30 20:00:55 crc kubenswrapper[4756]: I0930 20:00:55.115699 4756 scope.go:117] "RemoveContainer" containerID="f56aaee77dcbc16390076ba6d3302a79b4672011f53fc17fe5d502f42d15f64b" Sep 30 20:00:55 crc kubenswrapper[4756]: E0930 20:00:55.116800 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4n9zj_openshift-machine-config-operator(3370c2ca-fec3-4f90-8df7-51e21e6c7e1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" Sep 30 20:01:09 crc kubenswrapper[4756]: I0930 20:01:09.116363 4756 scope.go:117] "RemoveContainer" containerID="f56aaee77dcbc16390076ba6d3302a79b4672011f53fc17fe5d502f42d15f64b" Sep 30 20:01:09 crc kubenswrapper[4756]: E0930 20:01:09.117299 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4n9zj_openshift-machine-config-operator(3370c2ca-fec3-4f90-8df7-51e21e6c7e1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" Sep 30 20:01:21 crc kubenswrapper[4756]: I0930 20:01:21.119114 4756 scope.go:117] "RemoveContainer" containerID="f56aaee77dcbc16390076ba6d3302a79b4672011f53fc17fe5d502f42d15f64b" Sep 30 20:01:21 crc kubenswrapper[4756]: E0930 20:01:21.119775 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4n9zj_openshift-machine-config-operator(3370c2ca-fec3-4f90-8df7-51e21e6c7e1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" Sep 30 20:01:33 crc kubenswrapper[4756]: I0930 20:01:33.117290 4756 scope.go:117] "RemoveContainer" containerID="f56aaee77dcbc16390076ba6d3302a79b4672011f53fc17fe5d502f42d15f64b" Sep 30 20:01:33 crc kubenswrapper[4756]: E0930 20:01:33.118171 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4n9zj_openshift-machine-config-operator(3370c2ca-fec3-4f90-8df7-51e21e6c7e1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" Sep 30 20:01:45 crc kubenswrapper[4756]: I0930 20:01:45.115885 4756 scope.go:117] "RemoveContainer" containerID="f56aaee77dcbc16390076ba6d3302a79b4672011f53fc17fe5d502f42d15f64b" Sep 30 20:01:45 crc kubenswrapper[4756]: E0930 20:01:45.116657 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4n9zj_openshift-machine-config-operator(3370c2ca-fec3-4f90-8df7-51e21e6c7e1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" Sep 30 20:02:00 crc kubenswrapper[4756]: I0930 20:02:00.116107 4756 scope.go:117] "RemoveContainer" containerID="f56aaee77dcbc16390076ba6d3302a79b4672011f53fc17fe5d502f42d15f64b" Sep 30 20:02:00 crc kubenswrapper[4756]: E0930 20:02:00.116787 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4n9zj_openshift-machine-config-operator(3370c2ca-fec3-4f90-8df7-51e21e6c7e1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" Sep 30 20:02:14 crc kubenswrapper[4756]: I0930 20:02:14.115748 4756 scope.go:117] "RemoveContainer" containerID="f56aaee77dcbc16390076ba6d3302a79b4672011f53fc17fe5d502f42d15f64b" Sep 30 20:02:15 crc kubenswrapper[4756]: I0930 20:02:15.148122 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" event={"ID":"3370c2ca-fec3-4f90-8df7-51e21e6c7e1c","Type":"ContainerStarted","Data":"17fda7c8bad2c60a4c9ad2cd8f5d0f9964b6a0f217e7bbfac13174d4910d8c19"} Sep 30 20:04:41 crc kubenswrapper[4756]: I0930 20:04:41.539635 4756 patch_prober.go:28] interesting pod/machine-config-daemon-4n9zj container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 20:04:41 crc kubenswrapper[4756]: I0930 20:04:41.540607 4756 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 20:05:11 crc kubenswrapper[4756]: I0930 20:05:11.539138 4756 patch_prober.go:28] interesting pod/machine-config-daemon-4n9zj container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 20:05:11 crc kubenswrapper[4756]: I0930 20:05:11.539735 4756 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 20:05:12 crc kubenswrapper[4756]: I0930 20:05:12.556166 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-2qwnv"] Sep 30 20:05:12 crc kubenswrapper[4756]: E0930 20:05:12.556463 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bc9ed98f-bdbf-44d8-a4f4-fa4c00e7671a" containerName="collect-profiles" Sep 30 20:05:12 crc kubenswrapper[4756]: I0930 20:05:12.556475 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="bc9ed98f-bdbf-44d8-a4f4-fa4c00e7671a" containerName="collect-profiles" Sep 30 20:05:12 crc kubenswrapper[4756]: I0930 20:05:12.556610 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="bc9ed98f-bdbf-44d8-a4f4-fa4c00e7671a" containerName="collect-profiles" Sep 30 20:05:12 crc kubenswrapper[4756]: I0930 20:05:12.557545 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-2qwnv" Sep 30 20:05:12 crc kubenswrapper[4756]: I0930 20:05:12.570291 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-2qwnv"] Sep 30 20:05:12 crc kubenswrapper[4756]: I0930 20:05:12.735481 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5pdmk\" (UniqueName: \"kubernetes.io/projected/39cc64ce-a5a3-47ac-a424-40211e90ec6d-kube-api-access-5pdmk\") pod \"redhat-operators-2qwnv\" (UID: \"39cc64ce-a5a3-47ac-a424-40211e90ec6d\") " pod="openshift-marketplace/redhat-operators-2qwnv" Sep 30 20:05:12 crc kubenswrapper[4756]: I0930 20:05:12.735822 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/39cc64ce-a5a3-47ac-a424-40211e90ec6d-utilities\") pod \"redhat-operators-2qwnv\" (UID: \"39cc64ce-a5a3-47ac-a424-40211e90ec6d\") " pod="openshift-marketplace/redhat-operators-2qwnv" Sep 30 20:05:12 crc kubenswrapper[4756]: I0930 20:05:12.735894 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/39cc64ce-a5a3-47ac-a424-40211e90ec6d-catalog-content\") pod \"redhat-operators-2qwnv\" (UID: \"39cc64ce-a5a3-47ac-a424-40211e90ec6d\") " pod="openshift-marketplace/redhat-operators-2qwnv" Sep 30 20:05:12 crc kubenswrapper[4756]: I0930 20:05:12.836918 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/39cc64ce-a5a3-47ac-a424-40211e90ec6d-utilities\") pod \"redhat-operators-2qwnv\" (UID: \"39cc64ce-a5a3-47ac-a424-40211e90ec6d\") " pod="openshift-marketplace/redhat-operators-2qwnv" Sep 30 20:05:12 crc kubenswrapper[4756]: I0930 20:05:12.837018 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/39cc64ce-a5a3-47ac-a424-40211e90ec6d-catalog-content\") pod \"redhat-operators-2qwnv\" (UID: \"39cc64ce-a5a3-47ac-a424-40211e90ec6d\") " pod="openshift-marketplace/redhat-operators-2qwnv" Sep 30 20:05:12 crc kubenswrapper[4756]: I0930 20:05:12.837069 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5pdmk\" (UniqueName: \"kubernetes.io/projected/39cc64ce-a5a3-47ac-a424-40211e90ec6d-kube-api-access-5pdmk\") pod \"redhat-operators-2qwnv\" (UID: \"39cc64ce-a5a3-47ac-a424-40211e90ec6d\") " pod="openshift-marketplace/redhat-operators-2qwnv" Sep 30 20:05:12 crc kubenswrapper[4756]: I0930 20:05:12.837759 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/39cc64ce-a5a3-47ac-a424-40211e90ec6d-utilities\") pod \"redhat-operators-2qwnv\" (UID: \"39cc64ce-a5a3-47ac-a424-40211e90ec6d\") " pod="openshift-marketplace/redhat-operators-2qwnv" Sep 30 20:05:12 crc kubenswrapper[4756]: I0930 20:05:12.837785 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/39cc64ce-a5a3-47ac-a424-40211e90ec6d-catalog-content\") pod \"redhat-operators-2qwnv\" (UID: \"39cc64ce-a5a3-47ac-a424-40211e90ec6d\") " pod="openshift-marketplace/redhat-operators-2qwnv" Sep 30 20:05:12 crc kubenswrapper[4756]: I0930 20:05:12.862781 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5pdmk\" (UniqueName: \"kubernetes.io/projected/39cc64ce-a5a3-47ac-a424-40211e90ec6d-kube-api-access-5pdmk\") pod \"redhat-operators-2qwnv\" (UID: \"39cc64ce-a5a3-47ac-a424-40211e90ec6d\") " pod="openshift-marketplace/redhat-operators-2qwnv" Sep 30 20:05:12 crc kubenswrapper[4756]: I0930 20:05:12.880577 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-2qwnv" Sep 30 20:05:13 crc kubenswrapper[4756]: I0930 20:05:13.313908 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-2qwnv"] Sep 30 20:05:13 crc kubenswrapper[4756]: I0930 20:05:13.553012 4756 generic.go:334] "Generic (PLEG): container finished" podID="39cc64ce-a5a3-47ac-a424-40211e90ec6d" containerID="4136fbf0c5ee61056525aa0f7afc52b81107b43021309d3b518c8a16e9dd5a8a" exitCode=0 Sep 30 20:05:13 crc kubenswrapper[4756]: I0930 20:05:13.553057 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-2qwnv" event={"ID":"39cc64ce-a5a3-47ac-a424-40211e90ec6d","Type":"ContainerDied","Data":"4136fbf0c5ee61056525aa0f7afc52b81107b43021309d3b518c8a16e9dd5a8a"} Sep 30 20:05:13 crc kubenswrapper[4756]: I0930 20:05:13.553085 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-2qwnv" event={"ID":"39cc64ce-a5a3-47ac-a424-40211e90ec6d","Type":"ContainerStarted","Data":"afc1f2cb12660e3776a041a9ca76e1f81e3c5a04f373392fbe362b82333eb51e"} Sep 30 20:05:13 crc kubenswrapper[4756]: I0930 20:05:13.554564 4756 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Sep 30 20:05:15 crc kubenswrapper[4756]: I0930 20:05:15.572584 4756 generic.go:334] "Generic (PLEG): container finished" podID="39cc64ce-a5a3-47ac-a424-40211e90ec6d" containerID="f1a3d738311eef8e4891280b3cb07b25f8b46b173ecba0c5980dbae31329576a" exitCode=0 Sep 30 20:05:15 crc kubenswrapper[4756]: I0930 20:05:15.572652 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-2qwnv" event={"ID":"39cc64ce-a5a3-47ac-a424-40211e90ec6d","Type":"ContainerDied","Data":"f1a3d738311eef8e4891280b3cb07b25f8b46b173ecba0c5980dbae31329576a"} Sep 30 20:05:16 crc kubenswrapper[4756]: I0930 20:05:16.142524 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-rgslj"] Sep 30 20:05:16 crc kubenswrapper[4756]: I0930 20:05:16.144486 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-rgslj" Sep 30 20:05:16 crc kubenswrapper[4756]: I0930 20:05:16.161565 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-rgslj"] Sep 30 20:05:16 crc kubenswrapper[4756]: I0930 20:05:16.293472 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/628f0e88-d7ae-4c7d-976f-da3d5592e21f-catalog-content\") pod \"redhat-marketplace-rgslj\" (UID: \"628f0e88-d7ae-4c7d-976f-da3d5592e21f\") " pod="openshift-marketplace/redhat-marketplace-rgslj" Sep 30 20:05:16 crc kubenswrapper[4756]: I0930 20:05:16.293835 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/628f0e88-d7ae-4c7d-976f-da3d5592e21f-utilities\") pod \"redhat-marketplace-rgslj\" (UID: \"628f0e88-d7ae-4c7d-976f-da3d5592e21f\") " pod="openshift-marketplace/redhat-marketplace-rgslj" Sep 30 20:05:16 crc kubenswrapper[4756]: I0930 20:05:16.293869 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9jdbd\" (UniqueName: \"kubernetes.io/projected/628f0e88-d7ae-4c7d-976f-da3d5592e21f-kube-api-access-9jdbd\") pod \"redhat-marketplace-rgslj\" (UID: \"628f0e88-d7ae-4c7d-976f-da3d5592e21f\") " pod="openshift-marketplace/redhat-marketplace-rgslj" Sep 30 20:05:16 crc kubenswrapper[4756]: I0930 20:05:16.395380 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/628f0e88-d7ae-4c7d-976f-da3d5592e21f-catalog-content\") pod \"redhat-marketplace-rgslj\" (UID: \"628f0e88-d7ae-4c7d-976f-da3d5592e21f\") " pod="openshift-marketplace/redhat-marketplace-rgslj" Sep 30 20:05:16 crc kubenswrapper[4756]: I0930 20:05:16.395445 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/628f0e88-d7ae-4c7d-976f-da3d5592e21f-utilities\") pod \"redhat-marketplace-rgslj\" (UID: \"628f0e88-d7ae-4c7d-976f-da3d5592e21f\") " pod="openshift-marketplace/redhat-marketplace-rgslj" Sep 30 20:05:16 crc kubenswrapper[4756]: I0930 20:05:16.395466 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9jdbd\" (UniqueName: \"kubernetes.io/projected/628f0e88-d7ae-4c7d-976f-da3d5592e21f-kube-api-access-9jdbd\") pod \"redhat-marketplace-rgslj\" (UID: \"628f0e88-d7ae-4c7d-976f-da3d5592e21f\") " pod="openshift-marketplace/redhat-marketplace-rgslj" Sep 30 20:05:16 crc kubenswrapper[4756]: I0930 20:05:16.395936 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/628f0e88-d7ae-4c7d-976f-da3d5592e21f-catalog-content\") pod \"redhat-marketplace-rgslj\" (UID: \"628f0e88-d7ae-4c7d-976f-da3d5592e21f\") " pod="openshift-marketplace/redhat-marketplace-rgslj" Sep 30 20:05:16 crc kubenswrapper[4756]: I0930 20:05:16.396030 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/628f0e88-d7ae-4c7d-976f-da3d5592e21f-utilities\") pod \"redhat-marketplace-rgslj\" (UID: \"628f0e88-d7ae-4c7d-976f-da3d5592e21f\") " pod="openshift-marketplace/redhat-marketplace-rgslj" Sep 30 20:05:16 crc kubenswrapper[4756]: I0930 20:05:16.419116 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9jdbd\" (UniqueName: \"kubernetes.io/projected/628f0e88-d7ae-4c7d-976f-da3d5592e21f-kube-api-access-9jdbd\") pod \"redhat-marketplace-rgslj\" (UID: \"628f0e88-d7ae-4c7d-976f-da3d5592e21f\") " pod="openshift-marketplace/redhat-marketplace-rgslj" Sep 30 20:05:16 crc kubenswrapper[4756]: I0930 20:05:16.489190 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-rgslj" Sep 30 20:05:16 crc kubenswrapper[4756]: I0930 20:05:16.597660 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-2qwnv" event={"ID":"39cc64ce-a5a3-47ac-a424-40211e90ec6d","Type":"ContainerStarted","Data":"51bf73668ae468a7dda5972877441eb4457db3517b39d290125379c4cb8ded88"} Sep 30 20:05:16 crc kubenswrapper[4756]: I0930 20:05:16.621543 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-2qwnv" podStartSLOduration=1.91373454 podStartE2EDuration="4.621526044s" podCreationTimestamp="2025-09-30 20:05:12 +0000 UTC" firstStartedPulling="2025-09-30 20:05:13.554320763 +0000 UTC m=+2043.175254250" lastFinishedPulling="2025-09-30 20:05:16.262112267 +0000 UTC m=+2045.883045754" observedRunningTime="2025-09-30 20:05:16.613797002 +0000 UTC m=+2046.234730489" watchObservedRunningTime="2025-09-30 20:05:16.621526044 +0000 UTC m=+2046.242459521" Sep 30 20:05:16 crc kubenswrapper[4756]: I0930 20:05:16.895837 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-rgslj"] Sep 30 20:05:16 crc kubenswrapper[4756]: W0930 20:05:16.898406 4756 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod628f0e88_d7ae_4c7d_976f_da3d5592e21f.slice/crio-dddd47a983c0f03f878c0bb1088352393836019e126f5ab40a43fd6e1dc40990 WatchSource:0}: Error finding container dddd47a983c0f03f878c0bb1088352393836019e126f5ab40a43fd6e1dc40990: Status 404 returned error can't find the container with id dddd47a983c0f03f878c0bb1088352393836019e126f5ab40a43fd6e1dc40990 Sep 30 20:05:17 crc kubenswrapper[4756]: I0930 20:05:17.605334 4756 generic.go:334] "Generic (PLEG): container finished" podID="628f0e88-d7ae-4c7d-976f-da3d5592e21f" containerID="d842f0ca1d88ea1e52c1f1430bd33b54ec9e0316bbecf9d12716157666db522f" exitCode=0 Sep 30 20:05:17 crc kubenswrapper[4756]: I0930 20:05:17.605373 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-rgslj" event={"ID":"628f0e88-d7ae-4c7d-976f-da3d5592e21f","Type":"ContainerDied","Data":"d842f0ca1d88ea1e52c1f1430bd33b54ec9e0316bbecf9d12716157666db522f"} Sep 30 20:05:17 crc kubenswrapper[4756]: I0930 20:05:17.607425 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-rgslj" event={"ID":"628f0e88-d7ae-4c7d-976f-da3d5592e21f","Type":"ContainerStarted","Data":"dddd47a983c0f03f878c0bb1088352393836019e126f5ab40a43fd6e1dc40990"} Sep 30 20:05:19 crc kubenswrapper[4756]: I0930 20:05:19.622385 4756 generic.go:334] "Generic (PLEG): container finished" podID="628f0e88-d7ae-4c7d-976f-da3d5592e21f" containerID="c93a795e9901855c98656a683535323558befcbd9d12b1480f475c9f772b5eec" exitCode=0 Sep 30 20:05:19 crc kubenswrapper[4756]: I0930 20:05:19.622663 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-rgslj" event={"ID":"628f0e88-d7ae-4c7d-976f-da3d5592e21f","Type":"ContainerDied","Data":"c93a795e9901855c98656a683535323558befcbd9d12b1480f475c9f772b5eec"} Sep 30 20:05:20 crc kubenswrapper[4756]: I0930 20:05:20.632238 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-rgslj" event={"ID":"628f0e88-d7ae-4c7d-976f-da3d5592e21f","Type":"ContainerStarted","Data":"97aaf842717e5649dd8b77bcc095041eb3616e3247b7c812432fcf6075238944"} Sep 30 20:05:20 crc kubenswrapper[4756]: I0930 20:05:20.650118 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-rgslj" podStartSLOduration=2.172871548 podStartE2EDuration="4.650098035s" podCreationTimestamp="2025-09-30 20:05:16 +0000 UTC" firstStartedPulling="2025-09-30 20:05:17.607235241 +0000 UTC m=+2047.228168718" lastFinishedPulling="2025-09-30 20:05:20.084461728 +0000 UTC m=+2049.705395205" observedRunningTime="2025-09-30 20:05:20.647288341 +0000 UTC m=+2050.268221848" watchObservedRunningTime="2025-09-30 20:05:20.650098035 +0000 UTC m=+2050.271031512" Sep 30 20:05:22 crc kubenswrapper[4756]: I0930 20:05:22.881317 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-2qwnv" Sep 30 20:05:22 crc kubenswrapper[4756]: I0930 20:05:22.881438 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-2qwnv" Sep 30 20:05:22 crc kubenswrapper[4756]: I0930 20:05:22.923601 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-2qwnv" Sep 30 20:05:23 crc kubenswrapper[4756]: I0930 20:05:23.688002 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-2qwnv" Sep 30 20:05:24 crc kubenswrapper[4756]: I0930 20:05:24.125475 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-2qwnv"] Sep 30 20:05:25 crc kubenswrapper[4756]: I0930 20:05:25.663333 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-2qwnv" podUID="39cc64ce-a5a3-47ac-a424-40211e90ec6d" containerName="registry-server" containerID="cri-o://51bf73668ae468a7dda5972877441eb4457db3517b39d290125379c4cb8ded88" gracePeriod=2 Sep 30 20:05:26 crc kubenswrapper[4756]: I0930 20:05:26.489622 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-rgslj" Sep 30 20:05:26 crc kubenswrapper[4756]: I0930 20:05:26.491697 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-rgslj" Sep 30 20:05:26 crc kubenswrapper[4756]: I0930 20:05:26.535798 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-rgslj" Sep 30 20:05:26 crc kubenswrapper[4756]: I0930 20:05:26.536382 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-2qwnv" Sep 30 20:05:26 crc kubenswrapper[4756]: I0930 20:05:26.672018 4756 generic.go:334] "Generic (PLEG): container finished" podID="39cc64ce-a5a3-47ac-a424-40211e90ec6d" containerID="51bf73668ae468a7dda5972877441eb4457db3517b39d290125379c4cb8ded88" exitCode=0 Sep 30 20:05:26 crc kubenswrapper[4756]: I0930 20:05:26.672745 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-2qwnv" Sep 30 20:05:26 crc kubenswrapper[4756]: I0930 20:05:26.673068 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-2qwnv" event={"ID":"39cc64ce-a5a3-47ac-a424-40211e90ec6d","Type":"ContainerDied","Data":"51bf73668ae468a7dda5972877441eb4457db3517b39d290125379c4cb8ded88"} Sep 30 20:05:26 crc kubenswrapper[4756]: I0930 20:05:26.673090 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-2qwnv" event={"ID":"39cc64ce-a5a3-47ac-a424-40211e90ec6d","Type":"ContainerDied","Data":"afc1f2cb12660e3776a041a9ca76e1f81e3c5a04f373392fbe362b82333eb51e"} Sep 30 20:05:26 crc kubenswrapper[4756]: I0930 20:05:26.673106 4756 scope.go:117] "RemoveContainer" containerID="51bf73668ae468a7dda5972877441eb4457db3517b39d290125379c4cb8ded88" Sep 30 20:05:26 crc kubenswrapper[4756]: I0930 20:05:26.687497 4756 scope.go:117] "RemoveContainer" containerID="f1a3d738311eef8e4891280b3cb07b25f8b46b173ecba0c5980dbae31329576a" Sep 30 20:05:26 crc kubenswrapper[4756]: I0930 20:05:26.703124 4756 scope.go:117] "RemoveContainer" containerID="4136fbf0c5ee61056525aa0f7afc52b81107b43021309d3b518c8a16e9dd5a8a" Sep 30 20:05:26 crc kubenswrapper[4756]: I0930 20:05:26.726751 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-rgslj" Sep 30 20:05:26 crc kubenswrapper[4756]: I0930 20:05:26.732794 4756 scope.go:117] "RemoveContainer" containerID="51bf73668ae468a7dda5972877441eb4457db3517b39d290125379c4cb8ded88" Sep 30 20:05:26 crc kubenswrapper[4756]: E0930 20:05:26.733205 4756 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"51bf73668ae468a7dda5972877441eb4457db3517b39d290125379c4cb8ded88\": container with ID starting with 51bf73668ae468a7dda5972877441eb4457db3517b39d290125379c4cb8ded88 not found: ID does not exist" containerID="51bf73668ae468a7dda5972877441eb4457db3517b39d290125379c4cb8ded88" Sep 30 20:05:26 crc kubenswrapper[4756]: I0930 20:05:26.733242 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"51bf73668ae468a7dda5972877441eb4457db3517b39d290125379c4cb8ded88"} err="failed to get container status \"51bf73668ae468a7dda5972877441eb4457db3517b39d290125379c4cb8ded88\": rpc error: code = NotFound desc = could not find container \"51bf73668ae468a7dda5972877441eb4457db3517b39d290125379c4cb8ded88\": container with ID starting with 51bf73668ae468a7dda5972877441eb4457db3517b39d290125379c4cb8ded88 not found: ID does not exist" Sep 30 20:05:26 crc kubenswrapper[4756]: I0930 20:05:26.733265 4756 scope.go:117] "RemoveContainer" containerID="f1a3d738311eef8e4891280b3cb07b25f8b46b173ecba0c5980dbae31329576a" Sep 30 20:05:26 crc kubenswrapper[4756]: E0930 20:05:26.733543 4756 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f1a3d738311eef8e4891280b3cb07b25f8b46b173ecba0c5980dbae31329576a\": container with ID starting with f1a3d738311eef8e4891280b3cb07b25f8b46b173ecba0c5980dbae31329576a not found: ID does not exist" containerID="f1a3d738311eef8e4891280b3cb07b25f8b46b173ecba0c5980dbae31329576a" Sep 30 20:05:26 crc kubenswrapper[4756]: I0930 20:05:26.733579 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f1a3d738311eef8e4891280b3cb07b25f8b46b173ecba0c5980dbae31329576a"} err="failed to get container status \"f1a3d738311eef8e4891280b3cb07b25f8b46b173ecba0c5980dbae31329576a\": rpc error: code = NotFound desc = could not find container \"f1a3d738311eef8e4891280b3cb07b25f8b46b173ecba0c5980dbae31329576a\": container with ID starting with f1a3d738311eef8e4891280b3cb07b25f8b46b173ecba0c5980dbae31329576a not found: ID does not exist" Sep 30 20:05:26 crc kubenswrapper[4756]: I0930 20:05:26.733594 4756 scope.go:117] "RemoveContainer" containerID="4136fbf0c5ee61056525aa0f7afc52b81107b43021309d3b518c8a16e9dd5a8a" Sep 30 20:05:26 crc kubenswrapper[4756]: E0930 20:05:26.733903 4756 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4136fbf0c5ee61056525aa0f7afc52b81107b43021309d3b518c8a16e9dd5a8a\": container with ID starting with 4136fbf0c5ee61056525aa0f7afc52b81107b43021309d3b518c8a16e9dd5a8a not found: ID does not exist" containerID="4136fbf0c5ee61056525aa0f7afc52b81107b43021309d3b518c8a16e9dd5a8a" Sep 30 20:05:26 crc kubenswrapper[4756]: I0930 20:05:26.733931 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4136fbf0c5ee61056525aa0f7afc52b81107b43021309d3b518c8a16e9dd5a8a"} err="failed to get container status \"4136fbf0c5ee61056525aa0f7afc52b81107b43021309d3b518c8a16e9dd5a8a\": rpc error: code = NotFound desc = could not find container \"4136fbf0c5ee61056525aa0f7afc52b81107b43021309d3b518c8a16e9dd5a8a\": container with ID starting with 4136fbf0c5ee61056525aa0f7afc52b81107b43021309d3b518c8a16e9dd5a8a not found: ID does not exist" Sep 30 20:05:26 crc kubenswrapper[4756]: I0930 20:05:26.734887 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/39cc64ce-a5a3-47ac-a424-40211e90ec6d-utilities\") pod \"39cc64ce-a5a3-47ac-a424-40211e90ec6d\" (UID: \"39cc64ce-a5a3-47ac-a424-40211e90ec6d\") " Sep 30 20:05:26 crc kubenswrapper[4756]: I0930 20:05:26.734984 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/39cc64ce-a5a3-47ac-a424-40211e90ec6d-catalog-content\") pod \"39cc64ce-a5a3-47ac-a424-40211e90ec6d\" (UID: \"39cc64ce-a5a3-47ac-a424-40211e90ec6d\") " Sep 30 20:05:26 crc kubenswrapper[4756]: I0930 20:05:26.735094 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5pdmk\" (UniqueName: \"kubernetes.io/projected/39cc64ce-a5a3-47ac-a424-40211e90ec6d-kube-api-access-5pdmk\") pod \"39cc64ce-a5a3-47ac-a424-40211e90ec6d\" (UID: \"39cc64ce-a5a3-47ac-a424-40211e90ec6d\") " Sep 30 20:05:26 crc kubenswrapper[4756]: I0930 20:05:26.735702 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/39cc64ce-a5a3-47ac-a424-40211e90ec6d-utilities" (OuterVolumeSpecName: "utilities") pod "39cc64ce-a5a3-47ac-a424-40211e90ec6d" (UID: "39cc64ce-a5a3-47ac-a424-40211e90ec6d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 20:05:26 crc kubenswrapper[4756]: I0930 20:05:26.740538 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/39cc64ce-a5a3-47ac-a424-40211e90ec6d-kube-api-access-5pdmk" (OuterVolumeSpecName: "kube-api-access-5pdmk") pod "39cc64ce-a5a3-47ac-a424-40211e90ec6d" (UID: "39cc64ce-a5a3-47ac-a424-40211e90ec6d"). InnerVolumeSpecName "kube-api-access-5pdmk". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:05:26 crc kubenswrapper[4756]: I0930 20:05:26.818980 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/39cc64ce-a5a3-47ac-a424-40211e90ec6d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "39cc64ce-a5a3-47ac-a424-40211e90ec6d" (UID: "39cc64ce-a5a3-47ac-a424-40211e90ec6d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 20:05:26 crc kubenswrapper[4756]: I0930 20:05:26.837138 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5pdmk\" (UniqueName: \"kubernetes.io/projected/39cc64ce-a5a3-47ac-a424-40211e90ec6d-kube-api-access-5pdmk\") on node \"crc\" DevicePath \"\"" Sep 30 20:05:26 crc kubenswrapper[4756]: I0930 20:05:26.837364 4756 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/39cc64ce-a5a3-47ac-a424-40211e90ec6d-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 20:05:26 crc kubenswrapper[4756]: I0930 20:05:26.837449 4756 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/39cc64ce-a5a3-47ac-a424-40211e90ec6d-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 20:05:27 crc kubenswrapper[4756]: I0930 20:05:27.003110 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-2qwnv"] Sep 30 20:05:27 crc kubenswrapper[4756]: I0930 20:05:27.008565 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-2qwnv"] Sep 30 20:05:27 crc kubenswrapper[4756]: I0930 20:05:27.125005 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="39cc64ce-a5a3-47ac-a424-40211e90ec6d" path="/var/lib/kubelet/pods/39cc64ce-a5a3-47ac-a424-40211e90ec6d/volumes" Sep 30 20:05:29 crc kubenswrapper[4756]: I0930 20:05:29.128099 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-rgslj"] Sep 30 20:05:29 crc kubenswrapper[4756]: I0930 20:05:29.690778 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-rgslj" podUID="628f0e88-d7ae-4c7d-976f-da3d5592e21f" containerName="registry-server" containerID="cri-o://97aaf842717e5649dd8b77bcc095041eb3616e3247b7c812432fcf6075238944" gracePeriod=2 Sep 30 20:05:30 crc kubenswrapper[4756]: I0930 20:05:30.106557 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-rgslj" Sep 30 20:05:30 crc kubenswrapper[4756]: I0930 20:05:30.187597 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/628f0e88-d7ae-4c7d-976f-da3d5592e21f-catalog-content\") pod \"628f0e88-d7ae-4c7d-976f-da3d5592e21f\" (UID: \"628f0e88-d7ae-4c7d-976f-da3d5592e21f\") " Sep 30 20:05:30 crc kubenswrapper[4756]: I0930 20:05:30.187764 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/628f0e88-d7ae-4c7d-976f-da3d5592e21f-utilities\") pod \"628f0e88-d7ae-4c7d-976f-da3d5592e21f\" (UID: \"628f0e88-d7ae-4c7d-976f-da3d5592e21f\") " Sep 30 20:05:30 crc kubenswrapper[4756]: I0930 20:05:30.187789 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9jdbd\" (UniqueName: \"kubernetes.io/projected/628f0e88-d7ae-4c7d-976f-da3d5592e21f-kube-api-access-9jdbd\") pod \"628f0e88-d7ae-4c7d-976f-da3d5592e21f\" (UID: \"628f0e88-d7ae-4c7d-976f-da3d5592e21f\") " Sep 30 20:05:30 crc kubenswrapper[4756]: I0930 20:05:30.189270 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/628f0e88-d7ae-4c7d-976f-da3d5592e21f-utilities" (OuterVolumeSpecName: "utilities") pod "628f0e88-d7ae-4c7d-976f-da3d5592e21f" (UID: "628f0e88-d7ae-4c7d-976f-da3d5592e21f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 20:05:30 crc kubenswrapper[4756]: I0930 20:05:30.197819 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/628f0e88-d7ae-4c7d-976f-da3d5592e21f-kube-api-access-9jdbd" (OuterVolumeSpecName: "kube-api-access-9jdbd") pod "628f0e88-d7ae-4c7d-976f-da3d5592e21f" (UID: "628f0e88-d7ae-4c7d-976f-da3d5592e21f"). InnerVolumeSpecName "kube-api-access-9jdbd". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:05:30 crc kubenswrapper[4756]: I0930 20:05:30.206665 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/628f0e88-d7ae-4c7d-976f-da3d5592e21f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "628f0e88-d7ae-4c7d-976f-da3d5592e21f" (UID: "628f0e88-d7ae-4c7d-976f-da3d5592e21f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 20:05:30 crc kubenswrapper[4756]: I0930 20:05:30.290077 4756 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/628f0e88-d7ae-4c7d-976f-da3d5592e21f-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 20:05:30 crc kubenswrapper[4756]: I0930 20:05:30.290106 4756 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/628f0e88-d7ae-4c7d-976f-da3d5592e21f-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 20:05:30 crc kubenswrapper[4756]: I0930 20:05:30.290116 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9jdbd\" (UniqueName: \"kubernetes.io/projected/628f0e88-d7ae-4c7d-976f-da3d5592e21f-kube-api-access-9jdbd\") on node \"crc\" DevicePath \"\"" Sep 30 20:05:30 crc kubenswrapper[4756]: I0930 20:05:30.701235 4756 generic.go:334] "Generic (PLEG): container finished" podID="628f0e88-d7ae-4c7d-976f-da3d5592e21f" containerID="97aaf842717e5649dd8b77bcc095041eb3616e3247b7c812432fcf6075238944" exitCode=0 Sep 30 20:05:30 crc kubenswrapper[4756]: I0930 20:05:30.701346 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-rgslj" Sep 30 20:05:30 crc kubenswrapper[4756]: I0930 20:05:30.701360 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-rgslj" event={"ID":"628f0e88-d7ae-4c7d-976f-da3d5592e21f","Type":"ContainerDied","Data":"97aaf842717e5649dd8b77bcc095041eb3616e3247b7c812432fcf6075238944"} Sep 30 20:05:30 crc kubenswrapper[4756]: I0930 20:05:30.701687 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-rgslj" event={"ID":"628f0e88-d7ae-4c7d-976f-da3d5592e21f","Type":"ContainerDied","Data":"dddd47a983c0f03f878c0bb1088352393836019e126f5ab40a43fd6e1dc40990"} Sep 30 20:05:30 crc kubenswrapper[4756]: I0930 20:05:30.701720 4756 scope.go:117] "RemoveContainer" containerID="97aaf842717e5649dd8b77bcc095041eb3616e3247b7c812432fcf6075238944" Sep 30 20:05:30 crc kubenswrapper[4756]: I0930 20:05:30.727388 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-rgslj"] Sep 30 20:05:30 crc kubenswrapper[4756]: I0930 20:05:30.734334 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-rgslj"] Sep 30 20:05:30 crc kubenswrapper[4756]: I0930 20:05:30.742000 4756 scope.go:117] "RemoveContainer" containerID="c93a795e9901855c98656a683535323558befcbd9d12b1480f475c9f772b5eec" Sep 30 20:05:30 crc kubenswrapper[4756]: I0930 20:05:30.763650 4756 scope.go:117] "RemoveContainer" containerID="d842f0ca1d88ea1e52c1f1430bd33b54ec9e0316bbecf9d12716157666db522f" Sep 30 20:05:30 crc kubenswrapper[4756]: I0930 20:05:30.790990 4756 scope.go:117] "RemoveContainer" containerID="97aaf842717e5649dd8b77bcc095041eb3616e3247b7c812432fcf6075238944" Sep 30 20:05:30 crc kubenswrapper[4756]: E0930 20:05:30.793034 4756 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"97aaf842717e5649dd8b77bcc095041eb3616e3247b7c812432fcf6075238944\": container with ID starting with 97aaf842717e5649dd8b77bcc095041eb3616e3247b7c812432fcf6075238944 not found: ID does not exist" containerID="97aaf842717e5649dd8b77bcc095041eb3616e3247b7c812432fcf6075238944" Sep 30 20:05:30 crc kubenswrapper[4756]: I0930 20:05:30.793064 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"97aaf842717e5649dd8b77bcc095041eb3616e3247b7c812432fcf6075238944"} err="failed to get container status \"97aaf842717e5649dd8b77bcc095041eb3616e3247b7c812432fcf6075238944\": rpc error: code = NotFound desc = could not find container \"97aaf842717e5649dd8b77bcc095041eb3616e3247b7c812432fcf6075238944\": container with ID starting with 97aaf842717e5649dd8b77bcc095041eb3616e3247b7c812432fcf6075238944 not found: ID does not exist" Sep 30 20:05:30 crc kubenswrapper[4756]: I0930 20:05:30.793084 4756 scope.go:117] "RemoveContainer" containerID="c93a795e9901855c98656a683535323558befcbd9d12b1480f475c9f772b5eec" Sep 30 20:05:30 crc kubenswrapper[4756]: E0930 20:05:30.793694 4756 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c93a795e9901855c98656a683535323558befcbd9d12b1480f475c9f772b5eec\": container with ID starting with c93a795e9901855c98656a683535323558befcbd9d12b1480f475c9f772b5eec not found: ID does not exist" containerID="c93a795e9901855c98656a683535323558befcbd9d12b1480f475c9f772b5eec" Sep 30 20:05:30 crc kubenswrapper[4756]: I0930 20:05:30.793753 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c93a795e9901855c98656a683535323558befcbd9d12b1480f475c9f772b5eec"} err="failed to get container status \"c93a795e9901855c98656a683535323558befcbd9d12b1480f475c9f772b5eec\": rpc error: code = NotFound desc = could not find container \"c93a795e9901855c98656a683535323558befcbd9d12b1480f475c9f772b5eec\": container with ID starting with c93a795e9901855c98656a683535323558befcbd9d12b1480f475c9f772b5eec not found: ID does not exist" Sep 30 20:05:30 crc kubenswrapper[4756]: I0930 20:05:30.793794 4756 scope.go:117] "RemoveContainer" containerID="d842f0ca1d88ea1e52c1f1430bd33b54ec9e0316bbecf9d12716157666db522f" Sep 30 20:05:30 crc kubenswrapper[4756]: E0930 20:05:30.794215 4756 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d842f0ca1d88ea1e52c1f1430bd33b54ec9e0316bbecf9d12716157666db522f\": container with ID starting with d842f0ca1d88ea1e52c1f1430bd33b54ec9e0316bbecf9d12716157666db522f not found: ID does not exist" containerID="d842f0ca1d88ea1e52c1f1430bd33b54ec9e0316bbecf9d12716157666db522f" Sep 30 20:05:30 crc kubenswrapper[4756]: I0930 20:05:30.794237 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d842f0ca1d88ea1e52c1f1430bd33b54ec9e0316bbecf9d12716157666db522f"} err="failed to get container status \"d842f0ca1d88ea1e52c1f1430bd33b54ec9e0316bbecf9d12716157666db522f\": rpc error: code = NotFound desc = could not find container \"d842f0ca1d88ea1e52c1f1430bd33b54ec9e0316bbecf9d12716157666db522f\": container with ID starting with d842f0ca1d88ea1e52c1f1430bd33b54ec9e0316bbecf9d12716157666db522f not found: ID does not exist" Sep 30 20:05:31 crc kubenswrapper[4756]: I0930 20:05:31.124396 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="628f0e88-d7ae-4c7d-976f-da3d5592e21f" path="/var/lib/kubelet/pods/628f0e88-d7ae-4c7d-976f-da3d5592e21f/volumes" Sep 30 20:05:41 crc kubenswrapper[4756]: I0930 20:05:41.538919 4756 patch_prober.go:28] interesting pod/machine-config-daemon-4n9zj container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 20:05:41 crc kubenswrapper[4756]: I0930 20:05:41.539762 4756 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 20:05:41 crc kubenswrapper[4756]: I0930 20:05:41.539841 4756 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" Sep 30 20:05:41 crc kubenswrapper[4756]: I0930 20:05:41.540900 4756 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"17fda7c8bad2c60a4c9ad2cd8f5d0f9964b6a0f217e7bbfac13174d4910d8c19"} pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 30 20:05:41 crc kubenswrapper[4756]: I0930 20:05:41.540998 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" containerName="machine-config-daemon" containerID="cri-o://17fda7c8bad2c60a4c9ad2cd8f5d0f9964b6a0f217e7bbfac13174d4910d8c19" gracePeriod=600 Sep 30 20:05:41 crc kubenswrapper[4756]: I0930 20:05:41.795239 4756 generic.go:334] "Generic (PLEG): container finished" podID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" containerID="17fda7c8bad2c60a4c9ad2cd8f5d0f9964b6a0f217e7bbfac13174d4910d8c19" exitCode=0 Sep 30 20:05:41 crc kubenswrapper[4756]: I0930 20:05:41.795318 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" event={"ID":"3370c2ca-fec3-4f90-8df7-51e21e6c7e1c","Type":"ContainerDied","Data":"17fda7c8bad2c60a4c9ad2cd8f5d0f9964b6a0f217e7bbfac13174d4910d8c19"} Sep 30 20:05:41 crc kubenswrapper[4756]: I0930 20:05:41.795854 4756 scope.go:117] "RemoveContainer" containerID="f56aaee77dcbc16390076ba6d3302a79b4672011f53fc17fe5d502f42d15f64b" Sep 30 20:05:42 crc kubenswrapper[4756]: I0930 20:05:42.806370 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" event={"ID":"3370c2ca-fec3-4f90-8df7-51e21e6c7e1c","Type":"ContainerStarted","Data":"59f9f1c1ce4b55e108410080ce02b6db07852e4e8a303b3fe63834e86270c4f2"} Sep 30 20:06:35 crc kubenswrapper[4756]: I0930 20:06:35.416260 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-cp56n"] Sep 30 20:06:35 crc kubenswrapper[4756]: E0930 20:06:35.417123 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="628f0e88-d7ae-4c7d-976f-da3d5592e21f" containerName="extract-utilities" Sep 30 20:06:35 crc kubenswrapper[4756]: I0930 20:06:35.417137 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="628f0e88-d7ae-4c7d-976f-da3d5592e21f" containerName="extract-utilities" Sep 30 20:06:35 crc kubenswrapper[4756]: E0930 20:06:35.417157 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="39cc64ce-a5a3-47ac-a424-40211e90ec6d" containerName="extract-utilities" Sep 30 20:06:35 crc kubenswrapper[4756]: I0930 20:06:35.417166 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="39cc64ce-a5a3-47ac-a424-40211e90ec6d" containerName="extract-utilities" Sep 30 20:06:35 crc kubenswrapper[4756]: E0930 20:06:35.417202 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="39cc64ce-a5a3-47ac-a424-40211e90ec6d" containerName="registry-server" Sep 30 20:06:35 crc kubenswrapper[4756]: I0930 20:06:35.417210 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="39cc64ce-a5a3-47ac-a424-40211e90ec6d" containerName="registry-server" Sep 30 20:06:35 crc kubenswrapper[4756]: E0930 20:06:35.417221 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="628f0e88-d7ae-4c7d-976f-da3d5592e21f" containerName="registry-server" Sep 30 20:06:35 crc kubenswrapper[4756]: I0930 20:06:35.417230 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="628f0e88-d7ae-4c7d-976f-da3d5592e21f" containerName="registry-server" Sep 30 20:06:35 crc kubenswrapper[4756]: E0930 20:06:35.417249 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="39cc64ce-a5a3-47ac-a424-40211e90ec6d" containerName="extract-content" Sep 30 20:06:35 crc kubenswrapper[4756]: I0930 20:06:35.417258 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="39cc64ce-a5a3-47ac-a424-40211e90ec6d" containerName="extract-content" Sep 30 20:06:35 crc kubenswrapper[4756]: E0930 20:06:35.417272 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="628f0e88-d7ae-4c7d-976f-da3d5592e21f" containerName="extract-content" Sep 30 20:06:35 crc kubenswrapper[4756]: I0930 20:06:35.417281 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="628f0e88-d7ae-4c7d-976f-da3d5592e21f" containerName="extract-content" Sep 30 20:06:35 crc kubenswrapper[4756]: I0930 20:06:35.417471 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="39cc64ce-a5a3-47ac-a424-40211e90ec6d" containerName="registry-server" Sep 30 20:06:35 crc kubenswrapper[4756]: I0930 20:06:35.417491 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="628f0e88-d7ae-4c7d-976f-da3d5592e21f" containerName="registry-server" Sep 30 20:06:35 crc kubenswrapper[4756]: I0930 20:06:35.418759 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-cp56n" Sep 30 20:06:35 crc kubenswrapper[4756]: I0930 20:06:35.440522 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1973b0b4-a604-410e-a042-f4a328f9fd71-catalog-content\") pod \"certified-operators-cp56n\" (UID: \"1973b0b4-a604-410e-a042-f4a328f9fd71\") " pod="openshift-marketplace/certified-operators-cp56n" Sep 30 20:06:35 crc kubenswrapper[4756]: I0930 20:06:35.440607 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g5mqd\" (UniqueName: \"kubernetes.io/projected/1973b0b4-a604-410e-a042-f4a328f9fd71-kube-api-access-g5mqd\") pod \"certified-operators-cp56n\" (UID: \"1973b0b4-a604-410e-a042-f4a328f9fd71\") " pod="openshift-marketplace/certified-operators-cp56n" Sep 30 20:06:35 crc kubenswrapper[4756]: I0930 20:06:35.440688 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1973b0b4-a604-410e-a042-f4a328f9fd71-utilities\") pod \"certified-operators-cp56n\" (UID: \"1973b0b4-a604-410e-a042-f4a328f9fd71\") " pod="openshift-marketplace/certified-operators-cp56n" Sep 30 20:06:35 crc kubenswrapper[4756]: I0930 20:06:35.441956 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-cp56n"] Sep 30 20:06:35 crc kubenswrapper[4756]: I0930 20:06:35.542336 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1973b0b4-a604-410e-a042-f4a328f9fd71-catalog-content\") pod \"certified-operators-cp56n\" (UID: \"1973b0b4-a604-410e-a042-f4a328f9fd71\") " pod="openshift-marketplace/certified-operators-cp56n" Sep 30 20:06:35 crc kubenswrapper[4756]: I0930 20:06:35.542459 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g5mqd\" (UniqueName: \"kubernetes.io/projected/1973b0b4-a604-410e-a042-f4a328f9fd71-kube-api-access-g5mqd\") pod \"certified-operators-cp56n\" (UID: \"1973b0b4-a604-410e-a042-f4a328f9fd71\") " pod="openshift-marketplace/certified-operators-cp56n" Sep 30 20:06:35 crc kubenswrapper[4756]: I0930 20:06:35.542523 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1973b0b4-a604-410e-a042-f4a328f9fd71-utilities\") pod \"certified-operators-cp56n\" (UID: \"1973b0b4-a604-410e-a042-f4a328f9fd71\") " pod="openshift-marketplace/certified-operators-cp56n" Sep 30 20:06:35 crc kubenswrapper[4756]: I0930 20:06:35.543244 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1973b0b4-a604-410e-a042-f4a328f9fd71-catalog-content\") pod \"certified-operators-cp56n\" (UID: \"1973b0b4-a604-410e-a042-f4a328f9fd71\") " pod="openshift-marketplace/certified-operators-cp56n" Sep 30 20:06:35 crc kubenswrapper[4756]: I0930 20:06:35.543349 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1973b0b4-a604-410e-a042-f4a328f9fd71-utilities\") pod \"certified-operators-cp56n\" (UID: \"1973b0b4-a604-410e-a042-f4a328f9fd71\") " pod="openshift-marketplace/certified-operators-cp56n" Sep 30 20:06:35 crc kubenswrapper[4756]: I0930 20:06:35.569024 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g5mqd\" (UniqueName: \"kubernetes.io/projected/1973b0b4-a604-410e-a042-f4a328f9fd71-kube-api-access-g5mqd\") pod \"certified-operators-cp56n\" (UID: \"1973b0b4-a604-410e-a042-f4a328f9fd71\") " pod="openshift-marketplace/certified-operators-cp56n" Sep 30 20:06:35 crc kubenswrapper[4756]: I0930 20:06:35.744851 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-cp56n" Sep 30 20:06:36 crc kubenswrapper[4756]: I0930 20:06:36.202114 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-cp56n"] Sep 30 20:06:36 crc kubenswrapper[4756]: I0930 20:06:36.246164 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-cp56n" event={"ID":"1973b0b4-a604-410e-a042-f4a328f9fd71","Type":"ContainerStarted","Data":"898cd503e162c4a2302b79e50a410f551241658bda73a08220794f2f84d005a7"} Sep 30 20:06:37 crc kubenswrapper[4756]: I0930 20:06:37.253584 4756 generic.go:334] "Generic (PLEG): container finished" podID="1973b0b4-a604-410e-a042-f4a328f9fd71" containerID="3b8b4e2f86fb61a30137e5acc095f8b6de3626c14c455805f6584cd07d36fc10" exitCode=0 Sep 30 20:06:37 crc kubenswrapper[4756]: I0930 20:06:37.253674 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-cp56n" event={"ID":"1973b0b4-a604-410e-a042-f4a328f9fd71","Type":"ContainerDied","Data":"3b8b4e2f86fb61a30137e5acc095f8b6de3626c14c455805f6584cd07d36fc10"} Sep 30 20:06:39 crc kubenswrapper[4756]: I0930 20:06:39.272816 4756 generic.go:334] "Generic (PLEG): container finished" podID="1973b0b4-a604-410e-a042-f4a328f9fd71" containerID="c9bd4cf01604d0c2687df9b17d94edad0806a9be08cd9d27944b6f948be702fd" exitCode=0 Sep 30 20:06:39 crc kubenswrapper[4756]: I0930 20:06:39.272957 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-cp56n" event={"ID":"1973b0b4-a604-410e-a042-f4a328f9fd71","Type":"ContainerDied","Data":"c9bd4cf01604d0c2687df9b17d94edad0806a9be08cd9d27944b6f948be702fd"} Sep 30 20:06:40 crc kubenswrapper[4756]: I0930 20:06:40.282607 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-cp56n" event={"ID":"1973b0b4-a604-410e-a042-f4a328f9fd71","Type":"ContainerStarted","Data":"996600af710342464be65e53ba34ce49142ef5d89444eeb794a1046d2f3ca869"} Sep 30 20:06:40 crc kubenswrapper[4756]: I0930 20:06:40.309829 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-cp56n" podStartSLOduration=2.734626463 podStartE2EDuration="5.3098013s" podCreationTimestamp="2025-09-30 20:06:35 +0000 UTC" firstStartedPulling="2025-09-30 20:06:37.256495624 +0000 UTC m=+2126.877429111" lastFinishedPulling="2025-09-30 20:06:39.831670471 +0000 UTC m=+2129.452603948" observedRunningTime="2025-09-30 20:06:40.299133071 +0000 UTC m=+2129.920066548" watchObservedRunningTime="2025-09-30 20:06:40.3098013 +0000 UTC m=+2129.930734777" Sep 30 20:06:41 crc kubenswrapper[4756]: I0930 20:06:41.204563 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-rl6m2"] Sep 30 20:06:41 crc kubenswrapper[4756]: I0930 20:06:41.207158 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-rl6m2" Sep 30 20:06:41 crc kubenswrapper[4756]: I0930 20:06:41.217422 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-rl6m2"] Sep 30 20:06:41 crc kubenswrapper[4756]: I0930 20:06:41.338069 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5ff4996c-ab53-489d-966e-ece94a23562b-catalog-content\") pod \"community-operators-rl6m2\" (UID: \"5ff4996c-ab53-489d-966e-ece94a23562b\") " pod="openshift-marketplace/community-operators-rl6m2" Sep 30 20:06:41 crc kubenswrapper[4756]: I0930 20:06:41.338113 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gwvpn\" (UniqueName: \"kubernetes.io/projected/5ff4996c-ab53-489d-966e-ece94a23562b-kube-api-access-gwvpn\") pod \"community-operators-rl6m2\" (UID: \"5ff4996c-ab53-489d-966e-ece94a23562b\") " pod="openshift-marketplace/community-operators-rl6m2" Sep 30 20:06:41 crc kubenswrapper[4756]: I0930 20:06:41.338201 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5ff4996c-ab53-489d-966e-ece94a23562b-utilities\") pod \"community-operators-rl6m2\" (UID: \"5ff4996c-ab53-489d-966e-ece94a23562b\") " pod="openshift-marketplace/community-operators-rl6m2" Sep 30 20:06:41 crc kubenswrapper[4756]: I0930 20:06:41.439677 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5ff4996c-ab53-489d-966e-ece94a23562b-catalog-content\") pod \"community-operators-rl6m2\" (UID: \"5ff4996c-ab53-489d-966e-ece94a23562b\") " pod="openshift-marketplace/community-operators-rl6m2" Sep 30 20:06:41 crc kubenswrapper[4756]: I0930 20:06:41.439726 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gwvpn\" (UniqueName: \"kubernetes.io/projected/5ff4996c-ab53-489d-966e-ece94a23562b-kube-api-access-gwvpn\") pod \"community-operators-rl6m2\" (UID: \"5ff4996c-ab53-489d-966e-ece94a23562b\") " pod="openshift-marketplace/community-operators-rl6m2" Sep 30 20:06:41 crc kubenswrapper[4756]: I0930 20:06:41.439822 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5ff4996c-ab53-489d-966e-ece94a23562b-utilities\") pod \"community-operators-rl6m2\" (UID: \"5ff4996c-ab53-489d-966e-ece94a23562b\") " pod="openshift-marketplace/community-operators-rl6m2" Sep 30 20:06:41 crc kubenswrapper[4756]: I0930 20:06:41.440423 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5ff4996c-ab53-489d-966e-ece94a23562b-utilities\") pod \"community-operators-rl6m2\" (UID: \"5ff4996c-ab53-489d-966e-ece94a23562b\") " pod="openshift-marketplace/community-operators-rl6m2" Sep 30 20:06:41 crc kubenswrapper[4756]: I0930 20:06:41.440682 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5ff4996c-ab53-489d-966e-ece94a23562b-catalog-content\") pod \"community-operators-rl6m2\" (UID: \"5ff4996c-ab53-489d-966e-ece94a23562b\") " pod="openshift-marketplace/community-operators-rl6m2" Sep 30 20:06:41 crc kubenswrapper[4756]: I0930 20:06:41.461305 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gwvpn\" (UniqueName: \"kubernetes.io/projected/5ff4996c-ab53-489d-966e-ece94a23562b-kube-api-access-gwvpn\") pod \"community-operators-rl6m2\" (UID: \"5ff4996c-ab53-489d-966e-ece94a23562b\") " pod="openshift-marketplace/community-operators-rl6m2" Sep 30 20:06:41 crc kubenswrapper[4756]: I0930 20:06:41.543633 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-rl6m2" Sep 30 20:06:42 crc kubenswrapper[4756]: I0930 20:06:42.004090 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-rl6m2"] Sep 30 20:06:42 crc kubenswrapper[4756]: W0930 20:06:42.015199 4756 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5ff4996c_ab53_489d_966e_ece94a23562b.slice/crio-12019861e591099ea5f569f0871fecb40ae47f050cd76b4bbd7f3641ef1d3958 WatchSource:0}: Error finding container 12019861e591099ea5f569f0871fecb40ae47f050cd76b4bbd7f3641ef1d3958: Status 404 returned error can't find the container with id 12019861e591099ea5f569f0871fecb40ae47f050cd76b4bbd7f3641ef1d3958 Sep 30 20:06:42 crc kubenswrapper[4756]: I0930 20:06:42.307210 4756 generic.go:334] "Generic (PLEG): container finished" podID="5ff4996c-ab53-489d-966e-ece94a23562b" containerID="4c94e5c9396dd5a7b02eef368c79d5a1eb8ab4c568d5e2af6d810fa7ac85d42e" exitCode=0 Sep 30 20:06:42 crc kubenswrapper[4756]: I0930 20:06:42.307262 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rl6m2" event={"ID":"5ff4996c-ab53-489d-966e-ece94a23562b","Type":"ContainerDied","Data":"4c94e5c9396dd5a7b02eef368c79d5a1eb8ab4c568d5e2af6d810fa7ac85d42e"} Sep 30 20:06:42 crc kubenswrapper[4756]: I0930 20:06:42.307294 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rl6m2" event={"ID":"5ff4996c-ab53-489d-966e-ece94a23562b","Type":"ContainerStarted","Data":"12019861e591099ea5f569f0871fecb40ae47f050cd76b4bbd7f3641ef1d3958"} Sep 30 20:06:43 crc kubenswrapper[4756]: I0930 20:06:43.317157 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rl6m2" event={"ID":"5ff4996c-ab53-489d-966e-ece94a23562b","Type":"ContainerStarted","Data":"56e801cde02f036ac1de362d041e61c8f46fb52c9d50ada3ee29bc2a2eb574a6"} Sep 30 20:06:44 crc kubenswrapper[4756]: I0930 20:06:44.325595 4756 generic.go:334] "Generic (PLEG): container finished" podID="5ff4996c-ab53-489d-966e-ece94a23562b" containerID="56e801cde02f036ac1de362d041e61c8f46fb52c9d50ada3ee29bc2a2eb574a6" exitCode=0 Sep 30 20:06:44 crc kubenswrapper[4756]: I0930 20:06:44.325642 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rl6m2" event={"ID":"5ff4996c-ab53-489d-966e-ece94a23562b","Type":"ContainerDied","Data":"56e801cde02f036ac1de362d041e61c8f46fb52c9d50ada3ee29bc2a2eb574a6"} Sep 30 20:06:45 crc kubenswrapper[4756]: I0930 20:06:45.334226 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rl6m2" event={"ID":"5ff4996c-ab53-489d-966e-ece94a23562b","Type":"ContainerStarted","Data":"8b3b3e16db2993fbec33f5e2d4815f25b4c197f549241704ab0e01bb3933e03b"} Sep 30 20:06:45 crc kubenswrapper[4756]: I0930 20:06:45.390598 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-rl6m2" podStartSLOduration=1.928353081 podStartE2EDuration="4.390580937s" podCreationTimestamp="2025-09-30 20:06:41 +0000 UTC" firstStartedPulling="2025-09-30 20:06:42.314864944 +0000 UTC m=+2131.935798431" lastFinishedPulling="2025-09-30 20:06:44.77709277 +0000 UTC m=+2134.398026287" observedRunningTime="2025-09-30 20:06:45.383938904 +0000 UTC m=+2135.004872381" watchObservedRunningTime="2025-09-30 20:06:45.390580937 +0000 UTC m=+2135.011514414" Sep 30 20:06:45 crc kubenswrapper[4756]: I0930 20:06:45.745744 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-cp56n" Sep 30 20:06:45 crc kubenswrapper[4756]: I0930 20:06:45.745816 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-cp56n" Sep 30 20:06:45 crc kubenswrapper[4756]: I0930 20:06:45.790173 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-cp56n" Sep 30 20:06:46 crc kubenswrapper[4756]: I0930 20:06:46.390440 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-cp56n" Sep 30 20:06:47 crc kubenswrapper[4756]: I0930 20:06:47.992128 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-cp56n"] Sep 30 20:06:48 crc kubenswrapper[4756]: I0930 20:06:48.354770 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-cp56n" podUID="1973b0b4-a604-410e-a042-f4a328f9fd71" containerName="registry-server" containerID="cri-o://996600af710342464be65e53ba34ce49142ef5d89444eeb794a1046d2f3ca869" gracePeriod=2 Sep 30 20:06:48 crc kubenswrapper[4756]: I0930 20:06:48.783353 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-cp56n" Sep 30 20:06:48 crc kubenswrapper[4756]: I0930 20:06:48.842550 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1973b0b4-a604-410e-a042-f4a328f9fd71-catalog-content\") pod \"1973b0b4-a604-410e-a042-f4a328f9fd71\" (UID: \"1973b0b4-a604-410e-a042-f4a328f9fd71\") " Sep 30 20:06:48 crc kubenswrapper[4756]: I0930 20:06:48.842621 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-g5mqd\" (UniqueName: \"kubernetes.io/projected/1973b0b4-a604-410e-a042-f4a328f9fd71-kube-api-access-g5mqd\") pod \"1973b0b4-a604-410e-a042-f4a328f9fd71\" (UID: \"1973b0b4-a604-410e-a042-f4a328f9fd71\") " Sep 30 20:06:48 crc kubenswrapper[4756]: I0930 20:06:48.842645 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1973b0b4-a604-410e-a042-f4a328f9fd71-utilities\") pod \"1973b0b4-a604-410e-a042-f4a328f9fd71\" (UID: \"1973b0b4-a604-410e-a042-f4a328f9fd71\") " Sep 30 20:06:48 crc kubenswrapper[4756]: I0930 20:06:48.843589 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1973b0b4-a604-410e-a042-f4a328f9fd71-utilities" (OuterVolumeSpecName: "utilities") pod "1973b0b4-a604-410e-a042-f4a328f9fd71" (UID: "1973b0b4-a604-410e-a042-f4a328f9fd71"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 20:06:48 crc kubenswrapper[4756]: I0930 20:06:48.843888 4756 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1973b0b4-a604-410e-a042-f4a328f9fd71-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 20:06:48 crc kubenswrapper[4756]: I0930 20:06:48.854192 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1973b0b4-a604-410e-a042-f4a328f9fd71-kube-api-access-g5mqd" (OuterVolumeSpecName: "kube-api-access-g5mqd") pod "1973b0b4-a604-410e-a042-f4a328f9fd71" (UID: "1973b0b4-a604-410e-a042-f4a328f9fd71"). InnerVolumeSpecName "kube-api-access-g5mqd". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:06:48 crc kubenswrapper[4756]: I0930 20:06:48.904044 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1973b0b4-a604-410e-a042-f4a328f9fd71-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1973b0b4-a604-410e-a042-f4a328f9fd71" (UID: "1973b0b4-a604-410e-a042-f4a328f9fd71"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 20:06:48 crc kubenswrapper[4756]: I0930 20:06:48.945985 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-g5mqd\" (UniqueName: \"kubernetes.io/projected/1973b0b4-a604-410e-a042-f4a328f9fd71-kube-api-access-g5mqd\") on node \"crc\" DevicePath \"\"" Sep 30 20:06:48 crc kubenswrapper[4756]: I0930 20:06:48.946041 4756 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1973b0b4-a604-410e-a042-f4a328f9fd71-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 20:06:49 crc kubenswrapper[4756]: I0930 20:06:49.364915 4756 generic.go:334] "Generic (PLEG): container finished" podID="1973b0b4-a604-410e-a042-f4a328f9fd71" containerID="996600af710342464be65e53ba34ce49142ef5d89444eeb794a1046d2f3ca869" exitCode=0 Sep 30 20:06:49 crc kubenswrapper[4756]: I0930 20:06:49.364969 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-cp56n" event={"ID":"1973b0b4-a604-410e-a042-f4a328f9fd71","Type":"ContainerDied","Data":"996600af710342464be65e53ba34ce49142ef5d89444eeb794a1046d2f3ca869"} Sep 30 20:06:49 crc kubenswrapper[4756]: I0930 20:06:49.365005 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-cp56n" event={"ID":"1973b0b4-a604-410e-a042-f4a328f9fd71","Type":"ContainerDied","Data":"898cd503e162c4a2302b79e50a410f551241658bda73a08220794f2f84d005a7"} Sep 30 20:06:49 crc kubenswrapper[4756]: I0930 20:06:49.365030 4756 scope.go:117] "RemoveContainer" containerID="996600af710342464be65e53ba34ce49142ef5d89444eeb794a1046d2f3ca869" Sep 30 20:06:49 crc kubenswrapper[4756]: I0930 20:06:49.365202 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-cp56n" Sep 30 20:06:49 crc kubenswrapper[4756]: I0930 20:06:49.388961 4756 scope.go:117] "RemoveContainer" containerID="c9bd4cf01604d0c2687df9b17d94edad0806a9be08cd9d27944b6f948be702fd" Sep 30 20:06:49 crc kubenswrapper[4756]: I0930 20:06:49.399159 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-cp56n"] Sep 30 20:06:49 crc kubenswrapper[4756]: I0930 20:06:49.412354 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-cp56n"] Sep 30 20:06:49 crc kubenswrapper[4756]: I0930 20:06:49.414881 4756 scope.go:117] "RemoveContainer" containerID="3b8b4e2f86fb61a30137e5acc095f8b6de3626c14c455805f6584cd07d36fc10" Sep 30 20:06:49 crc kubenswrapper[4756]: I0930 20:06:49.441260 4756 scope.go:117] "RemoveContainer" containerID="996600af710342464be65e53ba34ce49142ef5d89444eeb794a1046d2f3ca869" Sep 30 20:06:49 crc kubenswrapper[4756]: E0930 20:06:49.441781 4756 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"996600af710342464be65e53ba34ce49142ef5d89444eeb794a1046d2f3ca869\": container with ID starting with 996600af710342464be65e53ba34ce49142ef5d89444eeb794a1046d2f3ca869 not found: ID does not exist" containerID="996600af710342464be65e53ba34ce49142ef5d89444eeb794a1046d2f3ca869" Sep 30 20:06:49 crc kubenswrapper[4756]: I0930 20:06:49.441830 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"996600af710342464be65e53ba34ce49142ef5d89444eeb794a1046d2f3ca869"} err="failed to get container status \"996600af710342464be65e53ba34ce49142ef5d89444eeb794a1046d2f3ca869\": rpc error: code = NotFound desc = could not find container \"996600af710342464be65e53ba34ce49142ef5d89444eeb794a1046d2f3ca869\": container with ID starting with 996600af710342464be65e53ba34ce49142ef5d89444eeb794a1046d2f3ca869 not found: ID does not exist" Sep 30 20:06:49 crc kubenswrapper[4756]: I0930 20:06:49.441858 4756 scope.go:117] "RemoveContainer" containerID="c9bd4cf01604d0c2687df9b17d94edad0806a9be08cd9d27944b6f948be702fd" Sep 30 20:06:49 crc kubenswrapper[4756]: E0930 20:06:49.442207 4756 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c9bd4cf01604d0c2687df9b17d94edad0806a9be08cd9d27944b6f948be702fd\": container with ID starting with c9bd4cf01604d0c2687df9b17d94edad0806a9be08cd9d27944b6f948be702fd not found: ID does not exist" containerID="c9bd4cf01604d0c2687df9b17d94edad0806a9be08cd9d27944b6f948be702fd" Sep 30 20:06:49 crc kubenswrapper[4756]: I0930 20:06:49.442295 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c9bd4cf01604d0c2687df9b17d94edad0806a9be08cd9d27944b6f948be702fd"} err="failed to get container status \"c9bd4cf01604d0c2687df9b17d94edad0806a9be08cd9d27944b6f948be702fd\": rpc error: code = NotFound desc = could not find container \"c9bd4cf01604d0c2687df9b17d94edad0806a9be08cd9d27944b6f948be702fd\": container with ID starting with c9bd4cf01604d0c2687df9b17d94edad0806a9be08cd9d27944b6f948be702fd not found: ID does not exist" Sep 30 20:06:49 crc kubenswrapper[4756]: I0930 20:06:49.442374 4756 scope.go:117] "RemoveContainer" containerID="3b8b4e2f86fb61a30137e5acc095f8b6de3626c14c455805f6584cd07d36fc10" Sep 30 20:06:49 crc kubenswrapper[4756]: E0930 20:06:49.442812 4756 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3b8b4e2f86fb61a30137e5acc095f8b6de3626c14c455805f6584cd07d36fc10\": container with ID starting with 3b8b4e2f86fb61a30137e5acc095f8b6de3626c14c455805f6584cd07d36fc10 not found: ID does not exist" containerID="3b8b4e2f86fb61a30137e5acc095f8b6de3626c14c455805f6584cd07d36fc10" Sep 30 20:06:49 crc kubenswrapper[4756]: I0930 20:06:49.442846 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3b8b4e2f86fb61a30137e5acc095f8b6de3626c14c455805f6584cd07d36fc10"} err="failed to get container status \"3b8b4e2f86fb61a30137e5acc095f8b6de3626c14c455805f6584cd07d36fc10\": rpc error: code = NotFound desc = could not find container \"3b8b4e2f86fb61a30137e5acc095f8b6de3626c14c455805f6584cd07d36fc10\": container with ID starting with 3b8b4e2f86fb61a30137e5acc095f8b6de3626c14c455805f6584cd07d36fc10 not found: ID does not exist" Sep 30 20:06:51 crc kubenswrapper[4756]: I0930 20:06:51.124687 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1973b0b4-a604-410e-a042-f4a328f9fd71" path="/var/lib/kubelet/pods/1973b0b4-a604-410e-a042-f4a328f9fd71/volumes" Sep 30 20:06:51 crc kubenswrapper[4756]: I0930 20:06:51.543945 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-rl6m2" Sep 30 20:06:51 crc kubenswrapper[4756]: I0930 20:06:51.544282 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-rl6m2" Sep 30 20:06:51 crc kubenswrapper[4756]: I0930 20:06:51.595813 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-rl6m2" Sep 30 20:06:52 crc kubenswrapper[4756]: I0930 20:06:52.440724 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-rl6m2" Sep 30 20:06:53 crc kubenswrapper[4756]: I0930 20:06:53.190679 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-rl6m2"] Sep 30 20:06:54 crc kubenswrapper[4756]: I0930 20:06:54.412456 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-rl6m2" podUID="5ff4996c-ab53-489d-966e-ece94a23562b" containerName="registry-server" containerID="cri-o://8b3b3e16db2993fbec33f5e2d4815f25b4c197f549241704ab0e01bb3933e03b" gracePeriod=2 Sep 30 20:06:54 crc kubenswrapper[4756]: I0930 20:06:54.857032 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-rl6m2" Sep 30 20:06:54 crc kubenswrapper[4756]: I0930 20:06:54.933537 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gwvpn\" (UniqueName: \"kubernetes.io/projected/5ff4996c-ab53-489d-966e-ece94a23562b-kube-api-access-gwvpn\") pod \"5ff4996c-ab53-489d-966e-ece94a23562b\" (UID: \"5ff4996c-ab53-489d-966e-ece94a23562b\") " Sep 30 20:06:54 crc kubenswrapper[4756]: I0930 20:06:54.933678 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5ff4996c-ab53-489d-966e-ece94a23562b-catalog-content\") pod \"5ff4996c-ab53-489d-966e-ece94a23562b\" (UID: \"5ff4996c-ab53-489d-966e-ece94a23562b\") " Sep 30 20:06:54 crc kubenswrapper[4756]: I0930 20:06:54.933713 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5ff4996c-ab53-489d-966e-ece94a23562b-utilities\") pod \"5ff4996c-ab53-489d-966e-ece94a23562b\" (UID: \"5ff4996c-ab53-489d-966e-ece94a23562b\") " Sep 30 20:06:54 crc kubenswrapper[4756]: I0930 20:06:54.934676 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5ff4996c-ab53-489d-966e-ece94a23562b-utilities" (OuterVolumeSpecName: "utilities") pod "5ff4996c-ab53-489d-966e-ece94a23562b" (UID: "5ff4996c-ab53-489d-966e-ece94a23562b"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 20:06:54 crc kubenswrapper[4756]: I0930 20:06:54.940646 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5ff4996c-ab53-489d-966e-ece94a23562b-kube-api-access-gwvpn" (OuterVolumeSpecName: "kube-api-access-gwvpn") pod "5ff4996c-ab53-489d-966e-ece94a23562b" (UID: "5ff4996c-ab53-489d-966e-ece94a23562b"). InnerVolumeSpecName "kube-api-access-gwvpn". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:06:54 crc kubenswrapper[4756]: I0930 20:06:54.999493 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5ff4996c-ab53-489d-966e-ece94a23562b-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5ff4996c-ab53-489d-966e-ece94a23562b" (UID: "5ff4996c-ab53-489d-966e-ece94a23562b"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 20:06:55 crc kubenswrapper[4756]: I0930 20:06:55.035878 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gwvpn\" (UniqueName: \"kubernetes.io/projected/5ff4996c-ab53-489d-966e-ece94a23562b-kube-api-access-gwvpn\") on node \"crc\" DevicePath \"\"" Sep 30 20:06:55 crc kubenswrapper[4756]: I0930 20:06:55.035945 4756 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5ff4996c-ab53-489d-966e-ece94a23562b-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 20:06:55 crc kubenswrapper[4756]: I0930 20:06:55.035958 4756 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5ff4996c-ab53-489d-966e-ece94a23562b-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 20:06:55 crc kubenswrapper[4756]: I0930 20:06:55.422697 4756 generic.go:334] "Generic (PLEG): container finished" podID="5ff4996c-ab53-489d-966e-ece94a23562b" containerID="8b3b3e16db2993fbec33f5e2d4815f25b4c197f549241704ab0e01bb3933e03b" exitCode=0 Sep 30 20:06:55 crc kubenswrapper[4756]: I0930 20:06:55.422756 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rl6m2" event={"ID":"5ff4996c-ab53-489d-966e-ece94a23562b","Type":"ContainerDied","Data":"8b3b3e16db2993fbec33f5e2d4815f25b4c197f549241704ab0e01bb3933e03b"} Sep 30 20:06:55 crc kubenswrapper[4756]: I0930 20:06:55.422786 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-rl6m2" Sep 30 20:06:55 crc kubenswrapper[4756]: I0930 20:06:55.422809 4756 scope.go:117] "RemoveContainer" containerID="8b3b3e16db2993fbec33f5e2d4815f25b4c197f549241704ab0e01bb3933e03b" Sep 30 20:06:55 crc kubenswrapper[4756]: I0930 20:06:55.422793 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rl6m2" event={"ID":"5ff4996c-ab53-489d-966e-ece94a23562b","Type":"ContainerDied","Data":"12019861e591099ea5f569f0871fecb40ae47f050cd76b4bbd7f3641ef1d3958"} Sep 30 20:06:55 crc kubenswrapper[4756]: I0930 20:06:55.450335 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-rl6m2"] Sep 30 20:06:55 crc kubenswrapper[4756]: I0930 20:06:55.451975 4756 scope.go:117] "RemoveContainer" containerID="56e801cde02f036ac1de362d041e61c8f46fb52c9d50ada3ee29bc2a2eb574a6" Sep 30 20:06:55 crc kubenswrapper[4756]: I0930 20:06:55.457861 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-rl6m2"] Sep 30 20:06:55 crc kubenswrapper[4756]: I0930 20:06:55.473230 4756 scope.go:117] "RemoveContainer" containerID="4c94e5c9396dd5a7b02eef368c79d5a1eb8ab4c568d5e2af6d810fa7ac85d42e" Sep 30 20:06:55 crc kubenswrapper[4756]: I0930 20:06:55.510344 4756 scope.go:117] "RemoveContainer" containerID="8b3b3e16db2993fbec33f5e2d4815f25b4c197f549241704ab0e01bb3933e03b" Sep 30 20:06:55 crc kubenswrapper[4756]: E0930 20:06:55.511038 4756 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8b3b3e16db2993fbec33f5e2d4815f25b4c197f549241704ab0e01bb3933e03b\": container with ID starting with 8b3b3e16db2993fbec33f5e2d4815f25b4c197f549241704ab0e01bb3933e03b not found: ID does not exist" containerID="8b3b3e16db2993fbec33f5e2d4815f25b4c197f549241704ab0e01bb3933e03b" Sep 30 20:06:55 crc kubenswrapper[4756]: I0930 20:06:55.511102 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8b3b3e16db2993fbec33f5e2d4815f25b4c197f549241704ab0e01bb3933e03b"} err="failed to get container status \"8b3b3e16db2993fbec33f5e2d4815f25b4c197f549241704ab0e01bb3933e03b\": rpc error: code = NotFound desc = could not find container \"8b3b3e16db2993fbec33f5e2d4815f25b4c197f549241704ab0e01bb3933e03b\": container with ID starting with 8b3b3e16db2993fbec33f5e2d4815f25b4c197f549241704ab0e01bb3933e03b not found: ID does not exist" Sep 30 20:06:55 crc kubenswrapper[4756]: I0930 20:06:55.511136 4756 scope.go:117] "RemoveContainer" containerID="56e801cde02f036ac1de362d041e61c8f46fb52c9d50ada3ee29bc2a2eb574a6" Sep 30 20:06:55 crc kubenswrapper[4756]: E0930 20:06:55.511664 4756 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"56e801cde02f036ac1de362d041e61c8f46fb52c9d50ada3ee29bc2a2eb574a6\": container with ID starting with 56e801cde02f036ac1de362d041e61c8f46fb52c9d50ada3ee29bc2a2eb574a6 not found: ID does not exist" containerID="56e801cde02f036ac1de362d041e61c8f46fb52c9d50ada3ee29bc2a2eb574a6" Sep 30 20:06:55 crc kubenswrapper[4756]: I0930 20:06:55.511719 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"56e801cde02f036ac1de362d041e61c8f46fb52c9d50ada3ee29bc2a2eb574a6"} err="failed to get container status \"56e801cde02f036ac1de362d041e61c8f46fb52c9d50ada3ee29bc2a2eb574a6\": rpc error: code = NotFound desc = could not find container \"56e801cde02f036ac1de362d041e61c8f46fb52c9d50ada3ee29bc2a2eb574a6\": container with ID starting with 56e801cde02f036ac1de362d041e61c8f46fb52c9d50ada3ee29bc2a2eb574a6 not found: ID does not exist" Sep 30 20:06:55 crc kubenswrapper[4756]: I0930 20:06:55.511748 4756 scope.go:117] "RemoveContainer" containerID="4c94e5c9396dd5a7b02eef368c79d5a1eb8ab4c568d5e2af6d810fa7ac85d42e" Sep 30 20:06:55 crc kubenswrapper[4756]: E0930 20:06:55.512062 4756 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4c94e5c9396dd5a7b02eef368c79d5a1eb8ab4c568d5e2af6d810fa7ac85d42e\": container with ID starting with 4c94e5c9396dd5a7b02eef368c79d5a1eb8ab4c568d5e2af6d810fa7ac85d42e not found: ID does not exist" containerID="4c94e5c9396dd5a7b02eef368c79d5a1eb8ab4c568d5e2af6d810fa7ac85d42e" Sep 30 20:06:55 crc kubenswrapper[4756]: I0930 20:06:55.512107 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4c94e5c9396dd5a7b02eef368c79d5a1eb8ab4c568d5e2af6d810fa7ac85d42e"} err="failed to get container status \"4c94e5c9396dd5a7b02eef368c79d5a1eb8ab4c568d5e2af6d810fa7ac85d42e\": rpc error: code = NotFound desc = could not find container \"4c94e5c9396dd5a7b02eef368c79d5a1eb8ab4c568d5e2af6d810fa7ac85d42e\": container with ID starting with 4c94e5c9396dd5a7b02eef368c79d5a1eb8ab4c568d5e2af6d810fa7ac85d42e not found: ID does not exist" Sep 30 20:06:57 crc kubenswrapper[4756]: I0930 20:06:57.131388 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5ff4996c-ab53-489d-966e-ece94a23562b" path="/var/lib/kubelet/pods/5ff4996c-ab53-489d-966e-ece94a23562b/volumes" Sep 30 20:07:41 crc kubenswrapper[4756]: I0930 20:07:41.539018 4756 patch_prober.go:28] interesting pod/machine-config-daemon-4n9zj container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 20:07:41 crc kubenswrapper[4756]: I0930 20:07:41.539815 4756 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 20:08:11 crc kubenswrapper[4756]: I0930 20:08:11.538854 4756 patch_prober.go:28] interesting pod/machine-config-daemon-4n9zj container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 20:08:11 crc kubenswrapper[4756]: I0930 20:08:11.539299 4756 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 20:08:41 crc kubenswrapper[4756]: I0930 20:08:41.538884 4756 patch_prober.go:28] interesting pod/machine-config-daemon-4n9zj container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 20:08:41 crc kubenswrapper[4756]: I0930 20:08:41.539576 4756 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 20:08:41 crc kubenswrapper[4756]: I0930 20:08:41.539648 4756 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" Sep 30 20:08:41 crc kubenswrapper[4756]: I0930 20:08:41.540650 4756 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"59f9f1c1ce4b55e108410080ce02b6db07852e4e8a303b3fe63834e86270c4f2"} pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 30 20:08:41 crc kubenswrapper[4756]: I0930 20:08:41.540765 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" containerName="machine-config-daemon" containerID="cri-o://59f9f1c1ce4b55e108410080ce02b6db07852e4e8a303b3fe63834e86270c4f2" gracePeriod=600 Sep 30 20:08:41 crc kubenswrapper[4756]: E0930 20:08:41.663684 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4n9zj_openshift-machine-config-operator(3370c2ca-fec3-4f90-8df7-51e21e6c7e1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" Sep 30 20:08:42 crc kubenswrapper[4756]: I0930 20:08:42.331788 4756 generic.go:334] "Generic (PLEG): container finished" podID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" containerID="59f9f1c1ce4b55e108410080ce02b6db07852e4e8a303b3fe63834e86270c4f2" exitCode=0 Sep 30 20:08:42 crc kubenswrapper[4756]: I0930 20:08:42.331857 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" event={"ID":"3370c2ca-fec3-4f90-8df7-51e21e6c7e1c","Type":"ContainerDied","Data":"59f9f1c1ce4b55e108410080ce02b6db07852e4e8a303b3fe63834e86270c4f2"} Sep 30 20:08:42 crc kubenswrapper[4756]: I0930 20:08:42.331928 4756 scope.go:117] "RemoveContainer" containerID="17fda7c8bad2c60a4c9ad2cd8f5d0f9964b6a0f217e7bbfac13174d4910d8c19" Sep 30 20:08:42 crc kubenswrapper[4756]: I0930 20:08:42.332495 4756 scope.go:117] "RemoveContainer" containerID="59f9f1c1ce4b55e108410080ce02b6db07852e4e8a303b3fe63834e86270c4f2" Sep 30 20:08:42 crc kubenswrapper[4756]: E0930 20:08:42.332853 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4n9zj_openshift-machine-config-operator(3370c2ca-fec3-4f90-8df7-51e21e6c7e1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" Sep 30 20:08:53 crc kubenswrapper[4756]: I0930 20:08:53.116162 4756 scope.go:117] "RemoveContainer" containerID="59f9f1c1ce4b55e108410080ce02b6db07852e4e8a303b3fe63834e86270c4f2" Sep 30 20:08:53 crc kubenswrapper[4756]: E0930 20:08:53.116896 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4n9zj_openshift-machine-config-operator(3370c2ca-fec3-4f90-8df7-51e21e6c7e1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" Sep 30 20:09:07 crc kubenswrapper[4756]: I0930 20:09:07.116918 4756 scope.go:117] "RemoveContainer" containerID="59f9f1c1ce4b55e108410080ce02b6db07852e4e8a303b3fe63834e86270c4f2" Sep 30 20:09:07 crc kubenswrapper[4756]: E0930 20:09:07.117893 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4n9zj_openshift-machine-config-operator(3370c2ca-fec3-4f90-8df7-51e21e6c7e1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" Sep 30 20:09:20 crc kubenswrapper[4756]: I0930 20:09:20.116212 4756 scope.go:117] "RemoveContainer" containerID="59f9f1c1ce4b55e108410080ce02b6db07852e4e8a303b3fe63834e86270c4f2" Sep 30 20:09:20 crc kubenswrapper[4756]: E0930 20:09:20.116924 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4n9zj_openshift-machine-config-operator(3370c2ca-fec3-4f90-8df7-51e21e6c7e1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" Sep 30 20:09:35 crc kubenswrapper[4756]: I0930 20:09:35.116482 4756 scope.go:117] "RemoveContainer" containerID="59f9f1c1ce4b55e108410080ce02b6db07852e4e8a303b3fe63834e86270c4f2" Sep 30 20:09:35 crc kubenswrapper[4756]: E0930 20:09:35.117155 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4n9zj_openshift-machine-config-operator(3370c2ca-fec3-4f90-8df7-51e21e6c7e1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" Sep 30 20:09:46 crc kubenswrapper[4756]: I0930 20:09:46.117133 4756 scope.go:117] "RemoveContainer" containerID="59f9f1c1ce4b55e108410080ce02b6db07852e4e8a303b3fe63834e86270c4f2" Sep 30 20:09:46 crc kubenswrapper[4756]: E0930 20:09:46.118020 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4n9zj_openshift-machine-config-operator(3370c2ca-fec3-4f90-8df7-51e21e6c7e1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" Sep 30 20:09:58 crc kubenswrapper[4756]: I0930 20:09:58.116176 4756 scope.go:117] "RemoveContainer" containerID="59f9f1c1ce4b55e108410080ce02b6db07852e4e8a303b3fe63834e86270c4f2" Sep 30 20:09:58 crc kubenswrapper[4756]: E0930 20:09:58.116807 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4n9zj_openshift-machine-config-operator(3370c2ca-fec3-4f90-8df7-51e21e6c7e1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" Sep 30 20:10:10 crc kubenswrapper[4756]: I0930 20:10:10.115769 4756 scope.go:117] "RemoveContainer" containerID="59f9f1c1ce4b55e108410080ce02b6db07852e4e8a303b3fe63834e86270c4f2" Sep 30 20:10:10 crc kubenswrapper[4756]: E0930 20:10:10.116584 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4n9zj_openshift-machine-config-operator(3370c2ca-fec3-4f90-8df7-51e21e6c7e1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" Sep 30 20:10:25 crc kubenswrapper[4756]: I0930 20:10:25.116231 4756 scope.go:117] "RemoveContainer" containerID="59f9f1c1ce4b55e108410080ce02b6db07852e4e8a303b3fe63834e86270c4f2" Sep 30 20:10:25 crc kubenswrapper[4756]: E0930 20:10:25.118108 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4n9zj_openshift-machine-config-operator(3370c2ca-fec3-4f90-8df7-51e21e6c7e1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" Sep 30 20:10:36 crc kubenswrapper[4756]: I0930 20:10:36.115677 4756 scope.go:117] "RemoveContainer" containerID="59f9f1c1ce4b55e108410080ce02b6db07852e4e8a303b3fe63834e86270c4f2" Sep 30 20:10:36 crc kubenswrapper[4756]: E0930 20:10:36.116341 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4n9zj_openshift-machine-config-operator(3370c2ca-fec3-4f90-8df7-51e21e6c7e1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" Sep 30 20:10:49 crc kubenswrapper[4756]: I0930 20:10:49.116238 4756 scope.go:117] "RemoveContainer" containerID="59f9f1c1ce4b55e108410080ce02b6db07852e4e8a303b3fe63834e86270c4f2" Sep 30 20:10:49 crc kubenswrapper[4756]: E0930 20:10:49.117084 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4n9zj_openshift-machine-config-operator(3370c2ca-fec3-4f90-8df7-51e21e6c7e1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" Sep 30 20:11:02 crc kubenswrapper[4756]: I0930 20:11:02.116653 4756 scope.go:117] "RemoveContainer" containerID="59f9f1c1ce4b55e108410080ce02b6db07852e4e8a303b3fe63834e86270c4f2" Sep 30 20:11:02 crc kubenswrapper[4756]: E0930 20:11:02.117983 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4n9zj_openshift-machine-config-operator(3370c2ca-fec3-4f90-8df7-51e21e6c7e1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" Sep 30 20:11:17 crc kubenswrapper[4756]: I0930 20:11:17.117177 4756 scope.go:117] "RemoveContainer" containerID="59f9f1c1ce4b55e108410080ce02b6db07852e4e8a303b3fe63834e86270c4f2" Sep 30 20:11:17 crc kubenswrapper[4756]: E0930 20:11:17.118156 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4n9zj_openshift-machine-config-operator(3370c2ca-fec3-4f90-8df7-51e21e6c7e1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" Sep 30 20:11:31 crc kubenswrapper[4756]: I0930 20:11:31.123905 4756 scope.go:117] "RemoveContainer" containerID="59f9f1c1ce4b55e108410080ce02b6db07852e4e8a303b3fe63834e86270c4f2" Sep 30 20:11:31 crc kubenswrapper[4756]: E0930 20:11:31.124550 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4n9zj_openshift-machine-config-operator(3370c2ca-fec3-4f90-8df7-51e21e6c7e1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" Sep 30 20:11:46 crc kubenswrapper[4756]: I0930 20:11:46.116644 4756 scope.go:117] "RemoveContainer" containerID="59f9f1c1ce4b55e108410080ce02b6db07852e4e8a303b3fe63834e86270c4f2" Sep 30 20:11:46 crc kubenswrapper[4756]: E0930 20:11:46.117858 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4n9zj_openshift-machine-config-operator(3370c2ca-fec3-4f90-8df7-51e21e6c7e1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" Sep 30 20:12:01 crc kubenswrapper[4756]: I0930 20:12:01.124635 4756 scope.go:117] "RemoveContainer" containerID="59f9f1c1ce4b55e108410080ce02b6db07852e4e8a303b3fe63834e86270c4f2" Sep 30 20:12:01 crc kubenswrapper[4756]: E0930 20:12:01.125542 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4n9zj_openshift-machine-config-operator(3370c2ca-fec3-4f90-8df7-51e21e6c7e1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" Sep 30 20:12:13 crc kubenswrapper[4756]: I0930 20:12:13.116284 4756 scope.go:117] "RemoveContainer" containerID="59f9f1c1ce4b55e108410080ce02b6db07852e4e8a303b3fe63834e86270c4f2" Sep 30 20:12:13 crc kubenswrapper[4756]: E0930 20:12:13.117176 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4n9zj_openshift-machine-config-operator(3370c2ca-fec3-4f90-8df7-51e21e6c7e1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" Sep 30 20:12:27 crc kubenswrapper[4756]: I0930 20:12:27.116892 4756 scope.go:117] "RemoveContainer" containerID="59f9f1c1ce4b55e108410080ce02b6db07852e4e8a303b3fe63834e86270c4f2" Sep 30 20:12:27 crc kubenswrapper[4756]: E0930 20:12:27.117734 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4n9zj_openshift-machine-config-operator(3370c2ca-fec3-4f90-8df7-51e21e6c7e1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" Sep 30 20:12:40 crc kubenswrapper[4756]: I0930 20:12:40.116039 4756 scope.go:117] "RemoveContainer" containerID="59f9f1c1ce4b55e108410080ce02b6db07852e4e8a303b3fe63834e86270c4f2" Sep 30 20:12:40 crc kubenswrapper[4756]: E0930 20:12:40.116747 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4n9zj_openshift-machine-config-operator(3370c2ca-fec3-4f90-8df7-51e21e6c7e1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" Sep 30 20:12:55 crc kubenswrapper[4756]: I0930 20:12:55.115758 4756 scope.go:117] "RemoveContainer" containerID="59f9f1c1ce4b55e108410080ce02b6db07852e4e8a303b3fe63834e86270c4f2" Sep 30 20:12:55 crc kubenswrapper[4756]: E0930 20:12:55.116451 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4n9zj_openshift-machine-config-operator(3370c2ca-fec3-4f90-8df7-51e21e6c7e1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" Sep 30 20:13:09 crc kubenswrapper[4756]: I0930 20:13:09.116206 4756 scope.go:117] "RemoveContainer" containerID="59f9f1c1ce4b55e108410080ce02b6db07852e4e8a303b3fe63834e86270c4f2" Sep 30 20:13:09 crc kubenswrapper[4756]: E0930 20:13:09.117030 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4n9zj_openshift-machine-config-operator(3370c2ca-fec3-4f90-8df7-51e21e6c7e1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" Sep 30 20:13:23 crc kubenswrapper[4756]: I0930 20:13:23.117132 4756 scope.go:117] "RemoveContainer" containerID="59f9f1c1ce4b55e108410080ce02b6db07852e4e8a303b3fe63834e86270c4f2" Sep 30 20:13:23 crc kubenswrapper[4756]: E0930 20:13:23.118088 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4n9zj_openshift-machine-config-operator(3370c2ca-fec3-4f90-8df7-51e21e6c7e1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" Sep 30 20:13:38 crc kubenswrapper[4756]: I0930 20:13:38.115848 4756 scope.go:117] "RemoveContainer" containerID="59f9f1c1ce4b55e108410080ce02b6db07852e4e8a303b3fe63834e86270c4f2" Sep 30 20:13:38 crc kubenswrapper[4756]: E0930 20:13:38.117585 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4n9zj_openshift-machine-config-operator(3370c2ca-fec3-4f90-8df7-51e21e6c7e1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" Sep 30 20:13:52 crc kubenswrapper[4756]: I0930 20:13:52.116024 4756 scope.go:117] "RemoveContainer" containerID="59f9f1c1ce4b55e108410080ce02b6db07852e4e8a303b3fe63834e86270c4f2" Sep 30 20:13:52 crc kubenswrapper[4756]: I0930 20:13:52.779441 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" event={"ID":"3370c2ca-fec3-4f90-8df7-51e21e6c7e1c","Type":"ContainerStarted","Data":"877567a59fb1cb2d3ae36ddaf918e59f424d07882ee0480b1ac7bde68db72d5b"} Sep 30 20:15:00 crc kubenswrapper[4756]: I0930 20:15:00.144946 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29321055-bzml4"] Sep 30 20:15:00 crc kubenswrapper[4756]: E0930 20:15:00.146058 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5ff4996c-ab53-489d-966e-ece94a23562b" containerName="extract-content" Sep 30 20:15:00 crc kubenswrapper[4756]: I0930 20:15:00.146082 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="5ff4996c-ab53-489d-966e-ece94a23562b" containerName="extract-content" Sep 30 20:15:00 crc kubenswrapper[4756]: E0930 20:15:00.146108 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5ff4996c-ab53-489d-966e-ece94a23562b" containerName="extract-utilities" Sep 30 20:15:00 crc kubenswrapper[4756]: I0930 20:15:00.146119 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="5ff4996c-ab53-489d-966e-ece94a23562b" containerName="extract-utilities" Sep 30 20:15:00 crc kubenswrapper[4756]: E0930 20:15:00.146157 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1973b0b4-a604-410e-a042-f4a328f9fd71" containerName="registry-server" Sep 30 20:15:00 crc kubenswrapper[4756]: I0930 20:15:00.146169 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="1973b0b4-a604-410e-a042-f4a328f9fd71" containerName="registry-server" Sep 30 20:15:00 crc kubenswrapper[4756]: E0930 20:15:00.146196 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5ff4996c-ab53-489d-966e-ece94a23562b" containerName="registry-server" Sep 30 20:15:00 crc kubenswrapper[4756]: I0930 20:15:00.146208 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="5ff4996c-ab53-489d-966e-ece94a23562b" containerName="registry-server" Sep 30 20:15:00 crc kubenswrapper[4756]: E0930 20:15:00.146227 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1973b0b4-a604-410e-a042-f4a328f9fd71" containerName="extract-utilities" Sep 30 20:15:00 crc kubenswrapper[4756]: I0930 20:15:00.146237 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="1973b0b4-a604-410e-a042-f4a328f9fd71" containerName="extract-utilities" Sep 30 20:15:00 crc kubenswrapper[4756]: E0930 20:15:00.146265 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1973b0b4-a604-410e-a042-f4a328f9fd71" containerName="extract-content" Sep 30 20:15:00 crc kubenswrapper[4756]: I0930 20:15:00.146277 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="1973b0b4-a604-410e-a042-f4a328f9fd71" containerName="extract-content" Sep 30 20:15:00 crc kubenswrapper[4756]: I0930 20:15:00.146530 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="1973b0b4-a604-410e-a042-f4a328f9fd71" containerName="registry-server" Sep 30 20:15:00 crc kubenswrapper[4756]: I0930 20:15:00.146565 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="5ff4996c-ab53-489d-966e-ece94a23562b" containerName="registry-server" Sep 30 20:15:00 crc kubenswrapper[4756]: I0930 20:15:00.147345 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29321055-bzml4" Sep 30 20:15:00 crc kubenswrapper[4756]: I0930 20:15:00.184510 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Sep 30 20:15:00 crc kubenswrapper[4756]: I0930 20:15:00.184837 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Sep 30 20:15:00 crc kubenswrapper[4756]: I0930 20:15:00.190149 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29321055-bzml4"] Sep 30 20:15:00 crc kubenswrapper[4756]: I0930 20:15:00.286865 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6gb9t\" (UniqueName: \"kubernetes.io/projected/77313252-ffb7-4283-8c2b-b0b9d51ce480-kube-api-access-6gb9t\") pod \"collect-profiles-29321055-bzml4\" (UID: \"77313252-ffb7-4283-8c2b-b0b9d51ce480\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321055-bzml4" Sep 30 20:15:00 crc kubenswrapper[4756]: I0930 20:15:00.286964 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/77313252-ffb7-4283-8c2b-b0b9d51ce480-config-volume\") pod \"collect-profiles-29321055-bzml4\" (UID: \"77313252-ffb7-4283-8c2b-b0b9d51ce480\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321055-bzml4" Sep 30 20:15:00 crc kubenswrapper[4756]: I0930 20:15:00.287008 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/77313252-ffb7-4283-8c2b-b0b9d51ce480-secret-volume\") pod \"collect-profiles-29321055-bzml4\" (UID: \"77313252-ffb7-4283-8c2b-b0b9d51ce480\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321055-bzml4" Sep 30 20:15:00 crc kubenswrapper[4756]: I0930 20:15:00.388677 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/77313252-ffb7-4283-8c2b-b0b9d51ce480-config-volume\") pod \"collect-profiles-29321055-bzml4\" (UID: \"77313252-ffb7-4283-8c2b-b0b9d51ce480\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321055-bzml4" Sep 30 20:15:00 crc kubenswrapper[4756]: I0930 20:15:00.388750 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/77313252-ffb7-4283-8c2b-b0b9d51ce480-secret-volume\") pod \"collect-profiles-29321055-bzml4\" (UID: \"77313252-ffb7-4283-8c2b-b0b9d51ce480\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321055-bzml4" Sep 30 20:15:00 crc kubenswrapper[4756]: I0930 20:15:00.388793 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6gb9t\" (UniqueName: \"kubernetes.io/projected/77313252-ffb7-4283-8c2b-b0b9d51ce480-kube-api-access-6gb9t\") pod \"collect-profiles-29321055-bzml4\" (UID: \"77313252-ffb7-4283-8c2b-b0b9d51ce480\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321055-bzml4" Sep 30 20:15:00 crc kubenswrapper[4756]: I0930 20:15:00.389661 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/77313252-ffb7-4283-8c2b-b0b9d51ce480-config-volume\") pod \"collect-profiles-29321055-bzml4\" (UID: \"77313252-ffb7-4283-8c2b-b0b9d51ce480\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321055-bzml4" Sep 30 20:15:00 crc kubenswrapper[4756]: I0930 20:15:00.394837 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/77313252-ffb7-4283-8c2b-b0b9d51ce480-secret-volume\") pod \"collect-profiles-29321055-bzml4\" (UID: \"77313252-ffb7-4283-8c2b-b0b9d51ce480\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321055-bzml4" Sep 30 20:15:00 crc kubenswrapper[4756]: I0930 20:15:00.410472 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6gb9t\" (UniqueName: \"kubernetes.io/projected/77313252-ffb7-4283-8c2b-b0b9d51ce480-kube-api-access-6gb9t\") pod \"collect-profiles-29321055-bzml4\" (UID: \"77313252-ffb7-4283-8c2b-b0b9d51ce480\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321055-bzml4" Sep 30 20:15:00 crc kubenswrapper[4756]: I0930 20:15:00.503104 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29321055-bzml4" Sep 30 20:15:00 crc kubenswrapper[4756]: I0930 20:15:00.902826 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29321055-bzml4"] Sep 30 20:15:01 crc kubenswrapper[4756]: I0930 20:15:01.372739 4756 generic.go:334] "Generic (PLEG): container finished" podID="77313252-ffb7-4283-8c2b-b0b9d51ce480" containerID="7f6a94a30a43da655de65a4c148f696a31bffc49e5af2e8aaf47c176a878d8f6" exitCode=0 Sep 30 20:15:01 crc kubenswrapper[4756]: I0930 20:15:01.372965 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29321055-bzml4" event={"ID":"77313252-ffb7-4283-8c2b-b0b9d51ce480","Type":"ContainerDied","Data":"7f6a94a30a43da655de65a4c148f696a31bffc49e5af2e8aaf47c176a878d8f6"} Sep 30 20:15:01 crc kubenswrapper[4756]: I0930 20:15:01.373115 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29321055-bzml4" event={"ID":"77313252-ffb7-4283-8c2b-b0b9d51ce480","Type":"ContainerStarted","Data":"72989b375dea083de251ef7eb17c2934df693d47a6ef8db057a68f4c0109657c"} Sep 30 20:15:02 crc kubenswrapper[4756]: I0930 20:15:02.673268 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29321055-bzml4" Sep 30 20:15:02 crc kubenswrapper[4756]: I0930 20:15:02.827227 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/77313252-ffb7-4283-8c2b-b0b9d51ce480-config-volume\") pod \"77313252-ffb7-4283-8c2b-b0b9d51ce480\" (UID: \"77313252-ffb7-4283-8c2b-b0b9d51ce480\") " Sep 30 20:15:02 crc kubenswrapper[4756]: I0930 20:15:02.827312 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6gb9t\" (UniqueName: \"kubernetes.io/projected/77313252-ffb7-4283-8c2b-b0b9d51ce480-kube-api-access-6gb9t\") pod \"77313252-ffb7-4283-8c2b-b0b9d51ce480\" (UID: \"77313252-ffb7-4283-8c2b-b0b9d51ce480\") " Sep 30 20:15:02 crc kubenswrapper[4756]: I0930 20:15:02.827440 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/77313252-ffb7-4283-8c2b-b0b9d51ce480-secret-volume\") pod \"77313252-ffb7-4283-8c2b-b0b9d51ce480\" (UID: \"77313252-ffb7-4283-8c2b-b0b9d51ce480\") " Sep 30 20:15:02 crc kubenswrapper[4756]: I0930 20:15:02.828141 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/77313252-ffb7-4283-8c2b-b0b9d51ce480-config-volume" (OuterVolumeSpecName: "config-volume") pod "77313252-ffb7-4283-8c2b-b0b9d51ce480" (UID: "77313252-ffb7-4283-8c2b-b0b9d51ce480"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:15:02 crc kubenswrapper[4756]: I0930 20:15:02.833271 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/77313252-ffb7-4283-8c2b-b0b9d51ce480-kube-api-access-6gb9t" (OuterVolumeSpecName: "kube-api-access-6gb9t") pod "77313252-ffb7-4283-8c2b-b0b9d51ce480" (UID: "77313252-ffb7-4283-8c2b-b0b9d51ce480"). InnerVolumeSpecName "kube-api-access-6gb9t". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:15:02 crc kubenswrapper[4756]: I0930 20:15:02.833590 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/77313252-ffb7-4283-8c2b-b0b9d51ce480-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "77313252-ffb7-4283-8c2b-b0b9d51ce480" (UID: "77313252-ffb7-4283-8c2b-b0b9d51ce480"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:15:02 crc kubenswrapper[4756]: I0930 20:15:02.928966 4756 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/77313252-ffb7-4283-8c2b-b0b9d51ce480-config-volume\") on node \"crc\" DevicePath \"\"" Sep 30 20:15:02 crc kubenswrapper[4756]: I0930 20:15:02.929001 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6gb9t\" (UniqueName: \"kubernetes.io/projected/77313252-ffb7-4283-8c2b-b0b9d51ce480-kube-api-access-6gb9t\") on node \"crc\" DevicePath \"\"" Sep 30 20:15:02 crc kubenswrapper[4756]: I0930 20:15:02.929012 4756 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/77313252-ffb7-4283-8c2b-b0b9d51ce480-secret-volume\") on node \"crc\" DevicePath \"\"" Sep 30 20:15:03 crc kubenswrapper[4756]: I0930 20:15:03.387953 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29321055-bzml4" event={"ID":"77313252-ffb7-4283-8c2b-b0b9d51ce480","Type":"ContainerDied","Data":"72989b375dea083de251ef7eb17c2934df693d47a6ef8db057a68f4c0109657c"} Sep 30 20:15:03 crc kubenswrapper[4756]: I0930 20:15:03.388281 4756 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="72989b375dea083de251ef7eb17c2934df693d47a6ef8db057a68f4c0109657c" Sep 30 20:15:03 crc kubenswrapper[4756]: I0930 20:15:03.388006 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29321055-bzml4" Sep 30 20:15:03 crc kubenswrapper[4756]: I0930 20:15:03.730363 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29321010-snrpj"] Sep 30 20:15:03 crc kubenswrapper[4756]: I0930 20:15:03.734554 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29321010-snrpj"] Sep 30 20:15:05 crc kubenswrapper[4756]: I0930 20:15:05.131222 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4f7f9fe4-0797-4490-8bc2-c48850eb9f02" path="/var/lib/kubelet/pods/4f7f9fe4-0797-4490-8bc2-c48850eb9f02/volumes" Sep 30 20:15:17 crc kubenswrapper[4756]: I0930 20:15:17.584343 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-rdnft"] Sep 30 20:15:17 crc kubenswrapper[4756]: E0930 20:15:17.585243 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="77313252-ffb7-4283-8c2b-b0b9d51ce480" containerName="collect-profiles" Sep 30 20:15:17 crc kubenswrapper[4756]: I0930 20:15:17.585258 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="77313252-ffb7-4283-8c2b-b0b9d51ce480" containerName="collect-profiles" Sep 30 20:15:17 crc kubenswrapper[4756]: I0930 20:15:17.585426 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="77313252-ffb7-4283-8c2b-b0b9d51ce480" containerName="collect-profiles" Sep 30 20:15:17 crc kubenswrapper[4756]: I0930 20:15:17.586455 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-rdnft" Sep 30 20:15:17 crc kubenswrapper[4756]: I0930 20:15:17.611011 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-rdnft"] Sep 30 20:15:17 crc kubenswrapper[4756]: I0930 20:15:17.729579 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c1ce8b72-3f07-4800-bbed-8a2cca80fdd9-utilities\") pod \"redhat-operators-rdnft\" (UID: \"c1ce8b72-3f07-4800-bbed-8a2cca80fdd9\") " pod="openshift-marketplace/redhat-operators-rdnft" Sep 30 20:15:17 crc kubenswrapper[4756]: I0930 20:15:17.729635 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kqzfd\" (UniqueName: \"kubernetes.io/projected/c1ce8b72-3f07-4800-bbed-8a2cca80fdd9-kube-api-access-kqzfd\") pod \"redhat-operators-rdnft\" (UID: \"c1ce8b72-3f07-4800-bbed-8a2cca80fdd9\") " pod="openshift-marketplace/redhat-operators-rdnft" Sep 30 20:15:17 crc kubenswrapper[4756]: I0930 20:15:17.729718 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c1ce8b72-3f07-4800-bbed-8a2cca80fdd9-catalog-content\") pod \"redhat-operators-rdnft\" (UID: \"c1ce8b72-3f07-4800-bbed-8a2cca80fdd9\") " pod="openshift-marketplace/redhat-operators-rdnft" Sep 30 20:15:17 crc kubenswrapper[4756]: I0930 20:15:17.831100 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c1ce8b72-3f07-4800-bbed-8a2cca80fdd9-catalog-content\") pod \"redhat-operators-rdnft\" (UID: \"c1ce8b72-3f07-4800-bbed-8a2cca80fdd9\") " pod="openshift-marketplace/redhat-operators-rdnft" Sep 30 20:15:17 crc kubenswrapper[4756]: I0930 20:15:17.831215 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c1ce8b72-3f07-4800-bbed-8a2cca80fdd9-utilities\") pod \"redhat-operators-rdnft\" (UID: \"c1ce8b72-3f07-4800-bbed-8a2cca80fdd9\") " pod="openshift-marketplace/redhat-operators-rdnft" Sep 30 20:15:17 crc kubenswrapper[4756]: I0930 20:15:17.831256 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kqzfd\" (UniqueName: \"kubernetes.io/projected/c1ce8b72-3f07-4800-bbed-8a2cca80fdd9-kube-api-access-kqzfd\") pod \"redhat-operators-rdnft\" (UID: \"c1ce8b72-3f07-4800-bbed-8a2cca80fdd9\") " pod="openshift-marketplace/redhat-operators-rdnft" Sep 30 20:15:17 crc kubenswrapper[4756]: I0930 20:15:17.831767 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c1ce8b72-3f07-4800-bbed-8a2cca80fdd9-utilities\") pod \"redhat-operators-rdnft\" (UID: \"c1ce8b72-3f07-4800-bbed-8a2cca80fdd9\") " pod="openshift-marketplace/redhat-operators-rdnft" Sep 30 20:15:17 crc kubenswrapper[4756]: I0930 20:15:17.831781 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c1ce8b72-3f07-4800-bbed-8a2cca80fdd9-catalog-content\") pod \"redhat-operators-rdnft\" (UID: \"c1ce8b72-3f07-4800-bbed-8a2cca80fdd9\") " pod="openshift-marketplace/redhat-operators-rdnft" Sep 30 20:15:17 crc kubenswrapper[4756]: I0930 20:15:17.850584 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kqzfd\" (UniqueName: \"kubernetes.io/projected/c1ce8b72-3f07-4800-bbed-8a2cca80fdd9-kube-api-access-kqzfd\") pod \"redhat-operators-rdnft\" (UID: \"c1ce8b72-3f07-4800-bbed-8a2cca80fdd9\") " pod="openshift-marketplace/redhat-operators-rdnft" Sep 30 20:15:17 crc kubenswrapper[4756]: I0930 20:15:17.905696 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-rdnft" Sep 30 20:15:18 crc kubenswrapper[4756]: I0930 20:15:18.325671 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-rdnft"] Sep 30 20:15:18 crc kubenswrapper[4756]: I0930 20:15:18.510349 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rdnft" event={"ID":"c1ce8b72-3f07-4800-bbed-8a2cca80fdd9","Type":"ContainerStarted","Data":"d7f96ca1ddaf606573059b225b9066f41e8f9f43b12d7fe988d453210e92c199"} Sep 30 20:15:18 crc kubenswrapper[4756]: I0930 20:15:18.510658 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rdnft" event={"ID":"c1ce8b72-3f07-4800-bbed-8a2cca80fdd9","Type":"ContainerStarted","Data":"a8b36cebddb4b32f647600cd8ab3198c4c30887ded485a0b73709a81936cb3b3"} Sep 30 20:15:19 crc kubenswrapper[4756]: I0930 20:15:19.023210 4756 scope.go:117] "RemoveContainer" containerID="a8c5ebe8c2ce3631ae5ba6c5403872e352b1e5e30e3c6977ceaa595219d3b3ac" Sep 30 20:15:19 crc kubenswrapper[4756]: I0930 20:15:19.519155 4756 generic.go:334] "Generic (PLEG): container finished" podID="c1ce8b72-3f07-4800-bbed-8a2cca80fdd9" containerID="d7f96ca1ddaf606573059b225b9066f41e8f9f43b12d7fe988d453210e92c199" exitCode=0 Sep 30 20:15:19 crc kubenswrapper[4756]: I0930 20:15:19.519281 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rdnft" event={"ID":"c1ce8b72-3f07-4800-bbed-8a2cca80fdd9","Type":"ContainerDied","Data":"d7f96ca1ddaf606573059b225b9066f41e8f9f43b12d7fe988d453210e92c199"} Sep 30 20:15:19 crc kubenswrapper[4756]: I0930 20:15:19.520809 4756 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Sep 30 20:15:21 crc kubenswrapper[4756]: I0930 20:15:21.534279 4756 generic.go:334] "Generic (PLEG): container finished" podID="c1ce8b72-3f07-4800-bbed-8a2cca80fdd9" containerID="965a25194f74d0876e9a600da935f5a1074ae7e280a224661fe443ef8faf4153" exitCode=0 Sep 30 20:15:21 crc kubenswrapper[4756]: I0930 20:15:21.534335 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rdnft" event={"ID":"c1ce8b72-3f07-4800-bbed-8a2cca80fdd9","Type":"ContainerDied","Data":"965a25194f74d0876e9a600da935f5a1074ae7e280a224661fe443ef8faf4153"} Sep 30 20:15:22 crc kubenswrapper[4756]: I0930 20:15:22.550056 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rdnft" event={"ID":"c1ce8b72-3f07-4800-bbed-8a2cca80fdd9","Type":"ContainerStarted","Data":"b51b5bdcbac1ffc31c1c192dae7ea0ab10956b7b3c8aa1dd569215e8444fdfec"} Sep 30 20:15:22 crc kubenswrapper[4756]: I0930 20:15:22.574326 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-rdnft" podStartSLOduration=3.06515084 podStartE2EDuration="5.574310627s" podCreationTimestamp="2025-09-30 20:15:17 +0000 UTC" firstStartedPulling="2025-09-30 20:15:19.520508676 +0000 UTC m=+2649.141442143" lastFinishedPulling="2025-09-30 20:15:22.029668423 +0000 UTC m=+2651.650601930" observedRunningTime="2025-09-30 20:15:22.572646284 +0000 UTC m=+2652.193579761" watchObservedRunningTime="2025-09-30 20:15:22.574310627 +0000 UTC m=+2652.195244114" Sep 30 20:15:23 crc kubenswrapper[4756]: I0930 20:15:23.766435 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-dpvs6"] Sep 30 20:15:23 crc kubenswrapper[4756]: I0930 20:15:23.768322 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-dpvs6" Sep 30 20:15:23 crc kubenswrapper[4756]: I0930 20:15:23.788280 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-dpvs6"] Sep 30 20:15:23 crc kubenswrapper[4756]: I0930 20:15:23.956979 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zkz4k\" (UniqueName: \"kubernetes.io/projected/94a4e007-3242-4c52-bba3-73c5e2259504-kube-api-access-zkz4k\") pod \"redhat-marketplace-dpvs6\" (UID: \"94a4e007-3242-4c52-bba3-73c5e2259504\") " pod="openshift-marketplace/redhat-marketplace-dpvs6" Sep 30 20:15:23 crc kubenswrapper[4756]: I0930 20:15:23.957061 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/94a4e007-3242-4c52-bba3-73c5e2259504-catalog-content\") pod \"redhat-marketplace-dpvs6\" (UID: \"94a4e007-3242-4c52-bba3-73c5e2259504\") " pod="openshift-marketplace/redhat-marketplace-dpvs6" Sep 30 20:15:23 crc kubenswrapper[4756]: I0930 20:15:23.957312 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/94a4e007-3242-4c52-bba3-73c5e2259504-utilities\") pod \"redhat-marketplace-dpvs6\" (UID: \"94a4e007-3242-4c52-bba3-73c5e2259504\") " pod="openshift-marketplace/redhat-marketplace-dpvs6" Sep 30 20:15:24 crc kubenswrapper[4756]: I0930 20:15:24.058300 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/94a4e007-3242-4c52-bba3-73c5e2259504-utilities\") pod \"redhat-marketplace-dpvs6\" (UID: \"94a4e007-3242-4c52-bba3-73c5e2259504\") " pod="openshift-marketplace/redhat-marketplace-dpvs6" Sep 30 20:15:24 crc kubenswrapper[4756]: I0930 20:15:24.058368 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zkz4k\" (UniqueName: \"kubernetes.io/projected/94a4e007-3242-4c52-bba3-73c5e2259504-kube-api-access-zkz4k\") pod \"redhat-marketplace-dpvs6\" (UID: \"94a4e007-3242-4c52-bba3-73c5e2259504\") " pod="openshift-marketplace/redhat-marketplace-dpvs6" Sep 30 20:15:24 crc kubenswrapper[4756]: I0930 20:15:24.058430 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/94a4e007-3242-4c52-bba3-73c5e2259504-catalog-content\") pod \"redhat-marketplace-dpvs6\" (UID: \"94a4e007-3242-4c52-bba3-73c5e2259504\") " pod="openshift-marketplace/redhat-marketplace-dpvs6" Sep 30 20:15:24 crc kubenswrapper[4756]: I0930 20:15:24.058919 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/94a4e007-3242-4c52-bba3-73c5e2259504-utilities\") pod \"redhat-marketplace-dpvs6\" (UID: \"94a4e007-3242-4c52-bba3-73c5e2259504\") " pod="openshift-marketplace/redhat-marketplace-dpvs6" Sep 30 20:15:24 crc kubenswrapper[4756]: I0930 20:15:24.058992 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/94a4e007-3242-4c52-bba3-73c5e2259504-catalog-content\") pod \"redhat-marketplace-dpvs6\" (UID: \"94a4e007-3242-4c52-bba3-73c5e2259504\") " pod="openshift-marketplace/redhat-marketplace-dpvs6" Sep 30 20:15:24 crc kubenswrapper[4756]: I0930 20:15:24.076866 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zkz4k\" (UniqueName: \"kubernetes.io/projected/94a4e007-3242-4c52-bba3-73c5e2259504-kube-api-access-zkz4k\") pod \"redhat-marketplace-dpvs6\" (UID: \"94a4e007-3242-4c52-bba3-73c5e2259504\") " pod="openshift-marketplace/redhat-marketplace-dpvs6" Sep 30 20:15:24 crc kubenswrapper[4756]: I0930 20:15:24.085778 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-dpvs6" Sep 30 20:15:24 crc kubenswrapper[4756]: I0930 20:15:24.576773 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-dpvs6"] Sep 30 20:15:24 crc kubenswrapper[4756]: W0930 20:15:24.577560 4756 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod94a4e007_3242_4c52_bba3_73c5e2259504.slice/crio-53c76b8ea4e47091f188a94ef6335dee7c0db98f933e85f20cf228c90c4c700d WatchSource:0}: Error finding container 53c76b8ea4e47091f188a94ef6335dee7c0db98f933e85f20cf228c90c4c700d: Status 404 returned error can't find the container with id 53c76b8ea4e47091f188a94ef6335dee7c0db98f933e85f20cf228c90c4c700d Sep 30 20:15:25 crc kubenswrapper[4756]: I0930 20:15:25.571080 4756 generic.go:334] "Generic (PLEG): container finished" podID="94a4e007-3242-4c52-bba3-73c5e2259504" containerID="86890d0a6400f8c9af28156d706766dddc79626ad3e00bf5e4c17a299a739478" exitCode=0 Sep 30 20:15:25 crc kubenswrapper[4756]: I0930 20:15:25.571116 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-dpvs6" event={"ID":"94a4e007-3242-4c52-bba3-73c5e2259504","Type":"ContainerDied","Data":"86890d0a6400f8c9af28156d706766dddc79626ad3e00bf5e4c17a299a739478"} Sep 30 20:15:25 crc kubenswrapper[4756]: I0930 20:15:25.571140 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-dpvs6" event={"ID":"94a4e007-3242-4c52-bba3-73c5e2259504","Type":"ContainerStarted","Data":"53c76b8ea4e47091f188a94ef6335dee7c0db98f933e85f20cf228c90c4c700d"} Sep 30 20:15:26 crc kubenswrapper[4756]: I0930 20:15:26.579723 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-dpvs6" event={"ID":"94a4e007-3242-4c52-bba3-73c5e2259504","Type":"ContainerStarted","Data":"e0ae4d3d0670cd6a26e43c41e22ca6f7ab635a841ad0c2d5ae44cfcecd50b978"} Sep 30 20:15:27 crc kubenswrapper[4756]: I0930 20:15:27.592585 4756 generic.go:334] "Generic (PLEG): container finished" podID="94a4e007-3242-4c52-bba3-73c5e2259504" containerID="e0ae4d3d0670cd6a26e43c41e22ca6f7ab635a841ad0c2d5ae44cfcecd50b978" exitCode=0 Sep 30 20:15:27 crc kubenswrapper[4756]: I0930 20:15:27.592650 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-dpvs6" event={"ID":"94a4e007-3242-4c52-bba3-73c5e2259504","Type":"ContainerDied","Data":"e0ae4d3d0670cd6a26e43c41e22ca6f7ab635a841ad0c2d5ae44cfcecd50b978"} Sep 30 20:15:27 crc kubenswrapper[4756]: I0930 20:15:27.906311 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-rdnft" Sep 30 20:15:27 crc kubenswrapper[4756]: I0930 20:15:27.906657 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-rdnft" Sep 30 20:15:27 crc kubenswrapper[4756]: I0930 20:15:27.949952 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-rdnft" Sep 30 20:15:28 crc kubenswrapper[4756]: I0930 20:15:28.602808 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-dpvs6" event={"ID":"94a4e007-3242-4c52-bba3-73c5e2259504","Type":"ContainerStarted","Data":"2395feb0cdaa6a4789b2fb2f571615606c875f043c5f878d9233a8ce501a4ec1"} Sep 30 20:15:28 crc kubenswrapper[4756]: I0930 20:15:28.634527 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-dpvs6" podStartSLOduration=2.9559491810000003 podStartE2EDuration="5.634506012s" podCreationTimestamp="2025-09-30 20:15:23 +0000 UTC" firstStartedPulling="2025-09-30 20:15:25.572602659 +0000 UTC m=+2655.193536136" lastFinishedPulling="2025-09-30 20:15:28.25115949 +0000 UTC m=+2657.872092967" observedRunningTime="2025-09-30 20:15:28.629804449 +0000 UTC m=+2658.250737926" watchObservedRunningTime="2025-09-30 20:15:28.634506012 +0000 UTC m=+2658.255439529" Sep 30 20:15:28 crc kubenswrapper[4756]: I0930 20:15:28.640661 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-rdnft" Sep 30 20:15:30 crc kubenswrapper[4756]: I0930 20:15:30.366294 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-rdnft"] Sep 30 20:15:30 crc kubenswrapper[4756]: I0930 20:15:30.621917 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-rdnft" podUID="c1ce8b72-3f07-4800-bbed-8a2cca80fdd9" containerName="registry-server" containerID="cri-o://b51b5bdcbac1ffc31c1c192dae7ea0ab10956b7b3c8aa1dd569215e8444fdfec" gracePeriod=2 Sep 30 20:15:31 crc kubenswrapper[4756]: I0930 20:15:31.064721 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-rdnft" Sep 30 20:15:31 crc kubenswrapper[4756]: I0930 20:15:31.170094 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kqzfd\" (UniqueName: \"kubernetes.io/projected/c1ce8b72-3f07-4800-bbed-8a2cca80fdd9-kube-api-access-kqzfd\") pod \"c1ce8b72-3f07-4800-bbed-8a2cca80fdd9\" (UID: \"c1ce8b72-3f07-4800-bbed-8a2cca80fdd9\") " Sep 30 20:15:31 crc kubenswrapper[4756]: I0930 20:15:31.170157 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c1ce8b72-3f07-4800-bbed-8a2cca80fdd9-utilities\") pod \"c1ce8b72-3f07-4800-bbed-8a2cca80fdd9\" (UID: \"c1ce8b72-3f07-4800-bbed-8a2cca80fdd9\") " Sep 30 20:15:31 crc kubenswrapper[4756]: I0930 20:15:31.170193 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c1ce8b72-3f07-4800-bbed-8a2cca80fdd9-catalog-content\") pod \"c1ce8b72-3f07-4800-bbed-8a2cca80fdd9\" (UID: \"c1ce8b72-3f07-4800-bbed-8a2cca80fdd9\") " Sep 30 20:15:31 crc kubenswrapper[4756]: I0930 20:15:31.171996 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c1ce8b72-3f07-4800-bbed-8a2cca80fdd9-utilities" (OuterVolumeSpecName: "utilities") pod "c1ce8b72-3f07-4800-bbed-8a2cca80fdd9" (UID: "c1ce8b72-3f07-4800-bbed-8a2cca80fdd9"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 20:15:31 crc kubenswrapper[4756]: I0930 20:15:31.181526 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c1ce8b72-3f07-4800-bbed-8a2cca80fdd9-kube-api-access-kqzfd" (OuterVolumeSpecName: "kube-api-access-kqzfd") pod "c1ce8b72-3f07-4800-bbed-8a2cca80fdd9" (UID: "c1ce8b72-3f07-4800-bbed-8a2cca80fdd9"). InnerVolumeSpecName "kube-api-access-kqzfd". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:15:31 crc kubenswrapper[4756]: I0930 20:15:31.271269 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kqzfd\" (UniqueName: \"kubernetes.io/projected/c1ce8b72-3f07-4800-bbed-8a2cca80fdd9-kube-api-access-kqzfd\") on node \"crc\" DevicePath \"\"" Sep 30 20:15:31 crc kubenswrapper[4756]: I0930 20:15:31.271573 4756 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c1ce8b72-3f07-4800-bbed-8a2cca80fdd9-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 20:15:31 crc kubenswrapper[4756]: I0930 20:15:31.282259 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c1ce8b72-3f07-4800-bbed-8a2cca80fdd9-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "c1ce8b72-3f07-4800-bbed-8a2cca80fdd9" (UID: "c1ce8b72-3f07-4800-bbed-8a2cca80fdd9"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 20:15:31 crc kubenswrapper[4756]: I0930 20:15:31.373420 4756 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c1ce8b72-3f07-4800-bbed-8a2cca80fdd9-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 20:15:31 crc kubenswrapper[4756]: I0930 20:15:31.638310 4756 generic.go:334] "Generic (PLEG): container finished" podID="c1ce8b72-3f07-4800-bbed-8a2cca80fdd9" containerID="b51b5bdcbac1ffc31c1c192dae7ea0ab10956b7b3c8aa1dd569215e8444fdfec" exitCode=0 Sep 30 20:15:31 crc kubenswrapper[4756]: I0930 20:15:31.638385 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rdnft" event={"ID":"c1ce8b72-3f07-4800-bbed-8a2cca80fdd9","Type":"ContainerDied","Data":"b51b5bdcbac1ffc31c1c192dae7ea0ab10956b7b3c8aa1dd569215e8444fdfec"} Sep 30 20:15:31 crc kubenswrapper[4756]: I0930 20:15:31.638457 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rdnft" event={"ID":"c1ce8b72-3f07-4800-bbed-8a2cca80fdd9","Type":"ContainerDied","Data":"a8b36cebddb4b32f647600cd8ab3198c4c30887ded485a0b73709a81936cb3b3"} Sep 30 20:15:31 crc kubenswrapper[4756]: I0930 20:15:31.638476 4756 scope.go:117] "RemoveContainer" containerID="b51b5bdcbac1ffc31c1c192dae7ea0ab10956b7b3c8aa1dd569215e8444fdfec" Sep 30 20:15:31 crc kubenswrapper[4756]: I0930 20:15:31.638562 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-rdnft" Sep 30 20:15:31 crc kubenswrapper[4756]: I0930 20:15:31.665868 4756 scope.go:117] "RemoveContainer" containerID="965a25194f74d0876e9a600da935f5a1074ae7e280a224661fe443ef8faf4153" Sep 30 20:15:31 crc kubenswrapper[4756]: I0930 20:15:31.689638 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-rdnft"] Sep 30 20:15:31 crc kubenswrapper[4756]: I0930 20:15:31.693051 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-rdnft"] Sep 30 20:15:31 crc kubenswrapper[4756]: I0930 20:15:31.709730 4756 scope.go:117] "RemoveContainer" containerID="d7f96ca1ddaf606573059b225b9066f41e8f9f43b12d7fe988d453210e92c199" Sep 30 20:15:31 crc kubenswrapper[4756]: I0930 20:15:31.751787 4756 scope.go:117] "RemoveContainer" containerID="b51b5bdcbac1ffc31c1c192dae7ea0ab10956b7b3c8aa1dd569215e8444fdfec" Sep 30 20:15:31 crc kubenswrapper[4756]: E0930 20:15:31.752416 4756 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b51b5bdcbac1ffc31c1c192dae7ea0ab10956b7b3c8aa1dd569215e8444fdfec\": container with ID starting with b51b5bdcbac1ffc31c1c192dae7ea0ab10956b7b3c8aa1dd569215e8444fdfec not found: ID does not exist" containerID="b51b5bdcbac1ffc31c1c192dae7ea0ab10956b7b3c8aa1dd569215e8444fdfec" Sep 30 20:15:31 crc kubenswrapper[4756]: I0930 20:15:31.752487 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b51b5bdcbac1ffc31c1c192dae7ea0ab10956b7b3c8aa1dd569215e8444fdfec"} err="failed to get container status \"b51b5bdcbac1ffc31c1c192dae7ea0ab10956b7b3c8aa1dd569215e8444fdfec\": rpc error: code = NotFound desc = could not find container \"b51b5bdcbac1ffc31c1c192dae7ea0ab10956b7b3c8aa1dd569215e8444fdfec\": container with ID starting with b51b5bdcbac1ffc31c1c192dae7ea0ab10956b7b3c8aa1dd569215e8444fdfec not found: ID does not exist" Sep 30 20:15:31 crc kubenswrapper[4756]: I0930 20:15:31.752525 4756 scope.go:117] "RemoveContainer" containerID="965a25194f74d0876e9a600da935f5a1074ae7e280a224661fe443ef8faf4153" Sep 30 20:15:31 crc kubenswrapper[4756]: E0930 20:15:31.753144 4756 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"965a25194f74d0876e9a600da935f5a1074ae7e280a224661fe443ef8faf4153\": container with ID starting with 965a25194f74d0876e9a600da935f5a1074ae7e280a224661fe443ef8faf4153 not found: ID does not exist" containerID="965a25194f74d0876e9a600da935f5a1074ae7e280a224661fe443ef8faf4153" Sep 30 20:15:31 crc kubenswrapper[4756]: I0930 20:15:31.753234 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"965a25194f74d0876e9a600da935f5a1074ae7e280a224661fe443ef8faf4153"} err="failed to get container status \"965a25194f74d0876e9a600da935f5a1074ae7e280a224661fe443ef8faf4153\": rpc error: code = NotFound desc = could not find container \"965a25194f74d0876e9a600da935f5a1074ae7e280a224661fe443ef8faf4153\": container with ID starting with 965a25194f74d0876e9a600da935f5a1074ae7e280a224661fe443ef8faf4153 not found: ID does not exist" Sep 30 20:15:31 crc kubenswrapper[4756]: I0930 20:15:31.753291 4756 scope.go:117] "RemoveContainer" containerID="d7f96ca1ddaf606573059b225b9066f41e8f9f43b12d7fe988d453210e92c199" Sep 30 20:15:31 crc kubenswrapper[4756]: E0930 20:15:31.753780 4756 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d7f96ca1ddaf606573059b225b9066f41e8f9f43b12d7fe988d453210e92c199\": container with ID starting with d7f96ca1ddaf606573059b225b9066f41e8f9f43b12d7fe988d453210e92c199 not found: ID does not exist" containerID="d7f96ca1ddaf606573059b225b9066f41e8f9f43b12d7fe988d453210e92c199" Sep 30 20:15:31 crc kubenswrapper[4756]: I0930 20:15:31.753838 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d7f96ca1ddaf606573059b225b9066f41e8f9f43b12d7fe988d453210e92c199"} err="failed to get container status \"d7f96ca1ddaf606573059b225b9066f41e8f9f43b12d7fe988d453210e92c199\": rpc error: code = NotFound desc = could not find container \"d7f96ca1ddaf606573059b225b9066f41e8f9f43b12d7fe988d453210e92c199\": container with ID starting with d7f96ca1ddaf606573059b225b9066f41e8f9f43b12d7fe988d453210e92c199 not found: ID does not exist" Sep 30 20:15:33 crc kubenswrapper[4756]: I0930 20:15:33.134287 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c1ce8b72-3f07-4800-bbed-8a2cca80fdd9" path="/var/lib/kubelet/pods/c1ce8b72-3f07-4800-bbed-8a2cca80fdd9/volumes" Sep 30 20:15:34 crc kubenswrapper[4756]: I0930 20:15:34.087042 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-dpvs6" Sep 30 20:15:34 crc kubenswrapper[4756]: I0930 20:15:34.087098 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-dpvs6" Sep 30 20:15:34 crc kubenswrapper[4756]: I0930 20:15:34.126204 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-dpvs6" Sep 30 20:15:34 crc kubenswrapper[4756]: I0930 20:15:34.730426 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-dpvs6" Sep 30 20:15:35 crc kubenswrapper[4756]: I0930 20:15:35.755371 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-dpvs6"] Sep 30 20:15:36 crc kubenswrapper[4756]: I0930 20:15:36.688110 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-dpvs6" podUID="94a4e007-3242-4c52-bba3-73c5e2259504" containerName="registry-server" containerID="cri-o://2395feb0cdaa6a4789b2fb2f571615606c875f043c5f878d9233a8ce501a4ec1" gracePeriod=2 Sep 30 20:15:37 crc kubenswrapper[4756]: I0930 20:15:37.185336 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-dpvs6" Sep 30 20:15:37 crc kubenswrapper[4756]: I0930 20:15:37.373275 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/94a4e007-3242-4c52-bba3-73c5e2259504-catalog-content\") pod \"94a4e007-3242-4c52-bba3-73c5e2259504\" (UID: \"94a4e007-3242-4c52-bba3-73c5e2259504\") " Sep 30 20:15:37 crc kubenswrapper[4756]: I0930 20:15:37.373382 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/94a4e007-3242-4c52-bba3-73c5e2259504-utilities\") pod \"94a4e007-3242-4c52-bba3-73c5e2259504\" (UID: \"94a4e007-3242-4c52-bba3-73c5e2259504\") " Sep 30 20:15:37 crc kubenswrapper[4756]: I0930 20:15:37.373450 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zkz4k\" (UniqueName: \"kubernetes.io/projected/94a4e007-3242-4c52-bba3-73c5e2259504-kube-api-access-zkz4k\") pod \"94a4e007-3242-4c52-bba3-73c5e2259504\" (UID: \"94a4e007-3242-4c52-bba3-73c5e2259504\") " Sep 30 20:15:37 crc kubenswrapper[4756]: I0930 20:15:37.374465 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/94a4e007-3242-4c52-bba3-73c5e2259504-utilities" (OuterVolumeSpecName: "utilities") pod "94a4e007-3242-4c52-bba3-73c5e2259504" (UID: "94a4e007-3242-4c52-bba3-73c5e2259504"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 20:15:37 crc kubenswrapper[4756]: I0930 20:15:37.380882 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/94a4e007-3242-4c52-bba3-73c5e2259504-kube-api-access-zkz4k" (OuterVolumeSpecName: "kube-api-access-zkz4k") pod "94a4e007-3242-4c52-bba3-73c5e2259504" (UID: "94a4e007-3242-4c52-bba3-73c5e2259504"). InnerVolumeSpecName "kube-api-access-zkz4k". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:15:37 crc kubenswrapper[4756]: I0930 20:15:37.394003 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/94a4e007-3242-4c52-bba3-73c5e2259504-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "94a4e007-3242-4c52-bba3-73c5e2259504" (UID: "94a4e007-3242-4c52-bba3-73c5e2259504"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 20:15:37 crc kubenswrapper[4756]: I0930 20:15:37.475065 4756 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/94a4e007-3242-4c52-bba3-73c5e2259504-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 20:15:37 crc kubenswrapper[4756]: I0930 20:15:37.475099 4756 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/94a4e007-3242-4c52-bba3-73c5e2259504-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 20:15:37 crc kubenswrapper[4756]: I0930 20:15:37.475112 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zkz4k\" (UniqueName: \"kubernetes.io/projected/94a4e007-3242-4c52-bba3-73c5e2259504-kube-api-access-zkz4k\") on node \"crc\" DevicePath \"\"" Sep 30 20:15:37 crc kubenswrapper[4756]: I0930 20:15:37.702504 4756 generic.go:334] "Generic (PLEG): container finished" podID="94a4e007-3242-4c52-bba3-73c5e2259504" containerID="2395feb0cdaa6a4789b2fb2f571615606c875f043c5f878d9233a8ce501a4ec1" exitCode=0 Sep 30 20:15:37 crc kubenswrapper[4756]: I0930 20:15:37.702578 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-dpvs6" event={"ID":"94a4e007-3242-4c52-bba3-73c5e2259504","Type":"ContainerDied","Data":"2395feb0cdaa6a4789b2fb2f571615606c875f043c5f878d9233a8ce501a4ec1"} Sep 30 20:15:37 crc kubenswrapper[4756]: I0930 20:15:37.702655 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-dpvs6" event={"ID":"94a4e007-3242-4c52-bba3-73c5e2259504","Type":"ContainerDied","Data":"53c76b8ea4e47091f188a94ef6335dee7c0db98f933e85f20cf228c90c4c700d"} Sep 30 20:15:37 crc kubenswrapper[4756]: I0930 20:15:37.702686 4756 scope.go:117] "RemoveContainer" containerID="2395feb0cdaa6a4789b2fb2f571615606c875f043c5f878d9233a8ce501a4ec1" Sep 30 20:15:37 crc kubenswrapper[4756]: I0930 20:15:37.702602 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-dpvs6" Sep 30 20:15:37 crc kubenswrapper[4756]: I0930 20:15:37.730922 4756 scope.go:117] "RemoveContainer" containerID="e0ae4d3d0670cd6a26e43c41e22ca6f7ab635a841ad0c2d5ae44cfcecd50b978" Sep 30 20:15:37 crc kubenswrapper[4756]: I0930 20:15:37.766185 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-dpvs6"] Sep 30 20:15:37 crc kubenswrapper[4756]: I0930 20:15:37.776900 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-dpvs6"] Sep 30 20:15:37 crc kubenswrapper[4756]: I0930 20:15:37.777614 4756 scope.go:117] "RemoveContainer" containerID="86890d0a6400f8c9af28156d706766dddc79626ad3e00bf5e4c17a299a739478" Sep 30 20:15:37 crc kubenswrapper[4756]: I0930 20:15:37.804863 4756 scope.go:117] "RemoveContainer" containerID="2395feb0cdaa6a4789b2fb2f571615606c875f043c5f878d9233a8ce501a4ec1" Sep 30 20:15:37 crc kubenswrapper[4756]: E0930 20:15:37.805574 4756 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2395feb0cdaa6a4789b2fb2f571615606c875f043c5f878d9233a8ce501a4ec1\": container with ID starting with 2395feb0cdaa6a4789b2fb2f571615606c875f043c5f878d9233a8ce501a4ec1 not found: ID does not exist" containerID="2395feb0cdaa6a4789b2fb2f571615606c875f043c5f878d9233a8ce501a4ec1" Sep 30 20:15:37 crc kubenswrapper[4756]: I0930 20:15:37.805616 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2395feb0cdaa6a4789b2fb2f571615606c875f043c5f878d9233a8ce501a4ec1"} err="failed to get container status \"2395feb0cdaa6a4789b2fb2f571615606c875f043c5f878d9233a8ce501a4ec1\": rpc error: code = NotFound desc = could not find container \"2395feb0cdaa6a4789b2fb2f571615606c875f043c5f878d9233a8ce501a4ec1\": container with ID starting with 2395feb0cdaa6a4789b2fb2f571615606c875f043c5f878d9233a8ce501a4ec1 not found: ID does not exist" Sep 30 20:15:37 crc kubenswrapper[4756]: I0930 20:15:37.805643 4756 scope.go:117] "RemoveContainer" containerID="e0ae4d3d0670cd6a26e43c41e22ca6f7ab635a841ad0c2d5ae44cfcecd50b978" Sep 30 20:15:37 crc kubenswrapper[4756]: E0930 20:15:37.806202 4756 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e0ae4d3d0670cd6a26e43c41e22ca6f7ab635a841ad0c2d5ae44cfcecd50b978\": container with ID starting with e0ae4d3d0670cd6a26e43c41e22ca6f7ab635a841ad0c2d5ae44cfcecd50b978 not found: ID does not exist" containerID="e0ae4d3d0670cd6a26e43c41e22ca6f7ab635a841ad0c2d5ae44cfcecd50b978" Sep 30 20:15:37 crc kubenswrapper[4756]: I0930 20:15:37.806228 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e0ae4d3d0670cd6a26e43c41e22ca6f7ab635a841ad0c2d5ae44cfcecd50b978"} err="failed to get container status \"e0ae4d3d0670cd6a26e43c41e22ca6f7ab635a841ad0c2d5ae44cfcecd50b978\": rpc error: code = NotFound desc = could not find container \"e0ae4d3d0670cd6a26e43c41e22ca6f7ab635a841ad0c2d5ae44cfcecd50b978\": container with ID starting with e0ae4d3d0670cd6a26e43c41e22ca6f7ab635a841ad0c2d5ae44cfcecd50b978 not found: ID does not exist" Sep 30 20:15:37 crc kubenswrapper[4756]: I0930 20:15:37.806269 4756 scope.go:117] "RemoveContainer" containerID="86890d0a6400f8c9af28156d706766dddc79626ad3e00bf5e4c17a299a739478" Sep 30 20:15:37 crc kubenswrapper[4756]: E0930 20:15:37.806937 4756 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"86890d0a6400f8c9af28156d706766dddc79626ad3e00bf5e4c17a299a739478\": container with ID starting with 86890d0a6400f8c9af28156d706766dddc79626ad3e00bf5e4c17a299a739478 not found: ID does not exist" containerID="86890d0a6400f8c9af28156d706766dddc79626ad3e00bf5e4c17a299a739478" Sep 30 20:15:37 crc kubenswrapper[4756]: I0930 20:15:37.807011 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"86890d0a6400f8c9af28156d706766dddc79626ad3e00bf5e4c17a299a739478"} err="failed to get container status \"86890d0a6400f8c9af28156d706766dddc79626ad3e00bf5e4c17a299a739478\": rpc error: code = NotFound desc = could not find container \"86890d0a6400f8c9af28156d706766dddc79626ad3e00bf5e4c17a299a739478\": container with ID starting with 86890d0a6400f8c9af28156d706766dddc79626ad3e00bf5e4c17a299a739478 not found: ID does not exist" Sep 30 20:15:39 crc kubenswrapper[4756]: I0930 20:15:39.125263 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="94a4e007-3242-4c52-bba3-73c5e2259504" path="/var/lib/kubelet/pods/94a4e007-3242-4c52-bba3-73c5e2259504/volumes" Sep 30 20:16:11 crc kubenswrapper[4756]: I0930 20:16:11.539309 4756 patch_prober.go:28] interesting pod/machine-config-daemon-4n9zj container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 20:16:11 crc kubenswrapper[4756]: I0930 20:16:11.540079 4756 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 20:16:41 crc kubenswrapper[4756]: I0930 20:16:41.539014 4756 patch_prober.go:28] interesting pod/machine-config-daemon-4n9zj container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 20:16:41 crc kubenswrapper[4756]: I0930 20:16:41.540123 4756 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 20:16:55 crc kubenswrapper[4756]: I0930 20:16:55.416146 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-cg2nt"] Sep 30 20:16:55 crc kubenswrapper[4756]: E0930 20:16:55.417451 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="94a4e007-3242-4c52-bba3-73c5e2259504" containerName="registry-server" Sep 30 20:16:55 crc kubenswrapper[4756]: I0930 20:16:55.417481 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="94a4e007-3242-4c52-bba3-73c5e2259504" containerName="registry-server" Sep 30 20:16:55 crc kubenswrapper[4756]: E0930 20:16:55.417528 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c1ce8b72-3f07-4800-bbed-8a2cca80fdd9" containerName="extract-content" Sep 30 20:16:55 crc kubenswrapper[4756]: I0930 20:16:55.417544 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="c1ce8b72-3f07-4800-bbed-8a2cca80fdd9" containerName="extract-content" Sep 30 20:16:55 crc kubenswrapper[4756]: E0930 20:16:55.417572 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="94a4e007-3242-4c52-bba3-73c5e2259504" containerName="extract-content" Sep 30 20:16:55 crc kubenswrapper[4756]: I0930 20:16:55.417586 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="94a4e007-3242-4c52-bba3-73c5e2259504" containerName="extract-content" Sep 30 20:16:55 crc kubenswrapper[4756]: E0930 20:16:55.417600 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c1ce8b72-3f07-4800-bbed-8a2cca80fdd9" containerName="extract-utilities" Sep 30 20:16:55 crc kubenswrapper[4756]: I0930 20:16:55.417614 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="c1ce8b72-3f07-4800-bbed-8a2cca80fdd9" containerName="extract-utilities" Sep 30 20:16:55 crc kubenswrapper[4756]: E0930 20:16:55.417646 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="94a4e007-3242-4c52-bba3-73c5e2259504" containerName="extract-utilities" Sep 30 20:16:55 crc kubenswrapper[4756]: I0930 20:16:55.417659 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="94a4e007-3242-4c52-bba3-73c5e2259504" containerName="extract-utilities" Sep 30 20:16:55 crc kubenswrapper[4756]: E0930 20:16:55.417686 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c1ce8b72-3f07-4800-bbed-8a2cca80fdd9" containerName="registry-server" Sep 30 20:16:55 crc kubenswrapper[4756]: I0930 20:16:55.417698 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="c1ce8b72-3f07-4800-bbed-8a2cca80fdd9" containerName="registry-server" Sep 30 20:16:55 crc kubenswrapper[4756]: I0930 20:16:55.417993 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="94a4e007-3242-4c52-bba3-73c5e2259504" containerName="registry-server" Sep 30 20:16:55 crc kubenswrapper[4756]: I0930 20:16:55.418021 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="c1ce8b72-3f07-4800-bbed-8a2cca80fdd9" containerName="registry-server" Sep 30 20:16:55 crc kubenswrapper[4756]: I0930 20:16:55.420600 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-cg2nt" Sep 30 20:16:55 crc kubenswrapper[4756]: I0930 20:16:55.434171 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-cg2nt"] Sep 30 20:16:55 crc kubenswrapper[4756]: I0930 20:16:55.538026 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n85ds\" (UniqueName: \"kubernetes.io/projected/ccb81dd1-6037-43db-8940-6e97db2614c2-kube-api-access-n85ds\") pod \"community-operators-cg2nt\" (UID: \"ccb81dd1-6037-43db-8940-6e97db2614c2\") " pod="openshift-marketplace/community-operators-cg2nt" Sep 30 20:16:55 crc kubenswrapper[4756]: I0930 20:16:55.538127 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ccb81dd1-6037-43db-8940-6e97db2614c2-utilities\") pod \"community-operators-cg2nt\" (UID: \"ccb81dd1-6037-43db-8940-6e97db2614c2\") " pod="openshift-marketplace/community-operators-cg2nt" Sep 30 20:16:55 crc kubenswrapper[4756]: I0930 20:16:55.538231 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ccb81dd1-6037-43db-8940-6e97db2614c2-catalog-content\") pod \"community-operators-cg2nt\" (UID: \"ccb81dd1-6037-43db-8940-6e97db2614c2\") " pod="openshift-marketplace/community-operators-cg2nt" Sep 30 20:16:55 crc kubenswrapper[4756]: I0930 20:16:55.639223 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n85ds\" (UniqueName: \"kubernetes.io/projected/ccb81dd1-6037-43db-8940-6e97db2614c2-kube-api-access-n85ds\") pod \"community-operators-cg2nt\" (UID: \"ccb81dd1-6037-43db-8940-6e97db2614c2\") " pod="openshift-marketplace/community-operators-cg2nt" Sep 30 20:16:55 crc kubenswrapper[4756]: I0930 20:16:55.639896 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ccb81dd1-6037-43db-8940-6e97db2614c2-utilities\") pod \"community-operators-cg2nt\" (UID: \"ccb81dd1-6037-43db-8940-6e97db2614c2\") " pod="openshift-marketplace/community-operators-cg2nt" Sep 30 20:16:55 crc kubenswrapper[4756]: I0930 20:16:55.640051 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ccb81dd1-6037-43db-8940-6e97db2614c2-catalog-content\") pod \"community-operators-cg2nt\" (UID: \"ccb81dd1-6037-43db-8940-6e97db2614c2\") " pod="openshift-marketplace/community-operators-cg2nt" Sep 30 20:16:55 crc kubenswrapper[4756]: I0930 20:16:55.640591 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ccb81dd1-6037-43db-8940-6e97db2614c2-catalog-content\") pod \"community-operators-cg2nt\" (UID: \"ccb81dd1-6037-43db-8940-6e97db2614c2\") " pod="openshift-marketplace/community-operators-cg2nt" Sep 30 20:16:55 crc kubenswrapper[4756]: I0930 20:16:55.640588 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ccb81dd1-6037-43db-8940-6e97db2614c2-utilities\") pod \"community-operators-cg2nt\" (UID: \"ccb81dd1-6037-43db-8940-6e97db2614c2\") " pod="openshift-marketplace/community-operators-cg2nt" Sep 30 20:16:55 crc kubenswrapper[4756]: I0930 20:16:55.658234 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n85ds\" (UniqueName: \"kubernetes.io/projected/ccb81dd1-6037-43db-8940-6e97db2614c2-kube-api-access-n85ds\") pod \"community-operators-cg2nt\" (UID: \"ccb81dd1-6037-43db-8940-6e97db2614c2\") " pod="openshift-marketplace/community-operators-cg2nt" Sep 30 20:16:55 crc kubenswrapper[4756]: I0930 20:16:55.744762 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-cg2nt" Sep 30 20:16:56 crc kubenswrapper[4756]: I0930 20:16:56.109022 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-cg2nt"] Sep 30 20:16:56 crc kubenswrapper[4756]: I0930 20:16:56.337704 4756 generic.go:334] "Generic (PLEG): container finished" podID="ccb81dd1-6037-43db-8940-6e97db2614c2" containerID="7c803b4848fe7b83048d2619bfe75e9f2fce2611a8f4d9c236773273dd5d7ca1" exitCode=0 Sep 30 20:16:56 crc kubenswrapper[4756]: I0930 20:16:56.337750 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-cg2nt" event={"ID":"ccb81dd1-6037-43db-8940-6e97db2614c2","Type":"ContainerDied","Data":"7c803b4848fe7b83048d2619bfe75e9f2fce2611a8f4d9c236773273dd5d7ca1"} Sep 30 20:16:56 crc kubenswrapper[4756]: I0930 20:16:56.337781 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-cg2nt" event={"ID":"ccb81dd1-6037-43db-8940-6e97db2614c2","Type":"ContainerStarted","Data":"0f67a2f09bbce72f24d44761c4a3a02314de1ee96e9552d9ab67e557f09f1a28"} Sep 30 20:16:57 crc kubenswrapper[4756]: I0930 20:16:57.345835 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-cg2nt" event={"ID":"ccb81dd1-6037-43db-8940-6e97db2614c2","Type":"ContainerStarted","Data":"199774cbc2b5a45fe147941fec41c0c5657041686932acf9a666fb667a58800b"} Sep 30 20:16:58 crc kubenswrapper[4756]: I0930 20:16:58.359521 4756 generic.go:334] "Generic (PLEG): container finished" podID="ccb81dd1-6037-43db-8940-6e97db2614c2" containerID="199774cbc2b5a45fe147941fec41c0c5657041686932acf9a666fb667a58800b" exitCode=0 Sep 30 20:16:58 crc kubenswrapper[4756]: I0930 20:16:58.359583 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-cg2nt" event={"ID":"ccb81dd1-6037-43db-8940-6e97db2614c2","Type":"ContainerDied","Data":"199774cbc2b5a45fe147941fec41c0c5657041686932acf9a666fb667a58800b"} Sep 30 20:16:59 crc kubenswrapper[4756]: I0930 20:16:59.372370 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-cg2nt" event={"ID":"ccb81dd1-6037-43db-8940-6e97db2614c2","Type":"ContainerStarted","Data":"b9c2584fd83190fddb5445f2744ed65aaf99a6c0ab3589094a44eb60e0cba731"} Sep 30 20:16:59 crc kubenswrapper[4756]: I0930 20:16:59.402470 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-cg2nt" podStartSLOduration=1.871960916 podStartE2EDuration="4.402450001s" podCreationTimestamp="2025-09-30 20:16:55 +0000 UTC" firstStartedPulling="2025-09-30 20:16:56.33909734 +0000 UTC m=+2745.960030817" lastFinishedPulling="2025-09-30 20:16:58.869586425 +0000 UTC m=+2748.490519902" observedRunningTime="2025-09-30 20:16:59.394073912 +0000 UTC m=+2749.015007409" watchObservedRunningTime="2025-09-30 20:16:59.402450001 +0000 UTC m=+2749.023383488" Sep 30 20:17:05 crc kubenswrapper[4756]: I0930 20:17:05.745558 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-cg2nt" Sep 30 20:17:05 crc kubenswrapper[4756]: I0930 20:17:05.747545 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-cg2nt" Sep 30 20:17:05 crc kubenswrapper[4756]: I0930 20:17:05.792841 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-cg2nt" Sep 30 20:17:06 crc kubenswrapper[4756]: I0930 20:17:06.488767 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-cg2nt" Sep 30 20:17:06 crc kubenswrapper[4756]: I0930 20:17:06.543918 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-cg2nt"] Sep 30 20:17:08 crc kubenswrapper[4756]: I0930 20:17:08.452524 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-cg2nt" podUID="ccb81dd1-6037-43db-8940-6e97db2614c2" containerName="registry-server" containerID="cri-o://b9c2584fd83190fddb5445f2744ed65aaf99a6c0ab3589094a44eb60e0cba731" gracePeriod=2 Sep 30 20:17:08 crc kubenswrapper[4756]: I0930 20:17:08.898176 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-cg2nt" Sep 30 20:17:09 crc kubenswrapper[4756]: I0930 20:17:09.069175 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ccb81dd1-6037-43db-8940-6e97db2614c2-catalog-content\") pod \"ccb81dd1-6037-43db-8940-6e97db2614c2\" (UID: \"ccb81dd1-6037-43db-8940-6e97db2614c2\") " Sep 30 20:17:09 crc kubenswrapper[4756]: I0930 20:17:09.069296 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ccb81dd1-6037-43db-8940-6e97db2614c2-utilities\") pod \"ccb81dd1-6037-43db-8940-6e97db2614c2\" (UID: \"ccb81dd1-6037-43db-8940-6e97db2614c2\") " Sep 30 20:17:09 crc kubenswrapper[4756]: I0930 20:17:09.070152 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ccb81dd1-6037-43db-8940-6e97db2614c2-utilities" (OuterVolumeSpecName: "utilities") pod "ccb81dd1-6037-43db-8940-6e97db2614c2" (UID: "ccb81dd1-6037-43db-8940-6e97db2614c2"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 20:17:09 crc kubenswrapper[4756]: I0930 20:17:09.070259 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-n85ds\" (UniqueName: \"kubernetes.io/projected/ccb81dd1-6037-43db-8940-6e97db2614c2-kube-api-access-n85ds\") pod \"ccb81dd1-6037-43db-8940-6e97db2614c2\" (UID: \"ccb81dd1-6037-43db-8940-6e97db2614c2\") " Sep 30 20:17:09 crc kubenswrapper[4756]: I0930 20:17:09.071142 4756 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ccb81dd1-6037-43db-8940-6e97db2614c2-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 20:17:09 crc kubenswrapper[4756]: I0930 20:17:09.075554 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ccb81dd1-6037-43db-8940-6e97db2614c2-kube-api-access-n85ds" (OuterVolumeSpecName: "kube-api-access-n85ds") pod "ccb81dd1-6037-43db-8940-6e97db2614c2" (UID: "ccb81dd1-6037-43db-8940-6e97db2614c2"). InnerVolumeSpecName "kube-api-access-n85ds". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:17:09 crc kubenswrapper[4756]: I0930 20:17:09.118852 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ccb81dd1-6037-43db-8940-6e97db2614c2-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "ccb81dd1-6037-43db-8940-6e97db2614c2" (UID: "ccb81dd1-6037-43db-8940-6e97db2614c2"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 20:17:09 crc kubenswrapper[4756]: I0930 20:17:09.172466 4756 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ccb81dd1-6037-43db-8940-6e97db2614c2-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 20:17:09 crc kubenswrapper[4756]: I0930 20:17:09.172491 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-n85ds\" (UniqueName: \"kubernetes.io/projected/ccb81dd1-6037-43db-8940-6e97db2614c2-kube-api-access-n85ds\") on node \"crc\" DevicePath \"\"" Sep 30 20:17:09 crc kubenswrapper[4756]: I0930 20:17:09.461677 4756 generic.go:334] "Generic (PLEG): container finished" podID="ccb81dd1-6037-43db-8940-6e97db2614c2" containerID="b9c2584fd83190fddb5445f2744ed65aaf99a6c0ab3589094a44eb60e0cba731" exitCode=0 Sep 30 20:17:09 crc kubenswrapper[4756]: I0930 20:17:09.461726 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-cg2nt" event={"ID":"ccb81dd1-6037-43db-8940-6e97db2614c2","Type":"ContainerDied","Data":"b9c2584fd83190fddb5445f2744ed65aaf99a6c0ab3589094a44eb60e0cba731"} Sep 30 20:17:09 crc kubenswrapper[4756]: I0930 20:17:09.461763 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-cg2nt" event={"ID":"ccb81dd1-6037-43db-8940-6e97db2614c2","Type":"ContainerDied","Data":"0f67a2f09bbce72f24d44761c4a3a02314de1ee96e9552d9ab67e557f09f1a28"} Sep 30 20:17:09 crc kubenswrapper[4756]: I0930 20:17:09.461781 4756 scope.go:117] "RemoveContainer" containerID="b9c2584fd83190fddb5445f2744ed65aaf99a6c0ab3589094a44eb60e0cba731" Sep 30 20:17:09 crc kubenswrapper[4756]: I0930 20:17:09.461966 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-cg2nt" Sep 30 20:17:09 crc kubenswrapper[4756]: I0930 20:17:09.495843 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-cg2nt"] Sep 30 20:17:09 crc kubenswrapper[4756]: I0930 20:17:09.499485 4756 scope.go:117] "RemoveContainer" containerID="199774cbc2b5a45fe147941fec41c0c5657041686932acf9a666fb667a58800b" Sep 30 20:17:09 crc kubenswrapper[4756]: I0930 20:17:09.502353 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-cg2nt"] Sep 30 20:17:09 crc kubenswrapper[4756]: I0930 20:17:09.517568 4756 scope.go:117] "RemoveContainer" containerID="7c803b4848fe7b83048d2619bfe75e9f2fce2611a8f4d9c236773273dd5d7ca1" Sep 30 20:17:09 crc kubenswrapper[4756]: I0930 20:17:09.549643 4756 scope.go:117] "RemoveContainer" containerID="b9c2584fd83190fddb5445f2744ed65aaf99a6c0ab3589094a44eb60e0cba731" Sep 30 20:17:09 crc kubenswrapper[4756]: E0930 20:17:09.550087 4756 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b9c2584fd83190fddb5445f2744ed65aaf99a6c0ab3589094a44eb60e0cba731\": container with ID starting with b9c2584fd83190fddb5445f2744ed65aaf99a6c0ab3589094a44eb60e0cba731 not found: ID does not exist" containerID="b9c2584fd83190fddb5445f2744ed65aaf99a6c0ab3589094a44eb60e0cba731" Sep 30 20:17:09 crc kubenswrapper[4756]: I0930 20:17:09.550130 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b9c2584fd83190fddb5445f2744ed65aaf99a6c0ab3589094a44eb60e0cba731"} err="failed to get container status \"b9c2584fd83190fddb5445f2744ed65aaf99a6c0ab3589094a44eb60e0cba731\": rpc error: code = NotFound desc = could not find container \"b9c2584fd83190fddb5445f2744ed65aaf99a6c0ab3589094a44eb60e0cba731\": container with ID starting with b9c2584fd83190fddb5445f2744ed65aaf99a6c0ab3589094a44eb60e0cba731 not found: ID does not exist" Sep 30 20:17:09 crc kubenswrapper[4756]: I0930 20:17:09.550160 4756 scope.go:117] "RemoveContainer" containerID="199774cbc2b5a45fe147941fec41c0c5657041686932acf9a666fb667a58800b" Sep 30 20:17:09 crc kubenswrapper[4756]: E0930 20:17:09.550511 4756 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"199774cbc2b5a45fe147941fec41c0c5657041686932acf9a666fb667a58800b\": container with ID starting with 199774cbc2b5a45fe147941fec41c0c5657041686932acf9a666fb667a58800b not found: ID does not exist" containerID="199774cbc2b5a45fe147941fec41c0c5657041686932acf9a666fb667a58800b" Sep 30 20:17:09 crc kubenswrapper[4756]: I0930 20:17:09.550564 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"199774cbc2b5a45fe147941fec41c0c5657041686932acf9a666fb667a58800b"} err="failed to get container status \"199774cbc2b5a45fe147941fec41c0c5657041686932acf9a666fb667a58800b\": rpc error: code = NotFound desc = could not find container \"199774cbc2b5a45fe147941fec41c0c5657041686932acf9a666fb667a58800b\": container with ID starting with 199774cbc2b5a45fe147941fec41c0c5657041686932acf9a666fb667a58800b not found: ID does not exist" Sep 30 20:17:09 crc kubenswrapper[4756]: I0930 20:17:09.550597 4756 scope.go:117] "RemoveContainer" containerID="7c803b4848fe7b83048d2619bfe75e9f2fce2611a8f4d9c236773273dd5d7ca1" Sep 30 20:17:09 crc kubenswrapper[4756]: E0930 20:17:09.550856 4756 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7c803b4848fe7b83048d2619bfe75e9f2fce2611a8f4d9c236773273dd5d7ca1\": container with ID starting with 7c803b4848fe7b83048d2619bfe75e9f2fce2611a8f4d9c236773273dd5d7ca1 not found: ID does not exist" containerID="7c803b4848fe7b83048d2619bfe75e9f2fce2611a8f4d9c236773273dd5d7ca1" Sep 30 20:17:09 crc kubenswrapper[4756]: I0930 20:17:09.550882 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7c803b4848fe7b83048d2619bfe75e9f2fce2611a8f4d9c236773273dd5d7ca1"} err="failed to get container status \"7c803b4848fe7b83048d2619bfe75e9f2fce2611a8f4d9c236773273dd5d7ca1\": rpc error: code = NotFound desc = could not find container \"7c803b4848fe7b83048d2619bfe75e9f2fce2611a8f4d9c236773273dd5d7ca1\": container with ID starting with 7c803b4848fe7b83048d2619bfe75e9f2fce2611a8f4d9c236773273dd5d7ca1 not found: ID does not exist" Sep 30 20:17:11 crc kubenswrapper[4756]: I0930 20:17:11.134462 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ccb81dd1-6037-43db-8940-6e97db2614c2" path="/var/lib/kubelet/pods/ccb81dd1-6037-43db-8940-6e97db2614c2/volumes" Sep 30 20:17:11 crc kubenswrapper[4756]: I0930 20:17:11.539744 4756 patch_prober.go:28] interesting pod/machine-config-daemon-4n9zj container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 20:17:11 crc kubenswrapper[4756]: I0930 20:17:11.539883 4756 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 20:17:11 crc kubenswrapper[4756]: I0930 20:17:11.540167 4756 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" Sep 30 20:17:11 crc kubenswrapper[4756]: I0930 20:17:11.541570 4756 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"877567a59fb1cb2d3ae36ddaf918e59f424d07882ee0480b1ac7bde68db72d5b"} pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 30 20:17:11 crc kubenswrapper[4756]: I0930 20:17:11.541680 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" containerName="machine-config-daemon" containerID="cri-o://877567a59fb1cb2d3ae36ddaf918e59f424d07882ee0480b1ac7bde68db72d5b" gracePeriod=600 Sep 30 20:17:12 crc kubenswrapper[4756]: I0930 20:17:12.495054 4756 generic.go:334] "Generic (PLEG): container finished" podID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" containerID="877567a59fb1cb2d3ae36ddaf918e59f424d07882ee0480b1ac7bde68db72d5b" exitCode=0 Sep 30 20:17:12 crc kubenswrapper[4756]: I0930 20:17:12.496500 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" event={"ID":"3370c2ca-fec3-4f90-8df7-51e21e6c7e1c","Type":"ContainerDied","Data":"877567a59fb1cb2d3ae36ddaf918e59f424d07882ee0480b1ac7bde68db72d5b"} Sep 30 20:17:12 crc kubenswrapper[4756]: I0930 20:17:12.496541 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" event={"ID":"3370c2ca-fec3-4f90-8df7-51e21e6c7e1c","Type":"ContainerStarted","Data":"7140069c5e0a2a2d080cb59aaea6ca4896a02d0db8684c84fd8d28280acfe154"} Sep 30 20:17:12 crc kubenswrapper[4756]: I0930 20:17:12.496563 4756 scope.go:117] "RemoveContainer" containerID="59f9f1c1ce4b55e108410080ce02b6db07852e4e8a303b3fe63834e86270c4f2" Sep 30 20:17:45 crc kubenswrapper[4756]: I0930 20:17:45.114019 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-xxr5p"] Sep 30 20:17:45 crc kubenswrapper[4756]: E0930 20:17:45.114930 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ccb81dd1-6037-43db-8940-6e97db2614c2" containerName="extract-content" Sep 30 20:17:45 crc kubenswrapper[4756]: I0930 20:17:45.114948 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="ccb81dd1-6037-43db-8940-6e97db2614c2" containerName="extract-content" Sep 30 20:17:45 crc kubenswrapper[4756]: E0930 20:17:45.114980 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ccb81dd1-6037-43db-8940-6e97db2614c2" containerName="extract-utilities" Sep 30 20:17:45 crc kubenswrapper[4756]: I0930 20:17:45.114990 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="ccb81dd1-6037-43db-8940-6e97db2614c2" containerName="extract-utilities" Sep 30 20:17:45 crc kubenswrapper[4756]: E0930 20:17:45.115004 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ccb81dd1-6037-43db-8940-6e97db2614c2" containerName="registry-server" Sep 30 20:17:45 crc kubenswrapper[4756]: I0930 20:17:45.115014 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="ccb81dd1-6037-43db-8940-6e97db2614c2" containerName="registry-server" Sep 30 20:17:45 crc kubenswrapper[4756]: I0930 20:17:45.115332 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="ccb81dd1-6037-43db-8940-6e97db2614c2" containerName="registry-server" Sep 30 20:17:45 crc kubenswrapper[4756]: I0930 20:17:45.116762 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-xxr5p" Sep 30 20:17:45 crc kubenswrapper[4756]: I0930 20:17:45.137221 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-xxr5p"] Sep 30 20:17:45 crc kubenswrapper[4756]: I0930 20:17:45.221973 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bbad9789-8bb3-49af-a69c-63126be10862-utilities\") pod \"certified-operators-xxr5p\" (UID: \"bbad9789-8bb3-49af-a69c-63126be10862\") " pod="openshift-marketplace/certified-operators-xxr5p" Sep 30 20:17:45 crc kubenswrapper[4756]: I0930 20:17:45.222023 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tt24m\" (UniqueName: \"kubernetes.io/projected/bbad9789-8bb3-49af-a69c-63126be10862-kube-api-access-tt24m\") pod \"certified-operators-xxr5p\" (UID: \"bbad9789-8bb3-49af-a69c-63126be10862\") " pod="openshift-marketplace/certified-operators-xxr5p" Sep 30 20:17:45 crc kubenswrapper[4756]: I0930 20:17:45.222094 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bbad9789-8bb3-49af-a69c-63126be10862-catalog-content\") pod \"certified-operators-xxr5p\" (UID: \"bbad9789-8bb3-49af-a69c-63126be10862\") " pod="openshift-marketplace/certified-operators-xxr5p" Sep 30 20:17:45 crc kubenswrapper[4756]: I0930 20:17:45.322898 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tt24m\" (UniqueName: \"kubernetes.io/projected/bbad9789-8bb3-49af-a69c-63126be10862-kube-api-access-tt24m\") pod \"certified-operators-xxr5p\" (UID: \"bbad9789-8bb3-49af-a69c-63126be10862\") " pod="openshift-marketplace/certified-operators-xxr5p" Sep 30 20:17:45 crc kubenswrapper[4756]: I0930 20:17:45.322967 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bbad9789-8bb3-49af-a69c-63126be10862-catalog-content\") pod \"certified-operators-xxr5p\" (UID: \"bbad9789-8bb3-49af-a69c-63126be10862\") " pod="openshift-marketplace/certified-operators-xxr5p" Sep 30 20:17:45 crc kubenswrapper[4756]: I0930 20:17:45.323085 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bbad9789-8bb3-49af-a69c-63126be10862-utilities\") pod \"certified-operators-xxr5p\" (UID: \"bbad9789-8bb3-49af-a69c-63126be10862\") " pod="openshift-marketplace/certified-operators-xxr5p" Sep 30 20:17:45 crc kubenswrapper[4756]: I0930 20:17:45.323458 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bbad9789-8bb3-49af-a69c-63126be10862-catalog-content\") pod \"certified-operators-xxr5p\" (UID: \"bbad9789-8bb3-49af-a69c-63126be10862\") " pod="openshift-marketplace/certified-operators-xxr5p" Sep 30 20:17:45 crc kubenswrapper[4756]: I0930 20:17:45.323559 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bbad9789-8bb3-49af-a69c-63126be10862-utilities\") pod \"certified-operators-xxr5p\" (UID: \"bbad9789-8bb3-49af-a69c-63126be10862\") " pod="openshift-marketplace/certified-operators-xxr5p" Sep 30 20:17:45 crc kubenswrapper[4756]: I0930 20:17:45.341779 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tt24m\" (UniqueName: \"kubernetes.io/projected/bbad9789-8bb3-49af-a69c-63126be10862-kube-api-access-tt24m\") pod \"certified-operators-xxr5p\" (UID: \"bbad9789-8bb3-49af-a69c-63126be10862\") " pod="openshift-marketplace/certified-operators-xxr5p" Sep 30 20:17:45 crc kubenswrapper[4756]: I0930 20:17:45.481993 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-xxr5p" Sep 30 20:17:45 crc kubenswrapper[4756]: I0930 20:17:45.906328 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-xxr5p"] Sep 30 20:17:46 crc kubenswrapper[4756]: I0930 20:17:46.780654 4756 generic.go:334] "Generic (PLEG): container finished" podID="bbad9789-8bb3-49af-a69c-63126be10862" containerID="f806f0acb5dbf0ab03d30d6c946ec6291c3ed5c7617e1e65451180311788cd05" exitCode=0 Sep 30 20:17:46 crc kubenswrapper[4756]: I0930 20:17:46.780756 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-xxr5p" event={"ID":"bbad9789-8bb3-49af-a69c-63126be10862","Type":"ContainerDied","Data":"f806f0acb5dbf0ab03d30d6c946ec6291c3ed5c7617e1e65451180311788cd05"} Sep 30 20:17:46 crc kubenswrapper[4756]: I0930 20:17:46.781095 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-xxr5p" event={"ID":"bbad9789-8bb3-49af-a69c-63126be10862","Type":"ContainerStarted","Data":"d31dec5f8be7106cfac8adfb39d3f6f175cc1748e3eef943c49677b7af1b7206"} Sep 30 20:17:47 crc kubenswrapper[4756]: I0930 20:17:47.796743 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-xxr5p" event={"ID":"bbad9789-8bb3-49af-a69c-63126be10862","Type":"ContainerStarted","Data":"29bff6c7a62ab46c029eb465c0e78f56e989d9bf4c86c32f6f09230e944d3e3d"} Sep 30 20:17:48 crc kubenswrapper[4756]: I0930 20:17:48.808183 4756 generic.go:334] "Generic (PLEG): container finished" podID="bbad9789-8bb3-49af-a69c-63126be10862" containerID="29bff6c7a62ab46c029eb465c0e78f56e989d9bf4c86c32f6f09230e944d3e3d" exitCode=0 Sep 30 20:17:48 crc kubenswrapper[4756]: I0930 20:17:48.808225 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-xxr5p" event={"ID":"bbad9789-8bb3-49af-a69c-63126be10862","Type":"ContainerDied","Data":"29bff6c7a62ab46c029eb465c0e78f56e989d9bf4c86c32f6f09230e944d3e3d"} Sep 30 20:17:49 crc kubenswrapper[4756]: I0930 20:17:49.819595 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-xxr5p" event={"ID":"bbad9789-8bb3-49af-a69c-63126be10862","Type":"ContainerStarted","Data":"717a2ea555c8923fd27f23646705ad34f34e7a7194687dcefd29998b0a836496"} Sep 30 20:17:49 crc kubenswrapper[4756]: I0930 20:17:49.847102 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-xxr5p" podStartSLOduration=2.378426069 podStartE2EDuration="4.847072758s" podCreationTimestamp="2025-09-30 20:17:45 +0000 UTC" firstStartedPulling="2025-09-30 20:17:46.783741188 +0000 UTC m=+2796.404674695" lastFinishedPulling="2025-09-30 20:17:49.252387887 +0000 UTC m=+2798.873321384" observedRunningTime="2025-09-30 20:17:49.844018088 +0000 UTC m=+2799.464951625" watchObservedRunningTime="2025-09-30 20:17:49.847072758 +0000 UTC m=+2799.468006275" Sep 30 20:17:55 crc kubenswrapper[4756]: I0930 20:17:55.483221 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-xxr5p" Sep 30 20:17:55 crc kubenswrapper[4756]: I0930 20:17:55.483811 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-xxr5p" Sep 30 20:17:55 crc kubenswrapper[4756]: I0930 20:17:55.562918 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-xxr5p" Sep 30 20:17:55 crc kubenswrapper[4756]: I0930 20:17:55.931464 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-xxr5p" Sep 30 20:17:55 crc kubenswrapper[4756]: I0930 20:17:55.997029 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-xxr5p"] Sep 30 20:17:57 crc kubenswrapper[4756]: I0930 20:17:57.899552 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-xxr5p" podUID="bbad9789-8bb3-49af-a69c-63126be10862" containerName="registry-server" containerID="cri-o://717a2ea555c8923fd27f23646705ad34f34e7a7194687dcefd29998b0a836496" gracePeriod=2 Sep 30 20:17:58 crc kubenswrapper[4756]: I0930 20:17:58.340466 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-xxr5p" Sep 30 20:17:58 crc kubenswrapper[4756]: I0930 20:17:58.450700 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bbad9789-8bb3-49af-a69c-63126be10862-catalog-content\") pod \"bbad9789-8bb3-49af-a69c-63126be10862\" (UID: \"bbad9789-8bb3-49af-a69c-63126be10862\") " Sep 30 20:17:58 crc kubenswrapper[4756]: I0930 20:17:58.450777 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tt24m\" (UniqueName: \"kubernetes.io/projected/bbad9789-8bb3-49af-a69c-63126be10862-kube-api-access-tt24m\") pod \"bbad9789-8bb3-49af-a69c-63126be10862\" (UID: \"bbad9789-8bb3-49af-a69c-63126be10862\") " Sep 30 20:17:58 crc kubenswrapper[4756]: I0930 20:17:58.450928 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bbad9789-8bb3-49af-a69c-63126be10862-utilities\") pod \"bbad9789-8bb3-49af-a69c-63126be10862\" (UID: \"bbad9789-8bb3-49af-a69c-63126be10862\") " Sep 30 20:17:58 crc kubenswrapper[4756]: I0930 20:17:58.452698 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bbad9789-8bb3-49af-a69c-63126be10862-utilities" (OuterVolumeSpecName: "utilities") pod "bbad9789-8bb3-49af-a69c-63126be10862" (UID: "bbad9789-8bb3-49af-a69c-63126be10862"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 20:17:58 crc kubenswrapper[4756]: I0930 20:17:58.460203 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bbad9789-8bb3-49af-a69c-63126be10862-kube-api-access-tt24m" (OuterVolumeSpecName: "kube-api-access-tt24m") pod "bbad9789-8bb3-49af-a69c-63126be10862" (UID: "bbad9789-8bb3-49af-a69c-63126be10862"). InnerVolumeSpecName "kube-api-access-tt24m". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:17:58 crc kubenswrapper[4756]: I0930 20:17:58.504609 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bbad9789-8bb3-49af-a69c-63126be10862-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "bbad9789-8bb3-49af-a69c-63126be10862" (UID: "bbad9789-8bb3-49af-a69c-63126be10862"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 20:17:58 crc kubenswrapper[4756]: I0930 20:17:58.553456 4756 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bbad9789-8bb3-49af-a69c-63126be10862-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 20:17:58 crc kubenswrapper[4756]: I0930 20:17:58.553517 4756 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bbad9789-8bb3-49af-a69c-63126be10862-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 20:17:58 crc kubenswrapper[4756]: I0930 20:17:58.553546 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tt24m\" (UniqueName: \"kubernetes.io/projected/bbad9789-8bb3-49af-a69c-63126be10862-kube-api-access-tt24m\") on node \"crc\" DevicePath \"\"" Sep 30 20:17:58 crc kubenswrapper[4756]: I0930 20:17:58.909998 4756 generic.go:334] "Generic (PLEG): container finished" podID="bbad9789-8bb3-49af-a69c-63126be10862" containerID="717a2ea555c8923fd27f23646705ad34f34e7a7194687dcefd29998b0a836496" exitCode=0 Sep 30 20:17:58 crc kubenswrapper[4756]: I0930 20:17:58.910050 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-xxr5p" event={"ID":"bbad9789-8bb3-49af-a69c-63126be10862","Type":"ContainerDied","Data":"717a2ea555c8923fd27f23646705ad34f34e7a7194687dcefd29998b0a836496"} Sep 30 20:17:58 crc kubenswrapper[4756]: I0930 20:17:58.910074 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-xxr5p" Sep 30 20:17:58 crc kubenswrapper[4756]: I0930 20:17:58.910080 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-xxr5p" event={"ID":"bbad9789-8bb3-49af-a69c-63126be10862","Type":"ContainerDied","Data":"d31dec5f8be7106cfac8adfb39d3f6f175cc1748e3eef943c49677b7af1b7206"} Sep 30 20:17:58 crc kubenswrapper[4756]: I0930 20:17:58.910093 4756 scope.go:117] "RemoveContainer" containerID="717a2ea555c8923fd27f23646705ad34f34e7a7194687dcefd29998b0a836496" Sep 30 20:17:58 crc kubenswrapper[4756]: I0930 20:17:58.930550 4756 scope.go:117] "RemoveContainer" containerID="29bff6c7a62ab46c029eb465c0e78f56e989d9bf4c86c32f6f09230e944d3e3d" Sep 30 20:17:58 crc kubenswrapper[4756]: I0930 20:17:58.953215 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-xxr5p"] Sep 30 20:17:58 crc kubenswrapper[4756]: I0930 20:17:58.964046 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-xxr5p"] Sep 30 20:17:58 crc kubenswrapper[4756]: I0930 20:17:58.969330 4756 scope.go:117] "RemoveContainer" containerID="f806f0acb5dbf0ab03d30d6c946ec6291c3ed5c7617e1e65451180311788cd05" Sep 30 20:17:59 crc kubenswrapper[4756]: I0930 20:17:59.000709 4756 scope.go:117] "RemoveContainer" containerID="717a2ea555c8923fd27f23646705ad34f34e7a7194687dcefd29998b0a836496" Sep 30 20:17:59 crc kubenswrapper[4756]: E0930 20:17:59.001226 4756 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"717a2ea555c8923fd27f23646705ad34f34e7a7194687dcefd29998b0a836496\": container with ID starting with 717a2ea555c8923fd27f23646705ad34f34e7a7194687dcefd29998b0a836496 not found: ID does not exist" containerID="717a2ea555c8923fd27f23646705ad34f34e7a7194687dcefd29998b0a836496" Sep 30 20:17:59 crc kubenswrapper[4756]: I0930 20:17:59.001268 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"717a2ea555c8923fd27f23646705ad34f34e7a7194687dcefd29998b0a836496"} err="failed to get container status \"717a2ea555c8923fd27f23646705ad34f34e7a7194687dcefd29998b0a836496\": rpc error: code = NotFound desc = could not find container \"717a2ea555c8923fd27f23646705ad34f34e7a7194687dcefd29998b0a836496\": container with ID starting with 717a2ea555c8923fd27f23646705ad34f34e7a7194687dcefd29998b0a836496 not found: ID does not exist" Sep 30 20:17:59 crc kubenswrapper[4756]: I0930 20:17:59.001327 4756 scope.go:117] "RemoveContainer" containerID="29bff6c7a62ab46c029eb465c0e78f56e989d9bf4c86c32f6f09230e944d3e3d" Sep 30 20:17:59 crc kubenswrapper[4756]: E0930 20:17:59.001699 4756 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"29bff6c7a62ab46c029eb465c0e78f56e989d9bf4c86c32f6f09230e944d3e3d\": container with ID starting with 29bff6c7a62ab46c029eb465c0e78f56e989d9bf4c86c32f6f09230e944d3e3d not found: ID does not exist" containerID="29bff6c7a62ab46c029eb465c0e78f56e989d9bf4c86c32f6f09230e944d3e3d" Sep 30 20:17:59 crc kubenswrapper[4756]: I0930 20:17:59.001790 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"29bff6c7a62ab46c029eb465c0e78f56e989d9bf4c86c32f6f09230e944d3e3d"} err="failed to get container status \"29bff6c7a62ab46c029eb465c0e78f56e989d9bf4c86c32f6f09230e944d3e3d\": rpc error: code = NotFound desc = could not find container \"29bff6c7a62ab46c029eb465c0e78f56e989d9bf4c86c32f6f09230e944d3e3d\": container with ID starting with 29bff6c7a62ab46c029eb465c0e78f56e989d9bf4c86c32f6f09230e944d3e3d not found: ID does not exist" Sep 30 20:17:59 crc kubenswrapper[4756]: I0930 20:17:59.001828 4756 scope.go:117] "RemoveContainer" containerID="f806f0acb5dbf0ab03d30d6c946ec6291c3ed5c7617e1e65451180311788cd05" Sep 30 20:17:59 crc kubenswrapper[4756]: E0930 20:17:59.002190 4756 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f806f0acb5dbf0ab03d30d6c946ec6291c3ed5c7617e1e65451180311788cd05\": container with ID starting with f806f0acb5dbf0ab03d30d6c946ec6291c3ed5c7617e1e65451180311788cd05 not found: ID does not exist" containerID="f806f0acb5dbf0ab03d30d6c946ec6291c3ed5c7617e1e65451180311788cd05" Sep 30 20:17:59 crc kubenswrapper[4756]: I0930 20:17:59.002239 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f806f0acb5dbf0ab03d30d6c946ec6291c3ed5c7617e1e65451180311788cd05"} err="failed to get container status \"f806f0acb5dbf0ab03d30d6c946ec6291c3ed5c7617e1e65451180311788cd05\": rpc error: code = NotFound desc = could not find container \"f806f0acb5dbf0ab03d30d6c946ec6291c3ed5c7617e1e65451180311788cd05\": container with ID starting with f806f0acb5dbf0ab03d30d6c946ec6291c3ed5c7617e1e65451180311788cd05 not found: ID does not exist" Sep 30 20:17:59 crc kubenswrapper[4756]: I0930 20:17:59.124195 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bbad9789-8bb3-49af-a69c-63126be10862" path="/var/lib/kubelet/pods/bbad9789-8bb3-49af-a69c-63126be10862/volumes" Sep 30 20:19:11 crc kubenswrapper[4756]: I0930 20:19:11.538722 4756 patch_prober.go:28] interesting pod/machine-config-daemon-4n9zj container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 20:19:11 crc kubenswrapper[4756]: I0930 20:19:11.539443 4756 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 20:19:41 crc kubenswrapper[4756]: I0930 20:19:41.538785 4756 patch_prober.go:28] interesting pod/machine-config-daemon-4n9zj container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 20:19:41 crc kubenswrapper[4756]: I0930 20:19:41.539663 4756 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 20:20:11 crc kubenswrapper[4756]: I0930 20:20:11.538725 4756 patch_prober.go:28] interesting pod/machine-config-daemon-4n9zj container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 20:20:11 crc kubenswrapper[4756]: I0930 20:20:11.539556 4756 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 20:20:11 crc kubenswrapper[4756]: I0930 20:20:11.539755 4756 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" Sep 30 20:20:11 crc kubenswrapper[4756]: I0930 20:20:11.540531 4756 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"7140069c5e0a2a2d080cb59aaea6ca4896a02d0db8684c84fd8d28280acfe154"} pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 30 20:20:11 crc kubenswrapper[4756]: I0930 20:20:11.540699 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" containerName="machine-config-daemon" containerID="cri-o://7140069c5e0a2a2d080cb59aaea6ca4896a02d0db8684c84fd8d28280acfe154" gracePeriod=600 Sep 30 20:20:11 crc kubenswrapper[4756]: E0930 20:20:11.666665 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4n9zj_openshift-machine-config-operator(3370c2ca-fec3-4f90-8df7-51e21e6c7e1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" Sep 30 20:20:12 crc kubenswrapper[4756]: I0930 20:20:12.032374 4756 generic.go:334] "Generic (PLEG): container finished" podID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" containerID="7140069c5e0a2a2d080cb59aaea6ca4896a02d0db8684c84fd8d28280acfe154" exitCode=0 Sep 30 20:20:12 crc kubenswrapper[4756]: I0930 20:20:12.032432 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" event={"ID":"3370c2ca-fec3-4f90-8df7-51e21e6c7e1c","Type":"ContainerDied","Data":"7140069c5e0a2a2d080cb59aaea6ca4896a02d0db8684c84fd8d28280acfe154"} Sep 30 20:20:12 crc kubenswrapper[4756]: I0930 20:20:12.032461 4756 scope.go:117] "RemoveContainer" containerID="877567a59fb1cb2d3ae36ddaf918e59f424d07882ee0480b1ac7bde68db72d5b" Sep 30 20:20:12 crc kubenswrapper[4756]: I0930 20:20:12.032923 4756 scope.go:117] "RemoveContainer" containerID="7140069c5e0a2a2d080cb59aaea6ca4896a02d0db8684c84fd8d28280acfe154" Sep 30 20:20:12 crc kubenswrapper[4756]: E0930 20:20:12.033123 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4n9zj_openshift-machine-config-operator(3370c2ca-fec3-4f90-8df7-51e21e6c7e1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" Sep 30 20:20:23 crc kubenswrapper[4756]: I0930 20:20:23.115925 4756 scope.go:117] "RemoveContainer" containerID="7140069c5e0a2a2d080cb59aaea6ca4896a02d0db8684c84fd8d28280acfe154" Sep 30 20:20:23 crc kubenswrapper[4756]: E0930 20:20:23.116962 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4n9zj_openshift-machine-config-operator(3370c2ca-fec3-4f90-8df7-51e21e6c7e1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" Sep 30 20:20:36 crc kubenswrapper[4756]: I0930 20:20:36.115995 4756 scope.go:117] "RemoveContainer" containerID="7140069c5e0a2a2d080cb59aaea6ca4896a02d0db8684c84fd8d28280acfe154" Sep 30 20:20:36 crc kubenswrapper[4756]: E0930 20:20:36.116772 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4n9zj_openshift-machine-config-operator(3370c2ca-fec3-4f90-8df7-51e21e6c7e1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" Sep 30 20:20:49 crc kubenswrapper[4756]: I0930 20:20:49.116079 4756 scope.go:117] "RemoveContainer" containerID="7140069c5e0a2a2d080cb59aaea6ca4896a02d0db8684c84fd8d28280acfe154" Sep 30 20:20:49 crc kubenswrapper[4756]: E0930 20:20:49.116841 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4n9zj_openshift-machine-config-operator(3370c2ca-fec3-4f90-8df7-51e21e6c7e1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" Sep 30 20:21:03 crc kubenswrapper[4756]: I0930 20:21:03.116379 4756 scope.go:117] "RemoveContainer" containerID="7140069c5e0a2a2d080cb59aaea6ca4896a02d0db8684c84fd8d28280acfe154" Sep 30 20:21:03 crc kubenswrapper[4756]: E0930 20:21:03.117269 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4n9zj_openshift-machine-config-operator(3370c2ca-fec3-4f90-8df7-51e21e6c7e1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" Sep 30 20:21:15 crc kubenswrapper[4756]: I0930 20:21:15.116532 4756 scope.go:117] "RemoveContainer" containerID="7140069c5e0a2a2d080cb59aaea6ca4896a02d0db8684c84fd8d28280acfe154" Sep 30 20:21:15 crc kubenswrapper[4756]: E0930 20:21:15.117388 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4n9zj_openshift-machine-config-operator(3370c2ca-fec3-4f90-8df7-51e21e6c7e1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" Sep 30 20:21:29 crc kubenswrapper[4756]: I0930 20:21:29.116575 4756 scope.go:117] "RemoveContainer" containerID="7140069c5e0a2a2d080cb59aaea6ca4896a02d0db8684c84fd8d28280acfe154" Sep 30 20:21:29 crc kubenswrapper[4756]: E0930 20:21:29.117872 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4n9zj_openshift-machine-config-operator(3370c2ca-fec3-4f90-8df7-51e21e6c7e1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" Sep 30 20:21:42 crc kubenswrapper[4756]: I0930 20:21:42.115921 4756 scope.go:117] "RemoveContainer" containerID="7140069c5e0a2a2d080cb59aaea6ca4896a02d0db8684c84fd8d28280acfe154" Sep 30 20:21:42 crc kubenswrapper[4756]: E0930 20:21:42.117663 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4n9zj_openshift-machine-config-operator(3370c2ca-fec3-4f90-8df7-51e21e6c7e1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" Sep 30 20:21:56 crc kubenswrapper[4756]: I0930 20:21:56.116593 4756 scope.go:117] "RemoveContainer" containerID="7140069c5e0a2a2d080cb59aaea6ca4896a02d0db8684c84fd8d28280acfe154" Sep 30 20:21:56 crc kubenswrapper[4756]: E0930 20:21:56.117955 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4n9zj_openshift-machine-config-operator(3370c2ca-fec3-4f90-8df7-51e21e6c7e1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" Sep 30 20:22:08 crc kubenswrapper[4756]: I0930 20:22:08.115958 4756 scope.go:117] "RemoveContainer" containerID="7140069c5e0a2a2d080cb59aaea6ca4896a02d0db8684c84fd8d28280acfe154" Sep 30 20:22:08 crc kubenswrapper[4756]: E0930 20:22:08.117252 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4n9zj_openshift-machine-config-operator(3370c2ca-fec3-4f90-8df7-51e21e6c7e1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" Sep 30 20:22:20 crc kubenswrapper[4756]: I0930 20:22:20.115771 4756 scope.go:117] "RemoveContainer" containerID="7140069c5e0a2a2d080cb59aaea6ca4896a02d0db8684c84fd8d28280acfe154" Sep 30 20:22:20 crc kubenswrapper[4756]: E0930 20:22:20.116474 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4n9zj_openshift-machine-config-operator(3370c2ca-fec3-4f90-8df7-51e21e6c7e1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" Sep 30 20:22:32 crc kubenswrapper[4756]: I0930 20:22:32.116470 4756 scope.go:117] "RemoveContainer" containerID="7140069c5e0a2a2d080cb59aaea6ca4896a02d0db8684c84fd8d28280acfe154" Sep 30 20:22:32 crc kubenswrapper[4756]: E0930 20:22:32.117118 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4n9zj_openshift-machine-config-operator(3370c2ca-fec3-4f90-8df7-51e21e6c7e1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" Sep 30 20:22:47 crc kubenswrapper[4756]: I0930 20:22:47.115519 4756 scope.go:117] "RemoveContainer" containerID="7140069c5e0a2a2d080cb59aaea6ca4896a02d0db8684c84fd8d28280acfe154" Sep 30 20:22:47 crc kubenswrapper[4756]: E0930 20:22:47.116135 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4n9zj_openshift-machine-config-operator(3370c2ca-fec3-4f90-8df7-51e21e6c7e1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" Sep 30 20:23:00 crc kubenswrapper[4756]: I0930 20:23:00.116355 4756 scope.go:117] "RemoveContainer" containerID="7140069c5e0a2a2d080cb59aaea6ca4896a02d0db8684c84fd8d28280acfe154" Sep 30 20:23:00 crc kubenswrapper[4756]: E0930 20:23:00.117099 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4n9zj_openshift-machine-config-operator(3370c2ca-fec3-4f90-8df7-51e21e6c7e1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" Sep 30 20:23:11 crc kubenswrapper[4756]: I0930 20:23:11.120026 4756 scope.go:117] "RemoveContainer" containerID="7140069c5e0a2a2d080cb59aaea6ca4896a02d0db8684c84fd8d28280acfe154" Sep 30 20:23:11 crc kubenswrapper[4756]: E0930 20:23:11.121350 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4n9zj_openshift-machine-config-operator(3370c2ca-fec3-4f90-8df7-51e21e6c7e1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" Sep 30 20:23:25 crc kubenswrapper[4756]: I0930 20:23:25.116194 4756 scope.go:117] "RemoveContainer" containerID="7140069c5e0a2a2d080cb59aaea6ca4896a02d0db8684c84fd8d28280acfe154" Sep 30 20:23:25 crc kubenswrapper[4756]: E0930 20:23:25.117045 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4n9zj_openshift-machine-config-operator(3370c2ca-fec3-4f90-8df7-51e21e6c7e1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" Sep 30 20:23:38 crc kubenswrapper[4756]: I0930 20:23:38.115972 4756 scope.go:117] "RemoveContainer" containerID="7140069c5e0a2a2d080cb59aaea6ca4896a02d0db8684c84fd8d28280acfe154" Sep 30 20:23:38 crc kubenswrapper[4756]: E0930 20:23:38.116796 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4n9zj_openshift-machine-config-operator(3370c2ca-fec3-4f90-8df7-51e21e6c7e1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" Sep 30 20:23:52 crc kubenswrapper[4756]: I0930 20:23:52.117586 4756 scope.go:117] "RemoveContainer" containerID="7140069c5e0a2a2d080cb59aaea6ca4896a02d0db8684c84fd8d28280acfe154" Sep 30 20:23:52 crc kubenswrapper[4756]: E0930 20:23:52.118630 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4n9zj_openshift-machine-config-operator(3370c2ca-fec3-4f90-8df7-51e21e6c7e1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" Sep 30 20:24:05 crc kubenswrapper[4756]: I0930 20:24:05.116673 4756 scope.go:117] "RemoveContainer" containerID="7140069c5e0a2a2d080cb59aaea6ca4896a02d0db8684c84fd8d28280acfe154" Sep 30 20:24:05 crc kubenswrapper[4756]: E0930 20:24:05.117236 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4n9zj_openshift-machine-config-operator(3370c2ca-fec3-4f90-8df7-51e21e6c7e1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" Sep 30 20:24:19 crc kubenswrapper[4756]: I0930 20:24:19.116183 4756 scope.go:117] "RemoveContainer" containerID="7140069c5e0a2a2d080cb59aaea6ca4896a02d0db8684c84fd8d28280acfe154" Sep 30 20:24:19 crc kubenswrapper[4756]: E0930 20:24:19.117213 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4n9zj_openshift-machine-config-operator(3370c2ca-fec3-4f90-8df7-51e21e6c7e1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" Sep 30 20:24:30 crc kubenswrapper[4756]: I0930 20:24:30.115854 4756 scope.go:117] "RemoveContainer" containerID="7140069c5e0a2a2d080cb59aaea6ca4896a02d0db8684c84fd8d28280acfe154" Sep 30 20:24:30 crc kubenswrapper[4756]: E0930 20:24:30.117136 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4n9zj_openshift-machine-config-operator(3370c2ca-fec3-4f90-8df7-51e21e6c7e1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" Sep 30 20:24:41 crc kubenswrapper[4756]: I0930 20:24:41.119280 4756 scope.go:117] "RemoveContainer" containerID="7140069c5e0a2a2d080cb59aaea6ca4896a02d0db8684c84fd8d28280acfe154" Sep 30 20:24:41 crc kubenswrapper[4756]: E0930 20:24:41.120113 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4n9zj_openshift-machine-config-operator(3370c2ca-fec3-4f90-8df7-51e21e6c7e1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" Sep 30 20:24:52 crc kubenswrapper[4756]: I0930 20:24:52.117083 4756 scope.go:117] "RemoveContainer" containerID="7140069c5e0a2a2d080cb59aaea6ca4896a02d0db8684c84fd8d28280acfe154" Sep 30 20:24:52 crc kubenswrapper[4756]: E0930 20:24:52.117722 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4n9zj_openshift-machine-config-operator(3370c2ca-fec3-4f90-8df7-51e21e6c7e1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" Sep 30 20:25:06 crc kubenswrapper[4756]: I0930 20:25:06.116448 4756 scope.go:117] "RemoveContainer" containerID="7140069c5e0a2a2d080cb59aaea6ca4896a02d0db8684c84fd8d28280acfe154" Sep 30 20:25:06 crc kubenswrapper[4756]: E0930 20:25:06.117332 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4n9zj_openshift-machine-config-operator(3370c2ca-fec3-4f90-8df7-51e21e6c7e1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" Sep 30 20:25:18 crc kubenswrapper[4756]: I0930 20:25:18.116682 4756 scope.go:117] "RemoveContainer" containerID="7140069c5e0a2a2d080cb59aaea6ca4896a02d0db8684c84fd8d28280acfe154" Sep 30 20:25:18 crc kubenswrapper[4756]: I0930 20:25:18.465300 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" event={"ID":"3370c2ca-fec3-4f90-8df7-51e21e6c7e1c","Type":"ContainerStarted","Data":"ae8690c9a70a632b97b3f050b9c93ed7793158b7537fa3bb06bd34f71872afd3"} Sep 30 20:25:44 crc kubenswrapper[4756]: I0930 20:25:44.960086 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-j2l9t"] Sep 30 20:25:44 crc kubenswrapper[4756]: E0930 20:25:44.960768 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bbad9789-8bb3-49af-a69c-63126be10862" containerName="extract-content" Sep 30 20:25:44 crc kubenswrapper[4756]: I0930 20:25:44.960780 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="bbad9789-8bb3-49af-a69c-63126be10862" containerName="extract-content" Sep 30 20:25:44 crc kubenswrapper[4756]: E0930 20:25:44.960789 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bbad9789-8bb3-49af-a69c-63126be10862" containerName="extract-utilities" Sep 30 20:25:44 crc kubenswrapper[4756]: I0930 20:25:44.960795 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="bbad9789-8bb3-49af-a69c-63126be10862" containerName="extract-utilities" Sep 30 20:25:44 crc kubenswrapper[4756]: E0930 20:25:44.960820 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bbad9789-8bb3-49af-a69c-63126be10862" containerName="registry-server" Sep 30 20:25:44 crc kubenswrapper[4756]: I0930 20:25:44.960826 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="bbad9789-8bb3-49af-a69c-63126be10862" containerName="registry-server" Sep 30 20:25:44 crc kubenswrapper[4756]: I0930 20:25:44.960963 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="bbad9789-8bb3-49af-a69c-63126be10862" containerName="registry-server" Sep 30 20:25:44 crc kubenswrapper[4756]: I0930 20:25:44.963560 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-j2l9t" Sep 30 20:25:44 crc kubenswrapper[4756]: I0930 20:25:44.989388 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-j2l9t"] Sep 30 20:25:45 crc kubenswrapper[4756]: I0930 20:25:45.059886 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d5b4b11d-dd31-4567-8105-8f5f2d9844fe-catalog-content\") pod \"redhat-operators-j2l9t\" (UID: \"d5b4b11d-dd31-4567-8105-8f5f2d9844fe\") " pod="openshift-marketplace/redhat-operators-j2l9t" Sep 30 20:25:45 crc kubenswrapper[4756]: I0930 20:25:45.059938 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bt55j\" (UniqueName: \"kubernetes.io/projected/d5b4b11d-dd31-4567-8105-8f5f2d9844fe-kube-api-access-bt55j\") pod \"redhat-operators-j2l9t\" (UID: \"d5b4b11d-dd31-4567-8105-8f5f2d9844fe\") " pod="openshift-marketplace/redhat-operators-j2l9t" Sep 30 20:25:45 crc kubenswrapper[4756]: I0930 20:25:45.060078 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d5b4b11d-dd31-4567-8105-8f5f2d9844fe-utilities\") pod \"redhat-operators-j2l9t\" (UID: \"d5b4b11d-dd31-4567-8105-8f5f2d9844fe\") " pod="openshift-marketplace/redhat-operators-j2l9t" Sep 30 20:25:45 crc kubenswrapper[4756]: I0930 20:25:45.161969 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d5b4b11d-dd31-4567-8105-8f5f2d9844fe-catalog-content\") pod \"redhat-operators-j2l9t\" (UID: \"d5b4b11d-dd31-4567-8105-8f5f2d9844fe\") " pod="openshift-marketplace/redhat-operators-j2l9t" Sep 30 20:25:45 crc kubenswrapper[4756]: I0930 20:25:45.162021 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bt55j\" (UniqueName: \"kubernetes.io/projected/d5b4b11d-dd31-4567-8105-8f5f2d9844fe-kube-api-access-bt55j\") pod \"redhat-operators-j2l9t\" (UID: \"d5b4b11d-dd31-4567-8105-8f5f2d9844fe\") " pod="openshift-marketplace/redhat-operators-j2l9t" Sep 30 20:25:45 crc kubenswrapper[4756]: I0930 20:25:45.162116 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d5b4b11d-dd31-4567-8105-8f5f2d9844fe-utilities\") pod \"redhat-operators-j2l9t\" (UID: \"d5b4b11d-dd31-4567-8105-8f5f2d9844fe\") " pod="openshift-marketplace/redhat-operators-j2l9t" Sep 30 20:25:45 crc kubenswrapper[4756]: I0930 20:25:45.162597 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d5b4b11d-dd31-4567-8105-8f5f2d9844fe-catalog-content\") pod \"redhat-operators-j2l9t\" (UID: \"d5b4b11d-dd31-4567-8105-8f5f2d9844fe\") " pod="openshift-marketplace/redhat-operators-j2l9t" Sep 30 20:25:45 crc kubenswrapper[4756]: I0930 20:25:45.162639 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d5b4b11d-dd31-4567-8105-8f5f2d9844fe-utilities\") pod \"redhat-operators-j2l9t\" (UID: \"d5b4b11d-dd31-4567-8105-8f5f2d9844fe\") " pod="openshift-marketplace/redhat-operators-j2l9t" Sep 30 20:25:45 crc kubenswrapper[4756]: I0930 20:25:45.181510 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bt55j\" (UniqueName: \"kubernetes.io/projected/d5b4b11d-dd31-4567-8105-8f5f2d9844fe-kube-api-access-bt55j\") pod \"redhat-operators-j2l9t\" (UID: \"d5b4b11d-dd31-4567-8105-8f5f2d9844fe\") " pod="openshift-marketplace/redhat-operators-j2l9t" Sep 30 20:25:45 crc kubenswrapper[4756]: I0930 20:25:45.286922 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-j2l9t" Sep 30 20:25:45 crc kubenswrapper[4756]: I0930 20:25:45.706378 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-j2l9t"] Sep 30 20:25:45 crc kubenswrapper[4756]: I0930 20:25:45.732636 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-j2l9t" event={"ID":"d5b4b11d-dd31-4567-8105-8f5f2d9844fe","Type":"ContainerStarted","Data":"7d395b5259f0cd75c7af7498be871f2c18ecb8cca15a37b39387d64dec80dea9"} Sep 30 20:25:46 crc kubenswrapper[4756]: I0930 20:25:46.741297 4756 generic.go:334] "Generic (PLEG): container finished" podID="d5b4b11d-dd31-4567-8105-8f5f2d9844fe" containerID="5dc0ae852aefdca013c228b6165b5062180dc4a5dad9036b7cbb75781c3b82eb" exitCode=0 Sep 30 20:25:46 crc kubenswrapper[4756]: I0930 20:25:46.741340 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-j2l9t" event={"ID":"d5b4b11d-dd31-4567-8105-8f5f2d9844fe","Type":"ContainerDied","Data":"5dc0ae852aefdca013c228b6165b5062180dc4a5dad9036b7cbb75781c3b82eb"} Sep 30 20:25:46 crc kubenswrapper[4756]: I0930 20:25:46.744852 4756 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Sep 30 20:25:47 crc kubenswrapper[4756]: I0930 20:25:47.751904 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-j2l9t" event={"ID":"d5b4b11d-dd31-4567-8105-8f5f2d9844fe","Type":"ContainerStarted","Data":"3c20bb0e90828cc675e40eede7c3a55f22ad0e4f062537dfdde109e680b105e5"} Sep 30 20:25:48 crc kubenswrapper[4756]: I0930 20:25:48.760440 4756 generic.go:334] "Generic (PLEG): container finished" podID="d5b4b11d-dd31-4567-8105-8f5f2d9844fe" containerID="3c20bb0e90828cc675e40eede7c3a55f22ad0e4f062537dfdde109e680b105e5" exitCode=0 Sep 30 20:25:48 crc kubenswrapper[4756]: I0930 20:25:48.760481 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-j2l9t" event={"ID":"d5b4b11d-dd31-4567-8105-8f5f2d9844fe","Type":"ContainerDied","Data":"3c20bb0e90828cc675e40eede7c3a55f22ad0e4f062537dfdde109e680b105e5"} Sep 30 20:25:49 crc kubenswrapper[4756]: I0930 20:25:49.769709 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-j2l9t" event={"ID":"d5b4b11d-dd31-4567-8105-8f5f2d9844fe","Type":"ContainerStarted","Data":"842bb3b1227ba035b87037b776a86d87dbe53facb0b25fcd746ea9ad0b868025"} Sep 30 20:25:49 crc kubenswrapper[4756]: I0930 20:25:49.794782 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-j2l9t" podStartSLOduration=3.375703609 podStartE2EDuration="5.794762788s" podCreationTimestamp="2025-09-30 20:25:44 +0000 UTC" firstStartedPulling="2025-09-30 20:25:46.744594448 +0000 UTC m=+3276.365527935" lastFinishedPulling="2025-09-30 20:25:49.163653627 +0000 UTC m=+3278.784587114" observedRunningTime="2025-09-30 20:25:49.788693869 +0000 UTC m=+3279.409627356" watchObservedRunningTime="2025-09-30 20:25:49.794762788 +0000 UTC m=+3279.415696275" Sep 30 20:25:53 crc kubenswrapper[4756]: I0930 20:25:53.745911 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-n7tmp"] Sep 30 20:25:53 crc kubenswrapper[4756]: I0930 20:25:53.747813 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-n7tmp" Sep 30 20:25:53 crc kubenswrapper[4756]: I0930 20:25:53.758574 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-n7tmp"] Sep 30 20:25:53 crc kubenswrapper[4756]: I0930 20:25:53.800111 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/674f247f-b227-49bc-95d1-df0c494472ee-catalog-content\") pod \"redhat-marketplace-n7tmp\" (UID: \"674f247f-b227-49bc-95d1-df0c494472ee\") " pod="openshift-marketplace/redhat-marketplace-n7tmp" Sep 30 20:25:53 crc kubenswrapper[4756]: I0930 20:25:53.800276 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/674f247f-b227-49bc-95d1-df0c494472ee-utilities\") pod \"redhat-marketplace-n7tmp\" (UID: \"674f247f-b227-49bc-95d1-df0c494472ee\") " pod="openshift-marketplace/redhat-marketplace-n7tmp" Sep 30 20:25:53 crc kubenswrapper[4756]: I0930 20:25:53.800305 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n2j7x\" (UniqueName: \"kubernetes.io/projected/674f247f-b227-49bc-95d1-df0c494472ee-kube-api-access-n2j7x\") pod \"redhat-marketplace-n7tmp\" (UID: \"674f247f-b227-49bc-95d1-df0c494472ee\") " pod="openshift-marketplace/redhat-marketplace-n7tmp" Sep 30 20:25:53 crc kubenswrapper[4756]: I0930 20:25:53.901845 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/674f247f-b227-49bc-95d1-df0c494472ee-utilities\") pod \"redhat-marketplace-n7tmp\" (UID: \"674f247f-b227-49bc-95d1-df0c494472ee\") " pod="openshift-marketplace/redhat-marketplace-n7tmp" Sep 30 20:25:53 crc kubenswrapper[4756]: I0930 20:25:53.901903 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n2j7x\" (UniqueName: \"kubernetes.io/projected/674f247f-b227-49bc-95d1-df0c494472ee-kube-api-access-n2j7x\") pod \"redhat-marketplace-n7tmp\" (UID: \"674f247f-b227-49bc-95d1-df0c494472ee\") " pod="openshift-marketplace/redhat-marketplace-n7tmp" Sep 30 20:25:53 crc kubenswrapper[4756]: I0930 20:25:53.901970 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/674f247f-b227-49bc-95d1-df0c494472ee-catalog-content\") pod \"redhat-marketplace-n7tmp\" (UID: \"674f247f-b227-49bc-95d1-df0c494472ee\") " pod="openshift-marketplace/redhat-marketplace-n7tmp" Sep 30 20:25:53 crc kubenswrapper[4756]: I0930 20:25:53.902388 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/674f247f-b227-49bc-95d1-df0c494472ee-utilities\") pod \"redhat-marketplace-n7tmp\" (UID: \"674f247f-b227-49bc-95d1-df0c494472ee\") " pod="openshift-marketplace/redhat-marketplace-n7tmp" Sep 30 20:25:53 crc kubenswrapper[4756]: I0930 20:25:53.902794 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/674f247f-b227-49bc-95d1-df0c494472ee-catalog-content\") pod \"redhat-marketplace-n7tmp\" (UID: \"674f247f-b227-49bc-95d1-df0c494472ee\") " pod="openshift-marketplace/redhat-marketplace-n7tmp" Sep 30 20:25:53 crc kubenswrapper[4756]: I0930 20:25:53.921175 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n2j7x\" (UniqueName: \"kubernetes.io/projected/674f247f-b227-49bc-95d1-df0c494472ee-kube-api-access-n2j7x\") pod \"redhat-marketplace-n7tmp\" (UID: \"674f247f-b227-49bc-95d1-df0c494472ee\") " pod="openshift-marketplace/redhat-marketplace-n7tmp" Sep 30 20:25:54 crc kubenswrapper[4756]: I0930 20:25:54.069484 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-n7tmp" Sep 30 20:25:54 crc kubenswrapper[4756]: I0930 20:25:54.471914 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-n7tmp"] Sep 30 20:25:54 crc kubenswrapper[4756]: I0930 20:25:54.807994 4756 generic.go:334] "Generic (PLEG): container finished" podID="674f247f-b227-49bc-95d1-df0c494472ee" containerID="3d1323d9896782a6192ac6cdec252bfcdd2c2980eac10fdecbb8f269c9c7c326" exitCode=0 Sep 30 20:25:54 crc kubenswrapper[4756]: I0930 20:25:54.808076 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-n7tmp" event={"ID":"674f247f-b227-49bc-95d1-df0c494472ee","Type":"ContainerDied","Data":"3d1323d9896782a6192ac6cdec252bfcdd2c2980eac10fdecbb8f269c9c7c326"} Sep 30 20:25:54 crc kubenswrapper[4756]: I0930 20:25:54.808835 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-n7tmp" event={"ID":"674f247f-b227-49bc-95d1-df0c494472ee","Type":"ContainerStarted","Data":"95c7979cb30d7763f08a834b498835ee4ec37a4a8c0e52506f0ab44e5aa3712d"} Sep 30 20:25:55 crc kubenswrapper[4756]: I0930 20:25:55.288094 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-j2l9t" Sep 30 20:25:55 crc kubenswrapper[4756]: I0930 20:25:55.288162 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-j2l9t" Sep 30 20:25:55 crc kubenswrapper[4756]: I0930 20:25:55.338888 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-j2l9t" Sep 30 20:25:55 crc kubenswrapper[4756]: I0930 20:25:55.867216 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-j2l9t" Sep 30 20:25:56 crc kubenswrapper[4756]: I0930 20:25:56.825200 4756 generic.go:334] "Generic (PLEG): container finished" podID="674f247f-b227-49bc-95d1-df0c494472ee" containerID="8e9950543714944d697990415f9f6d8eb36ba97497e1c18672d96e7c9158d5e1" exitCode=0 Sep 30 20:25:56 crc kubenswrapper[4756]: I0930 20:25:56.825308 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-n7tmp" event={"ID":"674f247f-b227-49bc-95d1-df0c494472ee","Type":"ContainerDied","Data":"8e9950543714944d697990415f9f6d8eb36ba97497e1c18672d96e7c9158d5e1"} Sep 30 20:25:57 crc kubenswrapper[4756]: I0930 20:25:57.838168 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-n7tmp" event={"ID":"674f247f-b227-49bc-95d1-df0c494472ee","Type":"ContainerStarted","Data":"c73fe7a96c691630f0dabb260ed91fffd0f2ae3f468dc9ba81961b65a3aa8c12"} Sep 30 20:25:58 crc kubenswrapper[4756]: I0930 20:25:58.135636 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-n7tmp" podStartSLOduration=2.704760986 podStartE2EDuration="5.135620405s" podCreationTimestamp="2025-09-30 20:25:53 +0000 UTC" firstStartedPulling="2025-09-30 20:25:54.810198407 +0000 UTC m=+3284.431131904" lastFinishedPulling="2025-09-30 20:25:57.241057836 +0000 UTC m=+3286.861991323" observedRunningTime="2025-09-30 20:25:57.863231563 +0000 UTC m=+3287.484165040" watchObservedRunningTime="2025-09-30 20:25:58.135620405 +0000 UTC m=+3287.756553872" Sep 30 20:25:58 crc kubenswrapper[4756]: I0930 20:25:58.139431 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-j2l9t"] Sep 30 20:25:58 crc kubenswrapper[4756]: I0930 20:25:58.139743 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-j2l9t" podUID="d5b4b11d-dd31-4567-8105-8f5f2d9844fe" containerName="registry-server" containerID="cri-o://842bb3b1227ba035b87037b776a86d87dbe53facb0b25fcd746ea9ad0b868025" gracePeriod=2 Sep 30 20:25:58 crc kubenswrapper[4756]: I0930 20:25:58.849872 4756 generic.go:334] "Generic (PLEG): container finished" podID="d5b4b11d-dd31-4567-8105-8f5f2d9844fe" containerID="842bb3b1227ba035b87037b776a86d87dbe53facb0b25fcd746ea9ad0b868025" exitCode=0 Sep 30 20:25:58 crc kubenswrapper[4756]: I0930 20:25:58.849957 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-j2l9t" event={"ID":"d5b4b11d-dd31-4567-8105-8f5f2d9844fe","Type":"ContainerDied","Data":"842bb3b1227ba035b87037b776a86d87dbe53facb0b25fcd746ea9ad0b868025"} Sep 30 20:25:59 crc kubenswrapper[4756]: I0930 20:25:59.692233 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-j2l9t" Sep 30 20:25:59 crc kubenswrapper[4756]: I0930 20:25:59.790347 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d5b4b11d-dd31-4567-8105-8f5f2d9844fe-catalog-content\") pod \"d5b4b11d-dd31-4567-8105-8f5f2d9844fe\" (UID: \"d5b4b11d-dd31-4567-8105-8f5f2d9844fe\") " Sep 30 20:25:59 crc kubenswrapper[4756]: I0930 20:25:59.790635 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d5b4b11d-dd31-4567-8105-8f5f2d9844fe-utilities\") pod \"d5b4b11d-dd31-4567-8105-8f5f2d9844fe\" (UID: \"d5b4b11d-dd31-4567-8105-8f5f2d9844fe\") " Sep 30 20:25:59 crc kubenswrapper[4756]: I0930 20:25:59.790669 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bt55j\" (UniqueName: \"kubernetes.io/projected/d5b4b11d-dd31-4567-8105-8f5f2d9844fe-kube-api-access-bt55j\") pod \"d5b4b11d-dd31-4567-8105-8f5f2d9844fe\" (UID: \"d5b4b11d-dd31-4567-8105-8f5f2d9844fe\") " Sep 30 20:25:59 crc kubenswrapper[4756]: I0930 20:25:59.791360 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d5b4b11d-dd31-4567-8105-8f5f2d9844fe-utilities" (OuterVolumeSpecName: "utilities") pod "d5b4b11d-dd31-4567-8105-8f5f2d9844fe" (UID: "d5b4b11d-dd31-4567-8105-8f5f2d9844fe"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 20:25:59 crc kubenswrapper[4756]: I0930 20:25:59.799743 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d5b4b11d-dd31-4567-8105-8f5f2d9844fe-kube-api-access-bt55j" (OuterVolumeSpecName: "kube-api-access-bt55j") pod "d5b4b11d-dd31-4567-8105-8f5f2d9844fe" (UID: "d5b4b11d-dd31-4567-8105-8f5f2d9844fe"). InnerVolumeSpecName "kube-api-access-bt55j". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:25:59 crc kubenswrapper[4756]: I0930 20:25:59.865204 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-j2l9t" event={"ID":"d5b4b11d-dd31-4567-8105-8f5f2d9844fe","Type":"ContainerDied","Data":"7d395b5259f0cd75c7af7498be871f2c18ecb8cca15a37b39387d64dec80dea9"} Sep 30 20:25:59 crc kubenswrapper[4756]: I0930 20:25:59.865264 4756 scope.go:117] "RemoveContainer" containerID="842bb3b1227ba035b87037b776a86d87dbe53facb0b25fcd746ea9ad0b868025" Sep 30 20:25:59 crc kubenswrapper[4756]: I0930 20:25:59.865286 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-j2l9t" Sep 30 20:25:59 crc kubenswrapper[4756]: I0930 20:25:59.876211 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d5b4b11d-dd31-4567-8105-8f5f2d9844fe-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "d5b4b11d-dd31-4567-8105-8f5f2d9844fe" (UID: "d5b4b11d-dd31-4567-8105-8f5f2d9844fe"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 20:25:59 crc kubenswrapper[4756]: I0930 20:25:59.891851 4756 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d5b4b11d-dd31-4567-8105-8f5f2d9844fe-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 20:25:59 crc kubenswrapper[4756]: I0930 20:25:59.891877 4756 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d5b4b11d-dd31-4567-8105-8f5f2d9844fe-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 20:25:59 crc kubenswrapper[4756]: I0930 20:25:59.891887 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bt55j\" (UniqueName: \"kubernetes.io/projected/d5b4b11d-dd31-4567-8105-8f5f2d9844fe-kube-api-access-bt55j\") on node \"crc\" DevicePath \"\"" Sep 30 20:25:59 crc kubenswrapper[4756]: I0930 20:25:59.894563 4756 scope.go:117] "RemoveContainer" containerID="3c20bb0e90828cc675e40eede7c3a55f22ad0e4f062537dfdde109e680b105e5" Sep 30 20:25:59 crc kubenswrapper[4756]: I0930 20:25:59.913833 4756 scope.go:117] "RemoveContainer" containerID="5dc0ae852aefdca013c228b6165b5062180dc4a5dad9036b7cbb75781c3b82eb" Sep 30 20:26:00 crc kubenswrapper[4756]: I0930 20:26:00.199579 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-j2l9t"] Sep 30 20:26:00 crc kubenswrapper[4756]: I0930 20:26:00.207816 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-j2l9t"] Sep 30 20:26:01 crc kubenswrapper[4756]: I0930 20:26:01.124068 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d5b4b11d-dd31-4567-8105-8f5f2d9844fe" path="/var/lib/kubelet/pods/d5b4b11d-dd31-4567-8105-8f5f2d9844fe/volumes" Sep 30 20:26:04 crc kubenswrapper[4756]: I0930 20:26:04.069882 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-n7tmp" Sep 30 20:26:04 crc kubenswrapper[4756]: I0930 20:26:04.071136 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-n7tmp" Sep 30 20:26:04 crc kubenswrapper[4756]: I0930 20:26:04.111762 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-n7tmp" Sep 30 20:26:04 crc kubenswrapper[4756]: I0930 20:26:04.975014 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-n7tmp" Sep 30 20:26:05 crc kubenswrapper[4756]: I0930 20:26:05.147338 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-n7tmp"] Sep 30 20:26:06 crc kubenswrapper[4756]: I0930 20:26:06.916760 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-n7tmp" podUID="674f247f-b227-49bc-95d1-df0c494472ee" containerName="registry-server" containerID="cri-o://c73fe7a96c691630f0dabb260ed91fffd0f2ae3f468dc9ba81961b65a3aa8c12" gracePeriod=2 Sep 30 20:26:07 crc kubenswrapper[4756]: I0930 20:26:07.373449 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-n7tmp" Sep 30 20:26:07 crc kubenswrapper[4756]: I0930 20:26:07.456340 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-n2j7x\" (UniqueName: \"kubernetes.io/projected/674f247f-b227-49bc-95d1-df0c494472ee-kube-api-access-n2j7x\") pod \"674f247f-b227-49bc-95d1-df0c494472ee\" (UID: \"674f247f-b227-49bc-95d1-df0c494472ee\") " Sep 30 20:26:07 crc kubenswrapper[4756]: I0930 20:26:07.456476 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/674f247f-b227-49bc-95d1-df0c494472ee-utilities\") pod \"674f247f-b227-49bc-95d1-df0c494472ee\" (UID: \"674f247f-b227-49bc-95d1-df0c494472ee\") " Sep 30 20:26:07 crc kubenswrapper[4756]: I0930 20:26:07.456631 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/674f247f-b227-49bc-95d1-df0c494472ee-catalog-content\") pod \"674f247f-b227-49bc-95d1-df0c494472ee\" (UID: \"674f247f-b227-49bc-95d1-df0c494472ee\") " Sep 30 20:26:07 crc kubenswrapper[4756]: I0930 20:26:07.457510 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/674f247f-b227-49bc-95d1-df0c494472ee-utilities" (OuterVolumeSpecName: "utilities") pod "674f247f-b227-49bc-95d1-df0c494472ee" (UID: "674f247f-b227-49bc-95d1-df0c494472ee"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 20:26:07 crc kubenswrapper[4756]: I0930 20:26:07.464579 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/674f247f-b227-49bc-95d1-df0c494472ee-kube-api-access-n2j7x" (OuterVolumeSpecName: "kube-api-access-n2j7x") pod "674f247f-b227-49bc-95d1-df0c494472ee" (UID: "674f247f-b227-49bc-95d1-df0c494472ee"). InnerVolumeSpecName "kube-api-access-n2j7x". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:26:07 crc kubenswrapper[4756]: I0930 20:26:07.469003 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/674f247f-b227-49bc-95d1-df0c494472ee-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "674f247f-b227-49bc-95d1-df0c494472ee" (UID: "674f247f-b227-49bc-95d1-df0c494472ee"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 20:26:07 crc kubenswrapper[4756]: I0930 20:26:07.558990 4756 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/674f247f-b227-49bc-95d1-df0c494472ee-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 20:26:07 crc kubenswrapper[4756]: I0930 20:26:07.559046 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-n2j7x\" (UniqueName: \"kubernetes.io/projected/674f247f-b227-49bc-95d1-df0c494472ee-kube-api-access-n2j7x\") on node \"crc\" DevicePath \"\"" Sep 30 20:26:07 crc kubenswrapper[4756]: I0930 20:26:07.559064 4756 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/674f247f-b227-49bc-95d1-df0c494472ee-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 20:26:07 crc kubenswrapper[4756]: I0930 20:26:07.925738 4756 generic.go:334] "Generic (PLEG): container finished" podID="674f247f-b227-49bc-95d1-df0c494472ee" containerID="c73fe7a96c691630f0dabb260ed91fffd0f2ae3f468dc9ba81961b65a3aa8c12" exitCode=0 Sep 30 20:26:07 crc kubenswrapper[4756]: I0930 20:26:07.925785 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-n7tmp" event={"ID":"674f247f-b227-49bc-95d1-df0c494472ee","Type":"ContainerDied","Data":"c73fe7a96c691630f0dabb260ed91fffd0f2ae3f468dc9ba81961b65a3aa8c12"} Sep 30 20:26:07 crc kubenswrapper[4756]: I0930 20:26:07.925835 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-n7tmp" event={"ID":"674f247f-b227-49bc-95d1-df0c494472ee","Type":"ContainerDied","Data":"95c7979cb30d7763f08a834b498835ee4ec37a4a8c0e52506f0ab44e5aa3712d"} Sep 30 20:26:07 crc kubenswrapper[4756]: I0930 20:26:07.925834 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-n7tmp" Sep 30 20:26:07 crc kubenswrapper[4756]: I0930 20:26:07.925852 4756 scope.go:117] "RemoveContainer" containerID="c73fe7a96c691630f0dabb260ed91fffd0f2ae3f468dc9ba81961b65a3aa8c12" Sep 30 20:26:07 crc kubenswrapper[4756]: I0930 20:26:07.946053 4756 scope.go:117] "RemoveContainer" containerID="8e9950543714944d697990415f9f6d8eb36ba97497e1c18672d96e7c9158d5e1" Sep 30 20:26:07 crc kubenswrapper[4756]: I0930 20:26:07.981164 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-n7tmp"] Sep 30 20:26:07 crc kubenswrapper[4756]: I0930 20:26:07.989378 4756 scope.go:117] "RemoveContainer" containerID="3d1323d9896782a6192ac6cdec252bfcdd2c2980eac10fdecbb8f269c9c7c326" Sep 30 20:26:07 crc kubenswrapper[4756]: I0930 20:26:07.992621 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-n7tmp"] Sep 30 20:26:08 crc kubenswrapper[4756]: I0930 20:26:08.011101 4756 scope.go:117] "RemoveContainer" containerID="c73fe7a96c691630f0dabb260ed91fffd0f2ae3f468dc9ba81961b65a3aa8c12" Sep 30 20:26:08 crc kubenswrapper[4756]: E0930 20:26:08.011828 4756 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c73fe7a96c691630f0dabb260ed91fffd0f2ae3f468dc9ba81961b65a3aa8c12\": container with ID starting with c73fe7a96c691630f0dabb260ed91fffd0f2ae3f468dc9ba81961b65a3aa8c12 not found: ID does not exist" containerID="c73fe7a96c691630f0dabb260ed91fffd0f2ae3f468dc9ba81961b65a3aa8c12" Sep 30 20:26:08 crc kubenswrapper[4756]: I0930 20:26:08.011856 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c73fe7a96c691630f0dabb260ed91fffd0f2ae3f468dc9ba81961b65a3aa8c12"} err="failed to get container status \"c73fe7a96c691630f0dabb260ed91fffd0f2ae3f468dc9ba81961b65a3aa8c12\": rpc error: code = NotFound desc = could not find container \"c73fe7a96c691630f0dabb260ed91fffd0f2ae3f468dc9ba81961b65a3aa8c12\": container with ID starting with c73fe7a96c691630f0dabb260ed91fffd0f2ae3f468dc9ba81961b65a3aa8c12 not found: ID does not exist" Sep 30 20:26:08 crc kubenswrapper[4756]: I0930 20:26:08.011875 4756 scope.go:117] "RemoveContainer" containerID="8e9950543714944d697990415f9f6d8eb36ba97497e1c18672d96e7c9158d5e1" Sep 30 20:26:08 crc kubenswrapper[4756]: E0930 20:26:08.012198 4756 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8e9950543714944d697990415f9f6d8eb36ba97497e1c18672d96e7c9158d5e1\": container with ID starting with 8e9950543714944d697990415f9f6d8eb36ba97497e1c18672d96e7c9158d5e1 not found: ID does not exist" containerID="8e9950543714944d697990415f9f6d8eb36ba97497e1c18672d96e7c9158d5e1" Sep 30 20:26:08 crc kubenswrapper[4756]: I0930 20:26:08.012235 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8e9950543714944d697990415f9f6d8eb36ba97497e1c18672d96e7c9158d5e1"} err="failed to get container status \"8e9950543714944d697990415f9f6d8eb36ba97497e1c18672d96e7c9158d5e1\": rpc error: code = NotFound desc = could not find container \"8e9950543714944d697990415f9f6d8eb36ba97497e1c18672d96e7c9158d5e1\": container with ID starting with 8e9950543714944d697990415f9f6d8eb36ba97497e1c18672d96e7c9158d5e1 not found: ID does not exist" Sep 30 20:26:08 crc kubenswrapper[4756]: I0930 20:26:08.012260 4756 scope.go:117] "RemoveContainer" containerID="3d1323d9896782a6192ac6cdec252bfcdd2c2980eac10fdecbb8f269c9c7c326" Sep 30 20:26:08 crc kubenswrapper[4756]: E0930 20:26:08.012604 4756 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3d1323d9896782a6192ac6cdec252bfcdd2c2980eac10fdecbb8f269c9c7c326\": container with ID starting with 3d1323d9896782a6192ac6cdec252bfcdd2c2980eac10fdecbb8f269c9c7c326 not found: ID does not exist" containerID="3d1323d9896782a6192ac6cdec252bfcdd2c2980eac10fdecbb8f269c9c7c326" Sep 30 20:26:08 crc kubenswrapper[4756]: I0930 20:26:08.012643 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3d1323d9896782a6192ac6cdec252bfcdd2c2980eac10fdecbb8f269c9c7c326"} err="failed to get container status \"3d1323d9896782a6192ac6cdec252bfcdd2c2980eac10fdecbb8f269c9c7c326\": rpc error: code = NotFound desc = could not find container \"3d1323d9896782a6192ac6cdec252bfcdd2c2980eac10fdecbb8f269c9c7c326\": container with ID starting with 3d1323d9896782a6192ac6cdec252bfcdd2c2980eac10fdecbb8f269c9c7c326 not found: ID does not exist" Sep 30 20:26:09 crc kubenswrapper[4756]: I0930 20:26:09.124907 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="674f247f-b227-49bc-95d1-df0c494472ee" path="/var/lib/kubelet/pods/674f247f-b227-49bc-95d1-df0c494472ee/volumes" Sep 30 20:27:41 crc kubenswrapper[4756]: I0930 20:27:41.539386 4756 patch_prober.go:28] interesting pod/machine-config-daemon-4n9zj container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 20:27:41 crc kubenswrapper[4756]: I0930 20:27:41.539988 4756 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 20:28:05 crc kubenswrapper[4756]: I0930 20:28:05.795094 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-s8t78"] Sep 30 20:28:05 crc kubenswrapper[4756]: E0930 20:28:05.795905 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="674f247f-b227-49bc-95d1-df0c494472ee" containerName="extract-content" Sep 30 20:28:05 crc kubenswrapper[4756]: I0930 20:28:05.795919 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="674f247f-b227-49bc-95d1-df0c494472ee" containerName="extract-content" Sep 30 20:28:05 crc kubenswrapper[4756]: E0930 20:28:05.795932 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="674f247f-b227-49bc-95d1-df0c494472ee" containerName="extract-utilities" Sep 30 20:28:05 crc kubenswrapper[4756]: I0930 20:28:05.795939 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="674f247f-b227-49bc-95d1-df0c494472ee" containerName="extract-utilities" Sep 30 20:28:05 crc kubenswrapper[4756]: E0930 20:28:05.795948 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d5b4b11d-dd31-4567-8105-8f5f2d9844fe" containerName="extract-utilities" Sep 30 20:28:05 crc kubenswrapper[4756]: I0930 20:28:05.795954 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="d5b4b11d-dd31-4567-8105-8f5f2d9844fe" containerName="extract-utilities" Sep 30 20:28:05 crc kubenswrapper[4756]: E0930 20:28:05.795964 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d5b4b11d-dd31-4567-8105-8f5f2d9844fe" containerName="extract-content" Sep 30 20:28:05 crc kubenswrapper[4756]: I0930 20:28:05.795971 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="d5b4b11d-dd31-4567-8105-8f5f2d9844fe" containerName="extract-content" Sep 30 20:28:05 crc kubenswrapper[4756]: E0930 20:28:05.795989 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="674f247f-b227-49bc-95d1-df0c494472ee" containerName="registry-server" Sep 30 20:28:05 crc kubenswrapper[4756]: I0930 20:28:05.795996 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="674f247f-b227-49bc-95d1-df0c494472ee" containerName="registry-server" Sep 30 20:28:05 crc kubenswrapper[4756]: E0930 20:28:05.796013 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d5b4b11d-dd31-4567-8105-8f5f2d9844fe" containerName="registry-server" Sep 30 20:28:05 crc kubenswrapper[4756]: I0930 20:28:05.796018 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="d5b4b11d-dd31-4567-8105-8f5f2d9844fe" containerName="registry-server" Sep 30 20:28:05 crc kubenswrapper[4756]: I0930 20:28:05.796142 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="d5b4b11d-dd31-4567-8105-8f5f2d9844fe" containerName="registry-server" Sep 30 20:28:05 crc kubenswrapper[4756]: I0930 20:28:05.796161 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="674f247f-b227-49bc-95d1-df0c494472ee" containerName="registry-server" Sep 30 20:28:05 crc kubenswrapper[4756]: I0930 20:28:05.797146 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-s8t78" Sep 30 20:28:05 crc kubenswrapper[4756]: I0930 20:28:05.815786 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-s8t78"] Sep 30 20:28:05 crc kubenswrapper[4756]: I0930 20:28:05.899659 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tm5rb\" (UniqueName: \"kubernetes.io/projected/d8642405-81ab-4f37-aaa7-91fe21aa8482-kube-api-access-tm5rb\") pod \"certified-operators-s8t78\" (UID: \"d8642405-81ab-4f37-aaa7-91fe21aa8482\") " pod="openshift-marketplace/certified-operators-s8t78" Sep 30 20:28:05 crc kubenswrapper[4756]: I0930 20:28:05.899763 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d8642405-81ab-4f37-aaa7-91fe21aa8482-utilities\") pod \"certified-operators-s8t78\" (UID: \"d8642405-81ab-4f37-aaa7-91fe21aa8482\") " pod="openshift-marketplace/certified-operators-s8t78" Sep 30 20:28:05 crc kubenswrapper[4756]: I0930 20:28:05.899870 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d8642405-81ab-4f37-aaa7-91fe21aa8482-catalog-content\") pod \"certified-operators-s8t78\" (UID: \"d8642405-81ab-4f37-aaa7-91fe21aa8482\") " pod="openshift-marketplace/certified-operators-s8t78" Sep 30 20:28:06 crc kubenswrapper[4756]: I0930 20:28:06.001415 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tm5rb\" (UniqueName: \"kubernetes.io/projected/d8642405-81ab-4f37-aaa7-91fe21aa8482-kube-api-access-tm5rb\") pod \"certified-operators-s8t78\" (UID: \"d8642405-81ab-4f37-aaa7-91fe21aa8482\") " pod="openshift-marketplace/certified-operators-s8t78" Sep 30 20:28:06 crc kubenswrapper[4756]: I0930 20:28:06.001467 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d8642405-81ab-4f37-aaa7-91fe21aa8482-utilities\") pod \"certified-operators-s8t78\" (UID: \"d8642405-81ab-4f37-aaa7-91fe21aa8482\") " pod="openshift-marketplace/certified-operators-s8t78" Sep 30 20:28:06 crc kubenswrapper[4756]: I0930 20:28:06.001504 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d8642405-81ab-4f37-aaa7-91fe21aa8482-catalog-content\") pod \"certified-operators-s8t78\" (UID: \"d8642405-81ab-4f37-aaa7-91fe21aa8482\") " pod="openshift-marketplace/certified-operators-s8t78" Sep 30 20:28:06 crc kubenswrapper[4756]: I0930 20:28:06.002052 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d8642405-81ab-4f37-aaa7-91fe21aa8482-utilities\") pod \"certified-operators-s8t78\" (UID: \"d8642405-81ab-4f37-aaa7-91fe21aa8482\") " pod="openshift-marketplace/certified-operators-s8t78" Sep 30 20:28:06 crc kubenswrapper[4756]: I0930 20:28:06.002079 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d8642405-81ab-4f37-aaa7-91fe21aa8482-catalog-content\") pod \"certified-operators-s8t78\" (UID: \"d8642405-81ab-4f37-aaa7-91fe21aa8482\") " pod="openshift-marketplace/certified-operators-s8t78" Sep 30 20:28:06 crc kubenswrapper[4756]: I0930 20:28:06.019242 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tm5rb\" (UniqueName: \"kubernetes.io/projected/d8642405-81ab-4f37-aaa7-91fe21aa8482-kube-api-access-tm5rb\") pod \"certified-operators-s8t78\" (UID: \"d8642405-81ab-4f37-aaa7-91fe21aa8482\") " pod="openshift-marketplace/certified-operators-s8t78" Sep 30 20:28:06 crc kubenswrapper[4756]: I0930 20:28:06.123632 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-s8t78" Sep 30 20:28:06 crc kubenswrapper[4756]: I0930 20:28:06.609527 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-s8t78"] Sep 30 20:28:06 crc kubenswrapper[4756]: I0930 20:28:06.926437 4756 generic.go:334] "Generic (PLEG): container finished" podID="d8642405-81ab-4f37-aaa7-91fe21aa8482" containerID="07fbd77461e84bb3d4c83eecb5d06d17cadfaff49145c7556e91b69ea1663fa0" exitCode=0 Sep 30 20:28:06 crc kubenswrapper[4756]: I0930 20:28:06.926536 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-s8t78" event={"ID":"d8642405-81ab-4f37-aaa7-91fe21aa8482","Type":"ContainerDied","Data":"07fbd77461e84bb3d4c83eecb5d06d17cadfaff49145c7556e91b69ea1663fa0"} Sep 30 20:28:06 crc kubenswrapper[4756]: I0930 20:28:06.926794 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-s8t78" event={"ID":"d8642405-81ab-4f37-aaa7-91fe21aa8482","Type":"ContainerStarted","Data":"048511b531b5b209813e290c05a790e8713c634e752ccc8fd1d8935def409116"} Sep 30 20:28:08 crc kubenswrapper[4756]: I0930 20:28:08.946006 4756 generic.go:334] "Generic (PLEG): container finished" podID="d8642405-81ab-4f37-aaa7-91fe21aa8482" containerID="f0fca0f833c70d12177c93f62fe7c9b4deba30e7fa3a6cba0cc5fe78bb03ed9c" exitCode=0 Sep 30 20:28:08 crc kubenswrapper[4756]: I0930 20:28:08.946065 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-s8t78" event={"ID":"d8642405-81ab-4f37-aaa7-91fe21aa8482","Type":"ContainerDied","Data":"f0fca0f833c70d12177c93f62fe7c9b4deba30e7fa3a6cba0cc5fe78bb03ed9c"} Sep 30 20:28:09 crc kubenswrapper[4756]: I0930 20:28:09.955273 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-s8t78" event={"ID":"d8642405-81ab-4f37-aaa7-91fe21aa8482","Type":"ContainerStarted","Data":"3e3643c66460eb23135de29adcbfe01ffdb677f065749a99e554ec664bf9bc56"} Sep 30 20:28:09 crc kubenswrapper[4756]: I0930 20:28:09.973527 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-s8t78" podStartSLOduration=2.545748771 podStartE2EDuration="4.973505988s" podCreationTimestamp="2025-09-30 20:28:05 +0000 UTC" firstStartedPulling="2025-09-30 20:28:06.928231926 +0000 UTC m=+3416.549165403" lastFinishedPulling="2025-09-30 20:28:09.355989143 +0000 UTC m=+3418.976922620" observedRunningTime="2025-09-30 20:28:09.971844795 +0000 UTC m=+3419.592778272" watchObservedRunningTime="2025-09-30 20:28:09.973505988 +0000 UTC m=+3419.594439465" Sep 30 20:28:11 crc kubenswrapper[4756]: I0930 20:28:11.538707 4756 patch_prober.go:28] interesting pod/machine-config-daemon-4n9zj container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 20:28:11 crc kubenswrapper[4756]: I0930 20:28:11.539093 4756 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 20:28:16 crc kubenswrapper[4756]: I0930 20:28:16.124279 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-s8t78" Sep 30 20:28:16 crc kubenswrapper[4756]: I0930 20:28:16.125308 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-s8t78" Sep 30 20:28:16 crc kubenswrapper[4756]: I0930 20:28:16.182672 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-s8t78" Sep 30 20:28:17 crc kubenswrapper[4756]: I0930 20:28:17.069149 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-s8t78" Sep 30 20:28:17 crc kubenswrapper[4756]: I0930 20:28:17.135843 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-s8t78"] Sep 30 20:28:19 crc kubenswrapper[4756]: I0930 20:28:19.048600 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-s8t78" podUID="d8642405-81ab-4f37-aaa7-91fe21aa8482" containerName="registry-server" containerID="cri-o://3e3643c66460eb23135de29adcbfe01ffdb677f065749a99e554ec664bf9bc56" gracePeriod=2 Sep 30 20:28:19 crc kubenswrapper[4756]: I0930 20:28:19.446804 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-s8t78" Sep 30 20:28:19 crc kubenswrapper[4756]: I0930 20:28:19.586729 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d8642405-81ab-4f37-aaa7-91fe21aa8482-catalog-content\") pod \"d8642405-81ab-4f37-aaa7-91fe21aa8482\" (UID: \"d8642405-81ab-4f37-aaa7-91fe21aa8482\") " Sep 30 20:28:19 crc kubenswrapper[4756]: I0930 20:28:19.586823 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d8642405-81ab-4f37-aaa7-91fe21aa8482-utilities\") pod \"d8642405-81ab-4f37-aaa7-91fe21aa8482\" (UID: \"d8642405-81ab-4f37-aaa7-91fe21aa8482\") " Sep 30 20:28:19 crc kubenswrapper[4756]: I0930 20:28:19.586882 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tm5rb\" (UniqueName: \"kubernetes.io/projected/d8642405-81ab-4f37-aaa7-91fe21aa8482-kube-api-access-tm5rb\") pod \"d8642405-81ab-4f37-aaa7-91fe21aa8482\" (UID: \"d8642405-81ab-4f37-aaa7-91fe21aa8482\") " Sep 30 20:28:19 crc kubenswrapper[4756]: I0930 20:28:19.588294 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d8642405-81ab-4f37-aaa7-91fe21aa8482-utilities" (OuterVolumeSpecName: "utilities") pod "d8642405-81ab-4f37-aaa7-91fe21aa8482" (UID: "d8642405-81ab-4f37-aaa7-91fe21aa8482"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 20:28:19 crc kubenswrapper[4756]: I0930 20:28:19.594931 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d8642405-81ab-4f37-aaa7-91fe21aa8482-kube-api-access-tm5rb" (OuterVolumeSpecName: "kube-api-access-tm5rb") pod "d8642405-81ab-4f37-aaa7-91fe21aa8482" (UID: "d8642405-81ab-4f37-aaa7-91fe21aa8482"). InnerVolumeSpecName "kube-api-access-tm5rb". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:28:19 crc kubenswrapper[4756]: I0930 20:28:19.688190 4756 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d8642405-81ab-4f37-aaa7-91fe21aa8482-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 20:28:19 crc kubenswrapper[4756]: I0930 20:28:19.688516 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tm5rb\" (UniqueName: \"kubernetes.io/projected/d8642405-81ab-4f37-aaa7-91fe21aa8482-kube-api-access-tm5rb\") on node \"crc\" DevicePath \"\"" Sep 30 20:28:19 crc kubenswrapper[4756]: I0930 20:28:19.989093 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d8642405-81ab-4f37-aaa7-91fe21aa8482-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "d8642405-81ab-4f37-aaa7-91fe21aa8482" (UID: "d8642405-81ab-4f37-aaa7-91fe21aa8482"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 20:28:19 crc kubenswrapper[4756]: I0930 20:28:19.992969 4756 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d8642405-81ab-4f37-aaa7-91fe21aa8482-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 20:28:20 crc kubenswrapper[4756]: I0930 20:28:20.062770 4756 generic.go:334] "Generic (PLEG): container finished" podID="d8642405-81ab-4f37-aaa7-91fe21aa8482" containerID="3e3643c66460eb23135de29adcbfe01ffdb677f065749a99e554ec664bf9bc56" exitCode=0 Sep 30 20:28:20 crc kubenswrapper[4756]: I0930 20:28:20.062823 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-s8t78" event={"ID":"d8642405-81ab-4f37-aaa7-91fe21aa8482","Type":"ContainerDied","Data":"3e3643c66460eb23135de29adcbfe01ffdb677f065749a99e554ec664bf9bc56"} Sep 30 20:28:20 crc kubenswrapper[4756]: I0930 20:28:20.062852 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-s8t78" event={"ID":"d8642405-81ab-4f37-aaa7-91fe21aa8482","Type":"ContainerDied","Data":"048511b531b5b209813e290c05a790e8713c634e752ccc8fd1d8935def409116"} Sep 30 20:28:20 crc kubenswrapper[4756]: I0930 20:28:20.062876 4756 scope.go:117] "RemoveContainer" containerID="3e3643c66460eb23135de29adcbfe01ffdb677f065749a99e554ec664bf9bc56" Sep 30 20:28:20 crc kubenswrapper[4756]: I0930 20:28:20.063456 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-s8t78" Sep 30 20:28:20 crc kubenswrapper[4756]: I0930 20:28:20.087210 4756 scope.go:117] "RemoveContainer" containerID="f0fca0f833c70d12177c93f62fe7c9b4deba30e7fa3a6cba0cc5fe78bb03ed9c" Sep 30 20:28:20 crc kubenswrapper[4756]: I0930 20:28:20.100542 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-s8t78"] Sep 30 20:28:20 crc kubenswrapper[4756]: I0930 20:28:20.105171 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-s8t78"] Sep 30 20:28:20 crc kubenswrapper[4756]: I0930 20:28:20.130455 4756 scope.go:117] "RemoveContainer" containerID="07fbd77461e84bb3d4c83eecb5d06d17cadfaff49145c7556e91b69ea1663fa0" Sep 30 20:28:20 crc kubenswrapper[4756]: I0930 20:28:20.146188 4756 scope.go:117] "RemoveContainer" containerID="3e3643c66460eb23135de29adcbfe01ffdb677f065749a99e554ec664bf9bc56" Sep 30 20:28:20 crc kubenswrapper[4756]: E0930 20:28:20.146567 4756 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3e3643c66460eb23135de29adcbfe01ffdb677f065749a99e554ec664bf9bc56\": container with ID starting with 3e3643c66460eb23135de29adcbfe01ffdb677f065749a99e554ec664bf9bc56 not found: ID does not exist" containerID="3e3643c66460eb23135de29adcbfe01ffdb677f065749a99e554ec664bf9bc56" Sep 30 20:28:20 crc kubenswrapper[4756]: I0930 20:28:20.146700 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3e3643c66460eb23135de29adcbfe01ffdb677f065749a99e554ec664bf9bc56"} err="failed to get container status \"3e3643c66460eb23135de29adcbfe01ffdb677f065749a99e554ec664bf9bc56\": rpc error: code = NotFound desc = could not find container \"3e3643c66460eb23135de29adcbfe01ffdb677f065749a99e554ec664bf9bc56\": container with ID starting with 3e3643c66460eb23135de29adcbfe01ffdb677f065749a99e554ec664bf9bc56 not found: ID does not exist" Sep 30 20:28:20 crc kubenswrapper[4756]: I0930 20:28:20.146783 4756 scope.go:117] "RemoveContainer" containerID="f0fca0f833c70d12177c93f62fe7c9b4deba30e7fa3a6cba0cc5fe78bb03ed9c" Sep 30 20:28:20 crc kubenswrapper[4756]: E0930 20:28:20.147090 4756 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f0fca0f833c70d12177c93f62fe7c9b4deba30e7fa3a6cba0cc5fe78bb03ed9c\": container with ID starting with f0fca0f833c70d12177c93f62fe7c9b4deba30e7fa3a6cba0cc5fe78bb03ed9c not found: ID does not exist" containerID="f0fca0f833c70d12177c93f62fe7c9b4deba30e7fa3a6cba0cc5fe78bb03ed9c" Sep 30 20:28:20 crc kubenswrapper[4756]: I0930 20:28:20.147121 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f0fca0f833c70d12177c93f62fe7c9b4deba30e7fa3a6cba0cc5fe78bb03ed9c"} err="failed to get container status \"f0fca0f833c70d12177c93f62fe7c9b4deba30e7fa3a6cba0cc5fe78bb03ed9c\": rpc error: code = NotFound desc = could not find container \"f0fca0f833c70d12177c93f62fe7c9b4deba30e7fa3a6cba0cc5fe78bb03ed9c\": container with ID starting with f0fca0f833c70d12177c93f62fe7c9b4deba30e7fa3a6cba0cc5fe78bb03ed9c not found: ID does not exist" Sep 30 20:28:20 crc kubenswrapper[4756]: I0930 20:28:20.147143 4756 scope.go:117] "RemoveContainer" containerID="07fbd77461e84bb3d4c83eecb5d06d17cadfaff49145c7556e91b69ea1663fa0" Sep 30 20:28:20 crc kubenswrapper[4756]: E0930 20:28:20.147296 4756 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"07fbd77461e84bb3d4c83eecb5d06d17cadfaff49145c7556e91b69ea1663fa0\": container with ID starting with 07fbd77461e84bb3d4c83eecb5d06d17cadfaff49145c7556e91b69ea1663fa0 not found: ID does not exist" containerID="07fbd77461e84bb3d4c83eecb5d06d17cadfaff49145c7556e91b69ea1663fa0" Sep 30 20:28:20 crc kubenswrapper[4756]: I0930 20:28:20.147318 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"07fbd77461e84bb3d4c83eecb5d06d17cadfaff49145c7556e91b69ea1663fa0"} err="failed to get container status \"07fbd77461e84bb3d4c83eecb5d06d17cadfaff49145c7556e91b69ea1663fa0\": rpc error: code = NotFound desc = could not find container \"07fbd77461e84bb3d4c83eecb5d06d17cadfaff49145c7556e91b69ea1663fa0\": container with ID starting with 07fbd77461e84bb3d4c83eecb5d06d17cadfaff49145c7556e91b69ea1663fa0 not found: ID does not exist" Sep 30 20:28:21 crc kubenswrapper[4756]: I0930 20:28:21.126141 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d8642405-81ab-4f37-aaa7-91fe21aa8482" path="/var/lib/kubelet/pods/d8642405-81ab-4f37-aaa7-91fe21aa8482/volumes" Sep 30 20:28:21 crc kubenswrapper[4756]: I0930 20:28:21.837422 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-fr57m"] Sep 30 20:28:21 crc kubenswrapper[4756]: E0930 20:28:21.837847 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d8642405-81ab-4f37-aaa7-91fe21aa8482" containerName="registry-server" Sep 30 20:28:21 crc kubenswrapper[4756]: I0930 20:28:21.837873 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="d8642405-81ab-4f37-aaa7-91fe21aa8482" containerName="registry-server" Sep 30 20:28:21 crc kubenswrapper[4756]: E0930 20:28:21.837888 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d8642405-81ab-4f37-aaa7-91fe21aa8482" containerName="extract-content" Sep 30 20:28:21 crc kubenswrapper[4756]: I0930 20:28:21.837896 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="d8642405-81ab-4f37-aaa7-91fe21aa8482" containerName="extract-content" Sep 30 20:28:21 crc kubenswrapper[4756]: E0930 20:28:21.837940 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d8642405-81ab-4f37-aaa7-91fe21aa8482" containerName="extract-utilities" Sep 30 20:28:21 crc kubenswrapper[4756]: I0930 20:28:21.837947 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="d8642405-81ab-4f37-aaa7-91fe21aa8482" containerName="extract-utilities" Sep 30 20:28:21 crc kubenswrapper[4756]: I0930 20:28:21.838122 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="d8642405-81ab-4f37-aaa7-91fe21aa8482" containerName="registry-server" Sep 30 20:28:21 crc kubenswrapper[4756]: I0930 20:28:21.839416 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-fr57m" Sep 30 20:28:21 crc kubenswrapper[4756]: I0930 20:28:21.852361 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-fr57m"] Sep 30 20:28:22 crc kubenswrapper[4756]: I0930 20:28:22.021633 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a7e8e1bc-bbad-4901-8daa-9ddf4a74c31f-catalog-content\") pod \"community-operators-fr57m\" (UID: \"a7e8e1bc-bbad-4901-8daa-9ddf4a74c31f\") " pod="openshift-marketplace/community-operators-fr57m" Sep 30 20:28:22 crc kubenswrapper[4756]: I0930 20:28:22.021677 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q7dq6\" (UniqueName: \"kubernetes.io/projected/a7e8e1bc-bbad-4901-8daa-9ddf4a74c31f-kube-api-access-q7dq6\") pod \"community-operators-fr57m\" (UID: \"a7e8e1bc-bbad-4901-8daa-9ddf4a74c31f\") " pod="openshift-marketplace/community-operators-fr57m" Sep 30 20:28:22 crc kubenswrapper[4756]: I0930 20:28:22.022179 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a7e8e1bc-bbad-4901-8daa-9ddf4a74c31f-utilities\") pod \"community-operators-fr57m\" (UID: \"a7e8e1bc-bbad-4901-8daa-9ddf4a74c31f\") " pod="openshift-marketplace/community-operators-fr57m" Sep 30 20:28:22 crc kubenswrapper[4756]: I0930 20:28:22.123308 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a7e8e1bc-bbad-4901-8daa-9ddf4a74c31f-utilities\") pod \"community-operators-fr57m\" (UID: \"a7e8e1bc-bbad-4901-8daa-9ddf4a74c31f\") " pod="openshift-marketplace/community-operators-fr57m" Sep 30 20:28:22 crc kubenswrapper[4756]: I0930 20:28:22.123438 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a7e8e1bc-bbad-4901-8daa-9ddf4a74c31f-catalog-content\") pod \"community-operators-fr57m\" (UID: \"a7e8e1bc-bbad-4901-8daa-9ddf4a74c31f\") " pod="openshift-marketplace/community-operators-fr57m" Sep 30 20:28:22 crc kubenswrapper[4756]: I0930 20:28:22.123460 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q7dq6\" (UniqueName: \"kubernetes.io/projected/a7e8e1bc-bbad-4901-8daa-9ddf4a74c31f-kube-api-access-q7dq6\") pod \"community-operators-fr57m\" (UID: \"a7e8e1bc-bbad-4901-8daa-9ddf4a74c31f\") " pod="openshift-marketplace/community-operators-fr57m" Sep 30 20:28:22 crc kubenswrapper[4756]: I0930 20:28:22.123855 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a7e8e1bc-bbad-4901-8daa-9ddf4a74c31f-utilities\") pod \"community-operators-fr57m\" (UID: \"a7e8e1bc-bbad-4901-8daa-9ddf4a74c31f\") " pod="openshift-marketplace/community-operators-fr57m" Sep 30 20:28:22 crc kubenswrapper[4756]: I0930 20:28:22.124164 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a7e8e1bc-bbad-4901-8daa-9ddf4a74c31f-catalog-content\") pod \"community-operators-fr57m\" (UID: \"a7e8e1bc-bbad-4901-8daa-9ddf4a74c31f\") " pod="openshift-marketplace/community-operators-fr57m" Sep 30 20:28:22 crc kubenswrapper[4756]: I0930 20:28:22.149368 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q7dq6\" (UniqueName: \"kubernetes.io/projected/a7e8e1bc-bbad-4901-8daa-9ddf4a74c31f-kube-api-access-q7dq6\") pod \"community-operators-fr57m\" (UID: \"a7e8e1bc-bbad-4901-8daa-9ddf4a74c31f\") " pod="openshift-marketplace/community-operators-fr57m" Sep 30 20:28:22 crc kubenswrapper[4756]: I0930 20:28:22.162057 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-fr57m" Sep 30 20:28:22 crc kubenswrapper[4756]: I0930 20:28:22.593657 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-fr57m"] Sep 30 20:28:23 crc kubenswrapper[4756]: I0930 20:28:23.088213 4756 generic.go:334] "Generic (PLEG): container finished" podID="a7e8e1bc-bbad-4901-8daa-9ddf4a74c31f" containerID="58c2b4e4f9edf19bf5791bcacca2f3ae8cf050601c6ffe5f92586f357c0fbfd4" exitCode=0 Sep 30 20:28:23 crc kubenswrapper[4756]: I0930 20:28:23.088251 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-fr57m" event={"ID":"a7e8e1bc-bbad-4901-8daa-9ddf4a74c31f","Type":"ContainerDied","Data":"58c2b4e4f9edf19bf5791bcacca2f3ae8cf050601c6ffe5f92586f357c0fbfd4"} Sep 30 20:28:23 crc kubenswrapper[4756]: I0930 20:28:23.088274 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-fr57m" event={"ID":"a7e8e1bc-bbad-4901-8daa-9ddf4a74c31f","Type":"ContainerStarted","Data":"8ab18dca614c931ce4124eeaccabc3d2efc46f3e92ec43c7f99e9796d6e13bb0"} Sep 30 20:28:25 crc kubenswrapper[4756]: I0930 20:28:25.112369 4756 generic.go:334] "Generic (PLEG): container finished" podID="a7e8e1bc-bbad-4901-8daa-9ddf4a74c31f" containerID="6dc80d534dcfdc15aab18cb26d89dea86305bd4ab3743183df5a1dcbbd296e25" exitCode=0 Sep 30 20:28:25 crc kubenswrapper[4756]: I0930 20:28:25.112436 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-fr57m" event={"ID":"a7e8e1bc-bbad-4901-8daa-9ddf4a74c31f","Type":"ContainerDied","Data":"6dc80d534dcfdc15aab18cb26d89dea86305bd4ab3743183df5a1dcbbd296e25"} Sep 30 20:28:26 crc kubenswrapper[4756]: I0930 20:28:26.125614 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-fr57m" event={"ID":"a7e8e1bc-bbad-4901-8daa-9ddf4a74c31f","Type":"ContainerStarted","Data":"ed2b91653dc0ad38e11cb67aed4b05568b03007bb7918f4ec04f0bc66cb44c16"} Sep 30 20:28:26 crc kubenswrapper[4756]: I0930 20:28:26.147638 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-fr57m" podStartSLOduration=2.727637167 podStartE2EDuration="5.147617871s" podCreationTimestamp="2025-09-30 20:28:21 +0000 UTC" firstStartedPulling="2025-09-30 20:28:23.090890759 +0000 UTC m=+3432.711824276" lastFinishedPulling="2025-09-30 20:28:25.510871493 +0000 UTC m=+3435.131804980" observedRunningTime="2025-09-30 20:28:26.140173666 +0000 UTC m=+3435.761107133" watchObservedRunningTime="2025-09-30 20:28:26.147617871 +0000 UTC m=+3435.768551358" Sep 30 20:28:32 crc kubenswrapper[4756]: I0930 20:28:32.162878 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-fr57m" Sep 30 20:28:32 crc kubenswrapper[4756]: I0930 20:28:32.163387 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-fr57m" Sep 30 20:28:32 crc kubenswrapper[4756]: I0930 20:28:32.203773 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-fr57m" Sep 30 20:28:32 crc kubenswrapper[4756]: I0930 20:28:32.244757 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-fr57m" Sep 30 20:28:32 crc kubenswrapper[4756]: I0930 20:28:32.440416 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-fr57m"] Sep 30 20:28:34 crc kubenswrapper[4756]: I0930 20:28:34.192925 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-fr57m" podUID="a7e8e1bc-bbad-4901-8daa-9ddf4a74c31f" containerName="registry-server" containerID="cri-o://ed2b91653dc0ad38e11cb67aed4b05568b03007bb7918f4ec04f0bc66cb44c16" gracePeriod=2 Sep 30 20:28:34 crc kubenswrapper[4756]: I0930 20:28:34.562125 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-fr57m" Sep 30 20:28:34 crc kubenswrapper[4756]: I0930 20:28:34.695614 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a7e8e1bc-bbad-4901-8daa-9ddf4a74c31f-utilities\") pod \"a7e8e1bc-bbad-4901-8daa-9ddf4a74c31f\" (UID: \"a7e8e1bc-bbad-4901-8daa-9ddf4a74c31f\") " Sep 30 20:28:34 crc kubenswrapper[4756]: I0930 20:28:34.695690 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a7e8e1bc-bbad-4901-8daa-9ddf4a74c31f-catalog-content\") pod \"a7e8e1bc-bbad-4901-8daa-9ddf4a74c31f\" (UID: \"a7e8e1bc-bbad-4901-8daa-9ddf4a74c31f\") " Sep 30 20:28:34 crc kubenswrapper[4756]: I0930 20:28:34.695738 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-q7dq6\" (UniqueName: \"kubernetes.io/projected/a7e8e1bc-bbad-4901-8daa-9ddf4a74c31f-kube-api-access-q7dq6\") pod \"a7e8e1bc-bbad-4901-8daa-9ddf4a74c31f\" (UID: \"a7e8e1bc-bbad-4901-8daa-9ddf4a74c31f\") " Sep 30 20:28:34 crc kubenswrapper[4756]: I0930 20:28:34.696703 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a7e8e1bc-bbad-4901-8daa-9ddf4a74c31f-utilities" (OuterVolumeSpecName: "utilities") pod "a7e8e1bc-bbad-4901-8daa-9ddf4a74c31f" (UID: "a7e8e1bc-bbad-4901-8daa-9ddf4a74c31f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 20:28:34 crc kubenswrapper[4756]: I0930 20:28:34.702252 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a7e8e1bc-bbad-4901-8daa-9ddf4a74c31f-kube-api-access-q7dq6" (OuterVolumeSpecName: "kube-api-access-q7dq6") pod "a7e8e1bc-bbad-4901-8daa-9ddf4a74c31f" (UID: "a7e8e1bc-bbad-4901-8daa-9ddf4a74c31f"). InnerVolumeSpecName "kube-api-access-q7dq6". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:28:34 crc kubenswrapper[4756]: I0930 20:28:34.746743 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a7e8e1bc-bbad-4901-8daa-9ddf4a74c31f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "a7e8e1bc-bbad-4901-8daa-9ddf4a74c31f" (UID: "a7e8e1bc-bbad-4901-8daa-9ddf4a74c31f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 20:28:34 crc kubenswrapper[4756]: I0930 20:28:34.797262 4756 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a7e8e1bc-bbad-4901-8daa-9ddf4a74c31f-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 20:28:34 crc kubenswrapper[4756]: I0930 20:28:34.797299 4756 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a7e8e1bc-bbad-4901-8daa-9ddf4a74c31f-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 20:28:34 crc kubenswrapper[4756]: I0930 20:28:34.797311 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-q7dq6\" (UniqueName: \"kubernetes.io/projected/a7e8e1bc-bbad-4901-8daa-9ddf4a74c31f-kube-api-access-q7dq6\") on node \"crc\" DevicePath \"\"" Sep 30 20:28:35 crc kubenswrapper[4756]: I0930 20:28:35.204310 4756 generic.go:334] "Generic (PLEG): container finished" podID="a7e8e1bc-bbad-4901-8daa-9ddf4a74c31f" containerID="ed2b91653dc0ad38e11cb67aed4b05568b03007bb7918f4ec04f0bc66cb44c16" exitCode=0 Sep 30 20:28:35 crc kubenswrapper[4756]: I0930 20:28:35.204369 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-fr57m" Sep 30 20:28:35 crc kubenswrapper[4756]: I0930 20:28:35.204374 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-fr57m" event={"ID":"a7e8e1bc-bbad-4901-8daa-9ddf4a74c31f","Type":"ContainerDied","Data":"ed2b91653dc0ad38e11cb67aed4b05568b03007bb7918f4ec04f0bc66cb44c16"} Sep 30 20:28:35 crc kubenswrapper[4756]: I0930 20:28:35.204822 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-fr57m" event={"ID":"a7e8e1bc-bbad-4901-8daa-9ddf4a74c31f","Type":"ContainerDied","Data":"8ab18dca614c931ce4124eeaccabc3d2efc46f3e92ec43c7f99e9796d6e13bb0"} Sep 30 20:28:35 crc kubenswrapper[4756]: I0930 20:28:35.204857 4756 scope.go:117] "RemoveContainer" containerID="ed2b91653dc0ad38e11cb67aed4b05568b03007bb7918f4ec04f0bc66cb44c16" Sep 30 20:28:35 crc kubenswrapper[4756]: I0930 20:28:35.228528 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-fr57m"] Sep 30 20:28:35 crc kubenswrapper[4756]: I0930 20:28:35.233595 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-fr57m"] Sep 30 20:28:35 crc kubenswrapper[4756]: I0930 20:28:35.238596 4756 scope.go:117] "RemoveContainer" containerID="6dc80d534dcfdc15aab18cb26d89dea86305bd4ab3743183df5a1dcbbd296e25" Sep 30 20:28:35 crc kubenswrapper[4756]: I0930 20:28:35.259806 4756 scope.go:117] "RemoveContainer" containerID="58c2b4e4f9edf19bf5791bcacca2f3ae8cf050601c6ffe5f92586f357c0fbfd4" Sep 30 20:28:35 crc kubenswrapper[4756]: I0930 20:28:35.282159 4756 scope.go:117] "RemoveContainer" containerID="ed2b91653dc0ad38e11cb67aed4b05568b03007bb7918f4ec04f0bc66cb44c16" Sep 30 20:28:35 crc kubenswrapper[4756]: E0930 20:28:35.282813 4756 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ed2b91653dc0ad38e11cb67aed4b05568b03007bb7918f4ec04f0bc66cb44c16\": container with ID starting with ed2b91653dc0ad38e11cb67aed4b05568b03007bb7918f4ec04f0bc66cb44c16 not found: ID does not exist" containerID="ed2b91653dc0ad38e11cb67aed4b05568b03007bb7918f4ec04f0bc66cb44c16" Sep 30 20:28:35 crc kubenswrapper[4756]: I0930 20:28:35.282930 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ed2b91653dc0ad38e11cb67aed4b05568b03007bb7918f4ec04f0bc66cb44c16"} err="failed to get container status \"ed2b91653dc0ad38e11cb67aed4b05568b03007bb7918f4ec04f0bc66cb44c16\": rpc error: code = NotFound desc = could not find container \"ed2b91653dc0ad38e11cb67aed4b05568b03007bb7918f4ec04f0bc66cb44c16\": container with ID starting with ed2b91653dc0ad38e11cb67aed4b05568b03007bb7918f4ec04f0bc66cb44c16 not found: ID does not exist" Sep 30 20:28:35 crc kubenswrapper[4756]: I0930 20:28:35.282992 4756 scope.go:117] "RemoveContainer" containerID="6dc80d534dcfdc15aab18cb26d89dea86305bd4ab3743183df5a1dcbbd296e25" Sep 30 20:28:35 crc kubenswrapper[4756]: E0930 20:28:35.283717 4756 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6dc80d534dcfdc15aab18cb26d89dea86305bd4ab3743183df5a1dcbbd296e25\": container with ID starting with 6dc80d534dcfdc15aab18cb26d89dea86305bd4ab3743183df5a1dcbbd296e25 not found: ID does not exist" containerID="6dc80d534dcfdc15aab18cb26d89dea86305bd4ab3743183df5a1dcbbd296e25" Sep 30 20:28:35 crc kubenswrapper[4756]: I0930 20:28:35.283769 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6dc80d534dcfdc15aab18cb26d89dea86305bd4ab3743183df5a1dcbbd296e25"} err="failed to get container status \"6dc80d534dcfdc15aab18cb26d89dea86305bd4ab3743183df5a1dcbbd296e25\": rpc error: code = NotFound desc = could not find container \"6dc80d534dcfdc15aab18cb26d89dea86305bd4ab3743183df5a1dcbbd296e25\": container with ID starting with 6dc80d534dcfdc15aab18cb26d89dea86305bd4ab3743183df5a1dcbbd296e25 not found: ID does not exist" Sep 30 20:28:35 crc kubenswrapper[4756]: I0930 20:28:35.283802 4756 scope.go:117] "RemoveContainer" containerID="58c2b4e4f9edf19bf5791bcacca2f3ae8cf050601c6ffe5f92586f357c0fbfd4" Sep 30 20:28:35 crc kubenswrapper[4756]: E0930 20:28:35.284129 4756 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"58c2b4e4f9edf19bf5791bcacca2f3ae8cf050601c6ffe5f92586f357c0fbfd4\": container with ID starting with 58c2b4e4f9edf19bf5791bcacca2f3ae8cf050601c6ffe5f92586f357c0fbfd4 not found: ID does not exist" containerID="58c2b4e4f9edf19bf5791bcacca2f3ae8cf050601c6ffe5f92586f357c0fbfd4" Sep 30 20:28:35 crc kubenswrapper[4756]: I0930 20:28:35.284299 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"58c2b4e4f9edf19bf5791bcacca2f3ae8cf050601c6ffe5f92586f357c0fbfd4"} err="failed to get container status \"58c2b4e4f9edf19bf5791bcacca2f3ae8cf050601c6ffe5f92586f357c0fbfd4\": rpc error: code = NotFound desc = could not find container \"58c2b4e4f9edf19bf5791bcacca2f3ae8cf050601c6ffe5f92586f357c0fbfd4\": container with ID starting with 58c2b4e4f9edf19bf5791bcacca2f3ae8cf050601c6ffe5f92586f357c0fbfd4 not found: ID does not exist" Sep 30 20:28:37 crc kubenswrapper[4756]: I0930 20:28:37.128321 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a7e8e1bc-bbad-4901-8daa-9ddf4a74c31f" path="/var/lib/kubelet/pods/a7e8e1bc-bbad-4901-8daa-9ddf4a74c31f/volumes" Sep 30 20:28:41 crc kubenswrapper[4756]: I0930 20:28:41.539244 4756 patch_prober.go:28] interesting pod/machine-config-daemon-4n9zj container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 20:28:41 crc kubenswrapper[4756]: I0930 20:28:41.539781 4756 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 20:28:41 crc kubenswrapper[4756]: I0930 20:28:41.539860 4756 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" Sep 30 20:28:41 crc kubenswrapper[4756]: I0930 20:28:41.540923 4756 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"ae8690c9a70a632b97b3f050b9c93ed7793158b7537fa3bb06bd34f71872afd3"} pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 30 20:28:41 crc kubenswrapper[4756]: I0930 20:28:41.541037 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" containerName="machine-config-daemon" containerID="cri-o://ae8690c9a70a632b97b3f050b9c93ed7793158b7537fa3bb06bd34f71872afd3" gracePeriod=600 Sep 30 20:28:42 crc kubenswrapper[4756]: I0930 20:28:42.279693 4756 generic.go:334] "Generic (PLEG): container finished" podID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" containerID="ae8690c9a70a632b97b3f050b9c93ed7793158b7537fa3bb06bd34f71872afd3" exitCode=0 Sep 30 20:28:42 crc kubenswrapper[4756]: I0930 20:28:42.279752 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" event={"ID":"3370c2ca-fec3-4f90-8df7-51e21e6c7e1c","Type":"ContainerDied","Data":"ae8690c9a70a632b97b3f050b9c93ed7793158b7537fa3bb06bd34f71872afd3"} Sep 30 20:28:42 crc kubenswrapper[4756]: I0930 20:28:42.280209 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" event={"ID":"3370c2ca-fec3-4f90-8df7-51e21e6c7e1c","Type":"ContainerStarted","Data":"20bb0b2310e2e3e6446619c00827bb6a031d09c2b13494eceb98d1db9a2bab91"} Sep 30 20:28:42 crc kubenswrapper[4756]: I0930 20:28:42.280240 4756 scope.go:117] "RemoveContainer" containerID="7140069c5e0a2a2d080cb59aaea6ca4896a02d0db8684c84fd8d28280acfe154" Sep 30 20:30:00 crc kubenswrapper[4756]: I0930 20:30:00.168808 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29321070-88ddl"] Sep 30 20:30:00 crc kubenswrapper[4756]: E0930 20:30:00.169709 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a7e8e1bc-bbad-4901-8daa-9ddf4a74c31f" containerName="extract-utilities" Sep 30 20:30:00 crc kubenswrapper[4756]: I0930 20:30:00.169723 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="a7e8e1bc-bbad-4901-8daa-9ddf4a74c31f" containerName="extract-utilities" Sep 30 20:30:00 crc kubenswrapper[4756]: E0930 20:30:00.169737 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a7e8e1bc-bbad-4901-8daa-9ddf4a74c31f" containerName="extract-content" Sep 30 20:30:00 crc kubenswrapper[4756]: I0930 20:30:00.169743 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="a7e8e1bc-bbad-4901-8daa-9ddf4a74c31f" containerName="extract-content" Sep 30 20:30:00 crc kubenswrapper[4756]: E0930 20:30:00.169756 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a7e8e1bc-bbad-4901-8daa-9ddf4a74c31f" containerName="registry-server" Sep 30 20:30:00 crc kubenswrapper[4756]: I0930 20:30:00.169763 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="a7e8e1bc-bbad-4901-8daa-9ddf4a74c31f" containerName="registry-server" Sep 30 20:30:00 crc kubenswrapper[4756]: I0930 20:30:00.169910 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="a7e8e1bc-bbad-4901-8daa-9ddf4a74c31f" containerName="registry-server" Sep 30 20:30:00 crc kubenswrapper[4756]: I0930 20:30:00.170413 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29321070-88ddl" Sep 30 20:30:00 crc kubenswrapper[4756]: I0930 20:30:00.172613 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Sep 30 20:30:00 crc kubenswrapper[4756]: I0930 20:30:00.175425 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Sep 30 20:30:00 crc kubenswrapper[4756]: I0930 20:30:00.179416 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29321070-88ddl"] Sep 30 20:30:00 crc kubenswrapper[4756]: I0930 20:30:00.308454 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bbrqp\" (UniqueName: \"kubernetes.io/projected/45e26ed6-2f29-41e4-9fd4-c83c0da2b1d6-kube-api-access-bbrqp\") pod \"collect-profiles-29321070-88ddl\" (UID: \"45e26ed6-2f29-41e4-9fd4-c83c0da2b1d6\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321070-88ddl" Sep 30 20:30:00 crc kubenswrapper[4756]: I0930 20:30:00.308520 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/45e26ed6-2f29-41e4-9fd4-c83c0da2b1d6-secret-volume\") pod \"collect-profiles-29321070-88ddl\" (UID: \"45e26ed6-2f29-41e4-9fd4-c83c0da2b1d6\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321070-88ddl" Sep 30 20:30:00 crc kubenswrapper[4756]: I0930 20:30:00.309085 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/45e26ed6-2f29-41e4-9fd4-c83c0da2b1d6-config-volume\") pod \"collect-profiles-29321070-88ddl\" (UID: \"45e26ed6-2f29-41e4-9fd4-c83c0da2b1d6\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321070-88ddl" Sep 30 20:30:00 crc kubenswrapper[4756]: I0930 20:30:00.411497 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bbrqp\" (UniqueName: \"kubernetes.io/projected/45e26ed6-2f29-41e4-9fd4-c83c0da2b1d6-kube-api-access-bbrqp\") pod \"collect-profiles-29321070-88ddl\" (UID: \"45e26ed6-2f29-41e4-9fd4-c83c0da2b1d6\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321070-88ddl" Sep 30 20:30:00 crc kubenswrapper[4756]: I0930 20:30:00.411650 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/45e26ed6-2f29-41e4-9fd4-c83c0da2b1d6-secret-volume\") pod \"collect-profiles-29321070-88ddl\" (UID: \"45e26ed6-2f29-41e4-9fd4-c83c0da2b1d6\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321070-88ddl" Sep 30 20:30:00 crc kubenswrapper[4756]: I0930 20:30:00.411894 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/45e26ed6-2f29-41e4-9fd4-c83c0da2b1d6-config-volume\") pod \"collect-profiles-29321070-88ddl\" (UID: \"45e26ed6-2f29-41e4-9fd4-c83c0da2b1d6\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321070-88ddl" Sep 30 20:30:00 crc kubenswrapper[4756]: I0930 20:30:00.412926 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/45e26ed6-2f29-41e4-9fd4-c83c0da2b1d6-config-volume\") pod \"collect-profiles-29321070-88ddl\" (UID: \"45e26ed6-2f29-41e4-9fd4-c83c0da2b1d6\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321070-88ddl" Sep 30 20:30:00 crc kubenswrapper[4756]: I0930 20:30:00.418046 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/45e26ed6-2f29-41e4-9fd4-c83c0da2b1d6-secret-volume\") pod \"collect-profiles-29321070-88ddl\" (UID: \"45e26ed6-2f29-41e4-9fd4-c83c0da2b1d6\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321070-88ddl" Sep 30 20:30:00 crc kubenswrapper[4756]: I0930 20:30:00.443210 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bbrqp\" (UniqueName: \"kubernetes.io/projected/45e26ed6-2f29-41e4-9fd4-c83c0da2b1d6-kube-api-access-bbrqp\") pod \"collect-profiles-29321070-88ddl\" (UID: \"45e26ed6-2f29-41e4-9fd4-c83c0da2b1d6\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321070-88ddl" Sep 30 20:30:00 crc kubenswrapper[4756]: I0930 20:30:00.492271 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29321070-88ddl" Sep 30 20:30:00 crc kubenswrapper[4756]: I0930 20:30:00.715602 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29321070-88ddl"] Sep 30 20:30:00 crc kubenswrapper[4756]: I0930 20:30:00.952081 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29321070-88ddl" event={"ID":"45e26ed6-2f29-41e4-9fd4-c83c0da2b1d6","Type":"ContainerStarted","Data":"73f7e5b3073364c3579afe79763dde44a71d4e0a7e699ad746bafeede2a463c7"} Sep 30 20:30:00 crc kubenswrapper[4756]: I0930 20:30:00.952464 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29321070-88ddl" event={"ID":"45e26ed6-2f29-41e4-9fd4-c83c0da2b1d6","Type":"ContainerStarted","Data":"1f1ec80bedde06ceb32846f91f70c65366cd8f1a5a994b7cc850f8266be81ae8"} Sep 30 20:30:00 crc kubenswrapper[4756]: I0930 20:30:00.979993 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29321070-88ddl" podStartSLOduration=0.979969593 podStartE2EDuration="979.969593ms" podCreationTimestamp="2025-09-30 20:30:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 20:30:00.972639871 +0000 UTC m=+3530.593573368" watchObservedRunningTime="2025-09-30 20:30:00.979969593 +0000 UTC m=+3530.600903070" Sep 30 20:30:01 crc kubenswrapper[4756]: I0930 20:30:01.959643 4756 generic.go:334] "Generic (PLEG): container finished" podID="45e26ed6-2f29-41e4-9fd4-c83c0da2b1d6" containerID="73f7e5b3073364c3579afe79763dde44a71d4e0a7e699ad746bafeede2a463c7" exitCode=0 Sep 30 20:30:01 crc kubenswrapper[4756]: I0930 20:30:01.959703 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29321070-88ddl" event={"ID":"45e26ed6-2f29-41e4-9fd4-c83c0da2b1d6","Type":"ContainerDied","Data":"73f7e5b3073364c3579afe79763dde44a71d4e0a7e699ad746bafeede2a463c7"} Sep 30 20:30:03 crc kubenswrapper[4756]: I0930 20:30:03.245259 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29321070-88ddl" Sep 30 20:30:03 crc kubenswrapper[4756]: I0930 20:30:03.252456 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/45e26ed6-2f29-41e4-9fd4-c83c0da2b1d6-secret-volume\") pod \"45e26ed6-2f29-41e4-9fd4-c83c0da2b1d6\" (UID: \"45e26ed6-2f29-41e4-9fd4-c83c0da2b1d6\") " Sep 30 20:30:03 crc kubenswrapper[4756]: I0930 20:30:03.252514 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/45e26ed6-2f29-41e4-9fd4-c83c0da2b1d6-config-volume\") pod \"45e26ed6-2f29-41e4-9fd4-c83c0da2b1d6\" (UID: \"45e26ed6-2f29-41e4-9fd4-c83c0da2b1d6\") " Sep 30 20:30:03 crc kubenswrapper[4756]: I0930 20:30:03.252642 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bbrqp\" (UniqueName: \"kubernetes.io/projected/45e26ed6-2f29-41e4-9fd4-c83c0da2b1d6-kube-api-access-bbrqp\") pod \"45e26ed6-2f29-41e4-9fd4-c83c0da2b1d6\" (UID: \"45e26ed6-2f29-41e4-9fd4-c83c0da2b1d6\") " Sep 30 20:30:03 crc kubenswrapper[4756]: I0930 20:30:03.253147 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/45e26ed6-2f29-41e4-9fd4-c83c0da2b1d6-config-volume" (OuterVolumeSpecName: "config-volume") pod "45e26ed6-2f29-41e4-9fd4-c83c0da2b1d6" (UID: "45e26ed6-2f29-41e4-9fd4-c83c0da2b1d6"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:30:03 crc kubenswrapper[4756]: I0930 20:30:03.257960 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/45e26ed6-2f29-41e4-9fd4-c83c0da2b1d6-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "45e26ed6-2f29-41e4-9fd4-c83c0da2b1d6" (UID: "45e26ed6-2f29-41e4-9fd4-c83c0da2b1d6"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:30:03 crc kubenswrapper[4756]: I0930 20:30:03.259011 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/45e26ed6-2f29-41e4-9fd4-c83c0da2b1d6-kube-api-access-bbrqp" (OuterVolumeSpecName: "kube-api-access-bbrqp") pod "45e26ed6-2f29-41e4-9fd4-c83c0da2b1d6" (UID: "45e26ed6-2f29-41e4-9fd4-c83c0da2b1d6"). InnerVolumeSpecName "kube-api-access-bbrqp". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:30:03 crc kubenswrapper[4756]: I0930 20:30:03.353964 4756 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/45e26ed6-2f29-41e4-9fd4-c83c0da2b1d6-secret-volume\") on node \"crc\" DevicePath \"\"" Sep 30 20:30:03 crc kubenswrapper[4756]: I0930 20:30:03.354059 4756 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/45e26ed6-2f29-41e4-9fd4-c83c0da2b1d6-config-volume\") on node \"crc\" DevicePath \"\"" Sep 30 20:30:03 crc kubenswrapper[4756]: I0930 20:30:03.354070 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bbrqp\" (UniqueName: \"kubernetes.io/projected/45e26ed6-2f29-41e4-9fd4-c83c0da2b1d6-kube-api-access-bbrqp\") on node \"crc\" DevicePath \"\"" Sep 30 20:30:03 crc kubenswrapper[4756]: I0930 20:30:03.979724 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29321070-88ddl" event={"ID":"45e26ed6-2f29-41e4-9fd4-c83c0da2b1d6","Type":"ContainerDied","Data":"1f1ec80bedde06ceb32846f91f70c65366cd8f1a5a994b7cc850f8266be81ae8"} Sep 30 20:30:03 crc kubenswrapper[4756]: I0930 20:30:03.979766 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29321070-88ddl" Sep 30 20:30:03 crc kubenswrapper[4756]: I0930 20:30:03.979772 4756 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1f1ec80bedde06ceb32846f91f70c65366cd8f1a5a994b7cc850f8266be81ae8" Sep 30 20:30:04 crc kubenswrapper[4756]: I0930 20:30:04.308963 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29321025-twlmh"] Sep 30 20:30:04 crc kubenswrapper[4756]: I0930 20:30:04.313707 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29321025-twlmh"] Sep 30 20:30:05 crc kubenswrapper[4756]: I0930 20:30:05.123853 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9ad9e216-4aec-4e6c-84f9-8b714c6e57f1" path="/var/lib/kubelet/pods/9ad9e216-4aec-4e6c-84f9-8b714c6e57f1/volumes" Sep 30 20:30:19 crc kubenswrapper[4756]: I0930 20:30:19.466779 4756 scope.go:117] "RemoveContainer" containerID="b6b7bf2fe7d30cfb6e4ee8e95898b7b4ea8a2edeeed7df5ed94bc3a661470790" Sep 30 20:30:41 crc kubenswrapper[4756]: I0930 20:30:41.539151 4756 patch_prober.go:28] interesting pod/machine-config-daemon-4n9zj container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 20:30:41 crc kubenswrapper[4756]: I0930 20:30:41.539714 4756 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 20:31:11 crc kubenswrapper[4756]: I0930 20:31:11.538932 4756 patch_prober.go:28] interesting pod/machine-config-daemon-4n9zj container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 20:31:11 crc kubenswrapper[4756]: I0930 20:31:11.539593 4756 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 20:31:41 crc kubenswrapper[4756]: I0930 20:31:41.539346 4756 patch_prober.go:28] interesting pod/machine-config-daemon-4n9zj container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 20:31:41 crc kubenswrapper[4756]: I0930 20:31:41.539976 4756 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 20:31:41 crc kubenswrapper[4756]: I0930 20:31:41.540038 4756 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" Sep 30 20:31:41 crc kubenswrapper[4756]: I0930 20:31:41.540837 4756 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"20bb0b2310e2e3e6446619c00827bb6a031d09c2b13494eceb98d1db9a2bab91"} pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 30 20:31:41 crc kubenswrapper[4756]: I0930 20:31:41.540922 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" containerName="machine-config-daemon" containerID="cri-o://20bb0b2310e2e3e6446619c00827bb6a031d09c2b13494eceb98d1db9a2bab91" gracePeriod=600 Sep 30 20:31:41 crc kubenswrapper[4756]: E0930 20:31:41.670452 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4n9zj_openshift-machine-config-operator(3370c2ca-fec3-4f90-8df7-51e21e6c7e1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" Sep 30 20:31:41 crc kubenswrapper[4756]: I0930 20:31:41.721219 4756 generic.go:334] "Generic (PLEG): container finished" podID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" containerID="20bb0b2310e2e3e6446619c00827bb6a031d09c2b13494eceb98d1db9a2bab91" exitCode=0 Sep 30 20:31:41 crc kubenswrapper[4756]: I0930 20:31:41.721253 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" event={"ID":"3370c2ca-fec3-4f90-8df7-51e21e6c7e1c","Type":"ContainerDied","Data":"20bb0b2310e2e3e6446619c00827bb6a031d09c2b13494eceb98d1db9a2bab91"} Sep 30 20:31:41 crc kubenswrapper[4756]: I0930 20:31:41.721555 4756 scope.go:117] "RemoveContainer" containerID="ae8690c9a70a632b97b3f050b9c93ed7793158b7537fa3bb06bd34f71872afd3" Sep 30 20:31:41 crc kubenswrapper[4756]: I0930 20:31:41.722385 4756 scope.go:117] "RemoveContainer" containerID="20bb0b2310e2e3e6446619c00827bb6a031d09c2b13494eceb98d1db9a2bab91" Sep 30 20:31:41 crc kubenswrapper[4756]: E0930 20:31:41.723080 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4n9zj_openshift-machine-config-operator(3370c2ca-fec3-4f90-8df7-51e21e6c7e1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" Sep 30 20:31:54 crc kubenswrapper[4756]: I0930 20:31:54.117319 4756 scope.go:117] "RemoveContainer" containerID="20bb0b2310e2e3e6446619c00827bb6a031d09c2b13494eceb98d1db9a2bab91" Sep 30 20:31:54 crc kubenswrapper[4756]: E0930 20:31:54.118254 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4n9zj_openshift-machine-config-operator(3370c2ca-fec3-4f90-8df7-51e21e6c7e1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" Sep 30 20:32:07 crc kubenswrapper[4756]: I0930 20:32:07.117771 4756 scope.go:117] "RemoveContainer" containerID="20bb0b2310e2e3e6446619c00827bb6a031d09c2b13494eceb98d1db9a2bab91" Sep 30 20:32:07 crc kubenswrapper[4756]: E0930 20:32:07.120419 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4n9zj_openshift-machine-config-operator(3370c2ca-fec3-4f90-8df7-51e21e6c7e1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" Sep 30 20:32:22 crc kubenswrapper[4756]: I0930 20:32:22.116620 4756 scope.go:117] "RemoveContainer" containerID="20bb0b2310e2e3e6446619c00827bb6a031d09c2b13494eceb98d1db9a2bab91" Sep 30 20:32:22 crc kubenswrapper[4756]: E0930 20:32:22.117630 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4n9zj_openshift-machine-config-operator(3370c2ca-fec3-4f90-8df7-51e21e6c7e1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" Sep 30 20:32:33 crc kubenswrapper[4756]: I0930 20:32:33.117739 4756 scope.go:117] "RemoveContainer" containerID="20bb0b2310e2e3e6446619c00827bb6a031d09c2b13494eceb98d1db9a2bab91" Sep 30 20:32:33 crc kubenswrapper[4756]: E0930 20:32:33.118737 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4n9zj_openshift-machine-config-operator(3370c2ca-fec3-4f90-8df7-51e21e6c7e1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" Sep 30 20:32:44 crc kubenswrapper[4756]: I0930 20:32:44.115841 4756 scope.go:117] "RemoveContainer" containerID="20bb0b2310e2e3e6446619c00827bb6a031d09c2b13494eceb98d1db9a2bab91" Sep 30 20:32:44 crc kubenswrapper[4756]: E0930 20:32:44.116696 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4n9zj_openshift-machine-config-operator(3370c2ca-fec3-4f90-8df7-51e21e6c7e1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" Sep 30 20:32:58 crc kubenswrapper[4756]: I0930 20:32:58.117312 4756 scope.go:117] "RemoveContainer" containerID="20bb0b2310e2e3e6446619c00827bb6a031d09c2b13494eceb98d1db9a2bab91" Sep 30 20:32:58 crc kubenswrapper[4756]: E0930 20:32:58.118864 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4n9zj_openshift-machine-config-operator(3370c2ca-fec3-4f90-8df7-51e21e6c7e1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" Sep 30 20:33:10 crc kubenswrapper[4756]: I0930 20:33:10.119509 4756 scope.go:117] "RemoveContainer" containerID="20bb0b2310e2e3e6446619c00827bb6a031d09c2b13494eceb98d1db9a2bab91" Sep 30 20:33:10 crc kubenswrapper[4756]: E0930 20:33:10.120230 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4n9zj_openshift-machine-config-operator(3370c2ca-fec3-4f90-8df7-51e21e6c7e1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" Sep 30 20:33:22 crc kubenswrapper[4756]: I0930 20:33:22.116740 4756 scope.go:117] "RemoveContainer" containerID="20bb0b2310e2e3e6446619c00827bb6a031d09c2b13494eceb98d1db9a2bab91" Sep 30 20:33:22 crc kubenswrapper[4756]: E0930 20:33:22.117543 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4n9zj_openshift-machine-config-operator(3370c2ca-fec3-4f90-8df7-51e21e6c7e1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" Sep 30 20:33:34 crc kubenswrapper[4756]: I0930 20:33:34.116112 4756 scope.go:117] "RemoveContainer" containerID="20bb0b2310e2e3e6446619c00827bb6a031d09c2b13494eceb98d1db9a2bab91" Sep 30 20:33:34 crc kubenswrapper[4756]: E0930 20:33:34.120502 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4n9zj_openshift-machine-config-operator(3370c2ca-fec3-4f90-8df7-51e21e6c7e1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" Sep 30 20:33:49 crc kubenswrapper[4756]: I0930 20:33:49.115898 4756 scope.go:117] "RemoveContainer" containerID="20bb0b2310e2e3e6446619c00827bb6a031d09c2b13494eceb98d1db9a2bab91" Sep 30 20:33:49 crc kubenswrapper[4756]: E0930 20:33:49.116669 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4n9zj_openshift-machine-config-operator(3370c2ca-fec3-4f90-8df7-51e21e6c7e1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" Sep 30 20:34:04 crc kubenswrapper[4756]: I0930 20:34:04.115702 4756 scope.go:117] "RemoveContainer" containerID="20bb0b2310e2e3e6446619c00827bb6a031d09c2b13494eceb98d1db9a2bab91" Sep 30 20:34:04 crc kubenswrapper[4756]: E0930 20:34:04.116428 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4n9zj_openshift-machine-config-operator(3370c2ca-fec3-4f90-8df7-51e21e6c7e1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" Sep 30 20:34:19 crc kubenswrapper[4756]: I0930 20:34:19.115916 4756 scope.go:117] "RemoveContainer" containerID="20bb0b2310e2e3e6446619c00827bb6a031d09c2b13494eceb98d1db9a2bab91" Sep 30 20:34:19 crc kubenswrapper[4756]: E0930 20:34:19.116576 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4n9zj_openshift-machine-config-operator(3370c2ca-fec3-4f90-8df7-51e21e6c7e1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" Sep 30 20:34:31 crc kubenswrapper[4756]: I0930 20:34:31.119540 4756 scope.go:117] "RemoveContainer" containerID="20bb0b2310e2e3e6446619c00827bb6a031d09c2b13494eceb98d1db9a2bab91" Sep 30 20:34:31 crc kubenswrapper[4756]: E0930 20:34:31.120235 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4n9zj_openshift-machine-config-operator(3370c2ca-fec3-4f90-8df7-51e21e6c7e1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" Sep 30 20:34:44 crc kubenswrapper[4756]: I0930 20:34:44.115744 4756 scope.go:117] "RemoveContainer" containerID="20bb0b2310e2e3e6446619c00827bb6a031d09c2b13494eceb98d1db9a2bab91" Sep 30 20:34:44 crc kubenswrapper[4756]: E0930 20:34:44.116594 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4n9zj_openshift-machine-config-operator(3370c2ca-fec3-4f90-8df7-51e21e6c7e1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" Sep 30 20:34:58 crc kubenswrapper[4756]: I0930 20:34:58.116245 4756 scope.go:117] "RemoveContainer" containerID="20bb0b2310e2e3e6446619c00827bb6a031d09c2b13494eceb98d1db9a2bab91" Sep 30 20:34:58 crc kubenswrapper[4756]: E0930 20:34:58.117139 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4n9zj_openshift-machine-config-operator(3370c2ca-fec3-4f90-8df7-51e21e6c7e1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" Sep 30 20:35:12 crc kubenswrapper[4756]: I0930 20:35:12.116252 4756 scope.go:117] "RemoveContainer" containerID="20bb0b2310e2e3e6446619c00827bb6a031d09c2b13494eceb98d1db9a2bab91" Sep 30 20:35:12 crc kubenswrapper[4756]: E0930 20:35:12.117137 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4n9zj_openshift-machine-config-operator(3370c2ca-fec3-4f90-8df7-51e21e6c7e1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" Sep 30 20:35:26 crc kubenswrapper[4756]: I0930 20:35:26.115651 4756 scope.go:117] "RemoveContainer" containerID="20bb0b2310e2e3e6446619c00827bb6a031d09c2b13494eceb98d1db9a2bab91" Sep 30 20:35:26 crc kubenswrapper[4756]: E0930 20:35:26.117413 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4n9zj_openshift-machine-config-operator(3370c2ca-fec3-4f90-8df7-51e21e6c7e1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" Sep 30 20:35:37 crc kubenswrapper[4756]: I0930 20:35:37.116063 4756 scope.go:117] "RemoveContainer" containerID="20bb0b2310e2e3e6446619c00827bb6a031d09c2b13494eceb98d1db9a2bab91" Sep 30 20:35:37 crc kubenswrapper[4756]: E0930 20:35:37.117108 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4n9zj_openshift-machine-config-operator(3370c2ca-fec3-4f90-8df7-51e21e6c7e1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" Sep 30 20:35:49 crc kubenswrapper[4756]: I0930 20:35:49.116463 4756 scope.go:117] "RemoveContainer" containerID="20bb0b2310e2e3e6446619c00827bb6a031d09c2b13494eceb98d1db9a2bab91" Sep 30 20:35:49 crc kubenswrapper[4756]: E0930 20:35:49.117196 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4n9zj_openshift-machine-config-operator(3370c2ca-fec3-4f90-8df7-51e21e6c7e1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" Sep 30 20:36:00 crc kubenswrapper[4756]: I0930 20:36:00.116624 4756 scope.go:117] "RemoveContainer" containerID="20bb0b2310e2e3e6446619c00827bb6a031d09c2b13494eceb98d1db9a2bab91" Sep 30 20:36:00 crc kubenswrapper[4756]: E0930 20:36:00.118264 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4n9zj_openshift-machine-config-operator(3370c2ca-fec3-4f90-8df7-51e21e6c7e1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" Sep 30 20:36:11 crc kubenswrapper[4756]: I0930 20:36:11.129569 4756 scope.go:117] "RemoveContainer" containerID="20bb0b2310e2e3e6446619c00827bb6a031d09c2b13494eceb98d1db9a2bab91" Sep 30 20:36:11 crc kubenswrapper[4756]: E0930 20:36:11.130268 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4n9zj_openshift-machine-config-operator(3370c2ca-fec3-4f90-8df7-51e21e6c7e1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" Sep 30 20:36:26 crc kubenswrapper[4756]: I0930 20:36:26.115777 4756 scope.go:117] "RemoveContainer" containerID="20bb0b2310e2e3e6446619c00827bb6a031d09c2b13494eceb98d1db9a2bab91" Sep 30 20:36:26 crc kubenswrapper[4756]: E0930 20:36:26.116498 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4n9zj_openshift-machine-config-operator(3370c2ca-fec3-4f90-8df7-51e21e6c7e1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" Sep 30 20:36:29 crc kubenswrapper[4756]: I0930 20:36:29.311809 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-fn2rx"] Sep 30 20:36:29 crc kubenswrapper[4756]: E0930 20:36:29.312471 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="45e26ed6-2f29-41e4-9fd4-c83c0da2b1d6" containerName="collect-profiles" Sep 30 20:36:29 crc kubenswrapper[4756]: I0930 20:36:29.312489 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="45e26ed6-2f29-41e4-9fd4-c83c0da2b1d6" containerName="collect-profiles" Sep 30 20:36:29 crc kubenswrapper[4756]: I0930 20:36:29.312713 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="45e26ed6-2f29-41e4-9fd4-c83c0da2b1d6" containerName="collect-profiles" Sep 30 20:36:29 crc kubenswrapper[4756]: I0930 20:36:29.313807 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-fn2rx" Sep 30 20:36:29 crc kubenswrapper[4756]: I0930 20:36:29.324426 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-fn2rx"] Sep 30 20:36:29 crc kubenswrapper[4756]: I0930 20:36:29.488912 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xvf8q\" (UniqueName: \"kubernetes.io/projected/ea886481-3dcf-47cd-ad04-f5c88a34a952-kube-api-access-xvf8q\") pod \"redhat-operators-fn2rx\" (UID: \"ea886481-3dcf-47cd-ad04-f5c88a34a952\") " pod="openshift-marketplace/redhat-operators-fn2rx" Sep 30 20:36:29 crc kubenswrapper[4756]: I0930 20:36:29.488974 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ea886481-3dcf-47cd-ad04-f5c88a34a952-utilities\") pod \"redhat-operators-fn2rx\" (UID: \"ea886481-3dcf-47cd-ad04-f5c88a34a952\") " pod="openshift-marketplace/redhat-operators-fn2rx" Sep 30 20:36:29 crc kubenswrapper[4756]: I0930 20:36:29.489009 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ea886481-3dcf-47cd-ad04-f5c88a34a952-catalog-content\") pod \"redhat-operators-fn2rx\" (UID: \"ea886481-3dcf-47cd-ad04-f5c88a34a952\") " pod="openshift-marketplace/redhat-operators-fn2rx" Sep 30 20:36:29 crc kubenswrapper[4756]: I0930 20:36:29.590039 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xvf8q\" (UniqueName: \"kubernetes.io/projected/ea886481-3dcf-47cd-ad04-f5c88a34a952-kube-api-access-xvf8q\") pod \"redhat-operators-fn2rx\" (UID: \"ea886481-3dcf-47cd-ad04-f5c88a34a952\") " pod="openshift-marketplace/redhat-operators-fn2rx" Sep 30 20:36:29 crc kubenswrapper[4756]: I0930 20:36:29.590101 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ea886481-3dcf-47cd-ad04-f5c88a34a952-utilities\") pod \"redhat-operators-fn2rx\" (UID: \"ea886481-3dcf-47cd-ad04-f5c88a34a952\") " pod="openshift-marketplace/redhat-operators-fn2rx" Sep 30 20:36:29 crc kubenswrapper[4756]: I0930 20:36:29.590125 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ea886481-3dcf-47cd-ad04-f5c88a34a952-catalog-content\") pod \"redhat-operators-fn2rx\" (UID: \"ea886481-3dcf-47cd-ad04-f5c88a34a952\") " pod="openshift-marketplace/redhat-operators-fn2rx" Sep 30 20:36:29 crc kubenswrapper[4756]: I0930 20:36:29.590606 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ea886481-3dcf-47cd-ad04-f5c88a34a952-utilities\") pod \"redhat-operators-fn2rx\" (UID: \"ea886481-3dcf-47cd-ad04-f5c88a34a952\") " pod="openshift-marketplace/redhat-operators-fn2rx" Sep 30 20:36:29 crc kubenswrapper[4756]: I0930 20:36:29.590686 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ea886481-3dcf-47cd-ad04-f5c88a34a952-catalog-content\") pod \"redhat-operators-fn2rx\" (UID: \"ea886481-3dcf-47cd-ad04-f5c88a34a952\") " pod="openshift-marketplace/redhat-operators-fn2rx" Sep 30 20:36:29 crc kubenswrapper[4756]: I0930 20:36:29.617462 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xvf8q\" (UniqueName: \"kubernetes.io/projected/ea886481-3dcf-47cd-ad04-f5c88a34a952-kube-api-access-xvf8q\") pod \"redhat-operators-fn2rx\" (UID: \"ea886481-3dcf-47cd-ad04-f5c88a34a952\") " pod="openshift-marketplace/redhat-operators-fn2rx" Sep 30 20:36:29 crc kubenswrapper[4756]: I0930 20:36:29.634216 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-fn2rx" Sep 30 20:36:30 crc kubenswrapper[4756]: I0930 20:36:30.084514 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-fn2rx"] Sep 30 20:36:30 crc kubenswrapper[4756]: I0930 20:36:30.848163 4756 generic.go:334] "Generic (PLEG): container finished" podID="ea886481-3dcf-47cd-ad04-f5c88a34a952" containerID="a9bb0eb015a40bc222b1c3a070aba57baca839e1136d88f13b0b035e9075fd38" exitCode=0 Sep 30 20:36:30 crc kubenswrapper[4756]: I0930 20:36:30.848210 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fn2rx" event={"ID":"ea886481-3dcf-47cd-ad04-f5c88a34a952","Type":"ContainerDied","Data":"a9bb0eb015a40bc222b1c3a070aba57baca839e1136d88f13b0b035e9075fd38"} Sep 30 20:36:30 crc kubenswrapper[4756]: I0930 20:36:30.848621 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fn2rx" event={"ID":"ea886481-3dcf-47cd-ad04-f5c88a34a952","Type":"ContainerStarted","Data":"1b4e2ecd09f28b16a431845fdf4fddff1ffb0e543816a5076a14ae1c0d91657a"} Sep 30 20:36:30 crc kubenswrapper[4756]: I0930 20:36:30.850839 4756 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Sep 30 20:36:32 crc kubenswrapper[4756]: I0930 20:36:32.863934 4756 generic.go:334] "Generic (PLEG): container finished" podID="ea886481-3dcf-47cd-ad04-f5c88a34a952" containerID="abb46785b0a22c163e54542b54629676df251eb92d9665c64635e7060b435bf6" exitCode=0 Sep 30 20:36:32 crc kubenswrapper[4756]: I0930 20:36:32.863975 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fn2rx" event={"ID":"ea886481-3dcf-47cd-ad04-f5c88a34a952","Type":"ContainerDied","Data":"abb46785b0a22c163e54542b54629676df251eb92d9665c64635e7060b435bf6"} Sep 30 20:36:33 crc kubenswrapper[4756]: I0930 20:36:33.874230 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fn2rx" event={"ID":"ea886481-3dcf-47cd-ad04-f5c88a34a952","Type":"ContainerStarted","Data":"ea86abf710b5ad77d9d91493195e637b24e0d4f2cba93b50507af4cc7e7d80e7"} Sep 30 20:36:33 crc kubenswrapper[4756]: I0930 20:36:33.896467 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-fn2rx" podStartSLOduration=2.348962452 podStartE2EDuration="4.896447579s" podCreationTimestamp="2025-09-30 20:36:29 +0000 UTC" firstStartedPulling="2025-09-30 20:36:30.850492501 +0000 UTC m=+3920.471425998" lastFinishedPulling="2025-09-30 20:36:33.397977648 +0000 UTC m=+3923.018911125" observedRunningTime="2025-09-30 20:36:33.890179916 +0000 UTC m=+3923.511113393" watchObservedRunningTime="2025-09-30 20:36:33.896447579 +0000 UTC m=+3923.517381046" Sep 30 20:36:34 crc kubenswrapper[4756]: I0930 20:36:34.284272 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-f2rlz"] Sep 30 20:36:34 crc kubenswrapper[4756]: I0930 20:36:34.285790 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-f2rlz" Sep 30 20:36:34 crc kubenswrapper[4756]: I0930 20:36:34.294138 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-f2rlz"] Sep 30 20:36:34 crc kubenswrapper[4756]: I0930 20:36:34.404580 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9mlkv\" (UniqueName: \"kubernetes.io/projected/ddddad69-f55c-4cdc-9ec8-a5a289dfaa5c-kube-api-access-9mlkv\") pod \"redhat-marketplace-f2rlz\" (UID: \"ddddad69-f55c-4cdc-9ec8-a5a289dfaa5c\") " pod="openshift-marketplace/redhat-marketplace-f2rlz" Sep 30 20:36:34 crc kubenswrapper[4756]: I0930 20:36:34.404640 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ddddad69-f55c-4cdc-9ec8-a5a289dfaa5c-utilities\") pod \"redhat-marketplace-f2rlz\" (UID: \"ddddad69-f55c-4cdc-9ec8-a5a289dfaa5c\") " pod="openshift-marketplace/redhat-marketplace-f2rlz" Sep 30 20:36:34 crc kubenswrapper[4756]: I0930 20:36:34.404672 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ddddad69-f55c-4cdc-9ec8-a5a289dfaa5c-catalog-content\") pod \"redhat-marketplace-f2rlz\" (UID: \"ddddad69-f55c-4cdc-9ec8-a5a289dfaa5c\") " pod="openshift-marketplace/redhat-marketplace-f2rlz" Sep 30 20:36:34 crc kubenswrapper[4756]: I0930 20:36:34.505451 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ddddad69-f55c-4cdc-9ec8-a5a289dfaa5c-catalog-content\") pod \"redhat-marketplace-f2rlz\" (UID: \"ddddad69-f55c-4cdc-9ec8-a5a289dfaa5c\") " pod="openshift-marketplace/redhat-marketplace-f2rlz" Sep 30 20:36:34 crc kubenswrapper[4756]: I0930 20:36:34.505816 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9mlkv\" (UniqueName: \"kubernetes.io/projected/ddddad69-f55c-4cdc-9ec8-a5a289dfaa5c-kube-api-access-9mlkv\") pod \"redhat-marketplace-f2rlz\" (UID: \"ddddad69-f55c-4cdc-9ec8-a5a289dfaa5c\") " pod="openshift-marketplace/redhat-marketplace-f2rlz" Sep 30 20:36:34 crc kubenswrapper[4756]: I0930 20:36:34.505906 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ddddad69-f55c-4cdc-9ec8-a5a289dfaa5c-catalog-content\") pod \"redhat-marketplace-f2rlz\" (UID: \"ddddad69-f55c-4cdc-9ec8-a5a289dfaa5c\") " pod="openshift-marketplace/redhat-marketplace-f2rlz" Sep 30 20:36:34 crc kubenswrapper[4756]: I0930 20:36:34.505921 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ddddad69-f55c-4cdc-9ec8-a5a289dfaa5c-utilities\") pod \"redhat-marketplace-f2rlz\" (UID: \"ddddad69-f55c-4cdc-9ec8-a5a289dfaa5c\") " pod="openshift-marketplace/redhat-marketplace-f2rlz" Sep 30 20:36:34 crc kubenswrapper[4756]: I0930 20:36:34.506374 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ddddad69-f55c-4cdc-9ec8-a5a289dfaa5c-utilities\") pod \"redhat-marketplace-f2rlz\" (UID: \"ddddad69-f55c-4cdc-9ec8-a5a289dfaa5c\") " pod="openshift-marketplace/redhat-marketplace-f2rlz" Sep 30 20:36:34 crc kubenswrapper[4756]: I0930 20:36:34.528933 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9mlkv\" (UniqueName: \"kubernetes.io/projected/ddddad69-f55c-4cdc-9ec8-a5a289dfaa5c-kube-api-access-9mlkv\") pod \"redhat-marketplace-f2rlz\" (UID: \"ddddad69-f55c-4cdc-9ec8-a5a289dfaa5c\") " pod="openshift-marketplace/redhat-marketplace-f2rlz" Sep 30 20:36:34 crc kubenswrapper[4756]: I0930 20:36:34.603945 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-f2rlz" Sep 30 20:36:35 crc kubenswrapper[4756]: I0930 20:36:35.052468 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-f2rlz"] Sep 30 20:36:35 crc kubenswrapper[4756]: W0930 20:36:35.059834 4756 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podddddad69_f55c_4cdc_9ec8_a5a289dfaa5c.slice/crio-7b9c3dcffcd7f7f5f5b20f6e9f3591a48f9a60130c2468d9d90cdbab9037cbd9 WatchSource:0}: Error finding container 7b9c3dcffcd7f7f5f5b20f6e9f3591a48f9a60130c2468d9d90cdbab9037cbd9: Status 404 returned error can't find the container with id 7b9c3dcffcd7f7f5f5b20f6e9f3591a48f9a60130c2468d9d90cdbab9037cbd9 Sep 30 20:36:35 crc kubenswrapper[4756]: I0930 20:36:35.887362 4756 generic.go:334] "Generic (PLEG): container finished" podID="ddddad69-f55c-4cdc-9ec8-a5a289dfaa5c" containerID="2f30f7f59fca0de59ec64b26bcf93da99869b41c9e3cfdde6fdd06f42be2493f" exitCode=0 Sep 30 20:36:35 crc kubenswrapper[4756]: I0930 20:36:35.887419 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-f2rlz" event={"ID":"ddddad69-f55c-4cdc-9ec8-a5a289dfaa5c","Type":"ContainerDied","Data":"2f30f7f59fca0de59ec64b26bcf93da99869b41c9e3cfdde6fdd06f42be2493f"} Sep 30 20:36:35 crc kubenswrapper[4756]: I0930 20:36:35.887624 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-f2rlz" event={"ID":"ddddad69-f55c-4cdc-9ec8-a5a289dfaa5c","Type":"ContainerStarted","Data":"7b9c3dcffcd7f7f5f5b20f6e9f3591a48f9a60130c2468d9d90cdbab9037cbd9"} Sep 30 20:36:39 crc kubenswrapper[4756]: I0930 20:36:39.634933 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-fn2rx" Sep 30 20:36:39 crc kubenswrapper[4756]: I0930 20:36:39.635343 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-fn2rx" Sep 30 20:36:39 crc kubenswrapper[4756]: I0930 20:36:39.677014 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-fn2rx" Sep 30 20:36:39 crc kubenswrapper[4756]: I0930 20:36:39.949114 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-fn2rx" Sep 30 20:36:39 crc kubenswrapper[4756]: I0930 20:36:39.988190 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-fn2rx"] Sep 30 20:36:41 crc kubenswrapper[4756]: I0930 20:36:41.121036 4756 scope.go:117] "RemoveContainer" containerID="20bb0b2310e2e3e6446619c00827bb6a031d09c2b13494eceb98d1db9a2bab91" Sep 30 20:36:41 crc kubenswrapper[4756]: E0930 20:36:41.121647 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4n9zj_openshift-machine-config-operator(3370c2ca-fec3-4f90-8df7-51e21e6c7e1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" Sep 30 20:36:41 crc kubenswrapper[4756]: I0930 20:36:41.928807 4756 generic.go:334] "Generic (PLEG): container finished" podID="ddddad69-f55c-4cdc-9ec8-a5a289dfaa5c" containerID="6266e56ab937898c2305772ab0c87debb94c3644cb0a08a16b21ab0af1b89ce2" exitCode=0 Sep 30 20:36:41 crc kubenswrapper[4756]: I0930 20:36:41.928864 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-f2rlz" event={"ID":"ddddad69-f55c-4cdc-9ec8-a5a289dfaa5c","Type":"ContainerDied","Data":"6266e56ab937898c2305772ab0c87debb94c3644cb0a08a16b21ab0af1b89ce2"} Sep 30 20:36:41 crc kubenswrapper[4756]: I0930 20:36:41.929058 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-fn2rx" podUID="ea886481-3dcf-47cd-ad04-f5c88a34a952" containerName="registry-server" containerID="cri-o://ea86abf710b5ad77d9d91493195e637b24e0d4f2cba93b50507af4cc7e7d80e7" gracePeriod=2 Sep 30 20:36:42 crc kubenswrapper[4756]: I0930 20:36:42.292254 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-fn2rx" Sep 30 20:36:42 crc kubenswrapper[4756]: I0930 20:36:42.406693 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ea886481-3dcf-47cd-ad04-f5c88a34a952-utilities\") pod \"ea886481-3dcf-47cd-ad04-f5c88a34a952\" (UID: \"ea886481-3dcf-47cd-ad04-f5c88a34a952\") " Sep 30 20:36:42 crc kubenswrapper[4756]: I0930 20:36:42.406793 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ea886481-3dcf-47cd-ad04-f5c88a34a952-catalog-content\") pod \"ea886481-3dcf-47cd-ad04-f5c88a34a952\" (UID: \"ea886481-3dcf-47cd-ad04-f5c88a34a952\") " Sep 30 20:36:42 crc kubenswrapper[4756]: I0930 20:36:42.406925 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xvf8q\" (UniqueName: \"kubernetes.io/projected/ea886481-3dcf-47cd-ad04-f5c88a34a952-kube-api-access-xvf8q\") pod \"ea886481-3dcf-47cd-ad04-f5c88a34a952\" (UID: \"ea886481-3dcf-47cd-ad04-f5c88a34a952\") " Sep 30 20:36:42 crc kubenswrapper[4756]: I0930 20:36:42.407966 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ea886481-3dcf-47cd-ad04-f5c88a34a952-utilities" (OuterVolumeSpecName: "utilities") pod "ea886481-3dcf-47cd-ad04-f5c88a34a952" (UID: "ea886481-3dcf-47cd-ad04-f5c88a34a952"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 20:36:42 crc kubenswrapper[4756]: I0930 20:36:42.411859 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ea886481-3dcf-47cd-ad04-f5c88a34a952-kube-api-access-xvf8q" (OuterVolumeSpecName: "kube-api-access-xvf8q") pod "ea886481-3dcf-47cd-ad04-f5c88a34a952" (UID: "ea886481-3dcf-47cd-ad04-f5c88a34a952"). InnerVolumeSpecName "kube-api-access-xvf8q". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:36:42 crc kubenswrapper[4756]: I0930 20:36:42.502618 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ea886481-3dcf-47cd-ad04-f5c88a34a952-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "ea886481-3dcf-47cd-ad04-f5c88a34a952" (UID: "ea886481-3dcf-47cd-ad04-f5c88a34a952"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 20:36:42 crc kubenswrapper[4756]: I0930 20:36:42.508701 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xvf8q\" (UniqueName: \"kubernetes.io/projected/ea886481-3dcf-47cd-ad04-f5c88a34a952-kube-api-access-xvf8q\") on node \"crc\" DevicePath \"\"" Sep 30 20:36:42 crc kubenswrapper[4756]: I0930 20:36:42.508739 4756 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ea886481-3dcf-47cd-ad04-f5c88a34a952-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 20:36:42 crc kubenswrapper[4756]: I0930 20:36:42.508750 4756 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ea886481-3dcf-47cd-ad04-f5c88a34a952-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 20:36:42 crc kubenswrapper[4756]: I0930 20:36:42.940603 4756 generic.go:334] "Generic (PLEG): container finished" podID="ea886481-3dcf-47cd-ad04-f5c88a34a952" containerID="ea86abf710b5ad77d9d91493195e637b24e0d4f2cba93b50507af4cc7e7d80e7" exitCode=0 Sep 30 20:36:42 crc kubenswrapper[4756]: I0930 20:36:42.940667 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fn2rx" event={"ID":"ea886481-3dcf-47cd-ad04-f5c88a34a952","Type":"ContainerDied","Data":"ea86abf710b5ad77d9d91493195e637b24e0d4f2cba93b50507af4cc7e7d80e7"} Sep 30 20:36:42 crc kubenswrapper[4756]: I0930 20:36:42.940707 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-fn2rx" Sep 30 20:36:42 crc kubenswrapper[4756]: I0930 20:36:42.940742 4756 scope.go:117] "RemoveContainer" containerID="ea86abf710b5ad77d9d91493195e637b24e0d4f2cba93b50507af4cc7e7d80e7" Sep 30 20:36:42 crc kubenswrapper[4756]: I0930 20:36:42.940728 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fn2rx" event={"ID":"ea886481-3dcf-47cd-ad04-f5c88a34a952","Type":"ContainerDied","Data":"1b4e2ecd09f28b16a431845fdf4fddff1ffb0e543816a5076a14ae1c0d91657a"} Sep 30 20:36:42 crc kubenswrapper[4756]: I0930 20:36:42.969947 4756 scope.go:117] "RemoveContainer" containerID="abb46785b0a22c163e54542b54629676df251eb92d9665c64635e7060b435bf6" Sep 30 20:36:42 crc kubenswrapper[4756]: I0930 20:36:42.998338 4756 scope.go:117] "RemoveContainer" containerID="a9bb0eb015a40bc222b1c3a070aba57baca839e1136d88f13b0b035e9075fd38" Sep 30 20:36:43 crc kubenswrapper[4756]: I0930 20:36:43.006834 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-fn2rx"] Sep 30 20:36:43 crc kubenswrapper[4756]: I0930 20:36:43.012681 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-fn2rx"] Sep 30 20:36:43 crc kubenswrapper[4756]: I0930 20:36:43.026473 4756 scope.go:117] "RemoveContainer" containerID="ea86abf710b5ad77d9d91493195e637b24e0d4f2cba93b50507af4cc7e7d80e7" Sep 30 20:36:43 crc kubenswrapper[4756]: E0930 20:36:43.026971 4756 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ea86abf710b5ad77d9d91493195e637b24e0d4f2cba93b50507af4cc7e7d80e7\": container with ID starting with ea86abf710b5ad77d9d91493195e637b24e0d4f2cba93b50507af4cc7e7d80e7 not found: ID does not exist" containerID="ea86abf710b5ad77d9d91493195e637b24e0d4f2cba93b50507af4cc7e7d80e7" Sep 30 20:36:43 crc kubenswrapper[4756]: I0930 20:36:43.027014 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ea86abf710b5ad77d9d91493195e637b24e0d4f2cba93b50507af4cc7e7d80e7"} err="failed to get container status \"ea86abf710b5ad77d9d91493195e637b24e0d4f2cba93b50507af4cc7e7d80e7\": rpc error: code = NotFound desc = could not find container \"ea86abf710b5ad77d9d91493195e637b24e0d4f2cba93b50507af4cc7e7d80e7\": container with ID starting with ea86abf710b5ad77d9d91493195e637b24e0d4f2cba93b50507af4cc7e7d80e7 not found: ID does not exist" Sep 30 20:36:43 crc kubenswrapper[4756]: I0930 20:36:43.027039 4756 scope.go:117] "RemoveContainer" containerID="abb46785b0a22c163e54542b54629676df251eb92d9665c64635e7060b435bf6" Sep 30 20:36:43 crc kubenswrapper[4756]: E0930 20:36:43.027316 4756 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"abb46785b0a22c163e54542b54629676df251eb92d9665c64635e7060b435bf6\": container with ID starting with abb46785b0a22c163e54542b54629676df251eb92d9665c64635e7060b435bf6 not found: ID does not exist" containerID="abb46785b0a22c163e54542b54629676df251eb92d9665c64635e7060b435bf6" Sep 30 20:36:43 crc kubenswrapper[4756]: I0930 20:36:43.027344 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"abb46785b0a22c163e54542b54629676df251eb92d9665c64635e7060b435bf6"} err="failed to get container status \"abb46785b0a22c163e54542b54629676df251eb92d9665c64635e7060b435bf6\": rpc error: code = NotFound desc = could not find container \"abb46785b0a22c163e54542b54629676df251eb92d9665c64635e7060b435bf6\": container with ID starting with abb46785b0a22c163e54542b54629676df251eb92d9665c64635e7060b435bf6 not found: ID does not exist" Sep 30 20:36:43 crc kubenswrapper[4756]: I0930 20:36:43.027376 4756 scope.go:117] "RemoveContainer" containerID="a9bb0eb015a40bc222b1c3a070aba57baca839e1136d88f13b0b035e9075fd38" Sep 30 20:36:43 crc kubenswrapper[4756]: E0930 20:36:43.027770 4756 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a9bb0eb015a40bc222b1c3a070aba57baca839e1136d88f13b0b035e9075fd38\": container with ID starting with a9bb0eb015a40bc222b1c3a070aba57baca839e1136d88f13b0b035e9075fd38 not found: ID does not exist" containerID="a9bb0eb015a40bc222b1c3a070aba57baca839e1136d88f13b0b035e9075fd38" Sep 30 20:36:43 crc kubenswrapper[4756]: I0930 20:36:43.027808 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a9bb0eb015a40bc222b1c3a070aba57baca839e1136d88f13b0b035e9075fd38"} err="failed to get container status \"a9bb0eb015a40bc222b1c3a070aba57baca839e1136d88f13b0b035e9075fd38\": rpc error: code = NotFound desc = could not find container \"a9bb0eb015a40bc222b1c3a070aba57baca839e1136d88f13b0b035e9075fd38\": container with ID starting with a9bb0eb015a40bc222b1c3a070aba57baca839e1136d88f13b0b035e9075fd38 not found: ID does not exist" Sep 30 20:36:43 crc kubenswrapper[4756]: I0930 20:36:43.138729 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ea886481-3dcf-47cd-ad04-f5c88a34a952" path="/var/lib/kubelet/pods/ea886481-3dcf-47cd-ad04-f5c88a34a952/volumes" Sep 30 20:36:43 crc kubenswrapper[4756]: I0930 20:36:43.949342 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-f2rlz" event={"ID":"ddddad69-f55c-4cdc-9ec8-a5a289dfaa5c","Type":"ContainerStarted","Data":"c2389799d15e39b8542ba345eacd4874124d6e56914e0cd4538e1d4283400655"} Sep 30 20:36:43 crc kubenswrapper[4756]: I0930 20:36:43.970088 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-f2rlz" podStartSLOduration=2.952983914 podStartE2EDuration="9.970072674s" podCreationTimestamp="2025-09-30 20:36:34 +0000 UTC" firstStartedPulling="2025-09-30 20:36:35.889124944 +0000 UTC m=+3925.510058421" lastFinishedPulling="2025-09-30 20:36:42.906213704 +0000 UTC m=+3932.527147181" observedRunningTime="2025-09-30 20:36:43.967381433 +0000 UTC m=+3933.588314910" watchObservedRunningTime="2025-09-30 20:36:43.970072674 +0000 UTC m=+3933.591006151" Sep 30 20:36:44 crc kubenswrapper[4756]: I0930 20:36:44.604595 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-f2rlz" Sep 30 20:36:44 crc kubenswrapper[4756]: I0930 20:36:44.604674 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-f2rlz" Sep 30 20:36:44 crc kubenswrapper[4756]: I0930 20:36:44.656885 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-f2rlz" Sep 30 20:36:52 crc kubenswrapper[4756]: I0930 20:36:52.116371 4756 scope.go:117] "RemoveContainer" containerID="20bb0b2310e2e3e6446619c00827bb6a031d09c2b13494eceb98d1db9a2bab91" Sep 30 20:36:53 crc kubenswrapper[4756]: I0930 20:36:53.022608 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" event={"ID":"3370c2ca-fec3-4f90-8df7-51e21e6c7e1c","Type":"ContainerStarted","Data":"ab7bd9f73f16ccb05c53c8ea3af19b906d154b090c3b400cd42807c34474abf3"} Sep 30 20:36:54 crc kubenswrapper[4756]: I0930 20:36:54.644462 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-f2rlz" Sep 30 20:36:54 crc kubenswrapper[4756]: I0930 20:36:54.694213 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-f2rlz"] Sep 30 20:36:55 crc kubenswrapper[4756]: I0930 20:36:55.038464 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-f2rlz" podUID="ddddad69-f55c-4cdc-9ec8-a5a289dfaa5c" containerName="registry-server" containerID="cri-o://c2389799d15e39b8542ba345eacd4874124d6e56914e0cd4538e1d4283400655" gracePeriod=2 Sep 30 20:36:55 crc kubenswrapper[4756]: I0930 20:36:55.385886 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-f2rlz" Sep 30 20:36:55 crc kubenswrapper[4756]: I0930 20:36:55.484078 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ddddad69-f55c-4cdc-9ec8-a5a289dfaa5c-utilities\") pod \"ddddad69-f55c-4cdc-9ec8-a5a289dfaa5c\" (UID: \"ddddad69-f55c-4cdc-9ec8-a5a289dfaa5c\") " Sep 30 20:36:55 crc kubenswrapper[4756]: I0930 20:36:55.484175 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9mlkv\" (UniqueName: \"kubernetes.io/projected/ddddad69-f55c-4cdc-9ec8-a5a289dfaa5c-kube-api-access-9mlkv\") pod \"ddddad69-f55c-4cdc-9ec8-a5a289dfaa5c\" (UID: \"ddddad69-f55c-4cdc-9ec8-a5a289dfaa5c\") " Sep 30 20:36:55 crc kubenswrapper[4756]: I0930 20:36:55.484250 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ddddad69-f55c-4cdc-9ec8-a5a289dfaa5c-catalog-content\") pod \"ddddad69-f55c-4cdc-9ec8-a5a289dfaa5c\" (UID: \"ddddad69-f55c-4cdc-9ec8-a5a289dfaa5c\") " Sep 30 20:36:55 crc kubenswrapper[4756]: I0930 20:36:55.486352 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ddddad69-f55c-4cdc-9ec8-a5a289dfaa5c-utilities" (OuterVolumeSpecName: "utilities") pod "ddddad69-f55c-4cdc-9ec8-a5a289dfaa5c" (UID: "ddddad69-f55c-4cdc-9ec8-a5a289dfaa5c"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 20:36:55 crc kubenswrapper[4756]: I0930 20:36:55.490638 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ddddad69-f55c-4cdc-9ec8-a5a289dfaa5c-kube-api-access-9mlkv" (OuterVolumeSpecName: "kube-api-access-9mlkv") pod "ddddad69-f55c-4cdc-9ec8-a5a289dfaa5c" (UID: "ddddad69-f55c-4cdc-9ec8-a5a289dfaa5c"). InnerVolumeSpecName "kube-api-access-9mlkv". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:36:55 crc kubenswrapper[4756]: I0930 20:36:55.498990 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ddddad69-f55c-4cdc-9ec8-a5a289dfaa5c-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "ddddad69-f55c-4cdc-9ec8-a5a289dfaa5c" (UID: "ddddad69-f55c-4cdc-9ec8-a5a289dfaa5c"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 20:36:55 crc kubenswrapper[4756]: I0930 20:36:55.586045 4756 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ddddad69-f55c-4cdc-9ec8-a5a289dfaa5c-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 20:36:55 crc kubenswrapper[4756]: I0930 20:36:55.586088 4756 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ddddad69-f55c-4cdc-9ec8-a5a289dfaa5c-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 20:36:55 crc kubenswrapper[4756]: I0930 20:36:55.586100 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9mlkv\" (UniqueName: \"kubernetes.io/projected/ddddad69-f55c-4cdc-9ec8-a5a289dfaa5c-kube-api-access-9mlkv\") on node \"crc\" DevicePath \"\"" Sep 30 20:36:56 crc kubenswrapper[4756]: I0930 20:36:56.050303 4756 generic.go:334] "Generic (PLEG): container finished" podID="ddddad69-f55c-4cdc-9ec8-a5a289dfaa5c" containerID="c2389799d15e39b8542ba345eacd4874124d6e56914e0cd4538e1d4283400655" exitCode=0 Sep 30 20:36:56 crc kubenswrapper[4756]: I0930 20:36:56.050403 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-f2rlz" Sep 30 20:36:56 crc kubenswrapper[4756]: I0930 20:36:56.050385 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-f2rlz" event={"ID":"ddddad69-f55c-4cdc-9ec8-a5a289dfaa5c","Type":"ContainerDied","Data":"c2389799d15e39b8542ba345eacd4874124d6e56914e0cd4538e1d4283400655"} Sep 30 20:36:56 crc kubenswrapper[4756]: I0930 20:36:56.050576 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-f2rlz" event={"ID":"ddddad69-f55c-4cdc-9ec8-a5a289dfaa5c","Type":"ContainerDied","Data":"7b9c3dcffcd7f7f5f5b20f6e9f3591a48f9a60130c2468d9d90cdbab9037cbd9"} Sep 30 20:36:56 crc kubenswrapper[4756]: I0930 20:36:56.050612 4756 scope.go:117] "RemoveContainer" containerID="c2389799d15e39b8542ba345eacd4874124d6e56914e0cd4538e1d4283400655" Sep 30 20:36:56 crc kubenswrapper[4756]: I0930 20:36:56.072087 4756 scope.go:117] "RemoveContainer" containerID="6266e56ab937898c2305772ab0c87debb94c3644cb0a08a16b21ab0af1b89ce2" Sep 30 20:36:56 crc kubenswrapper[4756]: I0930 20:36:56.095875 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-f2rlz"] Sep 30 20:36:56 crc kubenswrapper[4756]: I0930 20:36:56.101326 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-f2rlz"] Sep 30 20:36:56 crc kubenswrapper[4756]: I0930 20:36:56.110067 4756 scope.go:117] "RemoveContainer" containerID="2f30f7f59fca0de59ec64b26bcf93da99869b41c9e3cfdde6fdd06f42be2493f" Sep 30 20:36:56 crc kubenswrapper[4756]: I0930 20:36:56.131729 4756 scope.go:117] "RemoveContainer" containerID="c2389799d15e39b8542ba345eacd4874124d6e56914e0cd4538e1d4283400655" Sep 30 20:36:56 crc kubenswrapper[4756]: E0930 20:36:56.132202 4756 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c2389799d15e39b8542ba345eacd4874124d6e56914e0cd4538e1d4283400655\": container with ID starting with c2389799d15e39b8542ba345eacd4874124d6e56914e0cd4538e1d4283400655 not found: ID does not exist" containerID="c2389799d15e39b8542ba345eacd4874124d6e56914e0cd4538e1d4283400655" Sep 30 20:36:56 crc kubenswrapper[4756]: I0930 20:36:56.132235 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c2389799d15e39b8542ba345eacd4874124d6e56914e0cd4538e1d4283400655"} err="failed to get container status \"c2389799d15e39b8542ba345eacd4874124d6e56914e0cd4538e1d4283400655\": rpc error: code = NotFound desc = could not find container \"c2389799d15e39b8542ba345eacd4874124d6e56914e0cd4538e1d4283400655\": container with ID starting with c2389799d15e39b8542ba345eacd4874124d6e56914e0cd4538e1d4283400655 not found: ID does not exist" Sep 30 20:36:56 crc kubenswrapper[4756]: I0930 20:36:56.132284 4756 scope.go:117] "RemoveContainer" containerID="6266e56ab937898c2305772ab0c87debb94c3644cb0a08a16b21ab0af1b89ce2" Sep 30 20:36:56 crc kubenswrapper[4756]: E0930 20:36:56.132831 4756 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6266e56ab937898c2305772ab0c87debb94c3644cb0a08a16b21ab0af1b89ce2\": container with ID starting with 6266e56ab937898c2305772ab0c87debb94c3644cb0a08a16b21ab0af1b89ce2 not found: ID does not exist" containerID="6266e56ab937898c2305772ab0c87debb94c3644cb0a08a16b21ab0af1b89ce2" Sep 30 20:36:56 crc kubenswrapper[4756]: I0930 20:36:56.132943 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6266e56ab937898c2305772ab0c87debb94c3644cb0a08a16b21ab0af1b89ce2"} err="failed to get container status \"6266e56ab937898c2305772ab0c87debb94c3644cb0a08a16b21ab0af1b89ce2\": rpc error: code = NotFound desc = could not find container \"6266e56ab937898c2305772ab0c87debb94c3644cb0a08a16b21ab0af1b89ce2\": container with ID starting with 6266e56ab937898c2305772ab0c87debb94c3644cb0a08a16b21ab0af1b89ce2 not found: ID does not exist" Sep 30 20:36:56 crc kubenswrapper[4756]: I0930 20:36:56.133051 4756 scope.go:117] "RemoveContainer" containerID="2f30f7f59fca0de59ec64b26bcf93da99869b41c9e3cfdde6fdd06f42be2493f" Sep 30 20:36:56 crc kubenswrapper[4756]: E0930 20:36:56.133730 4756 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2f30f7f59fca0de59ec64b26bcf93da99869b41c9e3cfdde6fdd06f42be2493f\": container with ID starting with 2f30f7f59fca0de59ec64b26bcf93da99869b41c9e3cfdde6fdd06f42be2493f not found: ID does not exist" containerID="2f30f7f59fca0de59ec64b26bcf93da99869b41c9e3cfdde6fdd06f42be2493f" Sep 30 20:36:56 crc kubenswrapper[4756]: I0930 20:36:56.133840 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2f30f7f59fca0de59ec64b26bcf93da99869b41c9e3cfdde6fdd06f42be2493f"} err="failed to get container status \"2f30f7f59fca0de59ec64b26bcf93da99869b41c9e3cfdde6fdd06f42be2493f\": rpc error: code = NotFound desc = could not find container \"2f30f7f59fca0de59ec64b26bcf93da99869b41c9e3cfdde6fdd06f42be2493f\": container with ID starting with 2f30f7f59fca0de59ec64b26bcf93da99869b41c9e3cfdde6fdd06f42be2493f not found: ID does not exist" Sep 30 20:36:57 crc kubenswrapper[4756]: I0930 20:36:57.125059 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ddddad69-f55c-4cdc-9ec8-a5a289dfaa5c" path="/var/lib/kubelet/pods/ddddad69-f55c-4cdc-9ec8-a5a289dfaa5c/volumes" Sep 30 20:38:38 crc kubenswrapper[4756]: I0930 20:38:38.984658 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-hs656"] Sep 30 20:38:38 crc kubenswrapper[4756]: E0930 20:38:38.985512 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ddddad69-f55c-4cdc-9ec8-a5a289dfaa5c" containerName="extract-content" Sep 30 20:38:38 crc kubenswrapper[4756]: I0930 20:38:38.985531 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="ddddad69-f55c-4cdc-9ec8-a5a289dfaa5c" containerName="extract-content" Sep 30 20:38:38 crc kubenswrapper[4756]: E0930 20:38:38.985566 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ea886481-3dcf-47cd-ad04-f5c88a34a952" containerName="extract-utilities" Sep 30 20:38:38 crc kubenswrapper[4756]: I0930 20:38:38.985577 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="ea886481-3dcf-47cd-ad04-f5c88a34a952" containerName="extract-utilities" Sep 30 20:38:38 crc kubenswrapper[4756]: E0930 20:38:38.986295 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ea886481-3dcf-47cd-ad04-f5c88a34a952" containerName="extract-content" Sep 30 20:38:38 crc kubenswrapper[4756]: I0930 20:38:38.986314 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="ea886481-3dcf-47cd-ad04-f5c88a34a952" containerName="extract-content" Sep 30 20:38:38 crc kubenswrapper[4756]: E0930 20:38:38.986331 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ddddad69-f55c-4cdc-9ec8-a5a289dfaa5c" containerName="registry-server" Sep 30 20:38:38 crc kubenswrapper[4756]: I0930 20:38:38.986377 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="ddddad69-f55c-4cdc-9ec8-a5a289dfaa5c" containerName="registry-server" Sep 30 20:38:38 crc kubenswrapper[4756]: E0930 20:38:38.986443 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ddddad69-f55c-4cdc-9ec8-a5a289dfaa5c" containerName="extract-utilities" Sep 30 20:38:38 crc kubenswrapper[4756]: I0930 20:38:38.986456 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="ddddad69-f55c-4cdc-9ec8-a5a289dfaa5c" containerName="extract-utilities" Sep 30 20:38:38 crc kubenswrapper[4756]: E0930 20:38:38.986519 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ea886481-3dcf-47cd-ad04-f5c88a34a952" containerName="registry-server" Sep 30 20:38:38 crc kubenswrapper[4756]: I0930 20:38:38.986534 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="ea886481-3dcf-47cd-ad04-f5c88a34a952" containerName="registry-server" Sep 30 20:38:38 crc kubenswrapper[4756]: I0930 20:38:38.986951 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="ddddad69-f55c-4cdc-9ec8-a5a289dfaa5c" containerName="registry-server" Sep 30 20:38:38 crc kubenswrapper[4756]: I0930 20:38:38.986978 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="ea886481-3dcf-47cd-ad04-f5c88a34a952" containerName="registry-server" Sep 30 20:38:38 crc kubenswrapper[4756]: I0930 20:38:38.991016 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-hs656" Sep 30 20:38:38 crc kubenswrapper[4756]: I0930 20:38:38.998787 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-hs656"] Sep 30 20:38:39 crc kubenswrapper[4756]: I0930 20:38:39.061249 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gl2sn\" (UniqueName: \"kubernetes.io/projected/b745e9b8-9d77-40f1-ba5b-a347a86d8ce0-kube-api-access-gl2sn\") pod \"certified-operators-hs656\" (UID: \"b745e9b8-9d77-40f1-ba5b-a347a86d8ce0\") " pod="openshift-marketplace/certified-operators-hs656" Sep 30 20:38:39 crc kubenswrapper[4756]: I0930 20:38:39.061576 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b745e9b8-9d77-40f1-ba5b-a347a86d8ce0-catalog-content\") pod \"certified-operators-hs656\" (UID: \"b745e9b8-9d77-40f1-ba5b-a347a86d8ce0\") " pod="openshift-marketplace/certified-operators-hs656" Sep 30 20:38:39 crc kubenswrapper[4756]: I0930 20:38:39.061719 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b745e9b8-9d77-40f1-ba5b-a347a86d8ce0-utilities\") pod \"certified-operators-hs656\" (UID: \"b745e9b8-9d77-40f1-ba5b-a347a86d8ce0\") " pod="openshift-marketplace/certified-operators-hs656" Sep 30 20:38:39 crc kubenswrapper[4756]: I0930 20:38:39.162616 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gl2sn\" (UniqueName: \"kubernetes.io/projected/b745e9b8-9d77-40f1-ba5b-a347a86d8ce0-kube-api-access-gl2sn\") pod \"certified-operators-hs656\" (UID: \"b745e9b8-9d77-40f1-ba5b-a347a86d8ce0\") " pod="openshift-marketplace/certified-operators-hs656" Sep 30 20:38:39 crc kubenswrapper[4756]: I0930 20:38:39.163002 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b745e9b8-9d77-40f1-ba5b-a347a86d8ce0-catalog-content\") pod \"certified-operators-hs656\" (UID: \"b745e9b8-9d77-40f1-ba5b-a347a86d8ce0\") " pod="openshift-marketplace/certified-operators-hs656" Sep 30 20:38:39 crc kubenswrapper[4756]: I0930 20:38:39.163064 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b745e9b8-9d77-40f1-ba5b-a347a86d8ce0-utilities\") pod \"certified-operators-hs656\" (UID: \"b745e9b8-9d77-40f1-ba5b-a347a86d8ce0\") " pod="openshift-marketplace/certified-operators-hs656" Sep 30 20:38:39 crc kubenswrapper[4756]: I0930 20:38:39.163598 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b745e9b8-9d77-40f1-ba5b-a347a86d8ce0-utilities\") pod \"certified-operators-hs656\" (UID: \"b745e9b8-9d77-40f1-ba5b-a347a86d8ce0\") " pod="openshift-marketplace/certified-operators-hs656" Sep 30 20:38:39 crc kubenswrapper[4756]: I0930 20:38:39.163709 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b745e9b8-9d77-40f1-ba5b-a347a86d8ce0-catalog-content\") pod \"certified-operators-hs656\" (UID: \"b745e9b8-9d77-40f1-ba5b-a347a86d8ce0\") " pod="openshift-marketplace/certified-operators-hs656" Sep 30 20:38:39 crc kubenswrapper[4756]: I0930 20:38:39.183400 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gl2sn\" (UniqueName: \"kubernetes.io/projected/b745e9b8-9d77-40f1-ba5b-a347a86d8ce0-kube-api-access-gl2sn\") pod \"certified-operators-hs656\" (UID: \"b745e9b8-9d77-40f1-ba5b-a347a86d8ce0\") " pod="openshift-marketplace/certified-operators-hs656" Sep 30 20:38:39 crc kubenswrapper[4756]: I0930 20:38:39.322957 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-hs656" Sep 30 20:38:39 crc kubenswrapper[4756]: I0930 20:38:39.645356 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-hs656"] Sep 30 20:38:39 crc kubenswrapper[4756]: I0930 20:38:39.927423 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hs656" event={"ID":"b745e9b8-9d77-40f1-ba5b-a347a86d8ce0","Type":"ContainerStarted","Data":"d7e7be39195cf85f19fcf97f4d741416742d7f875a4d34606edc6f953f85f71f"} Sep 30 20:38:40 crc kubenswrapper[4756]: I0930 20:38:40.937886 4756 generic.go:334] "Generic (PLEG): container finished" podID="b745e9b8-9d77-40f1-ba5b-a347a86d8ce0" containerID="163e9d3153db669976b65ea15e1f7a2d0dfa7da82b2416e2f26f03ca45b1de81" exitCode=0 Sep 30 20:38:40 crc kubenswrapper[4756]: I0930 20:38:40.937930 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hs656" event={"ID":"b745e9b8-9d77-40f1-ba5b-a347a86d8ce0","Type":"ContainerDied","Data":"163e9d3153db669976b65ea15e1f7a2d0dfa7da82b2416e2f26f03ca45b1de81"} Sep 30 20:38:42 crc kubenswrapper[4756]: I0930 20:38:42.954000 4756 generic.go:334] "Generic (PLEG): container finished" podID="b745e9b8-9d77-40f1-ba5b-a347a86d8ce0" containerID="f4c9c97dd3947f41209ccc6310d53faa7bc4ec4b64d35604fb98c2a213680fcb" exitCode=0 Sep 30 20:38:42 crc kubenswrapper[4756]: I0930 20:38:42.954083 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hs656" event={"ID":"b745e9b8-9d77-40f1-ba5b-a347a86d8ce0","Type":"ContainerDied","Data":"f4c9c97dd3947f41209ccc6310d53faa7bc4ec4b64d35604fb98c2a213680fcb"} Sep 30 20:38:43 crc kubenswrapper[4756]: I0930 20:38:43.962721 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hs656" event={"ID":"b745e9b8-9d77-40f1-ba5b-a347a86d8ce0","Type":"ContainerStarted","Data":"cb7f6694a2a9a49428588764283ffe465fd44a921505ec5b9206260a0e169e8a"} Sep 30 20:38:43 crc kubenswrapper[4756]: I0930 20:38:43.983725 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-hs656" podStartSLOduration=3.541128787 podStartE2EDuration="5.983708037s" podCreationTimestamp="2025-09-30 20:38:38 +0000 UTC" firstStartedPulling="2025-09-30 20:38:40.940469177 +0000 UTC m=+4050.561402654" lastFinishedPulling="2025-09-30 20:38:43.383048397 +0000 UTC m=+4053.003981904" observedRunningTime="2025-09-30 20:38:43.978390638 +0000 UTC m=+4053.599324115" watchObservedRunningTime="2025-09-30 20:38:43.983708037 +0000 UTC m=+4053.604641514" Sep 30 20:38:49 crc kubenswrapper[4756]: I0930 20:38:49.323861 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-hs656" Sep 30 20:38:49 crc kubenswrapper[4756]: I0930 20:38:49.324468 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-hs656" Sep 30 20:38:49 crc kubenswrapper[4756]: I0930 20:38:49.367769 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-hs656" Sep 30 20:38:50 crc kubenswrapper[4756]: I0930 20:38:50.058861 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-hs656" Sep 30 20:38:50 crc kubenswrapper[4756]: I0930 20:38:50.122228 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-hs656"] Sep 30 20:38:52 crc kubenswrapper[4756]: I0930 20:38:52.019369 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-hs656" podUID="b745e9b8-9d77-40f1-ba5b-a347a86d8ce0" containerName="registry-server" containerID="cri-o://cb7f6694a2a9a49428588764283ffe465fd44a921505ec5b9206260a0e169e8a" gracePeriod=2 Sep 30 20:38:52 crc kubenswrapper[4756]: I0930 20:38:52.444224 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-hs656" Sep 30 20:38:52 crc kubenswrapper[4756]: I0930 20:38:52.467021 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b745e9b8-9d77-40f1-ba5b-a347a86d8ce0-catalog-content\") pod \"b745e9b8-9d77-40f1-ba5b-a347a86d8ce0\" (UID: \"b745e9b8-9d77-40f1-ba5b-a347a86d8ce0\") " Sep 30 20:38:52 crc kubenswrapper[4756]: I0930 20:38:52.467085 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gl2sn\" (UniqueName: \"kubernetes.io/projected/b745e9b8-9d77-40f1-ba5b-a347a86d8ce0-kube-api-access-gl2sn\") pod \"b745e9b8-9d77-40f1-ba5b-a347a86d8ce0\" (UID: \"b745e9b8-9d77-40f1-ba5b-a347a86d8ce0\") " Sep 30 20:38:52 crc kubenswrapper[4756]: I0930 20:38:52.467148 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b745e9b8-9d77-40f1-ba5b-a347a86d8ce0-utilities\") pod \"b745e9b8-9d77-40f1-ba5b-a347a86d8ce0\" (UID: \"b745e9b8-9d77-40f1-ba5b-a347a86d8ce0\") " Sep 30 20:38:52 crc kubenswrapper[4756]: I0930 20:38:52.468262 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b745e9b8-9d77-40f1-ba5b-a347a86d8ce0-utilities" (OuterVolumeSpecName: "utilities") pod "b745e9b8-9d77-40f1-ba5b-a347a86d8ce0" (UID: "b745e9b8-9d77-40f1-ba5b-a347a86d8ce0"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 20:38:52 crc kubenswrapper[4756]: I0930 20:38:52.476996 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b745e9b8-9d77-40f1-ba5b-a347a86d8ce0-kube-api-access-gl2sn" (OuterVolumeSpecName: "kube-api-access-gl2sn") pod "b745e9b8-9d77-40f1-ba5b-a347a86d8ce0" (UID: "b745e9b8-9d77-40f1-ba5b-a347a86d8ce0"). InnerVolumeSpecName "kube-api-access-gl2sn". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:38:52 crc kubenswrapper[4756]: I0930 20:38:52.525840 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b745e9b8-9d77-40f1-ba5b-a347a86d8ce0-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b745e9b8-9d77-40f1-ba5b-a347a86d8ce0" (UID: "b745e9b8-9d77-40f1-ba5b-a347a86d8ce0"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 20:38:52 crc kubenswrapper[4756]: I0930 20:38:52.568725 4756 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b745e9b8-9d77-40f1-ba5b-a347a86d8ce0-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 20:38:52 crc kubenswrapper[4756]: I0930 20:38:52.568764 4756 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b745e9b8-9d77-40f1-ba5b-a347a86d8ce0-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 20:38:52 crc kubenswrapper[4756]: I0930 20:38:52.568775 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gl2sn\" (UniqueName: \"kubernetes.io/projected/b745e9b8-9d77-40f1-ba5b-a347a86d8ce0-kube-api-access-gl2sn\") on node \"crc\" DevicePath \"\"" Sep 30 20:38:53 crc kubenswrapper[4756]: I0930 20:38:53.042948 4756 generic.go:334] "Generic (PLEG): container finished" podID="b745e9b8-9d77-40f1-ba5b-a347a86d8ce0" containerID="cb7f6694a2a9a49428588764283ffe465fd44a921505ec5b9206260a0e169e8a" exitCode=0 Sep 30 20:38:53 crc kubenswrapper[4756]: I0930 20:38:53.043021 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hs656" event={"ID":"b745e9b8-9d77-40f1-ba5b-a347a86d8ce0","Type":"ContainerDied","Data":"cb7f6694a2a9a49428588764283ffe465fd44a921505ec5b9206260a0e169e8a"} Sep 30 20:38:53 crc kubenswrapper[4756]: I0930 20:38:53.043659 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hs656" event={"ID":"b745e9b8-9d77-40f1-ba5b-a347a86d8ce0","Type":"ContainerDied","Data":"d7e7be39195cf85f19fcf97f4d741416742d7f875a4d34606edc6f953f85f71f"} Sep 30 20:38:53 crc kubenswrapper[4756]: I0930 20:38:53.043697 4756 scope.go:117] "RemoveContainer" containerID="cb7f6694a2a9a49428588764283ffe465fd44a921505ec5b9206260a0e169e8a" Sep 30 20:38:53 crc kubenswrapper[4756]: I0930 20:38:53.043201 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-hs656" Sep 30 20:38:53 crc kubenswrapper[4756]: I0930 20:38:53.077808 4756 scope.go:117] "RemoveContainer" containerID="f4c9c97dd3947f41209ccc6310d53faa7bc4ec4b64d35604fb98c2a213680fcb" Sep 30 20:38:53 crc kubenswrapper[4756]: I0930 20:38:53.105642 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-hs656"] Sep 30 20:38:53 crc kubenswrapper[4756]: I0930 20:38:53.110235 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-hs656"] Sep 30 20:38:53 crc kubenswrapper[4756]: I0930 20:38:53.116305 4756 scope.go:117] "RemoveContainer" containerID="163e9d3153db669976b65ea15e1f7a2d0dfa7da82b2416e2f26f03ca45b1de81" Sep 30 20:38:53 crc kubenswrapper[4756]: I0930 20:38:53.133811 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b745e9b8-9d77-40f1-ba5b-a347a86d8ce0" path="/var/lib/kubelet/pods/b745e9b8-9d77-40f1-ba5b-a347a86d8ce0/volumes" Sep 30 20:38:53 crc kubenswrapper[4756]: I0930 20:38:53.141327 4756 scope.go:117] "RemoveContainer" containerID="cb7f6694a2a9a49428588764283ffe465fd44a921505ec5b9206260a0e169e8a" Sep 30 20:38:53 crc kubenswrapper[4756]: E0930 20:38:53.143092 4756 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cb7f6694a2a9a49428588764283ffe465fd44a921505ec5b9206260a0e169e8a\": container with ID starting with cb7f6694a2a9a49428588764283ffe465fd44a921505ec5b9206260a0e169e8a not found: ID does not exist" containerID="cb7f6694a2a9a49428588764283ffe465fd44a921505ec5b9206260a0e169e8a" Sep 30 20:38:53 crc kubenswrapper[4756]: I0930 20:38:53.143125 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cb7f6694a2a9a49428588764283ffe465fd44a921505ec5b9206260a0e169e8a"} err="failed to get container status \"cb7f6694a2a9a49428588764283ffe465fd44a921505ec5b9206260a0e169e8a\": rpc error: code = NotFound desc = could not find container \"cb7f6694a2a9a49428588764283ffe465fd44a921505ec5b9206260a0e169e8a\": container with ID starting with cb7f6694a2a9a49428588764283ffe465fd44a921505ec5b9206260a0e169e8a not found: ID does not exist" Sep 30 20:38:53 crc kubenswrapper[4756]: I0930 20:38:53.143145 4756 scope.go:117] "RemoveContainer" containerID="f4c9c97dd3947f41209ccc6310d53faa7bc4ec4b64d35604fb98c2a213680fcb" Sep 30 20:38:53 crc kubenswrapper[4756]: E0930 20:38:53.144617 4756 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f4c9c97dd3947f41209ccc6310d53faa7bc4ec4b64d35604fb98c2a213680fcb\": container with ID starting with f4c9c97dd3947f41209ccc6310d53faa7bc4ec4b64d35604fb98c2a213680fcb not found: ID does not exist" containerID="f4c9c97dd3947f41209ccc6310d53faa7bc4ec4b64d35604fb98c2a213680fcb" Sep 30 20:38:53 crc kubenswrapper[4756]: I0930 20:38:53.144650 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f4c9c97dd3947f41209ccc6310d53faa7bc4ec4b64d35604fb98c2a213680fcb"} err="failed to get container status \"f4c9c97dd3947f41209ccc6310d53faa7bc4ec4b64d35604fb98c2a213680fcb\": rpc error: code = NotFound desc = could not find container \"f4c9c97dd3947f41209ccc6310d53faa7bc4ec4b64d35604fb98c2a213680fcb\": container with ID starting with f4c9c97dd3947f41209ccc6310d53faa7bc4ec4b64d35604fb98c2a213680fcb not found: ID does not exist" Sep 30 20:38:53 crc kubenswrapper[4756]: I0930 20:38:53.144668 4756 scope.go:117] "RemoveContainer" containerID="163e9d3153db669976b65ea15e1f7a2d0dfa7da82b2416e2f26f03ca45b1de81" Sep 30 20:38:53 crc kubenswrapper[4756]: E0930 20:38:53.145448 4756 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"163e9d3153db669976b65ea15e1f7a2d0dfa7da82b2416e2f26f03ca45b1de81\": container with ID starting with 163e9d3153db669976b65ea15e1f7a2d0dfa7da82b2416e2f26f03ca45b1de81 not found: ID does not exist" containerID="163e9d3153db669976b65ea15e1f7a2d0dfa7da82b2416e2f26f03ca45b1de81" Sep 30 20:38:53 crc kubenswrapper[4756]: I0930 20:38:53.145481 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"163e9d3153db669976b65ea15e1f7a2d0dfa7da82b2416e2f26f03ca45b1de81"} err="failed to get container status \"163e9d3153db669976b65ea15e1f7a2d0dfa7da82b2416e2f26f03ca45b1de81\": rpc error: code = NotFound desc = could not find container \"163e9d3153db669976b65ea15e1f7a2d0dfa7da82b2416e2f26f03ca45b1de81\": container with ID starting with 163e9d3153db669976b65ea15e1f7a2d0dfa7da82b2416e2f26f03ca45b1de81 not found: ID does not exist" Sep 30 20:39:03 crc kubenswrapper[4756]: I0930 20:39:03.092014 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-xvm8w"] Sep 30 20:39:03 crc kubenswrapper[4756]: E0930 20:39:03.093754 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b745e9b8-9d77-40f1-ba5b-a347a86d8ce0" containerName="registry-server" Sep 30 20:39:03 crc kubenswrapper[4756]: I0930 20:39:03.093785 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="b745e9b8-9d77-40f1-ba5b-a347a86d8ce0" containerName="registry-server" Sep 30 20:39:03 crc kubenswrapper[4756]: E0930 20:39:03.093817 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b745e9b8-9d77-40f1-ba5b-a347a86d8ce0" containerName="extract-utilities" Sep 30 20:39:03 crc kubenswrapper[4756]: I0930 20:39:03.093830 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="b745e9b8-9d77-40f1-ba5b-a347a86d8ce0" containerName="extract-utilities" Sep 30 20:39:03 crc kubenswrapper[4756]: E0930 20:39:03.093874 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b745e9b8-9d77-40f1-ba5b-a347a86d8ce0" containerName="extract-content" Sep 30 20:39:03 crc kubenswrapper[4756]: I0930 20:39:03.093887 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="b745e9b8-9d77-40f1-ba5b-a347a86d8ce0" containerName="extract-content" Sep 30 20:39:03 crc kubenswrapper[4756]: I0930 20:39:03.094213 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="b745e9b8-9d77-40f1-ba5b-a347a86d8ce0" containerName="registry-server" Sep 30 20:39:03 crc kubenswrapper[4756]: I0930 20:39:03.096551 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-xvm8w" Sep 30 20:39:03 crc kubenswrapper[4756]: I0930 20:39:03.105037 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-xvm8w"] Sep 30 20:39:03 crc kubenswrapper[4756]: I0930 20:39:03.110195 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rq4w5\" (UniqueName: \"kubernetes.io/projected/b8562a11-1a99-4f26-ab1b-e8b18d5ed105-kube-api-access-rq4w5\") pod \"community-operators-xvm8w\" (UID: \"b8562a11-1a99-4f26-ab1b-e8b18d5ed105\") " pod="openshift-marketplace/community-operators-xvm8w" Sep 30 20:39:03 crc kubenswrapper[4756]: I0930 20:39:03.110337 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b8562a11-1a99-4f26-ab1b-e8b18d5ed105-utilities\") pod \"community-operators-xvm8w\" (UID: \"b8562a11-1a99-4f26-ab1b-e8b18d5ed105\") " pod="openshift-marketplace/community-operators-xvm8w" Sep 30 20:39:03 crc kubenswrapper[4756]: I0930 20:39:03.110507 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b8562a11-1a99-4f26-ab1b-e8b18d5ed105-catalog-content\") pod \"community-operators-xvm8w\" (UID: \"b8562a11-1a99-4f26-ab1b-e8b18d5ed105\") " pod="openshift-marketplace/community-operators-xvm8w" Sep 30 20:39:03 crc kubenswrapper[4756]: I0930 20:39:03.225274 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rq4w5\" (UniqueName: \"kubernetes.io/projected/b8562a11-1a99-4f26-ab1b-e8b18d5ed105-kube-api-access-rq4w5\") pod \"community-operators-xvm8w\" (UID: \"b8562a11-1a99-4f26-ab1b-e8b18d5ed105\") " pod="openshift-marketplace/community-operators-xvm8w" Sep 30 20:39:03 crc kubenswrapper[4756]: I0930 20:39:03.225500 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b8562a11-1a99-4f26-ab1b-e8b18d5ed105-utilities\") pod \"community-operators-xvm8w\" (UID: \"b8562a11-1a99-4f26-ab1b-e8b18d5ed105\") " pod="openshift-marketplace/community-operators-xvm8w" Sep 30 20:39:03 crc kubenswrapper[4756]: I0930 20:39:03.225587 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b8562a11-1a99-4f26-ab1b-e8b18d5ed105-catalog-content\") pod \"community-operators-xvm8w\" (UID: \"b8562a11-1a99-4f26-ab1b-e8b18d5ed105\") " pod="openshift-marketplace/community-operators-xvm8w" Sep 30 20:39:03 crc kubenswrapper[4756]: I0930 20:39:03.228160 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b8562a11-1a99-4f26-ab1b-e8b18d5ed105-utilities\") pod \"community-operators-xvm8w\" (UID: \"b8562a11-1a99-4f26-ab1b-e8b18d5ed105\") " pod="openshift-marketplace/community-operators-xvm8w" Sep 30 20:39:03 crc kubenswrapper[4756]: I0930 20:39:03.229264 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b8562a11-1a99-4f26-ab1b-e8b18d5ed105-catalog-content\") pod \"community-operators-xvm8w\" (UID: \"b8562a11-1a99-4f26-ab1b-e8b18d5ed105\") " pod="openshift-marketplace/community-operators-xvm8w" Sep 30 20:39:03 crc kubenswrapper[4756]: I0930 20:39:03.254137 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rq4w5\" (UniqueName: \"kubernetes.io/projected/b8562a11-1a99-4f26-ab1b-e8b18d5ed105-kube-api-access-rq4w5\") pod \"community-operators-xvm8w\" (UID: \"b8562a11-1a99-4f26-ab1b-e8b18d5ed105\") " pod="openshift-marketplace/community-operators-xvm8w" Sep 30 20:39:03 crc kubenswrapper[4756]: I0930 20:39:03.426989 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-xvm8w" Sep 30 20:39:03 crc kubenswrapper[4756]: I0930 20:39:03.916984 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-xvm8w"] Sep 30 20:39:04 crc kubenswrapper[4756]: I0930 20:39:04.129583 4756 generic.go:334] "Generic (PLEG): container finished" podID="b8562a11-1a99-4f26-ab1b-e8b18d5ed105" containerID="d03f2b07548a73c5f32d128c806defdefb696d611d1e5629cf877f5a980c489a" exitCode=0 Sep 30 20:39:04 crc kubenswrapper[4756]: I0930 20:39:04.129645 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xvm8w" event={"ID":"b8562a11-1a99-4f26-ab1b-e8b18d5ed105","Type":"ContainerDied","Data":"d03f2b07548a73c5f32d128c806defdefb696d611d1e5629cf877f5a980c489a"} Sep 30 20:39:04 crc kubenswrapper[4756]: I0930 20:39:04.129687 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xvm8w" event={"ID":"b8562a11-1a99-4f26-ab1b-e8b18d5ed105","Type":"ContainerStarted","Data":"530d6c17e84dcd0d48caf809ef587acae5d431ea3da4889a2ba1c772872a0d38"} Sep 30 20:39:05 crc kubenswrapper[4756]: I0930 20:39:05.139061 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xvm8w" event={"ID":"b8562a11-1a99-4f26-ab1b-e8b18d5ed105","Type":"ContainerStarted","Data":"31b7ae05a6920fc90648749bdd705976be98dcb35ae878447c574674b1e2e04e"} Sep 30 20:39:06 crc kubenswrapper[4756]: I0930 20:39:06.147852 4756 generic.go:334] "Generic (PLEG): container finished" podID="b8562a11-1a99-4f26-ab1b-e8b18d5ed105" containerID="31b7ae05a6920fc90648749bdd705976be98dcb35ae878447c574674b1e2e04e" exitCode=0 Sep 30 20:39:06 crc kubenswrapper[4756]: I0930 20:39:06.147903 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xvm8w" event={"ID":"b8562a11-1a99-4f26-ab1b-e8b18d5ed105","Type":"ContainerDied","Data":"31b7ae05a6920fc90648749bdd705976be98dcb35ae878447c574674b1e2e04e"} Sep 30 20:39:07 crc kubenswrapper[4756]: I0930 20:39:07.159074 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xvm8w" event={"ID":"b8562a11-1a99-4f26-ab1b-e8b18d5ed105","Type":"ContainerStarted","Data":"7c204618cacd13a09351521f228d5961cd565b59bef23dadfe3e6df419b07168"} Sep 30 20:39:11 crc kubenswrapper[4756]: I0930 20:39:11.539548 4756 patch_prober.go:28] interesting pod/machine-config-daemon-4n9zj container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 20:39:11 crc kubenswrapper[4756]: I0930 20:39:11.539619 4756 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 20:39:13 crc kubenswrapper[4756]: I0930 20:39:13.428037 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-xvm8w" Sep 30 20:39:13 crc kubenswrapper[4756]: I0930 20:39:13.428128 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-xvm8w" Sep 30 20:39:13 crc kubenswrapper[4756]: I0930 20:39:13.476733 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-xvm8w" Sep 30 20:39:13 crc kubenswrapper[4756]: I0930 20:39:13.498055 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-xvm8w" podStartSLOduration=7.993293844 podStartE2EDuration="10.498038285s" podCreationTimestamp="2025-09-30 20:39:03 +0000 UTC" firstStartedPulling="2025-09-30 20:39:04.131912503 +0000 UTC m=+4073.752845980" lastFinishedPulling="2025-09-30 20:39:06.636656944 +0000 UTC m=+4076.257590421" observedRunningTime="2025-09-30 20:39:07.183026717 +0000 UTC m=+4076.803960224" watchObservedRunningTime="2025-09-30 20:39:13.498038285 +0000 UTC m=+4083.118971762" Sep 30 20:39:14 crc kubenswrapper[4756]: I0930 20:39:14.253243 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-xvm8w" Sep 30 20:39:14 crc kubenswrapper[4756]: I0930 20:39:14.296816 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-xvm8w"] Sep 30 20:39:16 crc kubenswrapper[4756]: I0930 20:39:16.230328 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-xvm8w" podUID="b8562a11-1a99-4f26-ab1b-e8b18d5ed105" containerName="registry-server" containerID="cri-o://7c204618cacd13a09351521f228d5961cd565b59bef23dadfe3e6df419b07168" gracePeriod=2 Sep 30 20:39:16 crc kubenswrapper[4756]: E0930 20:39:16.487314 4756 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb8562a11_1a99_4f26_ab1b_e8b18d5ed105.slice/crio-7c204618cacd13a09351521f228d5961cd565b59bef23dadfe3e6df419b07168.scope\": RecentStats: unable to find data in memory cache]" Sep 30 20:39:17 crc kubenswrapper[4756]: I0930 20:39:17.101610 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-xvm8w" Sep 30 20:39:17 crc kubenswrapper[4756]: I0930 20:39:17.162040 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b8562a11-1a99-4f26-ab1b-e8b18d5ed105-utilities\") pod \"b8562a11-1a99-4f26-ab1b-e8b18d5ed105\" (UID: \"b8562a11-1a99-4f26-ab1b-e8b18d5ed105\") " Sep 30 20:39:17 crc kubenswrapper[4756]: I0930 20:39:17.162114 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rq4w5\" (UniqueName: \"kubernetes.io/projected/b8562a11-1a99-4f26-ab1b-e8b18d5ed105-kube-api-access-rq4w5\") pod \"b8562a11-1a99-4f26-ab1b-e8b18d5ed105\" (UID: \"b8562a11-1a99-4f26-ab1b-e8b18d5ed105\") " Sep 30 20:39:17 crc kubenswrapper[4756]: I0930 20:39:17.162217 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b8562a11-1a99-4f26-ab1b-e8b18d5ed105-catalog-content\") pod \"b8562a11-1a99-4f26-ab1b-e8b18d5ed105\" (UID: \"b8562a11-1a99-4f26-ab1b-e8b18d5ed105\") " Sep 30 20:39:17 crc kubenswrapper[4756]: I0930 20:39:17.163037 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b8562a11-1a99-4f26-ab1b-e8b18d5ed105-utilities" (OuterVolumeSpecName: "utilities") pod "b8562a11-1a99-4f26-ab1b-e8b18d5ed105" (UID: "b8562a11-1a99-4f26-ab1b-e8b18d5ed105"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 20:39:17 crc kubenswrapper[4756]: I0930 20:39:17.168208 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b8562a11-1a99-4f26-ab1b-e8b18d5ed105-kube-api-access-rq4w5" (OuterVolumeSpecName: "kube-api-access-rq4w5") pod "b8562a11-1a99-4f26-ab1b-e8b18d5ed105" (UID: "b8562a11-1a99-4f26-ab1b-e8b18d5ed105"). InnerVolumeSpecName "kube-api-access-rq4w5". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:39:17 crc kubenswrapper[4756]: I0930 20:39:17.223415 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b8562a11-1a99-4f26-ab1b-e8b18d5ed105-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b8562a11-1a99-4f26-ab1b-e8b18d5ed105" (UID: "b8562a11-1a99-4f26-ab1b-e8b18d5ed105"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 20:39:17 crc kubenswrapper[4756]: I0930 20:39:17.237949 4756 generic.go:334] "Generic (PLEG): container finished" podID="b8562a11-1a99-4f26-ab1b-e8b18d5ed105" containerID="7c204618cacd13a09351521f228d5961cd565b59bef23dadfe3e6df419b07168" exitCode=0 Sep 30 20:39:17 crc kubenswrapper[4756]: I0930 20:39:17.237999 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xvm8w" event={"ID":"b8562a11-1a99-4f26-ab1b-e8b18d5ed105","Type":"ContainerDied","Data":"7c204618cacd13a09351521f228d5961cd565b59bef23dadfe3e6df419b07168"} Sep 30 20:39:17 crc kubenswrapper[4756]: I0930 20:39:17.238032 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-xvm8w" Sep 30 20:39:17 crc kubenswrapper[4756]: I0930 20:39:17.238047 4756 scope.go:117] "RemoveContainer" containerID="7c204618cacd13a09351521f228d5961cd565b59bef23dadfe3e6df419b07168" Sep 30 20:39:17 crc kubenswrapper[4756]: I0930 20:39:17.238035 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xvm8w" event={"ID":"b8562a11-1a99-4f26-ab1b-e8b18d5ed105","Type":"ContainerDied","Data":"530d6c17e84dcd0d48caf809ef587acae5d431ea3da4889a2ba1c772872a0d38"} Sep 30 20:39:17 crc kubenswrapper[4756]: I0930 20:39:17.257053 4756 scope.go:117] "RemoveContainer" containerID="31b7ae05a6920fc90648749bdd705976be98dcb35ae878447c574674b1e2e04e" Sep 30 20:39:17 crc kubenswrapper[4756]: I0930 20:39:17.264654 4756 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b8562a11-1a99-4f26-ab1b-e8b18d5ed105-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 20:39:17 crc kubenswrapper[4756]: I0930 20:39:17.264747 4756 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b8562a11-1a99-4f26-ab1b-e8b18d5ed105-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 20:39:17 crc kubenswrapper[4756]: I0930 20:39:17.264791 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rq4w5\" (UniqueName: \"kubernetes.io/projected/b8562a11-1a99-4f26-ab1b-e8b18d5ed105-kube-api-access-rq4w5\") on node \"crc\" DevicePath \"\"" Sep 30 20:39:17 crc kubenswrapper[4756]: I0930 20:39:17.277179 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-xvm8w"] Sep 30 20:39:17 crc kubenswrapper[4756]: I0930 20:39:17.285463 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-xvm8w"] Sep 30 20:39:17 crc kubenswrapper[4756]: I0930 20:39:17.359744 4756 scope.go:117] "RemoveContainer" containerID="d03f2b07548a73c5f32d128c806defdefb696d611d1e5629cf877f5a980c489a" Sep 30 20:39:17 crc kubenswrapper[4756]: I0930 20:39:17.376563 4756 scope.go:117] "RemoveContainer" containerID="7c204618cacd13a09351521f228d5961cd565b59bef23dadfe3e6df419b07168" Sep 30 20:39:17 crc kubenswrapper[4756]: E0930 20:39:17.377171 4756 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7c204618cacd13a09351521f228d5961cd565b59bef23dadfe3e6df419b07168\": container with ID starting with 7c204618cacd13a09351521f228d5961cd565b59bef23dadfe3e6df419b07168 not found: ID does not exist" containerID="7c204618cacd13a09351521f228d5961cd565b59bef23dadfe3e6df419b07168" Sep 30 20:39:17 crc kubenswrapper[4756]: I0930 20:39:17.377206 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7c204618cacd13a09351521f228d5961cd565b59bef23dadfe3e6df419b07168"} err="failed to get container status \"7c204618cacd13a09351521f228d5961cd565b59bef23dadfe3e6df419b07168\": rpc error: code = NotFound desc = could not find container \"7c204618cacd13a09351521f228d5961cd565b59bef23dadfe3e6df419b07168\": container with ID starting with 7c204618cacd13a09351521f228d5961cd565b59bef23dadfe3e6df419b07168 not found: ID does not exist" Sep 30 20:39:17 crc kubenswrapper[4756]: I0930 20:39:17.377230 4756 scope.go:117] "RemoveContainer" containerID="31b7ae05a6920fc90648749bdd705976be98dcb35ae878447c574674b1e2e04e" Sep 30 20:39:17 crc kubenswrapper[4756]: E0930 20:39:17.377783 4756 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"31b7ae05a6920fc90648749bdd705976be98dcb35ae878447c574674b1e2e04e\": container with ID starting with 31b7ae05a6920fc90648749bdd705976be98dcb35ae878447c574674b1e2e04e not found: ID does not exist" containerID="31b7ae05a6920fc90648749bdd705976be98dcb35ae878447c574674b1e2e04e" Sep 30 20:39:17 crc kubenswrapper[4756]: I0930 20:39:17.377807 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"31b7ae05a6920fc90648749bdd705976be98dcb35ae878447c574674b1e2e04e"} err="failed to get container status \"31b7ae05a6920fc90648749bdd705976be98dcb35ae878447c574674b1e2e04e\": rpc error: code = NotFound desc = could not find container \"31b7ae05a6920fc90648749bdd705976be98dcb35ae878447c574674b1e2e04e\": container with ID starting with 31b7ae05a6920fc90648749bdd705976be98dcb35ae878447c574674b1e2e04e not found: ID does not exist" Sep 30 20:39:17 crc kubenswrapper[4756]: I0930 20:39:17.377824 4756 scope.go:117] "RemoveContainer" containerID="d03f2b07548a73c5f32d128c806defdefb696d611d1e5629cf877f5a980c489a" Sep 30 20:39:17 crc kubenswrapper[4756]: E0930 20:39:17.378417 4756 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d03f2b07548a73c5f32d128c806defdefb696d611d1e5629cf877f5a980c489a\": container with ID starting with d03f2b07548a73c5f32d128c806defdefb696d611d1e5629cf877f5a980c489a not found: ID does not exist" containerID="d03f2b07548a73c5f32d128c806defdefb696d611d1e5629cf877f5a980c489a" Sep 30 20:39:17 crc kubenswrapper[4756]: I0930 20:39:17.378491 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d03f2b07548a73c5f32d128c806defdefb696d611d1e5629cf877f5a980c489a"} err="failed to get container status \"d03f2b07548a73c5f32d128c806defdefb696d611d1e5629cf877f5a980c489a\": rpc error: code = NotFound desc = could not find container \"d03f2b07548a73c5f32d128c806defdefb696d611d1e5629cf877f5a980c489a\": container with ID starting with d03f2b07548a73c5f32d128c806defdefb696d611d1e5629cf877f5a980c489a not found: ID does not exist" Sep 30 20:39:19 crc kubenswrapper[4756]: I0930 20:39:19.126358 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b8562a11-1a99-4f26-ab1b-e8b18d5ed105" path="/var/lib/kubelet/pods/b8562a11-1a99-4f26-ab1b-e8b18d5ed105/volumes" Sep 30 20:39:41 crc kubenswrapper[4756]: I0930 20:39:41.539162 4756 patch_prober.go:28] interesting pod/machine-config-daemon-4n9zj container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 20:39:41 crc kubenswrapper[4756]: I0930 20:39:41.540647 4756 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 20:40:11 crc kubenswrapper[4756]: I0930 20:40:11.539352 4756 patch_prober.go:28] interesting pod/machine-config-daemon-4n9zj container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 20:40:11 crc kubenswrapper[4756]: I0930 20:40:11.540131 4756 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 20:40:11 crc kubenswrapper[4756]: I0930 20:40:11.540212 4756 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" Sep 30 20:40:11 crc kubenswrapper[4756]: I0930 20:40:11.541221 4756 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"ab7bd9f73f16ccb05c53c8ea3af19b906d154b090c3b400cd42807c34474abf3"} pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 30 20:40:11 crc kubenswrapper[4756]: I0930 20:40:11.541330 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" containerName="machine-config-daemon" containerID="cri-o://ab7bd9f73f16ccb05c53c8ea3af19b906d154b090c3b400cd42807c34474abf3" gracePeriod=600 Sep 30 20:40:11 crc kubenswrapper[4756]: I0930 20:40:11.681896 4756 generic.go:334] "Generic (PLEG): container finished" podID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" containerID="ab7bd9f73f16ccb05c53c8ea3af19b906d154b090c3b400cd42807c34474abf3" exitCode=0 Sep 30 20:40:11 crc kubenswrapper[4756]: I0930 20:40:11.681946 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" event={"ID":"3370c2ca-fec3-4f90-8df7-51e21e6c7e1c","Type":"ContainerDied","Data":"ab7bd9f73f16ccb05c53c8ea3af19b906d154b090c3b400cd42807c34474abf3"} Sep 30 20:40:11 crc kubenswrapper[4756]: I0930 20:40:11.681988 4756 scope.go:117] "RemoveContainer" containerID="20bb0b2310e2e3e6446619c00827bb6a031d09c2b13494eceb98d1db9a2bab91" Sep 30 20:40:12 crc kubenswrapper[4756]: I0930 20:40:12.697598 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" event={"ID":"3370c2ca-fec3-4f90-8df7-51e21e6c7e1c","Type":"ContainerStarted","Data":"bdd10774ccbb11f8a2293190d3ed532f32596ee6e63b4f323c2f18f8a020bf99"} Sep 30 20:42:11 crc kubenswrapper[4756]: I0930 20:42:11.538488 4756 patch_prober.go:28] interesting pod/machine-config-daemon-4n9zj container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 20:42:11 crc kubenswrapper[4756]: I0930 20:42:11.539143 4756 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 20:42:41 crc kubenswrapper[4756]: I0930 20:42:41.538936 4756 patch_prober.go:28] interesting pod/machine-config-daemon-4n9zj container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 20:42:41 crc kubenswrapper[4756]: I0930 20:42:41.539907 4756 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 20:43:11 crc kubenswrapper[4756]: I0930 20:43:11.539339 4756 patch_prober.go:28] interesting pod/machine-config-daemon-4n9zj container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 20:43:11 crc kubenswrapper[4756]: I0930 20:43:11.539925 4756 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 20:43:11 crc kubenswrapper[4756]: I0930 20:43:11.539982 4756 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" Sep 30 20:43:11 crc kubenswrapper[4756]: I0930 20:43:11.540658 4756 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"bdd10774ccbb11f8a2293190d3ed532f32596ee6e63b4f323c2f18f8a020bf99"} pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 30 20:43:11 crc kubenswrapper[4756]: I0930 20:43:11.540713 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" containerName="machine-config-daemon" containerID="cri-o://bdd10774ccbb11f8a2293190d3ed532f32596ee6e63b4f323c2f18f8a020bf99" gracePeriod=600 Sep 30 20:43:11 crc kubenswrapper[4756]: E0930 20:43:11.676736 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4n9zj_openshift-machine-config-operator(3370c2ca-fec3-4f90-8df7-51e21e6c7e1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" Sep 30 20:43:12 crc kubenswrapper[4756]: I0930 20:43:12.227999 4756 generic.go:334] "Generic (PLEG): container finished" podID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" containerID="bdd10774ccbb11f8a2293190d3ed532f32596ee6e63b4f323c2f18f8a020bf99" exitCode=0 Sep 30 20:43:12 crc kubenswrapper[4756]: I0930 20:43:12.228065 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" event={"ID":"3370c2ca-fec3-4f90-8df7-51e21e6c7e1c","Type":"ContainerDied","Data":"bdd10774ccbb11f8a2293190d3ed532f32596ee6e63b4f323c2f18f8a020bf99"} Sep 30 20:43:12 crc kubenswrapper[4756]: I0930 20:43:12.228130 4756 scope.go:117] "RemoveContainer" containerID="ab7bd9f73f16ccb05c53c8ea3af19b906d154b090c3b400cd42807c34474abf3" Sep 30 20:43:12 crc kubenswrapper[4756]: I0930 20:43:12.228839 4756 scope.go:117] "RemoveContainer" containerID="bdd10774ccbb11f8a2293190d3ed532f32596ee6e63b4f323c2f18f8a020bf99" Sep 30 20:43:12 crc kubenswrapper[4756]: E0930 20:43:12.229292 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4n9zj_openshift-machine-config-operator(3370c2ca-fec3-4f90-8df7-51e21e6c7e1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" Sep 30 20:43:26 crc kubenswrapper[4756]: I0930 20:43:26.116733 4756 scope.go:117] "RemoveContainer" containerID="bdd10774ccbb11f8a2293190d3ed532f32596ee6e63b4f323c2f18f8a020bf99" Sep 30 20:43:26 crc kubenswrapper[4756]: E0930 20:43:26.118064 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4n9zj_openshift-machine-config-operator(3370c2ca-fec3-4f90-8df7-51e21e6c7e1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" Sep 30 20:43:41 crc kubenswrapper[4756]: I0930 20:43:41.121065 4756 scope.go:117] "RemoveContainer" containerID="bdd10774ccbb11f8a2293190d3ed532f32596ee6e63b4f323c2f18f8a020bf99" Sep 30 20:43:41 crc kubenswrapper[4756]: E0930 20:43:41.121945 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4n9zj_openshift-machine-config-operator(3370c2ca-fec3-4f90-8df7-51e21e6c7e1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" Sep 30 20:43:55 crc kubenswrapper[4756]: I0930 20:43:55.116200 4756 scope.go:117] "RemoveContainer" containerID="bdd10774ccbb11f8a2293190d3ed532f32596ee6e63b4f323c2f18f8a020bf99" Sep 30 20:43:55 crc kubenswrapper[4756]: E0930 20:43:55.117464 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4n9zj_openshift-machine-config-operator(3370c2ca-fec3-4f90-8df7-51e21e6c7e1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" Sep 30 20:44:09 crc kubenswrapper[4756]: I0930 20:44:09.117031 4756 scope.go:117] "RemoveContainer" containerID="bdd10774ccbb11f8a2293190d3ed532f32596ee6e63b4f323c2f18f8a020bf99" Sep 30 20:44:09 crc kubenswrapper[4756]: E0930 20:44:09.117935 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4n9zj_openshift-machine-config-operator(3370c2ca-fec3-4f90-8df7-51e21e6c7e1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" Sep 30 20:44:20 crc kubenswrapper[4756]: I0930 20:44:20.116042 4756 scope.go:117] "RemoveContainer" containerID="bdd10774ccbb11f8a2293190d3ed532f32596ee6e63b4f323c2f18f8a020bf99" Sep 30 20:44:20 crc kubenswrapper[4756]: E0930 20:44:20.117101 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4n9zj_openshift-machine-config-operator(3370c2ca-fec3-4f90-8df7-51e21e6c7e1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" Sep 30 20:44:32 crc kubenswrapper[4756]: I0930 20:44:32.116227 4756 scope.go:117] "RemoveContainer" containerID="bdd10774ccbb11f8a2293190d3ed532f32596ee6e63b4f323c2f18f8a020bf99" Sep 30 20:44:32 crc kubenswrapper[4756]: E0930 20:44:32.117374 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4n9zj_openshift-machine-config-operator(3370c2ca-fec3-4f90-8df7-51e21e6c7e1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" Sep 30 20:44:44 crc kubenswrapper[4756]: I0930 20:44:44.116076 4756 scope.go:117] "RemoveContainer" containerID="bdd10774ccbb11f8a2293190d3ed532f32596ee6e63b4f323c2f18f8a020bf99" Sep 30 20:44:44 crc kubenswrapper[4756]: E0930 20:44:44.117173 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4n9zj_openshift-machine-config-operator(3370c2ca-fec3-4f90-8df7-51e21e6c7e1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" Sep 30 20:44:50 crc kubenswrapper[4756]: I0930 20:44:50.482412 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["crc-storage/crc-storage-crc-vbkvb"] Sep 30 20:44:50 crc kubenswrapper[4756]: I0930 20:44:50.487868 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["crc-storage/crc-storage-crc-vbkvb"] Sep 30 20:44:50 crc kubenswrapper[4756]: I0930 20:44:50.651092 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["crc-storage/crc-storage-crc-87lbj"] Sep 30 20:44:50 crc kubenswrapper[4756]: E0930 20:44:50.651544 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b8562a11-1a99-4f26-ab1b-e8b18d5ed105" containerName="extract-content" Sep 30 20:44:50 crc kubenswrapper[4756]: I0930 20:44:50.651571 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="b8562a11-1a99-4f26-ab1b-e8b18d5ed105" containerName="extract-content" Sep 30 20:44:50 crc kubenswrapper[4756]: E0930 20:44:50.651607 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b8562a11-1a99-4f26-ab1b-e8b18d5ed105" containerName="registry-server" Sep 30 20:44:50 crc kubenswrapper[4756]: I0930 20:44:50.651618 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="b8562a11-1a99-4f26-ab1b-e8b18d5ed105" containerName="registry-server" Sep 30 20:44:50 crc kubenswrapper[4756]: E0930 20:44:50.651687 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b8562a11-1a99-4f26-ab1b-e8b18d5ed105" containerName="extract-utilities" Sep 30 20:44:50 crc kubenswrapper[4756]: I0930 20:44:50.651700 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="b8562a11-1a99-4f26-ab1b-e8b18d5ed105" containerName="extract-utilities" Sep 30 20:44:50 crc kubenswrapper[4756]: I0930 20:44:50.651949 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="b8562a11-1a99-4f26-ab1b-e8b18d5ed105" containerName="registry-server" Sep 30 20:44:50 crc kubenswrapper[4756]: I0930 20:44:50.652635 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-87lbj" Sep 30 20:44:50 crc kubenswrapper[4756]: I0930 20:44:50.655506 4756 reflector.go:368] Caches populated for *v1.Secret from object-"crc-storage"/"crc-storage-dockercfg-ghtx2" Sep 30 20:44:50 crc kubenswrapper[4756]: I0930 20:44:50.655510 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"crc-storage" Sep 30 20:44:50 crc kubenswrapper[4756]: I0930 20:44:50.655799 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"kube-root-ca.crt" Sep 30 20:44:50 crc kubenswrapper[4756]: I0930 20:44:50.655840 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"openshift-service-ca.crt" Sep 30 20:44:50 crc kubenswrapper[4756]: I0930 20:44:50.666172 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["crc-storage/crc-storage-crc-87lbj"] Sep 30 20:44:50 crc kubenswrapper[4756]: I0930 20:44:50.745993 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/e850b364-34d8-4b5d-9d9e-9047f1abac9c-crc-storage\") pod \"crc-storage-crc-87lbj\" (UID: \"e850b364-34d8-4b5d-9d9e-9047f1abac9c\") " pod="crc-storage/crc-storage-crc-87lbj" Sep 30 20:44:50 crc kubenswrapper[4756]: I0930 20:44:50.746043 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/e850b364-34d8-4b5d-9d9e-9047f1abac9c-node-mnt\") pod \"crc-storage-crc-87lbj\" (UID: \"e850b364-34d8-4b5d-9d9e-9047f1abac9c\") " pod="crc-storage/crc-storage-crc-87lbj" Sep 30 20:44:50 crc kubenswrapper[4756]: I0930 20:44:50.746073 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6nrmx\" (UniqueName: \"kubernetes.io/projected/e850b364-34d8-4b5d-9d9e-9047f1abac9c-kube-api-access-6nrmx\") pod \"crc-storage-crc-87lbj\" (UID: \"e850b364-34d8-4b5d-9d9e-9047f1abac9c\") " pod="crc-storage/crc-storage-crc-87lbj" Sep 30 20:44:50 crc kubenswrapper[4756]: I0930 20:44:50.848191 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/e850b364-34d8-4b5d-9d9e-9047f1abac9c-crc-storage\") pod \"crc-storage-crc-87lbj\" (UID: \"e850b364-34d8-4b5d-9d9e-9047f1abac9c\") " pod="crc-storage/crc-storage-crc-87lbj" Sep 30 20:44:50 crc kubenswrapper[4756]: I0930 20:44:50.848239 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/e850b364-34d8-4b5d-9d9e-9047f1abac9c-node-mnt\") pod \"crc-storage-crc-87lbj\" (UID: \"e850b364-34d8-4b5d-9d9e-9047f1abac9c\") " pod="crc-storage/crc-storage-crc-87lbj" Sep 30 20:44:50 crc kubenswrapper[4756]: I0930 20:44:50.848261 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6nrmx\" (UniqueName: \"kubernetes.io/projected/e850b364-34d8-4b5d-9d9e-9047f1abac9c-kube-api-access-6nrmx\") pod \"crc-storage-crc-87lbj\" (UID: \"e850b364-34d8-4b5d-9d9e-9047f1abac9c\") " pod="crc-storage/crc-storage-crc-87lbj" Sep 30 20:44:50 crc kubenswrapper[4756]: I0930 20:44:50.849366 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/e850b364-34d8-4b5d-9d9e-9047f1abac9c-crc-storage\") pod \"crc-storage-crc-87lbj\" (UID: \"e850b364-34d8-4b5d-9d9e-9047f1abac9c\") " pod="crc-storage/crc-storage-crc-87lbj" Sep 30 20:44:50 crc kubenswrapper[4756]: I0930 20:44:50.849544 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/e850b364-34d8-4b5d-9d9e-9047f1abac9c-node-mnt\") pod \"crc-storage-crc-87lbj\" (UID: \"e850b364-34d8-4b5d-9d9e-9047f1abac9c\") " pod="crc-storage/crc-storage-crc-87lbj" Sep 30 20:44:50 crc kubenswrapper[4756]: I0930 20:44:50.869487 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6nrmx\" (UniqueName: \"kubernetes.io/projected/e850b364-34d8-4b5d-9d9e-9047f1abac9c-kube-api-access-6nrmx\") pod \"crc-storage-crc-87lbj\" (UID: \"e850b364-34d8-4b5d-9d9e-9047f1abac9c\") " pod="crc-storage/crc-storage-crc-87lbj" Sep 30 20:44:50 crc kubenswrapper[4756]: I0930 20:44:50.970805 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-87lbj" Sep 30 20:44:51 crc kubenswrapper[4756]: I0930 20:44:51.124654 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d4b82604-c6b1-497d-a983-84d6543257a3" path="/var/lib/kubelet/pods/d4b82604-c6b1-497d-a983-84d6543257a3/volumes" Sep 30 20:44:51 crc kubenswrapper[4756]: I0930 20:44:51.367264 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["crc-storage/crc-storage-crc-87lbj"] Sep 30 20:44:51 crc kubenswrapper[4756]: I0930 20:44:51.376513 4756 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Sep 30 20:44:52 crc kubenswrapper[4756]: I0930 20:44:52.073160 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-87lbj" event={"ID":"e850b364-34d8-4b5d-9d9e-9047f1abac9c","Type":"ContainerStarted","Data":"b9dd06f77f56de2b4ed7d4a83bf6c153de0764d290c5c2669e5edcb6b004f6a6"} Sep 30 20:44:53 crc kubenswrapper[4756]: I0930 20:44:53.081608 4756 generic.go:334] "Generic (PLEG): container finished" podID="e850b364-34d8-4b5d-9d9e-9047f1abac9c" containerID="386db2b3ec8b7c313b33834aa997f6d5d6878ef0731d50eee3a9a33cf3e2c1f0" exitCode=0 Sep 30 20:44:53 crc kubenswrapper[4756]: I0930 20:44:53.081715 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-87lbj" event={"ID":"e850b364-34d8-4b5d-9d9e-9047f1abac9c","Type":"ContainerDied","Data":"386db2b3ec8b7c313b33834aa997f6d5d6878ef0731d50eee3a9a33cf3e2c1f0"} Sep 30 20:44:54 crc kubenswrapper[4756]: I0930 20:44:54.368787 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-87lbj" Sep 30 20:44:54 crc kubenswrapper[4756]: I0930 20:44:54.494875 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/e850b364-34d8-4b5d-9d9e-9047f1abac9c-crc-storage\") pod \"e850b364-34d8-4b5d-9d9e-9047f1abac9c\" (UID: \"e850b364-34d8-4b5d-9d9e-9047f1abac9c\") " Sep 30 20:44:54 crc kubenswrapper[4756]: I0930 20:44:54.495081 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6nrmx\" (UniqueName: \"kubernetes.io/projected/e850b364-34d8-4b5d-9d9e-9047f1abac9c-kube-api-access-6nrmx\") pod \"e850b364-34d8-4b5d-9d9e-9047f1abac9c\" (UID: \"e850b364-34d8-4b5d-9d9e-9047f1abac9c\") " Sep 30 20:44:54 crc kubenswrapper[4756]: I0930 20:44:54.495106 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/e850b364-34d8-4b5d-9d9e-9047f1abac9c-node-mnt\") pod \"e850b364-34d8-4b5d-9d9e-9047f1abac9c\" (UID: \"e850b364-34d8-4b5d-9d9e-9047f1abac9c\") " Sep 30 20:44:54 crc kubenswrapper[4756]: I0930 20:44:54.495232 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/e850b364-34d8-4b5d-9d9e-9047f1abac9c-node-mnt" (OuterVolumeSpecName: "node-mnt") pod "e850b364-34d8-4b5d-9d9e-9047f1abac9c" (UID: "e850b364-34d8-4b5d-9d9e-9047f1abac9c"). InnerVolumeSpecName "node-mnt". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 20:44:54 crc kubenswrapper[4756]: I0930 20:44:54.495380 4756 reconciler_common.go:293] "Volume detached for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/e850b364-34d8-4b5d-9d9e-9047f1abac9c-node-mnt\") on node \"crc\" DevicePath \"\"" Sep 30 20:44:54 crc kubenswrapper[4756]: I0930 20:44:54.499933 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e850b364-34d8-4b5d-9d9e-9047f1abac9c-kube-api-access-6nrmx" (OuterVolumeSpecName: "kube-api-access-6nrmx") pod "e850b364-34d8-4b5d-9d9e-9047f1abac9c" (UID: "e850b364-34d8-4b5d-9d9e-9047f1abac9c"). InnerVolumeSpecName "kube-api-access-6nrmx". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:44:54 crc kubenswrapper[4756]: I0930 20:44:54.513001 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e850b364-34d8-4b5d-9d9e-9047f1abac9c-crc-storage" (OuterVolumeSpecName: "crc-storage") pod "e850b364-34d8-4b5d-9d9e-9047f1abac9c" (UID: "e850b364-34d8-4b5d-9d9e-9047f1abac9c"). InnerVolumeSpecName "crc-storage". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:44:54 crc kubenswrapper[4756]: I0930 20:44:54.596485 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6nrmx\" (UniqueName: \"kubernetes.io/projected/e850b364-34d8-4b5d-9d9e-9047f1abac9c-kube-api-access-6nrmx\") on node \"crc\" DevicePath \"\"" Sep 30 20:44:54 crc kubenswrapper[4756]: I0930 20:44:54.596513 4756 reconciler_common.go:293] "Volume detached for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/e850b364-34d8-4b5d-9d9e-9047f1abac9c-crc-storage\") on node \"crc\" DevicePath \"\"" Sep 30 20:44:55 crc kubenswrapper[4756]: I0930 20:44:55.105485 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-87lbj" event={"ID":"e850b364-34d8-4b5d-9d9e-9047f1abac9c","Type":"ContainerDied","Data":"b9dd06f77f56de2b4ed7d4a83bf6c153de0764d290c5c2669e5edcb6b004f6a6"} Sep 30 20:44:55 crc kubenswrapper[4756]: I0930 20:44:55.105534 4756 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b9dd06f77f56de2b4ed7d4a83bf6c153de0764d290c5c2669e5edcb6b004f6a6" Sep 30 20:44:55 crc kubenswrapper[4756]: I0930 20:44:55.105593 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-87lbj" Sep 30 20:44:55 crc kubenswrapper[4756]: I0930 20:44:55.116577 4756 scope.go:117] "RemoveContainer" containerID="bdd10774ccbb11f8a2293190d3ed532f32596ee6e63b4f323c2f18f8a020bf99" Sep 30 20:44:55 crc kubenswrapper[4756]: E0930 20:44:55.117277 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4n9zj_openshift-machine-config-operator(3370c2ca-fec3-4f90-8df7-51e21e6c7e1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" Sep 30 20:44:56 crc kubenswrapper[4756]: I0930 20:44:56.776557 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["crc-storage/crc-storage-crc-87lbj"] Sep 30 20:44:56 crc kubenswrapper[4756]: I0930 20:44:56.784268 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["crc-storage/crc-storage-crc-87lbj"] Sep 30 20:44:56 crc kubenswrapper[4756]: I0930 20:44:56.971688 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["crc-storage/crc-storage-crc-d8fdm"] Sep 30 20:44:56 crc kubenswrapper[4756]: E0930 20:44:56.972275 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e850b364-34d8-4b5d-9d9e-9047f1abac9c" containerName="storage" Sep 30 20:44:56 crc kubenswrapper[4756]: I0930 20:44:56.972369 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="e850b364-34d8-4b5d-9d9e-9047f1abac9c" containerName="storage" Sep 30 20:44:56 crc kubenswrapper[4756]: I0930 20:44:56.972758 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="e850b364-34d8-4b5d-9d9e-9047f1abac9c" containerName="storage" Sep 30 20:44:56 crc kubenswrapper[4756]: I0930 20:44:56.973595 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-d8fdm" Sep 30 20:44:56 crc kubenswrapper[4756]: I0930 20:44:56.975646 4756 reflector.go:368] Caches populated for *v1.Secret from object-"crc-storage"/"crc-storage-dockercfg-ghtx2" Sep 30 20:44:56 crc kubenswrapper[4756]: I0930 20:44:56.976866 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"openshift-service-ca.crt" Sep 30 20:44:56 crc kubenswrapper[4756]: I0930 20:44:56.977066 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"kube-root-ca.crt" Sep 30 20:44:56 crc kubenswrapper[4756]: I0930 20:44:56.977212 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"crc-storage" Sep 30 20:44:56 crc kubenswrapper[4756]: I0930 20:44:56.984977 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["crc-storage/crc-storage-crc-d8fdm"] Sep 30 20:44:57 crc kubenswrapper[4756]: I0930 20:44:57.031336 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h5dwd\" (UniqueName: \"kubernetes.io/projected/a732c66c-de2d-4bad-b039-8b776aa478fe-kube-api-access-h5dwd\") pod \"crc-storage-crc-d8fdm\" (UID: \"a732c66c-de2d-4bad-b039-8b776aa478fe\") " pod="crc-storage/crc-storage-crc-d8fdm" Sep 30 20:44:57 crc kubenswrapper[4756]: I0930 20:44:57.031424 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/a732c66c-de2d-4bad-b039-8b776aa478fe-crc-storage\") pod \"crc-storage-crc-d8fdm\" (UID: \"a732c66c-de2d-4bad-b039-8b776aa478fe\") " pod="crc-storage/crc-storage-crc-d8fdm" Sep 30 20:44:57 crc kubenswrapper[4756]: I0930 20:44:57.031552 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/a732c66c-de2d-4bad-b039-8b776aa478fe-node-mnt\") pod \"crc-storage-crc-d8fdm\" (UID: \"a732c66c-de2d-4bad-b039-8b776aa478fe\") " pod="crc-storage/crc-storage-crc-d8fdm" Sep 30 20:44:57 crc kubenswrapper[4756]: I0930 20:44:57.130298 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e850b364-34d8-4b5d-9d9e-9047f1abac9c" path="/var/lib/kubelet/pods/e850b364-34d8-4b5d-9d9e-9047f1abac9c/volumes" Sep 30 20:44:57 crc kubenswrapper[4756]: I0930 20:44:57.133275 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h5dwd\" (UniqueName: \"kubernetes.io/projected/a732c66c-de2d-4bad-b039-8b776aa478fe-kube-api-access-h5dwd\") pod \"crc-storage-crc-d8fdm\" (UID: \"a732c66c-de2d-4bad-b039-8b776aa478fe\") " pod="crc-storage/crc-storage-crc-d8fdm" Sep 30 20:44:57 crc kubenswrapper[4756]: I0930 20:44:57.133313 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/a732c66c-de2d-4bad-b039-8b776aa478fe-crc-storage\") pod \"crc-storage-crc-d8fdm\" (UID: \"a732c66c-de2d-4bad-b039-8b776aa478fe\") " pod="crc-storage/crc-storage-crc-d8fdm" Sep 30 20:44:57 crc kubenswrapper[4756]: I0930 20:44:57.133443 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/a732c66c-de2d-4bad-b039-8b776aa478fe-node-mnt\") pod \"crc-storage-crc-d8fdm\" (UID: \"a732c66c-de2d-4bad-b039-8b776aa478fe\") " pod="crc-storage/crc-storage-crc-d8fdm" Sep 30 20:44:57 crc kubenswrapper[4756]: I0930 20:44:57.133805 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/a732c66c-de2d-4bad-b039-8b776aa478fe-node-mnt\") pod \"crc-storage-crc-d8fdm\" (UID: \"a732c66c-de2d-4bad-b039-8b776aa478fe\") " pod="crc-storage/crc-storage-crc-d8fdm" Sep 30 20:44:57 crc kubenswrapper[4756]: I0930 20:44:57.134484 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/a732c66c-de2d-4bad-b039-8b776aa478fe-crc-storage\") pod \"crc-storage-crc-d8fdm\" (UID: \"a732c66c-de2d-4bad-b039-8b776aa478fe\") " pod="crc-storage/crc-storage-crc-d8fdm" Sep 30 20:44:57 crc kubenswrapper[4756]: I0930 20:44:57.152429 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h5dwd\" (UniqueName: \"kubernetes.io/projected/a732c66c-de2d-4bad-b039-8b776aa478fe-kube-api-access-h5dwd\") pod \"crc-storage-crc-d8fdm\" (UID: \"a732c66c-de2d-4bad-b039-8b776aa478fe\") " pod="crc-storage/crc-storage-crc-d8fdm" Sep 30 20:44:57 crc kubenswrapper[4756]: I0930 20:44:57.299347 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-d8fdm" Sep 30 20:44:57 crc kubenswrapper[4756]: I0930 20:44:57.580306 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["crc-storage/crc-storage-crc-d8fdm"] Sep 30 20:44:58 crc kubenswrapper[4756]: I0930 20:44:58.132458 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-d8fdm" event={"ID":"a732c66c-de2d-4bad-b039-8b776aa478fe","Type":"ContainerStarted","Data":"51bffcf6d690be41d6d945d70ba28856e98be5472492adfdbf41d061f6105677"} Sep 30 20:44:59 crc kubenswrapper[4756]: I0930 20:44:59.140984 4756 generic.go:334] "Generic (PLEG): container finished" podID="a732c66c-de2d-4bad-b039-8b776aa478fe" containerID="0dd3564c1d7e93078d229af3c91e62afc4a36bf7fa7d1247304632164dbd1419" exitCode=0 Sep 30 20:44:59 crc kubenswrapper[4756]: I0930 20:44:59.141038 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-d8fdm" event={"ID":"a732c66c-de2d-4bad-b039-8b776aa478fe","Type":"ContainerDied","Data":"0dd3564c1d7e93078d229af3c91e62afc4a36bf7fa7d1247304632164dbd1419"} Sep 30 20:45:00 crc kubenswrapper[4756]: I0930 20:45:00.166116 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29321085-8jldx"] Sep 30 20:45:00 crc kubenswrapper[4756]: I0930 20:45:00.168078 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29321085-8jldx" Sep 30 20:45:00 crc kubenswrapper[4756]: I0930 20:45:00.171666 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Sep 30 20:45:00 crc kubenswrapper[4756]: I0930 20:45:00.171935 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Sep 30 20:45:00 crc kubenswrapper[4756]: I0930 20:45:00.179337 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29321085-8jldx"] Sep 30 20:45:00 crc kubenswrapper[4756]: I0930 20:45:00.282459 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/edc6db7a-b983-49e3-9314-c3e000ad156a-secret-volume\") pod \"collect-profiles-29321085-8jldx\" (UID: \"edc6db7a-b983-49e3-9314-c3e000ad156a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321085-8jldx" Sep 30 20:45:00 crc kubenswrapper[4756]: I0930 20:45:00.282903 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/edc6db7a-b983-49e3-9314-c3e000ad156a-config-volume\") pod \"collect-profiles-29321085-8jldx\" (UID: \"edc6db7a-b983-49e3-9314-c3e000ad156a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321085-8jldx" Sep 30 20:45:00 crc kubenswrapper[4756]: I0930 20:45:00.282926 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-98wmr\" (UniqueName: \"kubernetes.io/projected/edc6db7a-b983-49e3-9314-c3e000ad156a-kube-api-access-98wmr\") pod \"collect-profiles-29321085-8jldx\" (UID: \"edc6db7a-b983-49e3-9314-c3e000ad156a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321085-8jldx" Sep 30 20:45:00 crc kubenswrapper[4756]: I0930 20:45:00.384165 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/edc6db7a-b983-49e3-9314-c3e000ad156a-secret-volume\") pod \"collect-profiles-29321085-8jldx\" (UID: \"edc6db7a-b983-49e3-9314-c3e000ad156a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321085-8jldx" Sep 30 20:45:00 crc kubenswrapper[4756]: I0930 20:45:00.384277 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/edc6db7a-b983-49e3-9314-c3e000ad156a-config-volume\") pod \"collect-profiles-29321085-8jldx\" (UID: \"edc6db7a-b983-49e3-9314-c3e000ad156a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321085-8jldx" Sep 30 20:45:00 crc kubenswrapper[4756]: I0930 20:45:00.384306 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-98wmr\" (UniqueName: \"kubernetes.io/projected/edc6db7a-b983-49e3-9314-c3e000ad156a-kube-api-access-98wmr\") pod \"collect-profiles-29321085-8jldx\" (UID: \"edc6db7a-b983-49e3-9314-c3e000ad156a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321085-8jldx" Sep 30 20:45:00 crc kubenswrapper[4756]: I0930 20:45:00.385839 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/edc6db7a-b983-49e3-9314-c3e000ad156a-config-volume\") pod \"collect-profiles-29321085-8jldx\" (UID: \"edc6db7a-b983-49e3-9314-c3e000ad156a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321085-8jldx" Sep 30 20:45:00 crc kubenswrapper[4756]: I0930 20:45:00.397075 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/edc6db7a-b983-49e3-9314-c3e000ad156a-secret-volume\") pod \"collect-profiles-29321085-8jldx\" (UID: \"edc6db7a-b983-49e3-9314-c3e000ad156a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321085-8jldx" Sep 30 20:45:00 crc kubenswrapper[4756]: I0930 20:45:00.410959 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-98wmr\" (UniqueName: \"kubernetes.io/projected/edc6db7a-b983-49e3-9314-c3e000ad156a-kube-api-access-98wmr\") pod \"collect-profiles-29321085-8jldx\" (UID: \"edc6db7a-b983-49e3-9314-c3e000ad156a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321085-8jldx" Sep 30 20:45:00 crc kubenswrapper[4756]: I0930 20:45:00.492611 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29321085-8jldx" Sep 30 20:45:00 crc kubenswrapper[4756]: I0930 20:45:00.587078 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-d8fdm" Sep 30 20:45:00 crc kubenswrapper[4756]: I0930 20:45:00.687447 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/a732c66c-de2d-4bad-b039-8b776aa478fe-node-mnt\") pod \"a732c66c-de2d-4bad-b039-8b776aa478fe\" (UID: \"a732c66c-de2d-4bad-b039-8b776aa478fe\") " Sep 30 20:45:00 crc kubenswrapper[4756]: I0930 20:45:00.687897 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/a732c66c-de2d-4bad-b039-8b776aa478fe-crc-storage\") pod \"a732c66c-de2d-4bad-b039-8b776aa478fe\" (UID: \"a732c66c-de2d-4bad-b039-8b776aa478fe\") " Sep 30 20:45:00 crc kubenswrapper[4756]: I0930 20:45:00.687940 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-h5dwd\" (UniqueName: \"kubernetes.io/projected/a732c66c-de2d-4bad-b039-8b776aa478fe-kube-api-access-h5dwd\") pod \"a732c66c-de2d-4bad-b039-8b776aa478fe\" (UID: \"a732c66c-de2d-4bad-b039-8b776aa478fe\") " Sep 30 20:45:00 crc kubenswrapper[4756]: I0930 20:45:00.687837 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/a732c66c-de2d-4bad-b039-8b776aa478fe-node-mnt" (OuterVolumeSpecName: "node-mnt") pod "a732c66c-de2d-4bad-b039-8b776aa478fe" (UID: "a732c66c-de2d-4bad-b039-8b776aa478fe"). InnerVolumeSpecName "node-mnt". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 20:45:00 crc kubenswrapper[4756]: I0930 20:45:00.692063 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a732c66c-de2d-4bad-b039-8b776aa478fe-kube-api-access-h5dwd" (OuterVolumeSpecName: "kube-api-access-h5dwd") pod "a732c66c-de2d-4bad-b039-8b776aa478fe" (UID: "a732c66c-de2d-4bad-b039-8b776aa478fe"). InnerVolumeSpecName "kube-api-access-h5dwd". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:45:00 crc kubenswrapper[4756]: I0930 20:45:00.704974 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a732c66c-de2d-4bad-b039-8b776aa478fe-crc-storage" (OuterVolumeSpecName: "crc-storage") pod "a732c66c-de2d-4bad-b039-8b776aa478fe" (UID: "a732c66c-de2d-4bad-b039-8b776aa478fe"). InnerVolumeSpecName "crc-storage". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:45:00 crc kubenswrapper[4756]: I0930 20:45:00.790239 4756 reconciler_common.go:293] "Volume detached for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/a732c66c-de2d-4bad-b039-8b776aa478fe-crc-storage\") on node \"crc\" DevicePath \"\"" Sep 30 20:45:00 crc kubenswrapper[4756]: I0930 20:45:00.790292 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-h5dwd\" (UniqueName: \"kubernetes.io/projected/a732c66c-de2d-4bad-b039-8b776aa478fe-kube-api-access-h5dwd\") on node \"crc\" DevicePath \"\"" Sep 30 20:45:00 crc kubenswrapper[4756]: I0930 20:45:00.790310 4756 reconciler_common.go:293] "Volume detached for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/a732c66c-de2d-4bad-b039-8b776aa478fe-node-mnt\") on node \"crc\" DevicePath \"\"" Sep 30 20:45:00 crc kubenswrapper[4756]: I0930 20:45:00.915492 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29321085-8jldx"] Sep 30 20:45:00 crc kubenswrapper[4756]: W0930 20:45:00.921359 4756 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podedc6db7a_b983_49e3_9314_c3e000ad156a.slice/crio-206f66fb6371e2650b54f6999ef0e8e4d9bce2a9edaed1837ad46397c785ca61 WatchSource:0}: Error finding container 206f66fb6371e2650b54f6999ef0e8e4d9bce2a9edaed1837ad46397c785ca61: Status 404 returned error can't find the container with id 206f66fb6371e2650b54f6999ef0e8e4d9bce2a9edaed1837ad46397c785ca61 Sep 30 20:45:01 crc kubenswrapper[4756]: I0930 20:45:01.181738 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29321085-8jldx" event={"ID":"edc6db7a-b983-49e3-9314-c3e000ad156a","Type":"ContainerStarted","Data":"c22b245689d9a9357da35a62f843ca5576c6d0320faf2eecf0548a49b38cad3c"} Sep 30 20:45:01 crc kubenswrapper[4756]: I0930 20:45:01.181826 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29321085-8jldx" event={"ID":"edc6db7a-b983-49e3-9314-c3e000ad156a","Type":"ContainerStarted","Data":"206f66fb6371e2650b54f6999ef0e8e4d9bce2a9edaed1837ad46397c785ca61"} Sep 30 20:45:01 crc kubenswrapper[4756]: I0930 20:45:01.193082 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-d8fdm" event={"ID":"a732c66c-de2d-4bad-b039-8b776aa478fe","Type":"ContainerDied","Data":"51bffcf6d690be41d6d945d70ba28856e98be5472492adfdbf41d061f6105677"} Sep 30 20:45:01 crc kubenswrapper[4756]: I0930 20:45:01.193252 4756 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="51bffcf6d690be41d6d945d70ba28856e98be5472492adfdbf41d061f6105677" Sep 30 20:45:01 crc kubenswrapper[4756]: I0930 20:45:01.193174 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-d8fdm" Sep 30 20:45:01 crc kubenswrapper[4756]: I0930 20:45:01.199227 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29321085-8jldx" podStartSLOduration=1.199207982 podStartE2EDuration="1.199207982s" podCreationTimestamp="2025-09-30 20:45:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 20:45:01.19721909 +0000 UTC m=+4430.818152567" watchObservedRunningTime="2025-09-30 20:45:01.199207982 +0000 UTC m=+4430.820141449" Sep 30 20:45:02 crc kubenswrapper[4756]: I0930 20:45:02.205180 4756 generic.go:334] "Generic (PLEG): container finished" podID="edc6db7a-b983-49e3-9314-c3e000ad156a" containerID="c22b245689d9a9357da35a62f843ca5576c6d0320faf2eecf0548a49b38cad3c" exitCode=0 Sep 30 20:45:02 crc kubenswrapper[4756]: I0930 20:45:02.205782 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29321085-8jldx" event={"ID":"edc6db7a-b983-49e3-9314-c3e000ad156a","Type":"ContainerDied","Data":"c22b245689d9a9357da35a62f843ca5576c6d0320faf2eecf0548a49b38cad3c"} Sep 30 20:45:03 crc kubenswrapper[4756]: I0930 20:45:03.505500 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29321085-8jldx" Sep 30 20:45:03 crc kubenswrapper[4756]: I0930 20:45:03.654241 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/edc6db7a-b983-49e3-9314-c3e000ad156a-secret-volume\") pod \"edc6db7a-b983-49e3-9314-c3e000ad156a\" (UID: \"edc6db7a-b983-49e3-9314-c3e000ad156a\") " Sep 30 20:45:03 crc kubenswrapper[4756]: I0930 20:45:03.654289 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/edc6db7a-b983-49e3-9314-c3e000ad156a-config-volume\") pod \"edc6db7a-b983-49e3-9314-c3e000ad156a\" (UID: \"edc6db7a-b983-49e3-9314-c3e000ad156a\") " Sep 30 20:45:03 crc kubenswrapper[4756]: I0930 20:45:03.654367 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-98wmr\" (UniqueName: \"kubernetes.io/projected/edc6db7a-b983-49e3-9314-c3e000ad156a-kube-api-access-98wmr\") pod \"edc6db7a-b983-49e3-9314-c3e000ad156a\" (UID: \"edc6db7a-b983-49e3-9314-c3e000ad156a\") " Sep 30 20:45:03 crc kubenswrapper[4756]: I0930 20:45:03.655495 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/edc6db7a-b983-49e3-9314-c3e000ad156a-config-volume" (OuterVolumeSpecName: "config-volume") pod "edc6db7a-b983-49e3-9314-c3e000ad156a" (UID: "edc6db7a-b983-49e3-9314-c3e000ad156a"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:45:03 crc kubenswrapper[4756]: I0930 20:45:03.659600 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/edc6db7a-b983-49e3-9314-c3e000ad156a-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "edc6db7a-b983-49e3-9314-c3e000ad156a" (UID: "edc6db7a-b983-49e3-9314-c3e000ad156a"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:45:03 crc kubenswrapper[4756]: I0930 20:45:03.659620 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/edc6db7a-b983-49e3-9314-c3e000ad156a-kube-api-access-98wmr" (OuterVolumeSpecName: "kube-api-access-98wmr") pod "edc6db7a-b983-49e3-9314-c3e000ad156a" (UID: "edc6db7a-b983-49e3-9314-c3e000ad156a"). InnerVolumeSpecName "kube-api-access-98wmr". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:45:03 crc kubenswrapper[4756]: I0930 20:45:03.755974 4756 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/edc6db7a-b983-49e3-9314-c3e000ad156a-secret-volume\") on node \"crc\" DevicePath \"\"" Sep 30 20:45:03 crc kubenswrapper[4756]: I0930 20:45:03.756511 4756 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/edc6db7a-b983-49e3-9314-c3e000ad156a-config-volume\") on node \"crc\" DevicePath \"\"" Sep 30 20:45:03 crc kubenswrapper[4756]: I0930 20:45:03.756581 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-98wmr\" (UniqueName: \"kubernetes.io/projected/edc6db7a-b983-49e3-9314-c3e000ad156a-kube-api-access-98wmr\") on node \"crc\" DevicePath \"\"" Sep 30 20:45:04 crc kubenswrapper[4756]: I0930 20:45:04.224024 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29321085-8jldx" event={"ID":"edc6db7a-b983-49e3-9314-c3e000ad156a","Type":"ContainerDied","Data":"206f66fb6371e2650b54f6999ef0e8e4d9bce2a9edaed1837ad46397c785ca61"} Sep 30 20:45:04 crc kubenswrapper[4756]: I0930 20:45:04.224070 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29321085-8jldx" Sep 30 20:45:04 crc kubenswrapper[4756]: I0930 20:45:04.224076 4756 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="206f66fb6371e2650b54f6999ef0e8e4d9bce2a9edaed1837ad46397c785ca61" Sep 30 20:45:04 crc kubenswrapper[4756]: I0930 20:45:04.289432 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29321040-ngp2x"] Sep 30 20:45:04 crc kubenswrapper[4756]: I0930 20:45:04.296186 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29321040-ngp2x"] Sep 30 20:45:05 crc kubenswrapper[4756]: I0930 20:45:05.125796 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bc9ed98f-bdbf-44d8-a4f4-fa4c00e7671a" path="/var/lib/kubelet/pods/bc9ed98f-bdbf-44d8-a4f4-fa4c00e7671a/volumes" Sep 30 20:45:09 crc kubenswrapper[4756]: I0930 20:45:09.116571 4756 scope.go:117] "RemoveContainer" containerID="bdd10774ccbb11f8a2293190d3ed532f32596ee6e63b4f323c2f18f8a020bf99" Sep 30 20:45:09 crc kubenswrapper[4756]: E0930 20:45:09.117110 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4n9zj_openshift-machine-config-operator(3370c2ca-fec3-4f90-8df7-51e21e6c7e1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" Sep 30 20:45:19 crc kubenswrapper[4756]: I0930 20:45:19.798170 4756 scope.go:117] "RemoveContainer" containerID="e6e95b482daf14ba551431879790adb7bea9d18283bf491d25bbe0c6bb308353" Sep 30 20:45:19 crc kubenswrapper[4756]: I0930 20:45:19.830048 4756 scope.go:117] "RemoveContainer" containerID="815009d102e9e8828f9732d699ff140b56c62727f10ef37a42e9d85ab48860b4" Sep 30 20:45:21 crc kubenswrapper[4756]: I0930 20:45:21.119877 4756 scope.go:117] "RemoveContainer" containerID="bdd10774ccbb11f8a2293190d3ed532f32596ee6e63b4f323c2f18f8a020bf99" Sep 30 20:45:21 crc kubenswrapper[4756]: E0930 20:45:21.120375 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4n9zj_openshift-machine-config-operator(3370c2ca-fec3-4f90-8df7-51e21e6c7e1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" Sep 30 20:45:36 crc kubenswrapper[4756]: I0930 20:45:36.116198 4756 scope.go:117] "RemoveContainer" containerID="bdd10774ccbb11f8a2293190d3ed532f32596ee6e63b4f323c2f18f8a020bf99" Sep 30 20:45:36 crc kubenswrapper[4756]: E0930 20:45:36.116977 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4n9zj_openshift-machine-config-operator(3370c2ca-fec3-4f90-8df7-51e21e6c7e1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" Sep 30 20:45:51 crc kubenswrapper[4756]: I0930 20:45:51.119385 4756 scope.go:117] "RemoveContainer" containerID="bdd10774ccbb11f8a2293190d3ed532f32596ee6e63b4f323c2f18f8a020bf99" Sep 30 20:45:51 crc kubenswrapper[4756]: E0930 20:45:51.120133 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4n9zj_openshift-machine-config-operator(3370c2ca-fec3-4f90-8df7-51e21e6c7e1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" Sep 30 20:46:04 crc kubenswrapper[4756]: I0930 20:46:04.116683 4756 scope.go:117] "RemoveContainer" containerID="bdd10774ccbb11f8a2293190d3ed532f32596ee6e63b4f323c2f18f8a020bf99" Sep 30 20:46:04 crc kubenswrapper[4756]: E0930 20:46:04.117973 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4n9zj_openshift-machine-config-operator(3370c2ca-fec3-4f90-8df7-51e21e6c7e1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" Sep 30 20:46:18 crc kubenswrapper[4756]: I0930 20:46:18.116205 4756 scope.go:117] "RemoveContainer" containerID="bdd10774ccbb11f8a2293190d3ed532f32596ee6e63b4f323c2f18f8a020bf99" Sep 30 20:46:18 crc kubenswrapper[4756]: E0930 20:46:18.116917 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4n9zj_openshift-machine-config-operator(3370c2ca-fec3-4f90-8df7-51e21e6c7e1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" Sep 30 20:46:29 crc kubenswrapper[4756]: I0930 20:46:29.116033 4756 scope.go:117] "RemoveContainer" containerID="bdd10774ccbb11f8a2293190d3ed532f32596ee6e63b4f323c2f18f8a020bf99" Sep 30 20:46:29 crc kubenswrapper[4756]: E0930 20:46:29.116680 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4n9zj_openshift-machine-config-operator(3370c2ca-fec3-4f90-8df7-51e21e6c7e1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" Sep 30 20:46:43 crc kubenswrapper[4756]: I0930 20:46:43.116489 4756 scope.go:117] "RemoveContainer" containerID="bdd10774ccbb11f8a2293190d3ed532f32596ee6e63b4f323c2f18f8a020bf99" Sep 30 20:46:43 crc kubenswrapper[4756]: E0930 20:46:43.117118 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4n9zj_openshift-machine-config-operator(3370c2ca-fec3-4f90-8df7-51e21e6c7e1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" Sep 30 20:46:55 crc kubenswrapper[4756]: I0930 20:46:55.116896 4756 scope.go:117] "RemoveContainer" containerID="bdd10774ccbb11f8a2293190d3ed532f32596ee6e63b4f323c2f18f8a020bf99" Sep 30 20:46:55 crc kubenswrapper[4756]: E0930 20:46:55.118122 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4n9zj_openshift-machine-config-operator(3370c2ca-fec3-4f90-8df7-51e21e6c7e1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" Sep 30 20:47:06 crc kubenswrapper[4756]: I0930 20:47:06.083868 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-527mz"] Sep 30 20:47:06 crc kubenswrapper[4756]: E0930 20:47:06.084704 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="edc6db7a-b983-49e3-9314-c3e000ad156a" containerName="collect-profiles" Sep 30 20:47:06 crc kubenswrapper[4756]: I0930 20:47:06.084717 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="edc6db7a-b983-49e3-9314-c3e000ad156a" containerName="collect-profiles" Sep 30 20:47:06 crc kubenswrapper[4756]: E0930 20:47:06.084740 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a732c66c-de2d-4bad-b039-8b776aa478fe" containerName="storage" Sep 30 20:47:06 crc kubenswrapper[4756]: I0930 20:47:06.084747 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="a732c66c-de2d-4bad-b039-8b776aa478fe" containerName="storage" Sep 30 20:47:06 crc kubenswrapper[4756]: I0930 20:47:06.084893 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="a732c66c-de2d-4bad-b039-8b776aa478fe" containerName="storage" Sep 30 20:47:06 crc kubenswrapper[4756]: I0930 20:47:06.084908 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="edc6db7a-b983-49e3-9314-c3e000ad156a" containerName="collect-profiles" Sep 30 20:47:06 crc kubenswrapper[4756]: I0930 20:47:06.085903 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-527mz" Sep 30 20:47:06 crc kubenswrapper[4756]: I0930 20:47:06.100899 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-527mz"] Sep 30 20:47:06 crc kubenswrapper[4756]: I0930 20:47:06.153662 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fsv8t\" (UniqueName: \"kubernetes.io/projected/0ec74fc3-d99f-4341-bc73-1c9502982c1a-kube-api-access-fsv8t\") pod \"redhat-marketplace-527mz\" (UID: \"0ec74fc3-d99f-4341-bc73-1c9502982c1a\") " pod="openshift-marketplace/redhat-marketplace-527mz" Sep 30 20:47:06 crc kubenswrapper[4756]: I0930 20:47:06.153723 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0ec74fc3-d99f-4341-bc73-1c9502982c1a-utilities\") pod \"redhat-marketplace-527mz\" (UID: \"0ec74fc3-d99f-4341-bc73-1c9502982c1a\") " pod="openshift-marketplace/redhat-marketplace-527mz" Sep 30 20:47:06 crc kubenswrapper[4756]: I0930 20:47:06.153904 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0ec74fc3-d99f-4341-bc73-1c9502982c1a-catalog-content\") pod \"redhat-marketplace-527mz\" (UID: \"0ec74fc3-d99f-4341-bc73-1c9502982c1a\") " pod="openshift-marketplace/redhat-marketplace-527mz" Sep 30 20:47:06 crc kubenswrapper[4756]: I0930 20:47:06.254521 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0ec74fc3-d99f-4341-bc73-1c9502982c1a-catalog-content\") pod \"redhat-marketplace-527mz\" (UID: \"0ec74fc3-d99f-4341-bc73-1c9502982c1a\") " pod="openshift-marketplace/redhat-marketplace-527mz" Sep 30 20:47:06 crc kubenswrapper[4756]: I0930 20:47:06.254605 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fsv8t\" (UniqueName: \"kubernetes.io/projected/0ec74fc3-d99f-4341-bc73-1c9502982c1a-kube-api-access-fsv8t\") pod \"redhat-marketplace-527mz\" (UID: \"0ec74fc3-d99f-4341-bc73-1c9502982c1a\") " pod="openshift-marketplace/redhat-marketplace-527mz" Sep 30 20:47:06 crc kubenswrapper[4756]: I0930 20:47:06.254624 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0ec74fc3-d99f-4341-bc73-1c9502982c1a-utilities\") pod \"redhat-marketplace-527mz\" (UID: \"0ec74fc3-d99f-4341-bc73-1c9502982c1a\") " pod="openshift-marketplace/redhat-marketplace-527mz" Sep 30 20:47:06 crc kubenswrapper[4756]: I0930 20:47:06.255200 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0ec74fc3-d99f-4341-bc73-1c9502982c1a-utilities\") pod \"redhat-marketplace-527mz\" (UID: \"0ec74fc3-d99f-4341-bc73-1c9502982c1a\") " pod="openshift-marketplace/redhat-marketplace-527mz" Sep 30 20:47:06 crc kubenswrapper[4756]: I0930 20:47:06.255346 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0ec74fc3-d99f-4341-bc73-1c9502982c1a-catalog-content\") pod \"redhat-marketplace-527mz\" (UID: \"0ec74fc3-d99f-4341-bc73-1c9502982c1a\") " pod="openshift-marketplace/redhat-marketplace-527mz" Sep 30 20:47:06 crc kubenswrapper[4756]: I0930 20:47:06.279557 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fsv8t\" (UniqueName: \"kubernetes.io/projected/0ec74fc3-d99f-4341-bc73-1c9502982c1a-kube-api-access-fsv8t\") pod \"redhat-marketplace-527mz\" (UID: \"0ec74fc3-d99f-4341-bc73-1c9502982c1a\") " pod="openshift-marketplace/redhat-marketplace-527mz" Sep 30 20:47:06 crc kubenswrapper[4756]: I0930 20:47:06.418422 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-527mz" Sep 30 20:47:06 crc kubenswrapper[4756]: I0930 20:47:06.830848 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-527mz"] Sep 30 20:47:07 crc kubenswrapper[4756]: I0930 20:47:07.218075 4756 generic.go:334] "Generic (PLEG): container finished" podID="0ec74fc3-d99f-4341-bc73-1c9502982c1a" containerID="80bbdd2052810c57a4cdc7c98b0ffcb51c1fae571ffff6e686853356f2e0f658" exitCode=0 Sep 30 20:47:07 crc kubenswrapper[4756]: I0930 20:47:07.218180 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-527mz" event={"ID":"0ec74fc3-d99f-4341-bc73-1c9502982c1a","Type":"ContainerDied","Data":"80bbdd2052810c57a4cdc7c98b0ffcb51c1fae571ffff6e686853356f2e0f658"} Sep 30 20:47:07 crc kubenswrapper[4756]: I0930 20:47:07.218438 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-527mz" event={"ID":"0ec74fc3-d99f-4341-bc73-1c9502982c1a","Type":"ContainerStarted","Data":"26ca6ad1e6b72bb481079b57ae003d326ee3d472ed3ed7a9f01715317ef16f88"} Sep 30 20:47:09 crc kubenswrapper[4756]: I0930 20:47:09.230710 4756 generic.go:334] "Generic (PLEG): container finished" podID="0ec74fc3-d99f-4341-bc73-1c9502982c1a" containerID="10a438feb8397706c735cc23ddd0ad471ce42b1ae399f9f8d9b47d4bf41734e5" exitCode=0 Sep 30 20:47:09 crc kubenswrapper[4756]: I0930 20:47:09.230793 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-527mz" event={"ID":"0ec74fc3-d99f-4341-bc73-1c9502982c1a","Type":"ContainerDied","Data":"10a438feb8397706c735cc23ddd0ad471ce42b1ae399f9f8d9b47d4bf41734e5"} Sep 30 20:47:10 crc kubenswrapper[4756]: I0930 20:47:10.116199 4756 scope.go:117] "RemoveContainer" containerID="bdd10774ccbb11f8a2293190d3ed532f32596ee6e63b4f323c2f18f8a020bf99" Sep 30 20:47:10 crc kubenswrapper[4756]: E0930 20:47:10.116727 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4n9zj_openshift-machine-config-operator(3370c2ca-fec3-4f90-8df7-51e21e6c7e1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" Sep 30 20:47:10 crc kubenswrapper[4756]: I0930 20:47:10.241457 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-527mz" event={"ID":"0ec74fc3-d99f-4341-bc73-1c9502982c1a","Type":"ContainerStarted","Data":"5e57e8a05cb7425649a6d854c22dd6d3912544d33661ec9e224d871e95dc26e0"} Sep 30 20:47:10 crc kubenswrapper[4756]: I0930 20:47:10.259130 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-527mz" podStartSLOduration=1.8017990400000001 podStartE2EDuration="4.259105894s" podCreationTimestamp="2025-09-30 20:47:06 +0000 UTC" firstStartedPulling="2025-09-30 20:47:07.219554111 +0000 UTC m=+4556.840487588" lastFinishedPulling="2025-09-30 20:47:09.676860965 +0000 UTC m=+4559.297794442" observedRunningTime="2025-09-30 20:47:10.257708598 +0000 UTC m=+4559.878642085" watchObservedRunningTime="2025-09-30 20:47:10.259105894 +0000 UTC m=+4559.880039371" Sep 30 20:47:16 crc kubenswrapper[4756]: I0930 20:47:16.418860 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-527mz" Sep 30 20:47:16 crc kubenswrapper[4756]: I0930 20:47:16.419643 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-527mz" Sep 30 20:47:16 crc kubenswrapper[4756]: I0930 20:47:16.458915 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-527mz" Sep 30 20:47:17 crc kubenswrapper[4756]: I0930 20:47:17.328685 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-527mz" Sep 30 20:47:17 crc kubenswrapper[4756]: I0930 20:47:17.376885 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-527mz"] Sep 30 20:47:19 crc kubenswrapper[4756]: I0930 20:47:19.303844 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-527mz" podUID="0ec74fc3-d99f-4341-bc73-1c9502982c1a" containerName="registry-server" containerID="cri-o://5e57e8a05cb7425649a6d854c22dd6d3912544d33661ec9e224d871e95dc26e0" gracePeriod=2 Sep 30 20:47:19 crc kubenswrapper[4756]: I0930 20:47:19.680704 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-527mz" Sep 30 20:47:19 crc kubenswrapper[4756]: I0930 20:47:19.861418 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0ec74fc3-d99f-4341-bc73-1c9502982c1a-catalog-content\") pod \"0ec74fc3-d99f-4341-bc73-1c9502982c1a\" (UID: \"0ec74fc3-d99f-4341-bc73-1c9502982c1a\") " Sep 30 20:47:19 crc kubenswrapper[4756]: I0930 20:47:19.861558 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0ec74fc3-d99f-4341-bc73-1c9502982c1a-utilities\") pod \"0ec74fc3-d99f-4341-bc73-1c9502982c1a\" (UID: \"0ec74fc3-d99f-4341-bc73-1c9502982c1a\") " Sep 30 20:47:19 crc kubenswrapper[4756]: I0930 20:47:19.861672 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fsv8t\" (UniqueName: \"kubernetes.io/projected/0ec74fc3-d99f-4341-bc73-1c9502982c1a-kube-api-access-fsv8t\") pod \"0ec74fc3-d99f-4341-bc73-1c9502982c1a\" (UID: \"0ec74fc3-d99f-4341-bc73-1c9502982c1a\") " Sep 30 20:47:19 crc kubenswrapper[4756]: I0930 20:47:19.862454 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0ec74fc3-d99f-4341-bc73-1c9502982c1a-utilities" (OuterVolumeSpecName: "utilities") pod "0ec74fc3-d99f-4341-bc73-1c9502982c1a" (UID: "0ec74fc3-d99f-4341-bc73-1c9502982c1a"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 20:47:19 crc kubenswrapper[4756]: I0930 20:47:19.867284 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0ec74fc3-d99f-4341-bc73-1c9502982c1a-kube-api-access-fsv8t" (OuterVolumeSpecName: "kube-api-access-fsv8t") pod "0ec74fc3-d99f-4341-bc73-1c9502982c1a" (UID: "0ec74fc3-d99f-4341-bc73-1c9502982c1a"). InnerVolumeSpecName "kube-api-access-fsv8t". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:47:19 crc kubenswrapper[4756]: I0930 20:47:19.875165 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0ec74fc3-d99f-4341-bc73-1c9502982c1a-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "0ec74fc3-d99f-4341-bc73-1c9502982c1a" (UID: "0ec74fc3-d99f-4341-bc73-1c9502982c1a"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 20:47:19 crc kubenswrapper[4756]: I0930 20:47:19.963634 4756 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0ec74fc3-d99f-4341-bc73-1c9502982c1a-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 20:47:19 crc kubenswrapper[4756]: I0930 20:47:19.963950 4756 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0ec74fc3-d99f-4341-bc73-1c9502982c1a-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 20:47:19 crc kubenswrapper[4756]: I0930 20:47:19.963961 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fsv8t\" (UniqueName: \"kubernetes.io/projected/0ec74fc3-d99f-4341-bc73-1c9502982c1a-kube-api-access-fsv8t\") on node \"crc\" DevicePath \"\"" Sep 30 20:47:20 crc kubenswrapper[4756]: I0930 20:47:20.311648 4756 generic.go:334] "Generic (PLEG): container finished" podID="0ec74fc3-d99f-4341-bc73-1c9502982c1a" containerID="5e57e8a05cb7425649a6d854c22dd6d3912544d33661ec9e224d871e95dc26e0" exitCode=0 Sep 30 20:47:20 crc kubenswrapper[4756]: I0930 20:47:20.311707 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-527mz" event={"ID":"0ec74fc3-d99f-4341-bc73-1c9502982c1a","Type":"ContainerDied","Data":"5e57e8a05cb7425649a6d854c22dd6d3912544d33661ec9e224d871e95dc26e0"} Sep 30 20:47:20 crc kubenswrapper[4756]: I0930 20:47:20.311751 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-527mz" Sep 30 20:47:20 crc kubenswrapper[4756]: I0930 20:47:20.311778 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-527mz" event={"ID":"0ec74fc3-d99f-4341-bc73-1c9502982c1a","Type":"ContainerDied","Data":"26ca6ad1e6b72bb481079b57ae003d326ee3d472ed3ed7a9f01715317ef16f88"} Sep 30 20:47:20 crc kubenswrapper[4756]: I0930 20:47:20.311806 4756 scope.go:117] "RemoveContainer" containerID="5e57e8a05cb7425649a6d854c22dd6d3912544d33661ec9e224d871e95dc26e0" Sep 30 20:47:20 crc kubenswrapper[4756]: I0930 20:47:20.332318 4756 scope.go:117] "RemoveContainer" containerID="10a438feb8397706c735cc23ddd0ad471ce42b1ae399f9f8d9b47d4bf41734e5" Sep 30 20:47:20 crc kubenswrapper[4756]: I0930 20:47:20.349654 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-527mz"] Sep 30 20:47:20 crc kubenswrapper[4756]: I0930 20:47:20.356228 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-527mz"] Sep 30 20:47:20 crc kubenswrapper[4756]: I0930 20:47:20.364722 4756 scope.go:117] "RemoveContainer" containerID="80bbdd2052810c57a4cdc7c98b0ffcb51c1fae571ffff6e686853356f2e0f658" Sep 30 20:47:20 crc kubenswrapper[4756]: I0930 20:47:20.386992 4756 scope.go:117] "RemoveContainer" containerID="5e57e8a05cb7425649a6d854c22dd6d3912544d33661ec9e224d871e95dc26e0" Sep 30 20:47:20 crc kubenswrapper[4756]: E0930 20:47:20.387540 4756 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5e57e8a05cb7425649a6d854c22dd6d3912544d33661ec9e224d871e95dc26e0\": container with ID starting with 5e57e8a05cb7425649a6d854c22dd6d3912544d33661ec9e224d871e95dc26e0 not found: ID does not exist" containerID="5e57e8a05cb7425649a6d854c22dd6d3912544d33661ec9e224d871e95dc26e0" Sep 30 20:47:20 crc kubenswrapper[4756]: I0930 20:47:20.387580 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5e57e8a05cb7425649a6d854c22dd6d3912544d33661ec9e224d871e95dc26e0"} err="failed to get container status \"5e57e8a05cb7425649a6d854c22dd6d3912544d33661ec9e224d871e95dc26e0\": rpc error: code = NotFound desc = could not find container \"5e57e8a05cb7425649a6d854c22dd6d3912544d33661ec9e224d871e95dc26e0\": container with ID starting with 5e57e8a05cb7425649a6d854c22dd6d3912544d33661ec9e224d871e95dc26e0 not found: ID does not exist" Sep 30 20:47:20 crc kubenswrapper[4756]: I0930 20:47:20.387607 4756 scope.go:117] "RemoveContainer" containerID="10a438feb8397706c735cc23ddd0ad471ce42b1ae399f9f8d9b47d4bf41734e5" Sep 30 20:47:20 crc kubenswrapper[4756]: E0930 20:47:20.387860 4756 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"10a438feb8397706c735cc23ddd0ad471ce42b1ae399f9f8d9b47d4bf41734e5\": container with ID starting with 10a438feb8397706c735cc23ddd0ad471ce42b1ae399f9f8d9b47d4bf41734e5 not found: ID does not exist" containerID="10a438feb8397706c735cc23ddd0ad471ce42b1ae399f9f8d9b47d4bf41734e5" Sep 30 20:47:20 crc kubenswrapper[4756]: I0930 20:47:20.387965 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"10a438feb8397706c735cc23ddd0ad471ce42b1ae399f9f8d9b47d4bf41734e5"} err="failed to get container status \"10a438feb8397706c735cc23ddd0ad471ce42b1ae399f9f8d9b47d4bf41734e5\": rpc error: code = NotFound desc = could not find container \"10a438feb8397706c735cc23ddd0ad471ce42b1ae399f9f8d9b47d4bf41734e5\": container with ID starting with 10a438feb8397706c735cc23ddd0ad471ce42b1ae399f9f8d9b47d4bf41734e5 not found: ID does not exist" Sep 30 20:47:20 crc kubenswrapper[4756]: I0930 20:47:20.388047 4756 scope.go:117] "RemoveContainer" containerID="80bbdd2052810c57a4cdc7c98b0ffcb51c1fae571ffff6e686853356f2e0f658" Sep 30 20:47:20 crc kubenswrapper[4756]: E0930 20:47:20.388458 4756 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"80bbdd2052810c57a4cdc7c98b0ffcb51c1fae571ffff6e686853356f2e0f658\": container with ID starting with 80bbdd2052810c57a4cdc7c98b0ffcb51c1fae571ffff6e686853356f2e0f658 not found: ID does not exist" containerID="80bbdd2052810c57a4cdc7c98b0ffcb51c1fae571ffff6e686853356f2e0f658" Sep 30 20:47:20 crc kubenswrapper[4756]: I0930 20:47:20.388488 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"80bbdd2052810c57a4cdc7c98b0ffcb51c1fae571ffff6e686853356f2e0f658"} err="failed to get container status \"80bbdd2052810c57a4cdc7c98b0ffcb51c1fae571ffff6e686853356f2e0f658\": rpc error: code = NotFound desc = could not find container \"80bbdd2052810c57a4cdc7c98b0ffcb51c1fae571ffff6e686853356f2e0f658\": container with ID starting with 80bbdd2052810c57a4cdc7c98b0ffcb51c1fae571ffff6e686853356f2e0f658 not found: ID does not exist" Sep 30 20:47:21 crc kubenswrapper[4756]: I0930 20:47:21.126617 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0ec74fc3-d99f-4341-bc73-1c9502982c1a" path="/var/lib/kubelet/pods/0ec74fc3-d99f-4341-bc73-1c9502982c1a/volumes" Sep 30 20:47:22 crc kubenswrapper[4756]: I0930 20:47:22.115915 4756 scope.go:117] "RemoveContainer" containerID="bdd10774ccbb11f8a2293190d3ed532f32596ee6e63b4f323c2f18f8a020bf99" Sep 30 20:47:22 crc kubenswrapper[4756]: E0930 20:47:22.116460 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4n9zj_openshift-machine-config-operator(3370c2ca-fec3-4f90-8df7-51e21e6c7e1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" Sep 30 20:47:33 crc kubenswrapper[4756]: I0930 20:47:33.117048 4756 scope.go:117] "RemoveContainer" containerID="bdd10774ccbb11f8a2293190d3ed532f32596ee6e63b4f323c2f18f8a020bf99" Sep 30 20:47:33 crc kubenswrapper[4756]: E0930 20:47:33.119287 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4n9zj_openshift-machine-config-operator(3370c2ca-fec3-4f90-8df7-51e21e6c7e1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" Sep 30 20:47:46 crc kubenswrapper[4756]: I0930 20:47:46.116652 4756 scope.go:117] "RemoveContainer" containerID="bdd10774ccbb11f8a2293190d3ed532f32596ee6e63b4f323c2f18f8a020bf99" Sep 30 20:47:46 crc kubenswrapper[4756]: E0930 20:47:46.117904 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4n9zj_openshift-machine-config-operator(3370c2ca-fec3-4f90-8df7-51e21e6c7e1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" Sep 30 20:47:58 crc kubenswrapper[4756]: I0930 20:47:58.115761 4756 scope.go:117] "RemoveContainer" containerID="bdd10774ccbb11f8a2293190d3ed532f32596ee6e63b4f323c2f18f8a020bf99" Sep 30 20:47:58 crc kubenswrapper[4756]: E0930 20:47:58.116585 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4n9zj_openshift-machine-config-operator(3370c2ca-fec3-4f90-8df7-51e21e6c7e1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" Sep 30 20:48:04 crc kubenswrapper[4756]: I0930 20:48:04.176212 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5d7b5456f5-px82p"] Sep 30 20:48:04 crc kubenswrapper[4756]: E0930 20:48:04.176952 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0ec74fc3-d99f-4341-bc73-1c9502982c1a" containerName="extract-utilities" Sep 30 20:48:04 crc kubenswrapper[4756]: I0930 20:48:04.176972 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="0ec74fc3-d99f-4341-bc73-1c9502982c1a" containerName="extract-utilities" Sep 30 20:48:04 crc kubenswrapper[4756]: E0930 20:48:04.176994 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0ec74fc3-d99f-4341-bc73-1c9502982c1a" containerName="extract-content" Sep 30 20:48:04 crc kubenswrapper[4756]: I0930 20:48:04.177002 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="0ec74fc3-d99f-4341-bc73-1c9502982c1a" containerName="extract-content" Sep 30 20:48:04 crc kubenswrapper[4756]: E0930 20:48:04.177024 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0ec74fc3-d99f-4341-bc73-1c9502982c1a" containerName="registry-server" Sep 30 20:48:04 crc kubenswrapper[4756]: I0930 20:48:04.177032 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="0ec74fc3-d99f-4341-bc73-1c9502982c1a" containerName="registry-server" Sep 30 20:48:04 crc kubenswrapper[4756]: I0930 20:48:04.177228 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="0ec74fc3-d99f-4341-bc73-1c9502982c1a" containerName="registry-server" Sep 30 20:48:04 crc kubenswrapper[4756]: I0930 20:48:04.178120 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5d7b5456f5-px82p" Sep 30 20:48:04 crc kubenswrapper[4756]: I0930 20:48:04.180813 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openshift-service-ca.crt" Sep 30 20:48:04 crc kubenswrapper[4756]: I0930 20:48:04.181945 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dnsmasq-dns-dockercfg-llfxz" Sep 30 20:48:04 crc kubenswrapper[4756]: I0930 20:48:04.182017 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns-svc" Sep 30 20:48:04 crc kubenswrapper[4756]: I0930 20:48:04.182142 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns" Sep 30 20:48:04 crc kubenswrapper[4756]: I0930 20:48:04.188333 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"kube-root-ca.crt" Sep 30 20:48:04 crc kubenswrapper[4756]: I0930 20:48:04.201108 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5d7b5456f5-px82p"] Sep 30 20:48:04 crc kubenswrapper[4756]: I0930 20:48:04.288840 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cv7sw\" (UniqueName: \"kubernetes.io/projected/2d1685f0-5d42-4b88-9804-7f77e5a30cff-kube-api-access-cv7sw\") pod \"dnsmasq-dns-5d7b5456f5-px82p\" (UID: \"2d1685f0-5d42-4b88-9804-7f77e5a30cff\") " pod="openstack/dnsmasq-dns-5d7b5456f5-px82p" Sep 30 20:48:04 crc kubenswrapper[4756]: I0930 20:48:04.288914 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2d1685f0-5d42-4b88-9804-7f77e5a30cff-dns-svc\") pod \"dnsmasq-dns-5d7b5456f5-px82p\" (UID: \"2d1685f0-5d42-4b88-9804-7f77e5a30cff\") " pod="openstack/dnsmasq-dns-5d7b5456f5-px82p" Sep 30 20:48:04 crc kubenswrapper[4756]: I0930 20:48:04.288936 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2d1685f0-5d42-4b88-9804-7f77e5a30cff-config\") pod \"dnsmasq-dns-5d7b5456f5-px82p\" (UID: \"2d1685f0-5d42-4b88-9804-7f77e5a30cff\") " pod="openstack/dnsmasq-dns-5d7b5456f5-px82p" Sep 30 20:48:04 crc kubenswrapper[4756]: I0930 20:48:04.390841 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2d1685f0-5d42-4b88-9804-7f77e5a30cff-dns-svc\") pod \"dnsmasq-dns-5d7b5456f5-px82p\" (UID: \"2d1685f0-5d42-4b88-9804-7f77e5a30cff\") " pod="openstack/dnsmasq-dns-5d7b5456f5-px82p" Sep 30 20:48:04 crc kubenswrapper[4756]: I0930 20:48:04.390901 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2d1685f0-5d42-4b88-9804-7f77e5a30cff-config\") pod \"dnsmasq-dns-5d7b5456f5-px82p\" (UID: \"2d1685f0-5d42-4b88-9804-7f77e5a30cff\") " pod="openstack/dnsmasq-dns-5d7b5456f5-px82p" Sep 30 20:48:04 crc kubenswrapper[4756]: I0930 20:48:04.391020 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cv7sw\" (UniqueName: \"kubernetes.io/projected/2d1685f0-5d42-4b88-9804-7f77e5a30cff-kube-api-access-cv7sw\") pod \"dnsmasq-dns-5d7b5456f5-px82p\" (UID: \"2d1685f0-5d42-4b88-9804-7f77e5a30cff\") " pod="openstack/dnsmasq-dns-5d7b5456f5-px82p" Sep 30 20:48:04 crc kubenswrapper[4756]: I0930 20:48:04.391924 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2d1685f0-5d42-4b88-9804-7f77e5a30cff-dns-svc\") pod \"dnsmasq-dns-5d7b5456f5-px82p\" (UID: \"2d1685f0-5d42-4b88-9804-7f77e5a30cff\") " pod="openstack/dnsmasq-dns-5d7b5456f5-px82p" Sep 30 20:48:04 crc kubenswrapper[4756]: I0930 20:48:04.391943 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2d1685f0-5d42-4b88-9804-7f77e5a30cff-config\") pod \"dnsmasq-dns-5d7b5456f5-px82p\" (UID: \"2d1685f0-5d42-4b88-9804-7f77e5a30cff\") " pod="openstack/dnsmasq-dns-5d7b5456f5-px82p" Sep 30 20:48:04 crc kubenswrapper[4756]: I0930 20:48:04.427786 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cv7sw\" (UniqueName: \"kubernetes.io/projected/2d1685f0-5d42-4b88-9804-7f77e5a30cff-kube-api-access-cv7sw\") pod \"dnsmasq-dns-5d7b5456f5-px82p\" (UID: \"2d1685f0-5d42-4b88-9804-7f77e5a30cff\") " pod="openstack/dnsmasq-dns-5d7b5456f5-px82p" Sep 30 20:48:04 crc kubenswrapper[4756]: I0930 20:48:04.483173 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-98ddfc8f-4nbjr"] Sep 30 20:48:04 crc kubenswrapper[4756]: I0930 20:48:04.489576 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-98ddfc8f-4nbjr" Sep 30 20:48:04 crc kubenswrapper[4756]: I0930 20:48:04.497889 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5d7b5456f5-px82p" Sep 30 20:48:04 crc kubenswrapper[4756]: I0930 20:48:04.501093 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-98ddfc8f-4nbjr"] Sep 30 20:48:04 crc kubenswrapper[4756]: I0930 20:48:04.593379 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nvj4x\" (UniqueName: \"kubernetes.io/projected/09fbe05a-857d-48dc-9e31-faadf828e6eb-kube-api-access-nvj4x\") pod \"dnsmasq-dns-98ddfc8f-4nbjr\" (UID: \"09fbe05a-857d-48dc-9e31-faadf828e6eb\") " pod="openstack/dnsmasq-dns-98ddfc8f-4nbjr" Sep 30 20:48:04 crc kubenswrapper[4756]: I0930 20:48:04.593477 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09fbe05a-857d-48dc-9e31-faadf828e6eb-config\") pod \"dnsmasq-dns-98ddfc8f-4nbjr\" (UID: \"09fbe05a-857d-48dc-9e31-faadf828e6eb\") " pod="openstack/dnsmasq-dns-98ddfc8f-4nbjr" Sep 30 20:48:04 crc kubenswrapper[4756]: I0930 20:48:04.593507 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/09fbe05a-857d-48dc-9e31-faadf828e6eb-dns-svc\") pod \"dnsmasq-dns-98ddfc8f-4nbjr\" (UID: \"09fbe05a-857d-48dc-9e31-faadf828e6eb\") " pod="openstack/dnsmasq-dns-98ddfc8f-4nbjr" Sep 30 20:48:04 crc kubenswrapper[4756]: I0930 20:48:04.694537 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09fbe05a-857d-48dc-9e31-faadf828e6eb-config\") pod \"dnsmasq-dns-98ddfc8f-4nbjr\" (UID: \"09fbe05a-857d-48dc-9e31-faadf828e6eb\") " pod="openstack/dnsmasq-dns-98ddfc8f-4nbjr" Sep 30 20:48:04 crc kubenswrapper[4756]: I0930 20:48:04.694898 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/09fbe05a-857d-48dc-9e31-faadf828e6eb-dns-svc\") pod \"dnsmasq-dns-98ddfc8f-4nbjr\" (UID: \"09fbe05a-857d-48dc-9e31-faadf828e6eb\") " pod="openstack/dnsmasq-dns-98ddfc8f-4nbjr" Sep 30 20:48:04 crc kubenswrapper[4756]: I0930 20:48:04.694972 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nvj4x\" (UniqueName: \"kubernetes.io/projected/09fbe05a-857d-48dc-9e31-faadf828e6eb-kube-api-access-nvj4x\") pod \"dnsmasq-dns-98ddfc8f-4nbjr\" (UID: \"09fbe05a-857d-48dc-9e31-faadf828e6eb\") " pod="openstack/dnsmasq-dns-98ddfc8f-4nbjr" Sep 30 20:48:04 crc kubenswrapper[4756]: I0930 20:48:04.696076 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09fbe05a-857d-48dc-9e31-faadf828e6eb-config\") pod \"dnsmasq-dns-98ddfc8f-4nbjr\" (UID: \"09fbe05a-857d-48dc-9e31-faadf828e6eb\") " pod="openstack/dnsmasq-dns-98ddfc8f-4nbjr" Sep 30 20:48:04 crc kubenswrapper[4756]: I0930 20:48:04.696663 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/09fbe05a-857d-48dc-9e31-faadf828e6eb-dns-svc\") pod \"dnsmasq-dns-98ddfc8f-4nbjr\" (UID: \"09fbe05a-857d-48dc-9e31-faadf828e6eb\") " pod="openstack/dnsmasq-dns-98ddfc8f-4nbjr" Sep 30 20:48:04 crc kubenswrapper[4756]: I0930 20:48:04.716554 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nvj4x\" (UniqueName: \"kubernetes.io/projected/09fbe05a-857d-48dc-9e31-faadf828e6eb-kube-api-access-nvj4x\") pod \"dnsmasq-dns-98ddfc8f-4nbjr\" (UID: \"09fbe05a-857d-48dc-9e31-faadf828e6eb\") " pod="openstack/dnsmasq-dns-98ddfc8f-4nbjr" Sep 30 20:48:04 crc kubenswrapper[4756]: I0930 20:48:04.813293 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-98ddfc8f-4nbjr" Sep 30 20:48:05 crc kubenswrapper[4756]: I0930 20:48:05.072182 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-98ddfc8f-4nbjr"] Sep 30 20:48:05 crc kubenswrapper[4756]: I0930 20:48:05.114577 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5d7b5456f5-px82p"] Sep 30 20:48:05 crc kubenswrapper[4756]: W0930 20:48:05.120173 4756 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2d1685f0_5d42_4b88_9804_7f77e5a30cff.slice/crio-3c0d42b34017fe1f0513c795b29443a60da2f0365849e38cd2a4b65e53e515ab WatchSource:0}: Error finding container 3c0d42b34017fe1f0513c795b29443a60da2f0365849e38cd2a4b65e53e515ab: Status 404 returned error can't find the container with id 3c0d42b34017fe1f0513c795b29443a60da2f0365849e38cd2a4b65e53e515ab Sep 30 20:48:05 crc kubenswrapper[4756]: I0930 20:48:05.352258 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-server-0"] Sep 30 20:48:05 crc kubenswrapper[4756]: I0930 20:48:05.353875 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Sep 30 20:48:05 crc kubenswrapper[4756]: I0930 20:48:05.357898 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-plugins-conf" Sep 30 20:48:05 crc kubenswrapper[4756]: I0930 20:48:05.357997 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-server-dockercfg-lvsbp" Sep 30 20:48:05 crc kubenswrapper[4756]: I0930 20:48:05.358326 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-erlang-cookie" Sep 30 20:48:05 crc kubenswrapper[4756]: I0930 20:48:05.366522 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-server-conf" Sep 30 20:48:05 crc kubenswrapper[4756]: I0930 20:48:05.366622 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-default-user" Sep 30 20:48:05 crc kubenswrapper[4756]: I0930 20:48:05.379282 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Sep 30 20:48:05 crc kubenswrapper[4756]: I0930 20:48:05.414969 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/65817176-3f65-4836-b4e9-1e92d6c077ea-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"65817176-3f65-4836-b4e9-1e92d6c077ea\") " pod="openstack/rabbitmq-server-0" Sep 30 20:48:05 crc kubenswrapper[4756]: I0930 20:48:05.415059 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/65817176-3f65-4836-b4e9-1e92d6c077ea-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"65817176-3f65-4836-b4e9-1e92d6c077ea\") " pod="openstack/rabbitmq-server-0" Sep 30 20:48:05 crc kubenswrapper[4756]: I0930 20:48:05.415132 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/65817176-3f65-4836-b4e9-1e92d6c077ea-pod-info\") pod \"rabbitmq-server-0\" (UID: \"65817176-3f65-4836-b4e9-1e92d6c077ea\") " pod="openstack/rabbitmq-server-0" Sep 30 20:48:05 crc kubenswrapper[4756]: I0930 20:48:05.415446 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/65817176-3f65-4836-b4e9-1e92d6c077ea-server-conf\") pod \"rabbitmq-server-0\" (UID: \"65817176-3f65-4836-b4e9-1e92d6c077ea\") " pod="openstack/rabbitmq-server-0" Sep 30 20:48:05 crc kubenswrapper[4756]: I0930 20:48:05.415553 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/65817176-3f65-4836-b4e9-1e92d6c077ea-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"65817176-3f65-4836-b4e9-1e92d6c077ea\") " pod="openstack/rabbitmq-server-0" Sep 30 20:48:05 crc kubenswrapper[4756]: I0930 20:48:05.415684 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-664ad2c2-f310-4f5f-ac16-a701450ed846\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-664ad2c2-f310-4f5f-ac16-a701450ed846\") pod \"rabbitmq-server-0\" (UID: \"65817176-3f65-4836-b4e9-1e92d6c077ea\") " pod="openstack/rabbitmq-server-0" Sep 30 20:48:05 crc kubenswrapper[4756]: I0930 20:48:05.415776 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/65817176-3f65-4836-b4e9-1e92d6c077ea-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"65817176-3f65-4836-b4e9-1e92d6c077ea\") " pod="openstack/rabbitmq-server-0" Sep 30 20:48:05 crc kubenswrapper[4756]: I0930 20:48:05.415847 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/65817176-3f65-4836-b4e9-1e92d6c077ea-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"65817176-3f65-4836-b4e9-1e92d6c077ea\") " pod="openstack/rabbitmq-server-0" Sep 30 20:48:05 crc kubenswrapper[4756]: I0930 20:48:05.415882 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dm44g\" (UniqueName: \"kubernetes.io/projected/65817176-3f65-4836-b4e9-1e92d6c077ea-kube-api-access-dm44g\") pod \"rabbitmq-server-0\" (UID: \"65817176-3f65-4836-b4e9-1e92d6c077ea\") " pod="openstack/rabbitmq-server-0" Sep 30 20:48:05 crc kubenswrapper[4756]: I0930 20:48:05.517275 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/65817176-3f65-4836-b4e9-1e92d6c077ea-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"65817176-3f65-4836-b4e9-1e92d6c077ea\") " pod="openstack/rabbitmq-server-0" Sep 30 20:48:05 crc kubenswrapper[4756]: I0930 20:48:05.517354 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/65817176-3f65-4836-b4e9-1e92d6c077ea-pod-info\") pod \"rabbitmq-server-0\" (UID: \"65817176-3f65-4836-b4e9-1e92d6c077ea\") " pod="openstack/rabbitmq-server-0" Sep 30 20:48:05 crc kubenswrapper[4756]: I0930 20:48:05.517458 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/65817176-3f65-4836-b4e9-1e92d6c077ea-server-conf\") pod \"rabbitmq-server-0\" (UID: \"65817176-3f65-4836-b4e9-1e92d6c077ea\") " pod="openstack/rabbitmq-server-0" Sep 30 20:48:05 crc kubenswrapper[4756]: I0930 20:48:05.517497 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/65817176-3f65-4836-b4e9-1e92d6c077ea-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"65817176-3f65-4836-b4e9-1e92d6c077ea\") " pod="openstack/rabbitmq-server-0" Sep 30 20:48:05 crc kubenswrapper[4756]: I0930 20:48:05.517542 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-664ad2c2-f310-4f5f-ac16-a701450ed846\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-664ad2c2-f310-4f5f-ac16-a701450ed846\") pod \"rabbitmq-server-0\" (UID: \"65817176-3f65-4836-b4e9-1e92d6c077ea\") " pod="openstack/rabbitmq-server-0" Sep 30 20:48:05 crc kubenswrapper[4756]: I0930 20:48:05.517581 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/65817176-3f65-4836-b4e9-1e92d6c077ea-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"65817176-3f65-4836-b4e9-1e92d6c077ea\") " pod="openstack/rabbitmq-server-0" Sep 30 20:48:05 crc kubenswrapper[4756]: I0930 20:48:05.517613 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/65817176-3f65-4836-b4e9-1e92d6c077ea-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"65817176-3f65-4836-b4e9-1e92d6c077ea\") " pod="openstack/rabbitmq-server-0" Sep 30 20:48:05 crc kubenswrapper[4756]: I0930 20:48:05.517637 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dm44g\" (UniqueName: \"kubernetes.io/projected/65817176-3f65-4836-b4e9-1e92d6c077ea-kube-api-access-dm44g\") pod \"rabbitmq-server-0\" (UID: \"65817176-3f65-4836-b4e9-1e92d6c077ea\") " pod="openstack/rabbitmq-server-0" Sep 30 20:48:05 crc kubenswrapper[4756]: I0930 20:48:05.517664 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/65817176-3f65-4836-b4e9-1e92d6c077ea-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"65817176-3f65-4836-b4e9-1e92d6c077ea\") " pod="openstack/rabbitmq-server-0" Sep 30 20:48:05 crc kubenswrapper[4756]: I0930 20:48:05.519363 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/65817176-3f65-4836-b4e9-1e92d6c077ea-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"65817176-3f65-4836-b4e9-1e92d6c077ea\") " pod="openstack/rabbitmq-server-0" Sep 30 20:48:05 crc kubenswrapper[4756]: I0930 20:48:05.519700 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/65817176-3f65-4836-b4e9-1e92d6c077ea-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"65817176-3f65-4836-b4e9-1e92d6c077ea\") " pod="openstack/rabbitmq-server-0" Sep 30 20:48:05 crc kubenswrapper[4756]: I0930 20:48:05.520577 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/65817176-3f65-4836-b4e9-1e92d6c077ea-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"65817176-3f65-4836-b4e9-1e92d6c077ea\") " pod="openstack/rabbitmq-server-0" Sep 30 20:48:05 crc kubenswrapper[4756]: I0930 20:48:05.520676 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/65817176-3f65-4836-b4e9-1e92d6c077ea-server-conf\") pod \"rabbitmq-server-0\" (UID: \"65817176-3f65-4836-b4e9-1e92d6c077ea\") " pod="openstack/rabbitmq-server-0" Sep 30 20:48:05 crc kubenswrapper[4756]: I0930 20:48:05.524513 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/65817176-3f65-4836-b4e9-1e92d6c077ea-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"65817176-3f65-4836-b4e9-1e92d6c077ea\") " pod="openstack/rabbitmq-server-0" Sep 30 20:48:05 crc kubenswrapper[4756]: I0930 20:48:05.524604 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/65817176-3f65-4836-b4e9-1e92d6c077ea-pod-info\") pod \"rabbitmq-server-0\" (UID: \"65817176-3f65-4836-b4e9-1e92d6c077ea\") " pod="openstack/rabbitmq-server-0" Sep 30 20:48:05 crc kubenswrapper[4756]: I0930 20:48:05.524980 4756 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Sep 30 20:48:05 crc kubenswrapper[4756]: I0930 20:48:05.525150 4756 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-664ad2c2-f310-4f5f-ac16-a701450ed846\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-664ad2c2-f310-4f5f-ac16-a701450ed846\") pod \"rabbitmq-server-0\" (UID: \"65817176-3f65-4836-b4e9-1e92d6c077ea\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/13580d31e89d9734fbf1a64fe8163230ccfdaa4e77b367d3301304b5672e812d/globalmount\"" pod="openstack/rabbitmq-server-0" Sep 30 20:48:05 crc kubenswrapper[4756]: I0930 20:48:05.524988 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/65817176-3f65-4836-b4e9-1e92d6c077ea-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"65817176-3f65-4836-b4e9-1e92d6c077ea\") " pod="openstack/rabbitmq-server-0" Sep 30 20:48:05 crc kubenswrapper[4756]: I0930 20:48:05.541252 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dm44g\" (UniqueName: \"kubernetes.io/projected/65817176-3f65-4836-b4e9-1e92d6c077ea-kube-api-access-dm44g\") pod \"rabbitmq-server-0\" (UID: \"65817176-3f65-4836-b4e9-1e92d6c077ea\") " pod="openstack/rabbitmq-server-0" Sep 30 20:48:05 crc kubenswrapper[4756]: I0930 20:48:05.557765 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-664ad2c2-f310-4f5f-ac16-a701450ed846\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-664ad2c2-f310-4f5f-ac16-a701450ed846\") pod \"rabbitmq-server-0\" (UID: \"65817176-3f65-4836-b4e9-1e92d6c077ea\") " pod="openstack/rabbitmq-server-0" Sep 30 20:48:05 crc kubenswrapper[4756]: I0930 20:48:05.699521 4756 generic.go:334] "Generic (PLEG): container finished" podID="2d1685f0-5d42-4b88-9804-7f77e5a30cff" containerID="0e7c51a61200455ad431b02d4050add67d08afceeca1e0c925c112b0de658db7" exitCode=0 Sep 30 20:48:05 crc kubenswrapper[4756]: I0930 20:48:05.699784 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5d7b5456f5-px82p" event={"ID":"2d1685f0-5d42-4b88-9804-7f77e5a30cff","Type":"ContainerDied","Data":"0e7c51a61200455ad431b02d4050add67d08afceeca1e0c925c112b0de658db7"} Sep 30 20:48:05 crc kubenswrapper[4756]: I0930 20:48:05.699888 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5d7b5456f5-px82p" event={"ID":"2d1685f0-5d42-4b88-9804-7f77e5a30cff","Type":"ContainerStarted","Data":"3c0d42b34017fe1f0513c795b29443a60da2f0365849e38cd2a4b65e53e515ab"} Sep 30 20:48:05 crc kubenswrapper[4756]: I0930 20:48:05.702485 4756 generic.go:334] "Generic (PLEG): container finished" podID="09fbe05a-857d-48dc-9e31-faadf828e6eb" containerID="0fca3a6fa411bf7a8fc4f3e0fbb0352005fc65fc992acc99c3e4d25fc8d045d1" exitCode=0 Sep 30 20:48:05 crc kubenswrapper[4756]: I0930 20:48:05.702534 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-98ddfc8f-4nbjr" event={"ID":"09fbe05a-857d-48dc-9e31-faadf828e6eb","Type":"ContainerDied","Data":"0fca3a6fa411bf7a8fc4f3e0fbb0352005fc65fc992acc99c3e4d25fc8d045d1"} Sep 30 20:48:05 crc kubenswrapper[4756]: I0930 20:48:05.702714 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-98ddfc8f-4nbjr" event={"ID":"09fbe05a-857d-48dc-9e31-faadf828e6eb","Type":"ContainerStarted","Data":"18438bd35507e891fe08d754938be4f2dec263c786d2308a97f3774d85a3981a"} Sep 30 20:48:05 crc kubenswrapper[4756]: I0930 20:48:05.724795 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Sep 30 20:48:05 crc kubenswrapper[4756]: I0930 20:48:05.726331 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Sep 30 20:48:05 crc kubenswrapper[4756]: I0930 20:48:05.734464 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-server-dockercfg-g6qr8" Sep 30 20:48:05 crc kubenswrapper[4756]: I0930 20:48:05.735039 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-default-user" Sep 30 20:48:05 crc kubenswrapper[4756]: I0930 20:48:05.735306 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-erlang-cookie" Sep 30 20:48:05 crc kubenswrapper[4756]: I0930 20:48:05.735549 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-plugins-conf" Sep 30 20:48:05 crc kubenswrapper[4756]: I0930 20:48:05.735762 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-server-conf" Sep 30 20:48:05 crc kubenswrapper[4756]: I0930 20:48:05.781284 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Sep 30 20:48:05 crc kubenswrapper[4756]: I0930 20:48:05.799753 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Sep 30 20:48:05 crc kubenswrapper[4756]: I0930 20:48:05.934370 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9m94w\" (UniqueName: \"kubernetes.io/projected/e2fc60bc-0149-497d-85cf-d4614f8b33ad-kube-api-access-9m94w\") pod \"rabbitmq-cell1-server-0\" (UID: \"e2fc60bc-0149-497d-85cf-d4614f8b33ad\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 20:48:05 crc kubenswrapper[4756]: I0930 20:48:05.934445 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/e2fc60bc-0149-497d-85cf-d4614f8b33ad-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"e2fc60bc-0149-497d-85cf-d4614f8b33ad\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 20:48:05 crc kubenswrapper[4756]: I0930 20:48:05.934485 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/e2fc60bc-0149-497d-85cf-d4614f8b33ad-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"e2fc60bc-0149-497d-85cf-d4614f8b33ad\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 20:48:05 crc kubenswrapper[4756]: I0930 20:48:05.934546 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/e2fc60bc-0149-497d-85cf-d4614f8b33ad-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"e2fc60bc-0149-497d-85cf-d4614f8b33ad\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 20:48:05 crc kubenswrapper[4756]: I0930 20:48:05.935846 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-c20fde3d-b7e2-43bc-9121-3b4b06cf4865\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c20fde3d-b7e2-43bc-9121-3b4b06cf4865\") pod \"rabbitmq-cell1-server-0\" (UID: \"e2fc60bc-0149-497d-85cf-d4614f8b33ad\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 20:48:05 crc kubenswrapper[4756]: I0930 20:48:05.935991 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/e2fc60bc-0149-497d-85cf-d4614f8b33ad-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"e2fc60bc-0149-497d-85cf-d4614f8b33ad\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 20:48:05 crc kubenswrapper[4756]: I0930 20:48:05.936026 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/e2fc60bc-0149-497d-85cf-d4614f8b33ad-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"e2fc60bc-0149-497d-85cf-d4614f8b33ad\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 20:48:05 crc kubenswrapper[4756]: I0930 20:48:05.936179 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/e2fc60bc-0149-497d-85cf-d4614f8b33ad-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"e2fc60bc-0149-497d-85cf-d4614f8b33ad\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 20:48:05 crc kubenswrapper[4756]: I0930 20:48:05.936201 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/e2fc60bc-0149-497d-85cf-d4614f8b33ad-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"e2fc60bc-0149-497d-85cf-d4614f8b33ad\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 20:48:06 crc kubenswrapper[4756]: I0930 20:48:06.037428 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/e2fc60bc-0149-497d-85cf-d4614f8b33ad-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"e2fc60bc-0149-497d-85cf-d4614f8b33ad\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 20:48:06 crc kubenswrapper[4756]: I0930 20:48:06.037527 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/e2fc60bc-0149-497d-85cf-d4614f8b33ad-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"e2fc60bc-0149-497d-85cf-d4614f8b33ad\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 20:48:06 crc kubenswrapper[4756]: I0930 20:48:06.037552 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/e2fc60bc-0149-497d-85cf-d4614f8b33ad-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"e2fc60bc-0149-497d-85cf-d4614f8b33ad\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 20:48:06 crc kubenswrapper[4756]: I0930 20:48:06.037590 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9m94w\" (UniqueName: \"kubernetes.io/projected/e2fc60bc-0149-497d-85cf-d4614f8b33ad-kube-api-access-9m94w\") pod \"rabbitmq-cell1-server-0\" (UID: \"e2fc60bc-0149-497d-85cf-d4614f8b33ad\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 20:48:06 crc kubenswrapper[4756]: I0930 20:48:06.037613 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/e2fc60bc-0149-497d-85cf-d4614f8b33ad-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"e2fc60bc-0149-497d-85cf-d4614f8b33ad\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 20:48:06 crc kubenswrapper[4756]: I0930 20:48:06.037638 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/e2fc60bc-0149-497d-85cf-d4614f8b33ad-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"e2fc60bc-0149-497d-85cf-d4614f8b33ad\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 20:48:06 crc kubenswrapper[4756]: I0930 20:48:06.037682 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/e2fc60bc-0149-497d-85cf-d4614f8b33ad-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"e2fc60bc-0149-497d-85cf-d4614f8b33ad\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 20:48:06 crc kubenswrapper[4756]: I0930 20:48:06.037714 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-c20fde3d-b7e2-43bc-9121-3b4b06cf4865\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c20fde3d-b7e2-43bc-9121-3b4b06cf4865\") pod \"rabbitmq-cell1-server-0\" (UID: \"e2fc60bc-0149-497d-85cf-d4614f8b33ad\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 20:48:06 crc kubenswrapper[4756]: I0930 20:48:06.037740 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/e2fc60bc-0149-497d-85cf-d4614f8b33ad-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"e2fc60bc-0149-497d-85cf-d4614f8b33ad\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 20:48:06 crc kubenswrapper[4756]: I0930 20:48:06.041355 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/e2fc60bc-0149-497d-85cf-d4614f8b33ad-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"e2fc60bc-0149-497d-85cf-d4614f8b33ad\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 20:48:06 crc kubenswrapper[4756]: I0930 20:48:06.041492 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/e2fc60bc-0149-497d-85cf-d4614f8b33ad-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"e2fc60bc-0149-497d-85cf-d4614f8b33ad\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 20:48:06 crc kubenswrapper[4756]: I0930 20:48:06.041490 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/e2fc60bc-0149-497d-85cf-d4614f8b33ad-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"e2fc60bc-0149-497d-85cf-d4614f8b33ad\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 20:48:06 crc kubenswrapper[4756]: I0930 20:48:06.042290 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/e2fc60bc-0149-497d-85cf-d4614f8b33ad-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"e2fc60bc-0149-497d-85cf-d4614f8b33ad\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 20:48:06 crc kubenswrapper[4756]: I0930 20:48:06.043711 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/e2fc60bc-0149-497d-85cf-d4614f8b33ad-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"e2fc60bc-0149-497d-85cf-d4614f8b33ad\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 20:48:06 crc kubenswrapper[4756]: I0930 20:48:06.046559 4756 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Sep 30 20:48:06 crc kubenswrapper[4756]: I0930 20:48:06.046592 4756 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-c20fde3d-b7e2-43bc-9121-3b4b06cf4865\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c20fde3d-b7e2-43bc-9121-3b4b06cf4865\") pod \"rabbitmq-cell1-server-0\" (UID: \"e2fc60bc-0149-497d-85cf-d4614f8b33ad\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/129c38121bc8e03a953681f31281646cec7fd324f44e599ccb7820f5cbe7b9f0/globalmount\"" pod="openstack/rabbitmq-cell1-server-0" Sep 30 20:48:06 crc kubenswrapper[4756]: I0930 20:48:06.048069 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/e2fc60bc-0149-497d-85cf-d4614f8b33ad-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"e2fc60bc-0149-497d-85cf-d4614f8b33ad\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 20:48:06 crc kubenswrapper[4756]: I0930 20:48:06.048127 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/e2fc60bc-0149-497d-85cf-d4614f8b33ad-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"e2fc60bc-0149-497d-85cf-d4614f8b33ad\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 20:48:06 crc kubenswrapper[4756]: I0930 20:48:06.065164 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9m94w\" (UniqueName: \"kubernetes.io/projected/e2fc60bc-0149-497d-85cf-d4614f8b33ad-kube-api-access-9m94w\") pod \"rabbitmq-cell1-server-0\" (UID: \"e2fc60bc-0149-497d-85cf-d4614f8b33ad\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 20:48:06 crc kubenswrapper[4756]: I0930 20:48:06.105231 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-c20fde3d-b7e2-43bc-9121-3b4b06cf4865\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c20fde3d-b7e2-43bc-9121-3b4b06cf4865\") pod \"rabbitmq-cell1-server-0\" (UID: \"e2fc60bc-0149-497d-85cf-d4614f8b33ad\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 20:48:06 crc kubenswrapper[4756]: I0930 20:48:06.132182 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Sep 30 20:48:06 crc kubenswrapper[4756]: I0930 20:48:06.286465 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Sep 30 20:48:06 crc kubenswrapper[4756]: I0930 20:48:06.580458 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Sep 30 20:48:06 crc kubenswrapper[4756]: W0930 20:48:06.582086 4756 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode2fc60bc_0149_497d_85cf_d4614f8b33ad.slice/crio-1168e3a048bf7752b5bc4b5ee1c573f865dd0048320ecb7237fedab8b1bc5da1 WatchSource:0}: Error finding container 1168e3a048bf7752b5bc4b5ee1c573f865dd0048320ecb7237fedab8b1bc5da1: Status 404 returned error can't find the container with id 1168e3a048bf7752b5bc4b5ee1c573f865dd0048320ecb7237fedab8b1bc5da1 Sep 30 20:48:06 crc kubenswrapper[4756]: I0930 20:48:06.733548 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-98ddfc8f-4nbjr" event={"ID":"09fbe05a-857d-48dc-9e31-faadf828e6eb","Type":"ContainerStarted","Data":"5cc2d29824357c6f4347767987c10ce8a60746ee280b1be5d7b8b7453ed97e4f"} Sep 30 20:48:06 crc kubenswrapper[4756]: I0930 20:48:06.734275 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-98ddfc8f-4nbjr" Sep 30 20:48:06 crc kubenswrapper[4756]: I0930 20:48:06.736908 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"65817176-3f65-4836-b4e9-1e92d6c077ea","Type":"ContainerStarted","Data":"5c58a315a662fbae14da76647f46cbb38f9f1a55d89bf147949464d58216b893"} Sep 30 20:48:06 crc kubenswrapper[4756]: I0930 20:48:06.738704 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"e2fc60bc-0149-497d-85cf-d4614f8b33ad","Type":"ContainerStarted","Data":"1168e3a048bf7752b5bc4b5ee1c573f865dd0048320ecb7237fedab8b1bc5da1"} Sep 30 20:48:06 crc kubenswrapper[4756]: I0930 20:48:06.741271 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5d7b5456f5-px82p" event={"ID":"2d1685f0-5d42-4b88-9804-7f77e5a30cff","Type":"ContainerStarted","Data":"372e2d649edc408b3f84fc271e17aa81e1dcd48d00b96660223e9111e44ab636"} Sep 30 20:48:06 crc kubenswrapper[4756]: I0930 20:48:06.741707 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-5d7b5456f5-px82p" Sep 30 20:48:06 crc kubenswrapper[4756]: I0930 20:48:06.760849 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-98ddfc8f-4nbjr" podStartSLOduration=2.7608232150000003 podStartE2EDuration="2.760823215s" podCreationTimestamp="2025-09-30 20:48:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 20:48:06.753896264 +0000 UTC m=+4616.374829761" watchObservedRunningTime="2025-09-30 20:48:06.760823215 +0000 UTC m=+4616.381756682" Sep 30 20:48:06 crc kubenswrapper[4756]: I0930 20:48:06.773628 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-5d7b5456f5-px82p" podStartSLOduration=2.773609218 podStartE2EDuration="2.773609218s" podCreationTimestamp="2025-09-30 20:48:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 20:48:06.772627993 +0000 UTC m=+4616.393561500" watchObservedRunningTime="2025-09-30 20:48:06.773609218 +0000 UTC m=+4616.394542695" Sep 30 20:48:07 crc kubenswrapper[4756]: I0930 20:48:07.056460 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-galera-0"] Sep 30 20:48:07 crc kubenswrapper[4756]: I0930 20:48:07.058335 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Sep 30 20:48:07 crc kubenswrapper[4756]: I0930 20:48:07.061554 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Sep 30 20:48:07 crc kubenswrapper[4756]: I0930 20:48:07.061935 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config-data" Sep 30 20:48:07 crc kubenswrapper[4756]: I0930 20:48:07.062170 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-svc" Sep 30 20:48:07 crc kubenswrapper[4756]: I0930 20:48:07.062300 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-dockercfg-9v5rj" Sep 30 20:48:07 crc kubenswrapper[4756]: I0930 20:48:07.062415 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-scripts" Sep 30 20:48:07 crc kubenswrapper[4756]: I0930 20:48:07.075786 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"combined-ca-bundle" Sep 30 20:48:07 crc kubenswrapper[4756]: I0930 20:48:07.083794 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Sep 30 20:48:07 crc kubenswrapper[4756]: I0930 20:48:07.144562 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/memcached-0"] Sep 30 20:48:07 crc kubenswrapper[4756]: I0930 20:48:07.145646 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Sep 30 20:48:07 crc kubenswrapper[4756]: I0930 20:48:07.147737 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"memcached-memcached-dockercfg-2kwhh" Sep 30 20:48:07 crc kubenswrapper[4756]: I0930 20:48:07.148201 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"memcached-config-data" Sep 30 20:48:07 crc kubenswrapper[4756]: I0930 20:48:07.165511 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c86195f6-b0ce-4e59-81e8-25d5a978d2b5-operator-scripts\") pod \"openstack-galera-0\" (UID: \"c86195f6-b0ce-4e59-81e8-25d5a978d2b5\") " pod="openstack/openstack-galera-0" Sep 30 20:48:07 crc kubenswrapper[4756]: I0930 20:48:07.165589 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/c86195f6-b0ce-4e59-81e8-25d5a978d2b5-kolla-config\") pod \"openstack-galera-0\" (UID: \"c86195f6-b0ce-4e59-81e8-25d5a978d2b5\") " pod="openstack/openstack-galera-0" Sep 30 20:48:07 crc kubenswrapper[4756]: I0930 20:48:07.165620 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/c86195f6-b0ce-4e59-81e8-25d5a978d2b5-config-data-generated\") pod \"openstack-galera-0\" (UID: \"c86195f6-b0ce-4e59-81e8-25d5a978d2b5\") " pod="openstack/openstack-galera-0" Sep 30 20:48:07 crc kubenswrapper[4756]: I0930 20:48:07.165650 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/c86195f6-b0ce-4e59-81e8-25d5a978d2b5-secrets\") pod \"openstack-galera-0\" (UID: \"c86195f6-b0ce-4e59-81e8-25d5a978d2b5\") " pod="openstack/openstack-galera-0" Sep 30 20:48:07 crc kubenswrapper[4756]: I0930 20:48:07.165691 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c86195f6-b0ce-4e59-81e8-25d5a978d2b5-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"c86195f6-b0ce-4e59-81e8-25d5a978d2b5\") " pod="openstack/openstack-galera-0" Sep 30 20:48:07 crc kubenswrapper[4756]: I0930 20:48:07.165719 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-chzph\" (UniqueName: \"kubernetes.io/projected/c86195f6-b0ce-4e59-81e8-25d5a978d2b5-kube-api-access-chzph\") pod \"openstack-galera-0\" (UID: \"c86195f6-b0ce-4e59-81e8-25d5a978d2b5\") " pod="openstack/openstack-galera-0" Sep 30 20:48:07 crc kubenswrapper[4756]: I0930 20:48:07.165747 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-2e54f57d-7aeb-4386-b304-54a0b0885a1d\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-2e54f57d-7aeb-4386-b304-54a0b0885a1d\") pod \"openstack-galera-0\" (UID: \"c86195f6-b0ce-4e59-81e8-25d5a978d2b5\") " pod="openstack/openstack-galera-0" Sep 30 20:48:07 crc kubenswrapper[4756]: I0930 20:48:07.165794 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/c86195f6-b0ce-4e59-81e8-25d5a978d2b5-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"c86195f6-b0ce-4e59-81e8-25d5a978d2b5\") " pod="openstack/openstack-galera-0" Sep 30 20:48:07 crc kubenswrapper[4756]: I0930 20:48:07.165832 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/c86195f6-b0ce-4e59-81e8-25d5a978d2b5-config-data-default\") pod \"openstack-galera-0\" (UID: \"c86195f6-b0ce-4e59-81e8-25d5a978d2b5\") " pod="openstack/openstack-galera-0" Sep 30 20:48:07 crc kubenswrapper[4756]: I0930 20:48:07.183533 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Sep 30 20:48:07 crc kubenswrapper[4756]: I0930 20:48:07.272338 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/c86195f6-b0ce-4e59-81e8-25d5a978d2b5-config-data-default\") pod \"openstack-galera-0\" (UID: \"c86195f6-b0ce-4e59-81e8-25d5a978d2b5\") " pod="openstack/openstack-galera-0" Sep 30 20:48:07 crc kubenswrapper[4756]: I0930 20:48:07.272743 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/1a9a3913-4c34-4606-817a-31686e00be78-kolla-config\") pod \"memcached-0\" (UID: \"1a9a3913-4c34-4606-817a-31686e00be78\") " pod="openstack/memcached-0" Sep 30 20:48:07 crc kubenswrapper[4756]: I0930 20:48:07.272848 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c86195f6-b0ce-4e59-81e8-25d5a978d2b5-operator-scripts\") pod \"openstack-galera-0\" (UID: \"c86195f6-b0ce-4e59-81e8-25d5a978d2b5\") " pod="openstack/openstack-galera-0" Sep 30 20:48:07 crc kubenswrapper[4756]: I0930 20:48:07.272954 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qhs4k\" (UniqueName: \"kubernetes.io/projected/1a9a3913-4c34-4606-817a-31686e00be78-kube-api-access-qhs4k\") pod \"memcached-0\" (UID: \"1a9a3913-4c34-4606-817a-31686e00be78\") " pod="openstack/memcached-0" Sep 30 20:48:07 crc kubenswrapper[4756]: I0930 20:48:07.273060 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/c86195f6-b0ce-4e59-81e8-25d5a978d2b5-kolla-config\") pod \"openstack-galera-0\" (UID: \"c86195f6-b0ce-4e59-81e8-25d5a978d2b5\") " pod="openstack/openstack-galera-0" Sep 30 20:48:07 crc kubenswrapper[4756]: I0930 20:48:07.273187 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/c86195f6-b0ce-4e59-81e8-25d5a978d2b5-config-data-generated\") pod \"openstack-galera-0\" (UID: \"c86195f6-b0ce-4e59-81e8-25d5a978d2b5\") " pod="openstack/openstack-galera-0" Sep 30 20:48:07 crc kubenswrapper[4756]: I0930 20:48:07.274001 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/c86195f6-b0ce-4e59-81e8-25d5a978d2b5-secrets\") pod \"openstack-galera-0\" (UID: \"c86195f6-b0ce-4e59-81e8-25d5a978d2b5\") " pod="openstack/openstack-galera-0" Sep 30 20:48:07 crc kubenswrapper[4756]: I0930 20:48:07.274131 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c86195f6-b0ce-4e59-81e8-25d5a978d2b5-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"c86195f6-b0ce-4e59-81e8-25d5a978d2b5\") " pod="openstack/openstack-galera-0" Sep 30 20:48:07 crc kubenswrapper[4756]: I0930 20:48:07.274254 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-chzph\" (UniqueName: \"kubernetes.io/projected/c86195f6-b0ce-4e59-81e8-25d5a978d2b5-kube-api-access-chzph\") pod \"openstack-galera-0\" (UID: \"c86195f6-b0ce-4e59-81e8-25d5a978d2b5\") " pod="openstack/openstack-galera-0" Sep 30 20:48:07 crc kubenswrapper[4756]: I0930 20:48:07.274371 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-2e54f57d-7aeb-4386-b304-54a0b0885a1d\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-2e54f57d-7aeb-4386-b304-54a0b0885a1d\") pod \"openstack-galera-0\" (UID: \"c86195f6-b0ce-4e59-81e8-25d5a978d2b5\") " pod="openstack/openstack-galera-0" Sep 30 20:48:07 crc kubenswrapper[4756]: I0930 20:48:07.273896 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/c86195f6-b0ce-4e59-81e8-25d5a978d2b5-config-data-default\") pod \"openstack-galera-0\" (UID: \"c86195f6-b0ce-4e59-81e8-25d5a978d2b5\") " pod="openstack/openstack-galera-0" Sep 30 20:48:07 crc kubenswrapper[4756]: I0930 20:48:07.274637 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/1a9a3913-4c34-4606-817a-31686e00be78-config-data\") pod \"memcached-0\" (UID: \"1a9a3913-4c34-4606-817a-31686e00be78\") " pod="openstack/memcached-0" Sep 30 20:48:07 crc kubenswrapper[4756]: I0930 20:48:07.274688 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/c86195f6-b0ce-4e59-81e8-25d5a978d2b5-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"c86195f6-b0ce-4e59-81e8-25d5a978d2b5\") " pod="openstack/openstack-galera-0" Sep 30 20:48:07 crc kubenswrapper[4756]: I0930 20:48:07.275888 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c86195f6-b0ce-4e59-81e8-25d5a978d2b5-operator-scripts\") pod \"openstack-galera-0\" (UID: \"c86195f6-b0ce-4e59-81e8-25d5a978d2b5\") " pod="openstack/openstack-galera-0" Sep 30 20:48:07 crc kubenswrapper[4756]: I0930 20:48:07.276341 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/c86195f6-b0ce-4e59-81e8-25d5a978d2b5-config-data-generated\") pod \"openstack-galera-0\" (UID: \"c86195f6-b0ce-4e59-81e8-25d5a978d2b5\") " pod="openstack/openstack-galera-0" Sep 30 20:48:07 crc kubenswrapper[4756]: I0930 20:48:07.276434 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/c86195f6-b0ce-4e59-81e8-25d5a978d2b5-kolla-config\") pod \"openstack-galera-0\" (UID: \"c86195f6-b0ce-4e59-81e8-25d5a978d2b5\") " pod="openstack/openstack-galera-0" Sep 30 20:48:07 crc kubenswrapper[4756]: I0930 20:48:07.281343 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/c86195f6-b0ce-4e59-81e8-25d5a978d2b5-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"c86195f6-b0ce-4e59-81e8-25d5a978d2b5\") " pod="openstack/openstack-galera-0" Sep 30 20:48:07 crc kubenswrapper[4756]: I0930 20:48:07.283204 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c86195f6-b0ce-4e59-81e8-25d5a978d2b5-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"c86195f6-b0ce-4e59-81e8-25d5a978d2b5\") " pod="openstack/openstack-galera-0" Sep 30 20:48:07 crc kubenswrapper[4756]: I0930 20:48:07.284566 4756 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Sep 30 20:48:07 crc kubenswrapper[4756]: I0930 20:48:07.284602 4756 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-2e54f57d-7aeb-4386-b304-54a0b0885a1d\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-2e54f57d-7aeb-4386-b304-54a0b0885a1d\") pod \"openstack-galera-0\" (UID: \"c86195f6-b0ce-4e59-81e8-25d5a978d2b5\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/6fb5f618ebc135ba48afce4f2170a5a798bcdd943a9fc18d1df6b8b591aa39c5/globalmount\"" pod="openstack/openstack-galera-0" Sep 30 20:48:07 crc kubenswrapper[4756]: I0930 20:48:07.298928 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/c86195f6-b0ce-4e59-81e8-25d5a978d2b5-secrets\") pod \"openstack-galera-0\" (UID: \"c86195f6-b0ce-4e59-81e8-25d5a978d2b5\") " pod="openstack/openstack-galera-0" Sep 30 20:48:07 crc kubenswrapper[4756]: I0930 20:48:07.304055 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-chzph\" (UniqueName: \"kubernetes.io/projected/c86195f6-b0ce-4e59-81e8-25d5a978d2b5-kube-api-access-chzph\") pod \"openstack-galera-0\" (UID: \"c86195f6-b0ce-4e59-81e8-25d5a978d2b5\") " pod="openstack/openstack-galera-0" Sep 30 20:48:07 crc kubenswrapper[4756]: I0930 20:48:07.380323 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/1a9a3913-4c34-4606-817a-31686e00be78-config-data\") pod \"memcached-0\" (UID: \"1a9a3913-4c34-4606-817a-31686e00be78\") " pod="openstack/memcached-0" Sep 30 20:48:07 crc kubenswrapper[4756]: I0930 20:48:07.380469 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/1a9a3913-4c34-4606-817a-31686e00be78-kolla-config\") pod \"memcached-0\" (UID: \"1a9a3913-4c34-4606-817a-31686e00be78\") " pod="openstack/memcached-0" Sep 30 20:48:07 crc kubenswrapper[4756]: I0930 20:48:07.380498 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qhs4k\" (UniqueName: \"kubernetes.io/projected/1a9a3913-4c34-4606-817a-31686e00be78-kube-api-access-qhs4k\") pod \"memcached-0\" (UID: \"1a9a3913-4c34-4606-817a-31686e00be78\") " pod="openstack/memcached-0" Sep 30 20:48:07 crc kubenswrapper[4756]: I0930 20:48:07.381609 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/1a9a3913-4c34-4606-817a-31686e00be78-config-data\") pod \"memcached-0\" (UID: \"1a9a3913-4c34-4606-817a-31686e00be78\") " pod="openstack/memcached-0" Sep 30 20:48:07 crc kubenswrapper[4756]: I0930 20:48:07.382063 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/1a9a3913-4c34-4606-817a-31686e00be78-kolla-config\") pod \"memcached-0\" (UID: \"1a9a3913-4c34-4606-817a-31686e00be78\") " pod="openstack/memcached-0" Sep 30 20:48:07 crc kubenswrapper[4756]: I0930 20:48:07.455187 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qhs4k\" (UniqueName: \"kubernetes.io/projected/1a9a3913-4c34-4606-817a-31686e00be78-kube-api-access-qhs4k\") pod \"memcached-0\" (UID: \"1a9a3913-4c34-4606-817a-31686e00be78\") " pod="openstack/memcached-0" Sep 30 20:48:07 crc kubenswrapper[4756]: I0930 20:48:07.478807 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Sep 30 20:48:07 crc kubenswrapper[4756]: I0930 20:48:07.689504 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-2e54f57d-7aeb-4386-b304-54a0b0885a1d\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-2e54f57d-7aeb-4386-b304-54a0b0885a1d\") pod \"openstack-galera-0\" (UID: \"c86195f6-b0ce-4e59-81e8-25d5a978d2b5\") " pod="openstack/openstack-galera-0" Sep 30 20:48:07 crc kubenswrapper[4756]: I0930 20:48:07.729566 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Sep 30 20:48:07 crc kubenswrapper[4756]: I0930 20:48:07.996122 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Sep 30 20:48:08 crc kubenswrapper[4756]: I0930 20:48:08.267990 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Sep 30 20:48:08 crc kubenswrapper[4756]: I0930 20:48:08.493285 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-cell1-galera-0"] Sep 30 20:48:08 crc kubenswrapper[4756]: I0930 20:48:08.495380 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Sep 30 20:48:08 crc kubenswrapper[4756]: I0930 20:48:08.499654 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-scripts" Sep 30 20:48:08 crc kubenswrapper[4756]: I0930 20:48:08.499806 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-cell1-dockercfg-zfc5h" Sep 30 20:48:08 crc kubenswrapper[4756]: I0930 20:48:08.500620 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-cell1-svc" Sep 30 20:48:08 crc kubenswrapper[4756]: I0930 20:48:08.503468 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-config-data" Sep 30 20:48:08 crc kubenswrapper[4756]: I0930 20:48:08.517149 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Sep 30 20:48:08 crc kubenswrapper[4756]: I0930 20:48:08.613831 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/be947271-1bee-445b-be89-945f5ffe659b-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"be947271-1bee-445b-be89-945f5ffe659b\") " pod="openstack/openstack-cell1-galera-0" Sep 30 20:48:08 crc kubenswrapper[4756]: I0930 20:48:08.613933 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-phrqb\" (UniqueName: \"kubernetes.io/projected/be947271-1bee-445b-be89-945f5ffe659b-kube-api-access-phrqb\") pod \"openstack-cell1-galera-0\" (UID: \"be947271-1bee-445b-be89-945f5ffe659b\") " pod="openstack/openstack-cell1-galera-0" Sep 30 20:48:08 crc kubenswrapper[4756]: I0930 20:48:08.614005 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/be947271-1bee-445b-be89-945f5ffe659b-secrets\") pod \"openstack-cell1-galera-0\" (UID: \"be947271-1bee-445b-be89-945f5ffe659b\") " pod="openstack/openstack-cell1-galera-0" Sep 30 20:48:08 crc kubenswrapper[4756]: I0930 20:48:08.614030 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/be947271-1bee-445b-be89-945f5ffe659b-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"be947271-1bee-445b-be89-945f5ffe659b\") " pod="openstack/openstack-cell1-galera-0" Sep 30 20:48:08 crc kubenswrapper[4756]: I0930 20:48:08.614073 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/be947271-1bee-445b-be89-945f5ffe659b-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"be947271-1bee-445b-be89-945f5ffe659b\") " pod="openstack/openstack-cell1-galera-0" Sep 30 20:48:08 crc kubenswrapper[4756]: I0930 20:48:08.614320 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-02725226-29b6-47be-89a5-119081eb5908\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-02725226-29b6-47be-89a5-119081eb5908\") pod \"openstack-cell1-galera-0\" (UID: \"be947271-1bee-445b-be89-945f5ffe659b\") " pod="openstack/openstack-cell1-galera-0" Sep 30 20:48:08 crc kubenswrapper[4756]: I0930 20:48:08.614663 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/be947271-1bee-445b-be89-945f5ffe659b-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"be947271-1bee-445b-be89-945f5ffe659b\") " pod="openstack/openstack-cell1-galera-0" Sep 30 20:48:08 crc kubenswrapper[4756]: I0930 20:48:08.614738 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/be947271-1bee-445b-be89-945f5ffe659b-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"be947271-1bee-445b-be89-945f5ffe659b\") " pod="openstack/openstack-cell1-galera-0" Sep 30 20:48:08 crc kubenswrapper[4756]: I0930 20:48:08.614767 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/be947271-1bee-445b-be89-945f5ffe659b-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"be947271-1bee-445b-be89-945f5ffe659b\") " pod="openstack/openstack-cell1-galera-0" Sep 30 20:48:08 crc kubenswrapper[4756]: W0930 20:48:08.655777 4756 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc86195f6_b0ce_4e59_81e8_25d5a978d2b5.slice/crio-fa9d528a204500871996bb7f552e2c09ce8df9ee5b9ad394cac66c2a8ba416de WatchSource:0}: Error finding container fa9d528a204500871996bb7f552e2c09ce8df9ee5b9ad394cac66c2a8ba416de: Status 404 returned error can't find the container with id fa9d528a204500871996bb7f552e2c09ce8df9ee5b9ad394cac66c2a8ba416de Sep 30 20:48:08 crc kubenswrapper[4756]: I0930 20:48:08.716156 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-02725226-29b6-47be-89a5-119081eb5908\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-02725226-29b6-47be-89a5-119081eb5908\") pod \"openstack-cell1-galera-0\" (UID: \"be947271-1bee-445b-be89-945f5ffe659b\") " pod="openstack/openstack-cell1-galera-0" Sep 30 20:48:08 crc kubenswrapper[4756]: I0930 20:48:08.716220 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/be947271-1bee-445b-be89-945f5ffe659b-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"be947271-1bee-445b-be89-945f5ffe659b\") " pod="openstack/openstack-cell1-galera-0" Sep 30 20:48:08 crc kubenswrapper[4756]: I0930 20:48:08.716253 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/be947271-1bee-445b-be89-945f5ffe659b-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"be947271-1bee-445b-be89-945f5ffe659b\") " pod="openstack/openstack-cell1-galera-0" Sep 30 20:48:08 crc kubenswrapper[4756]: I0930 20:48:08.716272 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/be947271-1bee-445b-be89-945f5ffe659b-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"be947271-1bee-445b-be89-945f5ffe659b\") " pod="openstack/openstack-cell1-galera-0" Sep 30 20:48:08 crc kubenswrapper[4756]: I0930 20:48:08.716305 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/be947271-1bee-445b-be89-945f5ffe659b-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"be947271-1bee-445b-be89-945f5ffe659b\") " pod="openstack/openstack-cell1-galera-0" Sep 30 20:48:08 crc kubenswrapper[4756]: I0930 20:48:08.716328 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-phrqb\" (UniqueName: \"kubernetes.io/projected/be947271-1bee-445b-be89-945f5ffe659b-kube-api-access-phrqb\") pod \"openstack-cell1-galera-0\" (UID: \"be947271-1bee-445b-be89-945f5ffe659b\") " pod="openstack/openstack-cell1-galera-0" Sep 30 20:48:08 crc kubenswrapper[4756]: I0930 20:48:08.716373 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/be947271-1bee-445b-be89-945f5ffe659b-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"be947271-1bee-445b-be89-945f5ffe659b\") " pod="openstack/openstack-cell1-galera-0" Sep 30 20:48:08 crc kubenswrapper[4756]: I0930 20:48:08.716415 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/be947271-1bee-445b-be89-945f5ffe659b-secrets\") pod \"openstack-cell1-galera-0\" (UID: \"be947271-1bee-445b-be89-945f5ffe659b\") " pod="openstack/openstack-cell1-galera-0" Sep 30 20:48:08 crc kubenswrapper[4756]: I0930 20:48:08.716445 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/be947271-1bee-445b-be89-945f5ffe659b-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"be947271-1bee-445b-be89-945f5ffe659b\") " pod="openstack/openstack-cell1-galera-0" Sep 30 20:48:08 crc kubenswrapper[4756]: I0930 20:48:08.717393 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/be947271-1bee-445b-be89-945f5ffe659b-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"be947271-1bee-445b-be89-945f5ffe659b\") " pod="openstack/openstack-cell1-galera-0" Sep 30 20:48:08 crc kubenswrapper[4756]: I0930 20:48:08.718119 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/be947271-1bee-445b-be89-945f5ffe659b-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"be947271-1bee-445b-be89-945f5ffe659b\") " pod="openstack/openstack-cell1-galera-0" Sep 30 20:48:08 crc kubenswrapper[4756]: I0930 20:48:08.718153 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/be947271-1bee-445b-be89-945f5ffe659b-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"be947271-1bee-445b-be89-945f5ffe659b\") " pod="openstack/openstack-cell1-galera-0" Sep 30 20:48:08 crc kubenswrapper[4756]: I0930 20:48:08.718897 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/be947271-1bee-445b-be89-945f5ffe659b-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"be947271-1bee-445b-be89-945f5ffe659b\") " pod="openstack/openstack-cell1-galera-0" Sep 30 20:48:08 crc kubenswrapper[4756]: I0930 20:48:08.721389 4756 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Sep 30 20:48:08 crc kubenswrapper[4756]: I0930 20:48:08.721493 4756 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-02725226-29b6-47be-89a5-119081eb5908\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-02725226-29b6-47be-89a5-119081eb5908\") pod \"openstack-cell1-galera-0\" (UID: \"be947271-1bee-445b-be89-945f5ffe659b\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/cce8c629bf6ae314bb408f27fef7ef71a4c1517c1af0c2ed9df12fddbf317226/globalmount\"" pod="openstack/openstack-cell1-galera-0" Sep 30 20:48:08 crc kubenswrapper[4756]: I0930 20:48:08.722516 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/be947271-1bee-445b-be89-945f5ffe659b-secrets\") pod \"openstack-cell1-galera-0\" (UID: \"be947271-1bee-445b-be89-945f5ffe659b\") " pod="openstack/openstack-cell1-galera-0" Sep 30 20:48:08 crc kubenswrapper[4756]: I0930 20:48:08.722863 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/be947271-1bee-445b-be89-945f5ffe659b-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"be947271-1bee-445b-be89-945f5ffe659b\") " pod="openstack/openstack-cell1-galera-0" Sep 30 20:48:08 crc kubenswrapper[4756]: I0930 20:48:08.723211 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/be947271-1bee-445b-be89-945f5ffe659b-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"be947271-1bee-445b-be89-945f5ffe659b\") " pod="openstack/openstack-cell1-galera-0" Sep 30 20:48:08 crc kubenswrapper[4756]: I0930 20:48:08.734755 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-phrqb\" (UniqueName: \"kubernetes.io/projected/be947271-1bee-445b-be89-945f5ffe659b-kube-api-access-phrqb\") pod \"openstack-cell1-galera-0\" (UID: \"be947271-1bee-445b-be89-945f5ffe659b\") " pod="openstack/openstack-cell1-galera-0" Sep 30 20:48:08 crc kubenswrapper[4756]: I0930 20:48:08.759184 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"c86195f6-b0ce-4e59-81e8-25d5a978d2b5","Type":"ContainerStarted","Data":"fa9d528a204500871996bb7f552e2c09ce8df9ee5b9ad394cac66c2a8ba416de"} Sep 30 20:48:08 crc kubenswrapper[4756]: I0930 20:48:08.761466 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"65817176-3f65-4836-b4e9-1e92d6c077ea","Type":"ContainerStarted","Data":"dee5098e7385d48f3943eff23a19ac971c3375746ee4c55c1bd15e5747709658"} Sep 30 20:48:08 crc kubenswrapper[4756]: I0930 20:48:08.763043 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"e2fc60bc-0149-497d-85cf-d4614f8b33ad","Type":"ContainerStarted","Data":"87f9f533f36ac9b2fb4f15f3577e57d6287652abf8ce2563784951e51c58af65"} Sep 30 20:48:08 crc kubenswrapper[4756]: I0930 20:48:08.764887 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"1a9a3913-4c34-4606-817a-31686e00be78","Type":"ContainerStarted","Data":"4286807ae410e64a00cd491f3fef2596dacec151cb0c94ed6b27e9d8768d1afb"} Sep 30 20:48:08 crc kubenswrapper[4756]: I0930 20:48:08.764919 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"1a9a3913-4c34-4606-817a-31686e00be78","Type":"ContainerStarted","Data":"01f93403059e78cfea807ad7b87589099f9be32e95936230c467b3adb274b9a0"} Sep 30 20:48:08 crc kubenswrapper[4756]: I0930 20:48:08.765271 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/memcached-0" Sep 30 20:48:08 crc kubenswrapper[4756]: I0930 20:48:08.807029 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/memcached-0" podStartSLOduration=1.806998474 podStartE2EDuration="1.806998474s" podCreationTimestamp="2025-09-30 20:48:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 20:48:08.803717128 +0000 UTC m=+4618.424650625" watchObservedRunningTime="2025-09-30 20:48:08.806998474 +0000 UTC m=+4618.427931951" Sep 30 20:48:08 crc kubenswrapper[4756]: I0930 20:48:08.867741 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-02725226-29b6-47be-89a5-119081eb5908\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-02725226-29b6-47be-89a5-119081eb5908\") pod \"openstack-cell1-galera-0\" (UID: \"be947271-1bee-445b-be89-945f5ffe659b\") " pod="openstack/openstack-cell1-galera-0" Sep 30 20:48:09 crc kubenswrapper[4756]: I0930 20:48:09.125721 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Sep 30 20:48:09 crc kubenswrapper[4756]: I0930 20:48:09.403129 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Sep 30 20:48:09 crc kubenswrapper[4756]: W0930 20:48:09.412524 4756 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podbe947271_1bee_445b_be89_945f5ffe659b.slice/crio-8534778de9ce4402f488618b2b65eba0e6ae4d81eda8f7f09a3c25e5da1287cd WatchSource:0}: Error finding container 8534778de9ce4402f488618b2b65eba0e6ae4d81eda8f7f09a3c25e5da1287cd: Status 404 returned error can't find the container with id 8534778de9ce4402f488618b2b65eba0e6ae4d81eda8f7f09a3c25e5da1287cd Sep 30 20:48:09 crc kubenswrapper[4756]: I0930 20:48:09.776507 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"be947271-1bee-445b-be89-945f5ffe659b","Type":"ContainerStarted","Data":"b413f7112677fe3406651a603d96165d8ff005e6eb190337ba0ba1cd3fb907f3"} Sep 30 20:48:09 crc kubenswrapper[4756]: I0930 20:48:09.777130 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"be947271-1bee-445b-be89-945f5ffe659b","Type":"ContainerStarted","Data":"8534778de9ce4402f488618b2b65eba0e6ae4d81eda8f7f09a3c25e5da1287cd"} Sep 30 20:48:09 crc kubenswrapper[4756]: I0930 20:48:09.779331 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"c86195f6-b0ce-4e59-81e8-25d5a978d2b5","Type":"ContainerStarted","Data":"0c63d9572759201733e322987da85127e29842f1abdf06db3464458a7627c27e"} Sep 30 20:48:12 crc kubenswrapper[4756]: I0930 20:48:12.116150 4756 scope.go:117] "RemoveContainer" containerID="bdd10774ccbb11f8a2293190d3ed532f32596ee6e63b4f323c2f18f8a020bf99" Sep 30 20:48:12 crc kubenswrapper[4756]: I0930 20:48:12.805830 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" event={"ID":"3370c2ca-fec3-4f90-8df7-51e21e6c7e1c","Type":"ContainerStarted","Data":"fd9fa9739b840f2adab14d744818d1716aa3d12ebd65d535d5bd7108701e2831"} Sep 30 20:48:13 crc kubenswrapper[4756]: I0930 20:48:13.815357 4756 generic.go:334] "Generic (PLEG): container finished" podID="c86195f6-b0ce-4e59-81e8-25d5a978d2b5" containerID="0c63d9572759201733e322987da85127e29842f1abdf06db3464458a7627c27e" exitCode=0 Sep 30 20:48:13 crc kubenswrapper[4756]: I0930 20:48:13.815441 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"c86195f6-b0ce-4e59-81e8-25d5a978d2b5","Type":"ContainerDied","Data":"0c63d9572759201733e322987da85127e29842f1abdf06db3464458a7627c27e"} Sep 30 20:48:14 crc kubenswrapper[4756]: I0930 20:48:14.499635 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-5d7b5456f5-px82p" Sep 30 20:48:14 crc kubenswrapper[4756]: I0930 20:48:14.815701 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-98ddfc8f-4nbjr" Sep 30 20:48:14 crc kubenswrapper[4756]: I0930 20:48:14.824421 4756 generic.go:334] "Generic (PLEG): container finished" podID="be947271-1bee-445b-be89-945f5ffe659b" containerID="b413f7112677fe3406651a603d96165d8ff005e6eb190337ba0ba1cd3fb907f3" exitCode=0 Sep 30 20:48:14 crc kubenswrapper[4756]: I0930 20:48:14.824498 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"be947271-1bee-445b-be89-945f5ffe659b","Type":"ContainerDied","Data":"b413f7112677fe3406651a603d96165d8ff005e6eb190337ba0ba1cd3fb907f3"} Sep 30 20:48:14 crc kubenswrapper[4756]: I0930 20:48:14.827028 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"c86195f6-b0ce-4e59-81e8-25d5a978d2b5","Type":"ContainerStarted","Data":"e83941b2546ff28da1581d6e717b65753ac194bbf7307760d3ed9ab9e2264372"} Sep 30 20:48:14 crc kubenswrapper[4756]: I0930 20:48:14.875208 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5d7b5456f5-px82p"] Sep 30 20:48:14 crc kubenswrapper[4756]: I0930 20:48:14.875411 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-5d7b5456f5-px82p" podUID="2d1685f0-5d42-4b88-9804-7f77e5a30cff" containerName="dnsmasq-dns" containerID="cri-o://372e2d649edc408b3f84fc271e17aa81e1dcd48d00b96660223e9111e44ab636" gracePeriod=10 Sep 30 20:48:14 crc kubenswrapper[4756]: I0930 20:48:14.878407 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-galera-0" podStartSLOduration=8.878372348 podStartE2EDuration="8.878372348s" podCreationTimestamp="2025-09-30 20:48:06 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 20:48:14.870154763 +0000 UTC m=+4624.491088240" watchObservedRunningTime="2025-09-30 20:48:14.878372348 +0000 UTC m=+4624.499305825" Sep 30 20:48:15 crc kubenswrapper[4756]: I0930 20:48:15.319833 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5d7b5456f5-px82p" Sep 30 20:48:15 crc kubenswrapper[4756]: I0930 20:48:15.361289 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2d1685f0-5d42-4b88-9804-7f77e5a30cff-dns-svc\") pod \"2d1685f0-5d42-4b88-9804-7f77e5a30cff\" (UID: \"2d1685f0-5d42-4b88-9804-7f77e5a30cff\") " Sep 30 20:48:15 crc kubenswrapper[4756]: I0930 20:48:15.361443 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2d1685f0-5d42-4b88-9804-7f77e5a30cff-config\") pod \"2d1685f0-5d42-4b88-9804-7f77e5a30cff\" (UID: \"2d1685f0-5d42-4b88-9804-7f77e5a30cff\") " Sep 30 20:48:15 crc kubenswrapper[4756]: I0930 20:48:15.361775 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cv7sw\" (UniqueName: \"kubernetes.io/projected/2d1685f0-5d42-4b88-9804-7f77e5a30cff-kube-api-access-cv7sw\") pod \"2d1685f0-5d42-4b88-9804-7f77e5a30cff\" (UID: \"2d1685f0-5d42-4b88-9804-7f77e5a30cff\") " Sep 30 20:48:15 crc kubenswrapper[4756]: I0930 20:48:15.379800 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2d1685f0-5d42-4b88-9804-7f77e5a30cff-kube-api-access-cv7sw" (OuterVolumeSpecName: "kube-api-access-cv7sw") pod "2d1685f0-5d42-4b88-9804-7f77e5a30cff" (UID: "2d1685f0-5d42-4b88-9804-7f77e5a30cff"). InnerVolumeSpecName "kube-api-access-cv7sw". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:48:15 crc kubenswrapper[4756]: I0930 20:48:15.411723 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2d1685f0-5d42-4b88-9804-7f77e5a30cff-config" (OuterVolumeSpecName: "config") pod "2d1685f0-5d42-4b88-9804-7f77e5a30cff" (UID: "2d1685f0-5d42-4b88-9804-7f77e5a30cff"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:48:15 crc kubenswrapper[4756]: I0930 20:48:15.412938 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2d1685f0-5d42-4b88-9804-7f77e5a30cff-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "2d1685f0-5d42-4b88-9804-7f77e5a30cff" (UID: "2d1685f0-5d42-4b88-9804-7f77e5a30cff"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:48:15 crc kubenswrapper[4756]: I0930 20:48:15.464720 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cv7sw\" (UniqueName: \"kubernetes.io/projected/2d1685f0-5d42-4b88-9804-7f77e5a30cff-kube-api-access-cv7sw\") on node \"crc\" DevicePath \"\"" Sep 30 20:48:15 crc kubenswrapper[4756]: I0930 20:48:15.464784 4756 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2d1685f0-5d42-4b88-9804-7f77e5a30cff-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 30 20:48:15 crc kubenswrapper[4756]: I0930 20:48:15.464794 4756 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2d1685f0-5d42-4b88-9804-7f77e5a30cff-config\") on node \"crc\" DevicePath \"\"" Sep 30 20:48:15 crc kubenswrapper[4756]: I0930 20:48:15.840998 4756 generic.go:334] "Generic (PLEG): container finished" podID="2d1685f0-5d42-4b88-9804-7f77e5a30cff" containerID="372e2d649edc408b3f84fc271e17aa81e1dcd48d00b96660223e9111e44ab636" exitCode=0 Sep 30 20:48:15 crc kubenswrapper[4756]: I0930 20:48:15.841121 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5d7b5456f5-px82p" event={"ID":"2d1685f0-5d42-4b88-9804-7f77e5a30cff","Type":"ContainerDied","Data":"372e2d649edc408b3f84fc271e17aa81e1dcd48d00b96660223e9111e44ab636"} Sep 30 20:48:15 crc kubenswrapper[4756]: I0930 20:48:15.841153 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5d7b5456f5-px82p" event={"ID":"2d1685f0-5d42-4b88-9804-7f77e5a30cff","Type":"ContainerDied","Data":"3c0d42b34017fe1f0513c795b29443a60da2f0365849e38cd2a4b65e53e515ab"} Sep 30 20:48:15 crc kubenswrapper[4756]: I0930 20:48:15.841173 4756 scope.go:117] "RemoveContainer" containerID="372e2d649edc408b3f84fc271e17aa81e1dcd48d00b96660223e9111e44ab636" Sep 30 20:48:15 crc kubenswrapper[4756]: I0930 20:48:15.841307 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5d7b5456f5-px82p" Sep 30 20:48:15 crc kubenswrapper[4756]: I0930 20:48:15.844495 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"be947271-1bee-445b-be89-945f5ffe659b","Type":"ContainerStarted","Data":"b28d5a4d2dfc2746aceb86e0ec00703acb032cce2bbad4305aed2d0ca672663b"} Sep 30 20:48:15 crc kubenswrapper[4756]: I0930 20:48:15.875025 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-cell1-galera-0" podStartSLOduration=8.875001547 podStartE2EDuration="8.875001547s" podCreationTimestamp="2025-09-30 20:48:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 20:48:15.873415716 +0000 UTC m=+4625.494349213" watchObservedRunningTime="2025-09-30 20:48:15.875001547 +0000 UTC m=+4625.495935024" Sep 30 20:48:15 crc kubenswrapper[4756]: I0930 20:48:15.895722 4756 scope.go:117] "RemoveContainer" containerID="0e7c51a61200455ad431b02d4050add67d08afceeca1e0c925c112b0de658db7" Sep 30 20:48:15 crc kubenswrapper[4756]: I0930 20:48:15.905260 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5d7b5456f5-px82p"] Sep 30 20:48:15 crc kubenswrapper[4756]: I0930 20:48:15.910368 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5d7b5456f5-px82p"] Sep 30 20:48:15 crc kubenswrapper[4756]: I0930 20:48:15.933934 4756 scope.go:117] "RemoveContainer" containerID="372e2d649edc408b3f84fc271e17aa81e1dcd48d00b96660223e9111e44ab636" Sep 30 20:48:15 crc kubenswrapper[4756]: E0930 20:48:15.934458 4756 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"372e2d649edc408b3f84fc271e17aa81e1dcd48d00b96660223e9111e44ab636\": container with ID starting with 372e2d649edc408b3f84fc271e17aa81e1dcd48d00b96660223e9111e44ab636 not found: ID does not exist" containerID="372e2d649edc408b3f84fc271e17aa81e1dcd48d00b96660223e9111e44ab636" Sep 30 20:48:15 crc kubenswrapper[4756]: I0930 20:48:15.934491 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"372e2d649edc408b3f84fc271e17aa81e1dcd48d00b96660223e9111e44ab636"} err="failed to get container status \"372e2d649edc408b3f84fc271e17aa81e1dcd48d00b96660223e9111e44ab636\": rpc error: code = NotFound desc = could not find container \"372e2d649edc408b3f84fc271e17aa81e1dcd48d00b96660223e9111e44ab636\": container with ID starting with 372e2d649edc408b3f84fc271e17aa81e1dcd48d00b96660223e9111e44ab636 not found: ID does not exist" Sep 30 20:48:15 crc kubenswrapper[4756]: I0930 20:48:15.934515 4756 scope.go:117] "RemoveContainer" containerID="0e7c51a61200455ad431b02d4050add67d08afceeca1e0c925c112b0de658db7" Sep 30 20:48:15 crc kubenswrapper[4756]: E0930 20:48:15.934970 4756 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0e7c51a61200455ad431b02d4050add67d08afceeca1e0c925c112b0de658db7\": container with ID starting with 0e7c51a61200455ad431b02d4050add67d08afceeca1e0c925c112b0de658db7 not found: ID does not exist" containerID="0e7c51a61200455ad431b02d4050add67d08afceeca1e0c925c112b0de658db7" Sep 30 20:48:15 crc kubenswrapper[4756]: I0930 20:48:15.935095 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0e7c51a61200455ad431b02d4050add67d08afceeca1e0c925c112b0de658db7"} err="failed to get container status \"0e7c51a61200455ad431b02d4050add67d08afceeca1e0c925c112b0de658db7\": rpc error: code = NotFound desc = could not find container \"0e7c51a61200455ad431b02d4050add67d08afceeca1e0c925c112b0de658db7\": container with ID starting with 0e7c51a61200455ad431b02d4050add67d08afceeca1e0c925c112b0de658db7 not found: ID does not exist" Sep 30 20:48:17 crc kubenswrapper[4756]: I0930 20:48:17.129418 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2d1685f0-5d42-4b88-9804-7f77e5a30cff" path="/var/lib/kubelet/pods/2d1685f0-5d42-4b88-9804-7f77e5a30cff/volumes" Sep 30 20:48:17 crc kubenswrapper[4756]: I0930 20:48:17.482341 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/memcached-0" Sep 30 20:48:17 crc kubenswrapper[4756]: I0930 20:48:17.730050 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-galera-0" Sep 30 20:48:17 crc kubenswrapper[4756]: I0930 20:48:17.730615 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-galera-0" Sep 30 20:48:19 crc kubenswrapper[4756]: I0930 20:48:19.137064 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-cell1-galera-0" Sep 30 20:48:19 crc kubenswrapper[4756]: I0930 20:48:19.137114 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-cell1-galera-0" Sep 30 20:48:19 crc kubenswrapper[4756]: I0930 20:48:19.799337 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-galera-0" Sep 30 20:48:19 crc kubenswrapper[4756]: I0930 20:48:19.843904 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-galera-0" Sep 30 20:48:21 crc kubenswrapper[4756]: I0930 20:48:21.258905 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-cell1-galera-0" Sep 30 20:48:21 crc kubenswrapper[4756]: I0930 20:48:21.322804 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-cell1-galera-0" Sep 30 20:48:41 crc kubenswrapper[4756]: I0930 20:48:41.040183 4756 generic.go:334] "Generic (PLEG): container finished" podID="65817176-3f65-4836-b4e9-1e92d6c077ea" containerID="dee5098e7385d48f3943eff23a19ac971c3375746ee4c55c1bd15e5747709658" exitCode=0 Sep 30 20:48:41 crc kubenswrapper[4756]: I0930 20:48:41.042160 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"65817176-3f65-4836-b4e9-1e92d6c077ea","Type":"ContainerDied","Data":"dee5098e7385d48f3943eff23a19ac971c3375746ee4c55c1bd15e5747709658"} Sep 30 20:48:41 crc kubenswrapper[4756]: I0930 20:48:41.045826 4756 generic.go:334] "Generic (PLEG): container finished" podID="e2fc60bc-0149-497d-85cf-d4614f8b33ad" containerID="87f9f533f36ac9b2fb4f15f3577e57d6287652abf8ce2563784951e51c58af65" exitCode=0 Sep 30 20:48:41 crc kubenswrapper[4756]: I0930 20:48:41.045867 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"e2fc60bc-0149-497d-85cf-d4614f8b33ad","Type":"ContainerDied","Data":"87f9f533f36ac9b2fb4f15f3577e57d6287652abf8ce2563784951e51c58af65"} Sep 30 20:48:42 crc kubenswrapper[4756]: I0930 20:48:42.064052 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"65817176-3f65-4836-b4e9-1e92d6c077ea","Type":"ContainerStarted","Data":"df79b8c36ddd777798e6f97fda5c80edb54558099469c5d6262e73d4102914e5"} Sep 30 20:48:42 crc kubenswrapper[4756]: I0930 20:48:42.065919 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-server-0" Sep 30 20:48:42 crc kubenswrapper[4756]: I0930 20:48:42.068779 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"e2fc60bc-0149-497d-85cf-d4614f8b33ad","Type":"ContainerStarted","Data":"4058ffc740cd540ac058c8773ba6a47ad273b94a1bde02e7bd6aaf4432ce89ef"} Sep 30 20:48:42 crc kubenswrapper[4756]: I0930 20:48:42.069283 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-cell1-server-0" Sep 30 20:48:42 crc kubenswrapper[4756]: I0930 20:48:42.104833 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-server-0" podStartSLOduration=38.104798734 podStartE2EDuration="38.104798734s" podCreationTimestamp="2025-09-30 20:48:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 20:48:42.09695566 +0000 UTC m=+4651.717889137" watchObservedRunningTime="2025-09-30 20:48:42.104798734 +0000 UTC m=+4651.725732201" Sep 30 20:48:42 crc kubenswrapper[4756]: I0930 20:48:42.145314 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-cell1-server-0" podStartSLOduration=38.14527347 podStartE2EDuration="38.14527347s" podCreationTimestamp="2025-09-30 20:48:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 20:48:42.142066837 +0000 UTC m=+4651.763000324" watchObservedRunningTime="2025-09-30 20:48:42.14527347 +0000 UTC m=+4651.766206967" Sep 30 20:48:55 crc kubenswrapper[4756]: I0930 20:48:55.785369 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-server-0" Sep 30 20:48:56 crc kubenswrapper[4756]: I0930 20:48:56.136380 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-cell1-server-0" Sep 30 20:49:01 crc kubenswrapper[4756]: I0930 20:49:01.447417 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5b7946d7b9-psch6"] Sep 30 20:49:01 crc kubenswrapper[4756]: E0930 20:49:01.448516 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2d1685f0-5d42-4b88-9804-7f77e5a30cff" containerName="dnsmasq-dns" Sep 30 20:49:01 crc kubenswrapper[4756]: I0930 20:49:01.448540 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="2d1685f0-5d42-4b88-9804-7f77e5a30cff" containerName="dnsmasq-dns" Sep 30 20:49:01 crc kubenswrapper[4756]: E0930 20:49:01.448575 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2d1685f0-5d42-4b88-9804-7f77e5a30cff" containerName="init" Sep 30 20:49:01 crc kubenswrapper[4756]: I0930 20:49:01.448584 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="2d1685f0-5d42-4b88-9804-7f77e5a30cff" containerName="init" Sep 30 20:49:01 crc kubenswrapper[4756]: I0930 20:49:01.449989 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="2d1685f0-5d42-4b88-9804-7f77e5a30cff" containerName="dnsmasq-dns" Sep 30 20:49:01 crc kubenswrapper[4756]: I0930 20:49:01.451658 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5b7946d7b9-psch6" Sep 30 20:49:01 crc kubenswrapper[4756]: I0930 20:49:01.458361 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5b7946d7b9-psch6"] Sep 30 20:49:01 crc kubenswrapper[4756]: I0930 20:49:01.500209 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5993c2dd-f61e-4bdb-ade9-578cfcc6ab41-config\") pod \"dnsmasq-dns-5b7946d7b9-psch6\" (UID: \"5993c2dd-f61e-4bdb-ade9-578cfcc6ab41\") " pod="openstack/dnsmasq-dns-5b7946d7b9-psch6" Sep 30 20:49:01 crc kubenswrapper[4756]: I0930 20:49:01.500375 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fp6lt\" (UniqueName: \"kubernetes.io/projected/5993c2dd-f61e-4bdb-ade9-578cfcc6ab41-kube-api-access-fp6lt\") pod \"dnsmasq-dns-5b7946d7b9-psch6\" (UID: \"5993c2dd-f61e-4bdb-ade9-578cfcc6ab41\") " pod="openstack/dnsmasq-dns-5b7946d7b9-psch6" Sep 30 20:49:01 crc kubenswrapper[4756]: I0930 20:49:01.500450 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5993c2dd-f61e-4bdb-ade9-578cfcc6ab41-dns-svc\") pod \"dnsmasq-dns-5b7946d7b9-psch6\" (UID: \"5993c2dd-f61e-4bdb-ade9-578cfcc6ab41\") " pod="openstack/dnsmasq-dns-5b7946d7b9-psch6" Sep 30 20:49:01 crc kubenswrapper[4756]: I0930 20:49:01.603462 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5993c2dd-f61e-4bdb-ade9-578cfcc6ab41-config\") pod \"dnsmasq-dns-5b7946d7b9-psch6\" (UID: \"5993c2dd-f61e-4bdb-ade9-578cfcc6ab41\") " pod="openstack/dnsmasq-dns-5b7946d7b9-psch6" Sep 30 20:49:01 crc kubenswrapper[4756]: I0930 20:49:01.603812 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fp6lt\" (UniqueName: \"kubernetes.io/projected/5993c2dd-f61e-4bdb-ade9-578cfcc6ab41-kube-api-access-fp6lt\") pod \"dnsmasq-dns-5b7946d7b9-psch6\" (UID: \"5993c2dd-f61e-4bdb-ade9-578cfcc6ab41\") " pod="openstack/dnsmasq-dns-5b7946d7b9-psch6" Sep 30 20:49:01 crc kubenswrapper[4756]: I0930 20:49:01.603857 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5993c2dd-f61e-4bdb-ade9-578cfcc6ab41-dns-svc\") pod \"dnsmasq-dns-5b7946d7b9-psch6\" (UID: \"5993c2dd-f61e-4bdb-ade9-578cfcc6ab41\") " pod="openstack/dnsmasq-dns-5b7946d7b9-psch6" Sep 30 20:49:01 crc kubenswrapper[4756]: I0930 20:49:01.604016 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5993c2dd-f61e-4bdb-ade9-578cfcc6ab41-config\") pod \"dnsmasq-dns-5b7946d7b9-psch6\" (UID: \"5993c2dd-f61e-4bdb-ade9-578cfcc6ab41\") " pod="openstack/dnsmasq-dns-5b7946d7b9-psch6" Sep 30 20:49:01 crc kubenswrapper[4756]: I0930 20:49:01.605020 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5993c2dd-f61e-4bdb-ade9-578cfcc6ab41-dns-svc\") pod \"dnsmasq-dns-5b7946d7b9-psch6\" (UID: \"5993c2dd-f61e-4bdb-ade9-578cfcc6ab41\") " pod="openstack/dnsmasq-dns-5b7946d7b9-psch6" Sep 30 20:49:01 crc kubenswrapper[4756]: I0930 20:49:01.626220 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fp6lt\" (UniqueName: \"kubernetes.io/projected/5993c2dd-f61e-4bdb-ade9-578cfcc6ab41-kube-api-access-fp6lt\") pod \"dnsmasq-dns-5b7946d7b9-psch6\" (UID: \"5993c2dd-f61e-4bdb-ade9-578cfcc6ab41\") " pod="openstack/dnsmasq-dns-5b7946d7b9-psch6" Sep 30 20:49:01 crc kubenswrapper[4756]: I0930 20:49:01.774267 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5b7946d7b9-psch6" Sep 30 20:49:02 crc kubenswrapper[4756]: I0930 20:49:02.293198 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-server-0"] Sep 30 20:49:02 crc kubenswrapper[4756]: I0930 20:49:02.374875 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5b7946d7b9-psch6"] Sep 30 20:49:02 crc kubenswrapper[4756]: I0930 20:49:02.905880 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Sep 30 20:49:03 crc kubenswrapper[4756]: I0930 20:49:03.299293 4756 generic.go:334] "Generic (PLEG): container finished" podID="5993c2dd-f61e-4bdb-ade9-578cfcc6ab41" containerID="3d1cbce16efe229574769a3bb9b38eea231dcc69f91ea60f1370dcc12957403f" exitCode=0 Sep 30 20:49:03 crc kubenswrapper[4756]: I0930 20:49:03.299357 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5b7946d7b9-psch6" event={"ID":"5993c2dd-f61e-4bdb-ade9-578cfcc6ab41","Type":"ContainerDied","Data":"3d1cbce16efe229574769a3bb9b38eea231dcc69f91ea60f1370dcc12957403f"} Sep 30 20:49:03 crc kubenswrapper[4756]: I0930 20:49:03.299416 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5b7946d7b9-psch6" event={"ID":"5993c2dd-f61e-4bdb-ade9-578cfcc6ab41","Type":"ContainerStarted","Data":"9f76215580ad3be220010ae0b113909dcb358393f223421db8d4d3e77391a58e"} Sep 30 20:49:04 crc kubenswrapper[4756]: I0930 20:49:04.310078 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5b7946d7b9-psch6" event={"ID":"5993c2dd-f61e-4bdb-ade9-578cfcc6ab41","Type":"ContainerStarted","Data":"4af506dc1153474ea15af59464c7c04d60455898b66c30feaaa231b97465b2e1"} Sep 30 20:49:04 crc kubenswrapper[4756]: I0930 20:49:04.312709 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-5b7946d7b9-psch6" Sep 30 20:49:04 crc kubenswrapper[4756]: I0930 20:49:04.338061 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-5b7946d7b9-psch6" podStartSLOduration=3.338030184 podStartE2EDuration="3.338030184s" podCreationTimestamp="2025-09-30 20:49:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 20:49:04.33481401 +0000 UTC m=+4673.955747487" watchObservedRunningTime="2025-09-30 20:49:04.338030184 +0000 UTC m=+4673.958963681" Sep 30 20:49:04 crc kubenswrapper[4756]: I0930 20:49:04.358925 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/rabbitmq-server-0" podUID="65817176-3f65-4836-b4e9-1e92d6c077ea" containerName="rabbitmq" containerID="cri-o://df79b8c36ddd777798e6f97fda5c80edb54558099469c5d6262e73d4102914e5" gracePeriod=604798 Sep 30 20:49:04 crc kubenswrapper[4756]: I0930 20:49:04.915681 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/rabbitmq-cell1-server-0" podUID="e2fc60bc-0149-497d-85cf-d4614f8b33ad" containerName="rabbitmq" containerID="cri-o://4058ffc740cd540ac058c8773ba6a47ad273b94a1bde02e7bd6aaf4432ce89ef" gracePeriod=604798 Sep 30 20:49:05 crc kubenswrapper[4756]: I0930 20:49:05.783100 4756 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-server-0" podUID="65817176-3f65-4836-b4e9-1e92d6c077ea" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.240:5672: connect: connection refused" Sep 30 20:49:06 crc kubenswrapper[4756]: I0930 20:49:06.133196 4756 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-cell1-server-0" podUID="e2fc60bc-0149-497d-85cf-d4614f8b33ad" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.241:5672: connect: connection refused" Sep 30 20:49:11 crc kubenswrapper[4756]: I0930 20:49:11.218849 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Sep 30 20:49:11 crc kubenswrapper[4756]: I0930 20:49:11.406079 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/65817176-3f65-4836-b4e9-1e92d6c077ea-rabbitmq-confd\") pod \"65817176-3f65-4836-b4e9-1e92d6c077ea\" (UID: \"65817176-3f65-4836-b4e9-1e92d6c077ea\") " Sep 30 20:49:11 crc kubenswrapper[4756]: I0930 20:49:11.406164 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/65817176-3f65-4836-b4e9-1e92d6c077ea-erlang-cookie-secret\") pod \"65817176-3f65-4836-b4e9-1e92d6c077ea\" (UID: \"65817176-3f65-4836-b4e9-1e92d6c077ea\") " Sep 30 20:49:11 crc kubenswrapper[4756]: I0930 20:49:11.406316 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-664ad2c2-f310-4f5f-ac16-a701450ed846\") pod \"65817176-3f65-4836-b4e9-1e92d6c077ea\" (UID: \"65817176-3f65-4836-b4e9-1e92d6c077ea\") " Sep 30 20:49:11 crc kubenswrapper[4756]: I0930 20:49:11.406359 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/65817176-3f65-4836-b4e9-1e92d6c077ea-plugins-conf\") pod \"65817176-3f65-4836-b4e9-1e92d6c077ea\" (UID: \"65817176-3f65-4836-b4e9-1e92d6c077ea\") " Sep 30 20:49:11 crc kubenswrapper[4756]: I0930 20:49:11.406446 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/65817176-3f65-4836-b4e9-1e92d6c077ea-rabbitmq-plugins\") pod \"65817176-3f65-4836-b4e9-1e92d6c077ea\" (UID: \"65817176-3f65-4836-b4e9-1e92d6c077ea\") " Sep 30 20:49:11 crc kubenswrapper[4756]: I0930 20:49:11.406525 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dm44g\" (UniqueName: \"kubernetes.io/projected/65817176-3f65-4836-b4e9-1e92d6c077ea-kube-api-access-dm44g\") pod \"65817176-3f65-4836-b4e9-1e92d6c077ea\" (UID: \"65817176-3f65-4836-b4e9-1e92d6c077ea\") " Sep 30 20:49:11 crc kubenswrapper[4756]: I0930 20:49:11.406603 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/65817176-3f65-4836-b4e9-1e92d6c077ea-pod-info\") pod \"65817176-3f65-4836-b4e9-1e92d6c077ea\" (UID: \"65817176-3f65-4836-b4e9-1e92d6c077ea\") " Sep 30 20:49:11 crc kubenswrapper[4756]: I0930 20:49:11.406623 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/65817176-3f65-4836-b4e9-1e92d6c077ea-server-conf\") pod \"65817176-3f65-4836-b4e9-1e92d6c077ea\" (UID: \"65817176-3f65-4836-b4e9-1e92d6c077ea\") " Sep 30 20:49:11 crc kubenswrapper[4756]: I0930 20:49:11.406696 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/65817176-3f65-4836-b4e9-1e92d6c077ea-rabbitmq-erlang-cookie\") pod \"65817176-3f65-4836-b4e9-1e92d6c077ea\" (UID: \"65817176-3f65-4836-b4e9-1e92d6c077ea\") " Sep 30 20:49:11 crc kubenswrapper[4756]: I0930 20:49:11.408418 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/65817176-3f65-4836-b4e9-1e92d6c077ea-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "65817176-3f65-4836-b4e9-1e92d6c077ea" (UID: "65817176-3f65-4836-b4e9-1e92d6c077ea"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 20:49:11 crc kubenswrapper[4756]: I0930 20:49:11.408823 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/65817176-3f65-4836-b4e9-1e92d6c077ea-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "65817176-3f65-4836-b4e9-1e92d6c077ea" (UID: "65817176-3f65-4836-b4e9-1e92d6c077ea"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 20:49:11 crc kubenswrapper[4756]: I0930 20:49:11.410735 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/65817176-3f65-4836-b4e9-1e92d6c077ea-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "65817176-3f65-4836-b4e9-1e92d6c077ea" (UID: "65817176-3f65-4836-b4e9-1e92d6c077ea"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:49:11 crc kubenswrapper[4756]: I0930 20:49:11.420913 4756 generic.go:334] "Generic (PLEG): container finished" podID="e2fc60bc-0149-497d-85cf-d4614f8b33ad" containerID="4058ffc740cd540ac058c8773ba6a47ad273b94a1bde02e7bd6aaf4432ce89ef" exitCode=0 Sep 30 20:49:11 crc kubenswrapper[4756]: I0930 20:49:11.420993 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"e2fc60bc-0149-497d-85cf-d4614f8b33ad","Type":"ContainerDied","Data":"4058ffc740cd540ac058c8773ba6a47ad273b94a1bde02e7bd6aaf4432ce89ef"} Sep 30 20:49:11 crc kubenswrapper[4756]: I0930 20:49:11.422669 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/65817176-3f65-4836-b4e9-1e92d6c077ea-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "65817176-3f65-4836-b4e9-1e92d6c077ea" (UID: "65817176-3f65-4836-b4e9-1e92d6c077ea"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:49:11 crc kubenswrapper[4756]: I0930 20:49:11.424811 4756 generic.go:334] "Generic (PLEG): container finished" podID="65817176-3f65-4836-b4e9-1e92d6c077ea" containerID="df79b8c36ddd777798e6f97fda5c80edb54558099469c5d6262e73d4102914e5" exitCode=0 Sep 30 20:49:11 crc kubenswrapper[4756]: I0930 20:49:11.424873 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"65817176-3f65-4836-b4e9-1e92d6c077ea","Type":"ContainerDied","Data":"df79b8c36ddd777798e6f97fda5c80edb54558099469c5d6262e73d4102914e5"} Sep 30 20:49:11 crc kubenswrapper[4756]: I0930 20:49:11.424916 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"65817176-3f65-4836-b4e9-1e92d6c077ea","Type":"ContainerDied","Data":"5c58a315a662fbae14da76647f46cbb38f9f1a55d89bf147949464d58216b893"} Sep 30 20:49:11 crc kubenswrapper[4756]: I0930 20:49:11.424938 4756 scope.go:117] "RemoveContainer" containerID="df79b8c36ddd777798e6f97fda5c80edb54558099469c5d6262e73d4102914e5" Sep 30 20:49:11 crc kubenswrapper[4756]: I0930 20:49:11.424877 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/65817176-3f65-4836-b4e9-1e92d6c077ea-pod-info" (OuterVolumeSpecName: "pod-info") pod "65817176-3f65-4836-b4e9-1e92d6c077ea" (UID: "65817176-3f65-4836-b4e9-1e92d6c077ea"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Sep 30 20:49:11 crc kubenswrapper[4756]: I0930 20:49:11.424989 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Sep 30 20:49:11 crc kubenswrapper[4756]: I0930 20:49:11.428131 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-664ad2c2-f310-4f5f-ac16-a701450ed846" (OuterVolumeSpecName: "persistence") pod "65817176-3f65-4836-b4e9-1e92d6c077ea" (UID: "65817176-3f65-4836-b4e9-1e92d6c077ea"). InnerVolumeSpecName "pvc-664ad2c2-f310-4f5f-ac16-a701450ed846". PluginName "kubernetes.io/csi", VolumeGidValue "" Sep 30 20:49:11 crc kubenswrapper[4756]: I0930 20:49:11.429686 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/65817176-3f65-4836-b4e9-1e92d6c077ea-kube-api-access-dm44g" (OuterVolumeSpecName: "kube-api-access-dm44g") pod "65817176-3f65-4836-b4e9-1e92d6c077ea" (UID: "65817176-3f65-4836-b4e9-1e92d6c077ea"). InnerVolumeSpecName "kube-api-access-dm44g". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:49:11 crc kubenswrapper[4756]: I0930 20:49:11.431478 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/65817176-3f65-4836-b4e9-1e92d6c077ea-server-conf" (OuterVolumeSpecName: "server-conf") pod "65817176-3f65-4836-b4e9-1e92d6c077ea" (UID: "65817176-3f65-4836-b4e9-1e92d6c077ea"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:49:11 crc kubenswrapper[4756]: I0930 20:49:11.471753 4756 scope.go:117] "RemoveContainer" containerID="dee5098e7385d48f3943eff23a19ac971c3375746ee4c55c1bd15e5747709658" Sep 30 20:49:11 crc kubenswrapper[4756]: I0930 20:49:11.495772 4756 scope.go:117] "RemoveContainer" containerID="df79b8c36ddd777798e6f97fda5c80edb54558099469c5d6262e73d4102914e5" Sep 30 20:49:11 crc kubenswrapper[4756]: E0930 20:49:11.497040 4756 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"df79b8c36ddd777798e6f97fda5c80edb54558099469c5d6262e73d4102914e5\": container with ID starting with df79b8c36ddd777798e6f97fda5c80edb54558099469c5d6262e73d4102914e5 not found: ID does not exist" containerID="df79b8c36ddd777798e6f97fda5c80edb54558099469c5d6262e73d4102914e5" Sep 30 20:49:11 crc kubenswrapper[4756]: I0930 20:49:11.497085 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"df79b8c36ddd777798e6f97fda5c80edb54558099469c5d6262e73d4102914e5"} err="failed to get container status \"df79b8c36ddd777798e6f97fda5c80edb54558099469c5d6262e73d4102914e5\": rpc error: code = NotFound desc = could not find container \"df79b8c36ddd777798e6f97fda5c80edb54558099469c5d6262e73d4102914e5\": container with ID starting with df79b8c36ddd777798e6f97fda5c80edb54558099469c5d6262e73d4102914e5 not found: ID does not exist" Sep 30 20:49:11 crc kubenswrapper[4756]: I0930 20:49:11.497128 4756 scope.go:117] "RemoveContainer" containerID="dee5098e7385d48f3943eff23a19ac971c3375746ee4c55c1bd15e5747709658" Sep 30 20:49:11 crc kubenswrapper[4756]: E0930 20:49:11.497604 4756 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"dee5098e7385d48f3943eff23a19ac971c3375746ee4c55c1bd15e5747709658\": container with ID starting with dee5098e7385d48f3943eff23a19ac971c3375746ee4c55c1bd15e5747709658 not found: ID does not exist" containerID="dee5098e7385d48f3943eff23a19ac971c3375746ee4c55c1bd15e5747709658" Sep 30 20:49:11 crc kubenswrapper[4756]: I0930 20:49:11.497655 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dee5098e7385d48f3943eff23a19ac971c3375746ee4c55c1bd15e5747709658"} err="failed to get container status \"dee5098e7385d48f3943eff23a19ac971c3375746ee4c55c1bd15e5747709658\": rpc error: code = NotFound desc = could not find container \"dee5098e7385d48f3943eff23a19ac971c3375746ee4c55c1bd15e5747709658\": container with ID starting with dee5098e7385d48f3943eff23a19ac971c3375746ee4c55c1bd15e5747709658 not found: ID does not exist" Sep 30 20:49:11 crc kubenswrapper[4756]: I0930 20:49:11.503232 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/65817176-3f65-4836-b4e9-1e92d6c077ea-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "65817176-3f65-4836-b4e9-1e92d6c077ea" (UID: "65817176-3f65-4836-b4e9-1e92d6c077ea"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:49:11 crc kubenswrapper[4756]: I0930 20:49:11.508946 4756 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/65817176-3f65-4836-b4e9-1e92d6c077ea-pod-info\") on node \"crc\" DevicePath \"\"" Sep 30 20:49:11 crc kubenswrapper[4756]: I0930 20:49:11.509121 4756 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/65817176-3f65-4836-b4e9-1e92d6c077ea-server-conf\") on node \"crc\" DevicePath \"\"" Sep 30 20:49:11 crc kubenswrapper[4756]: I0930 20:49:11.509210 4756 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/65817176-3f65-4836-b4e9-1e92d6c077ea-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Sep 30 20:49:11 crc kubenswrapper[4756]: I0930 20:49:11.509271 4756 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/65817176-3f65-4836-b4e9-1e92d6c077ea-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Sep 30 20:49:11 crc kubenswrapper[4756]: I0930 20:49:11.509336 4756 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/65817176-3f65-4836-b4e9-1e92d6c077ea-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Sep 30 20:49:11 crc kubenswrapper[4756]: I0930 20:49:11.509496 4756 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"pvc-664ad2c2-f310-4f5f-ac16-a701450ed846\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-664ad2c2-f310-4f5f-ac16-a701450ed846\") on node \"crc\" " Sep 30 20:49:11 crc kubenswrapper[4756]: I0930 20:49:11.509567 4756 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/65817176-3f65-4836-b4e9-1e92d6c077ea-plugins-conf\") on node \"crc\" DevicePath \"\"" Sep 30 20:49:11 crc kubenswrapper[4756]: I0930 20:49:11.509624 4756 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/65817176-3f65-4836-b4e9-1e92d6c077ea-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Sep 30 20:49:11 crc kubenswrapper[4756]: I0930 20:49:11.509689 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dm44g\" (UniqueName: \"kubernetes.io/projected/65817176-3f65-4836-b4e9-1e92d6c077ea-kube-api-access-dm44g\") on node \"crc\" DevicePath \"\"" Sep 30 20:49:11 crc kubenswrapper[4756]: I0930 20:49:11.518027 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Sep 30 20:49:11 crc kubenswrapper[4756]: I0930 20:49:11.538157 4756 csi_attacher.go:630] kubernetes.io/csi: attacher.UnmountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping UnmountDevice... Sep 30 20:49:11 crc kubenswrapper[4756]: I0930 20:49:11.538364 4756 operation_generator.go:917] UnmountDevice succeeded for volume "pvc-664ad2c2-f310-4f5f-ac16-a701450ed846" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-664ad2c2-f310-4f5f-ac16-a701450ed846") on node "crc" Sep 30 20:49:11 crc kubenswrapper[4756]: I0930 20:49:11.611846 4756 reconciler_common.go:293] "Volume detached for volume \"pvc-664ad2c2-f310-4f5f-ac16-a701450ed846\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-664ad2c2-f310-4f5f-ac16-a701450ed846\") on node \"crc\" DevicePath \"\"" Sep 30 20:49:11 crc kubenswrapper[4756]: I0930 20:49:11.713273 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/e2fc60bc-0149-497d-85cf-d4614f8b33ad-erlang-cookie-secret\") pod \"e2fc60bc-0149-497d-85cf-d4614f8b33ad\" (UID: \"e2fc60bc-0149-497d-85cf-d4614f8b33ad\") " Sep 30 20:49:11 crc kubenswrapper[4756]: I0930 20:49:11.713329 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/e2fc60bc-0149-497d-85cf-d4614f8b33ad-pod-info\") pod \"e2fc60bc-0149-497d-85cf-d4614f8b33ad\" (UID: \"e2fc60bc-0149-497d-85cf-d4614f8b33ad\") " Sep 30 20:49:11 crc kubenswrapper[4756]: I0930 20:49:11.713381 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/e2fc60bc-0149-497d-85cf-d4614f8b33ad-rabbitmq-plugins\") pod \"e2fc60bc-0149-497d-85cf-d4614f8b33ad\" (UID: \"e2fc60bc-0149-497d-85cf-d4614f8b33ad\") " Sep 30 20:49:11 crc kubenswrapper[4756]: I0930 20:49:11.713531 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/e2fc60bc-0149-497d-85cf-d4614f8b33ad-plugins-conf\") pod \"e2fc60bc-0149-497d-85cf-d4614f8b33ad\" (UID: \"e2fc60bc-0149-497d-85cf-d4614f8b33ad\") " Sep 30 20:49:11 crc kubenswrapper[4756]: I0930 20:49:11.713587 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/e2fc60bc-0149-497d-85cf-d4614f8b33ad-server-conf\") pod \"e2fc60bc-0149-497d-85cf-d4614f8b33ad\" (UID: \"e2fc60bc-0149-497d-85cf-d4614f8b33ad\") " Sep 30 20:49:11 crc kubenswrapper[4756]: I0930 20:49:11.713610 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9m94w\" (UniqueName: \"kubernetes.io/projected/e2fc60bc-0149-497d-85cf-d4614f8b33ad-kube-api-access-9m94w\") pod \"e2fc60bc-0149-497d-85cf-d4614f8b33ad\" (UID: \"e2fc60bc-0149-497d-85cf-d4614f8b33ad\") " Sep 30 20:49:11 crc kubenswrapper[4756]: I0930 20:49:11.713658 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/e2fc60bc-0149-497d-85cf-d4614f8b33ad-rabbitmq-confd\") pod \"e2fc60bc-0149-497d-85cf-d4614f8b33ad\" (UID: \"e2fc60bc-0149-497d-85cf-d4614f8b33ad\") " Sep 30 20:49:11 crc kubenswrapper[4756]: I0930 20:49:11.713770 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c20fde3d-b7e2-43bc-9121-3b4b06cf4865\") pod \"e2fc60bc-0149-497d-85cf-d4614f8b33ad\" (UID: \"e2fc60bc-0149-497d-85cf-d4614f8b33ad\") " Sep 30 20:49:11 crc kubenswrapper[4756]: I0930 20:49:11.713854 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/e2fc60bc-0149-497d-85cf-d4614f8b33ad-rabbitmq-erlang-cookie\") pod \"e2fc60bc-0149-497d-85cf-d4614f8b33ad\" (UID: \"e2fc60bc-0149-497d-85cf-d4614f8b33ad\") " Sep 30 20:49:11 crc kubenswrapper[4756]: I0930 20:49:11.713991 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e2fc60bc-0149-497d-85cf-d4614f8b33ad-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "e2fc60bc-0149-497d-85cf-d4614f8b33ad" (UID: "e2fc60bc-0149-497d-85cf-d4614f8b33ad"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 20:49:11 crc kubenswrapper[4756]: I0930 20:49:11.714252 4756 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/e2fc60bc-0149-497d-85cf-d4614f8b33ad-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Sep 30 20:49:11 crc kubenswrapper[4756]: I0930 20:49:11.714342 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e2fc60bc-0149-497d-85cf-d4614f8b33ad-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "e2fc60bc-0149-497d-85cf-d4614f8b33ad" (UID: "e2fc60bc-0149-497d-85cf-d4614f8b33ad"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:49:11 crc kubenswrapper[4756]: I0930 20:49:11.714646 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e2fc60bc-0149-497d-85cf-d4614f8b33ad-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "e2fc60bc-0149-497d-85cf-d4614f8b33ad" (UID: "e2fc60bc-0149-497d-85cf-d4614f8b33ad"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 20:49:11 crc kubenswrapper[4756]: I0930 20:49:11.717841 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e2fc60bc-0149-497d-85cf-d4614f8b33ad-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "e2fc60bc-0149-497d-85cf-d4614f8b33ad" (UID: "e2fc60bc-0149-497d-85cf-d4614f8b33ad"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:49:11 crc kubenswrapper[4756]: I0930 20:49:11.722450 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e2fc60bc-0149-497d-85cf-d4614f8b33ad-kube-api-access-9m94w" (OuterVolumeSpecName: "kube-api-access-9m94w") pod "e2fc60bc-0149-497d-85cf-d4614f8b33ad" (UID: "e2fc60bc-0149-497d-85cf-d4614f8b33ad"). InnerVolumeSpecName "kube-api-access-9m94w". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:49:11 crc kubenswrapper[4756]: I0930 20:49:11.723274 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/e2fc60bc-0149-497d-85cf-d4614f8b33ad-pod-info" (OuterVolumeSpecName: "pod-info") pod "e2fc60bc-0149-497d-85cf-d4614f8b33ad" (UID: "e2fc60bc-0149-497d-85cf-d4614f8b33ad"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Sep 30 20:49:11 crc kubenswrapper[4756]: I0930 20:49:11.745174 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e2fc60bc-0149-497d-85cf-d4614f8b33ad-server-conf" (OuterVolumeSpecName: "server-conf") pod "e2fc60bc-0149-497d-85cf-d4614f8b33ad" (UID: "e2fc60bc-0149-497d-85cf-d4614f8b33ad"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:49:11 crc kubenswrapper[4756]: I0930 20:49:11.753564 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c20fde3d-b7e2-43bc-9121-3b4b06cf4865" (OuterVolumeSpecName: "persistence") pod "e2fc60bc-0149-497d-85cf-d4614f8b33ad" (UID: "e2fc60bc-0149-497d-85cf-d4614f8b33ad"). InnerVolumeSpecName "pvc-c20fde3d-b7e2-43bc-9121-3b4b06cf4865". PluginName "kubernetes.io/csi", VolumeGidValue "" Sep 30 20:49:11 crc kubenswrapper[4756]: I0930 20:49:11.775598 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-5b7946d7b9-psch6" Sep 30 20:49:11 crc kubenswrapper[4756]: I0930 20:49:11.825423 4756 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/e2fc60bc-0149-497d-85cf-d4614f8b33ad-plugins-conf\") on node \"crc\" DevicePath \"\"" Sep 30 20:49:11 crc kubenswrapper[4756]: I0930 20:49:11.825485 4756 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/e2fc60bc-0149-497d-85cf-d4614f8b33ad-server-conf\") on node \"crc\" DevicePath \"\"" Sep 30 20:49:11 crc kubenswrapper[4756]: I0930 20:49:11.825503 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9m94w\" (UniqueName: \"kubernetes.io/projected/e2fc60bc-0149-497d-85cf-d4614f8b33ad-kube-api-access-9m94w\") on node \"crc\" DevicePath \"\"" Sep 30 20:49:11 crc kubenswrapper[4756]: I0930 20:49:11.825551 4756 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"pvc-c20fde3d-b7e2-43bc-9121-3b4b06cf4865\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c20fde3d-b7e2-43bc-9121-3b4b06cf4865\") on node \"crc\" " Sep 30 20:49:11 crc kubenswrapper[4756]: I0930 20:49:11.825572 4756 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/e2fc60bc-0149-497d-85cf-d4614f8b33ad-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Sep 30 20:49:11 crc kubenswrapper[4756]: I0930 20:49:11.825589 4756 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/e2fc60bc-0149-497d-85cf-d4614f8b33ad-pod-info\") on node \"crc\" DevicePath \"\"" Sep 30 20:49:11 crc kubenswrapper[4756]: I0930 20:49:11.825605 4756 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/e2fc60bc-0149-497d-85cf-d4614f8b33ad-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Sep 30 20:49:11 crc kubenswrapper[4756]: I0930 20:49:11.828679 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e2fc60bc-0149-497d-85cf-d4614f8b33ad-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "e2fc60bc-0149-497d-85cf-d4614f8b33ad" (UID: "e2fc60bc-0149-497d-85cf-d4614f8b33ad"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:49:11 crc kubenswrapper[4756]: I0930 20:49:11.864557 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-98ddfc8f-4nbjr"] Sep 30 20:49:11 crc kubenswrapper[4756]: I0930 20:49:11.864923 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-98ddfc8f-4nbjr" podUID="09fbe05a-857d-48dc-9e31-faadf828e6eb" containerName="dnsmasq-dns" containerID="cri-o://5cc2d29824357c6f4347767987c10ce8a60746ee280b1be5d7b8b7453ed97e4f" gracePeriod=10 Sep 30 20:49:11 crc kubenswrapper[4756]: I0930 20:49:11.870806 4756 csi_attacher.go:630] kubernetes.io/csi: attacher.UnmountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping UnmountDevice... Sep 30 20:49:11 crc kubenswrapper[4756]: I0930 20:49:11.871002 4756 operation_generator.go:917] UnmountDevice succeeded for volume "pvc-c20fde3d-b7e2-43bc-9121-3b4b06cf4865" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c20fde3d-b7e2-43bc-9121-3b4b06cf4865") on node "crc" Sep 30 20:49:11 crc kubenswrapper[4756]: I0930 20:49:11.900558 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-server-0"] Sep 30 20:49:11 crc kubenswrapper[4756]: I0930 20:49:11.936880 4756 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/e2fc60bc-0149-497d-85cf-d4614f8b33ad-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Sep 30 20:49:11 crc kubenswrapper[4756]: I0930 20:49:11.936920 4756 reconciler_common.go:293] "Volume detached for volume \"pvc-c20fde3d-b7e2-43bc-9121-3b4b06cf4865\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c20fde3d-b7e2-43bc-9121-3b4b06cf4865\") on node \"crc\" DevicePath \"\"" Sep 30 20:49:11 crc kubenswrapper[4756]: I0930 20:49:11.941372 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/rabbitmq-server-0"] Sep 30 20:49:11 crc kubenswrapper[4756]: I0930 20:49:11.955415 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-server-0"] Sep 30 20:49:11 crc kubenswrapper[4756]: E0930 20:49:11.957427 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e2fc60bc-0149-497d-85cf-d4614f8b33ad" containerName="setup-container" Sep 30 20:49:11 crc kubenswrapper[4756]: I0930 20:49:11.963308 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="e2fc60bc-0149-497d-85cf-d4614f8b33ad" containerName="setup-container" Sep 30 20:49:11 crc kubenswrapper[4756]: E0930 20:49:11.963409 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="65817176-3f65-4836-b4e9-1e92d6c077ea" containerName="rabbitmq" Sep 30 20:49:11 crc kubenswrapper[4756]: I0930 20:49:11.963421 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="65817176-3f65-4836-b4e9-1e92d6c077ea" containerName="rabbitmq" Sep 30 20:49:11 crc kubenswrapper[4756]: E0930 20:49:11.963445 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="65817176-3f65-4836-b4e9-1e92d6c077ea" containerName="setup-container" Sep 30 20:49:11 crc kubenswrapper[4756]: I0930 20:49:11.963456 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="65817176-3f65-4836-b4e9-1e92d6c077ea" containerName="setup-container" Sep 30 20:49:11 crc kubenswrapper[4756]: E0930 20:49:11.963481 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e2fc60bc-0149-497d-85cf-d4614f8b33ad" containerName="rabbitmq" Sep 30 20:49:11 crc kubenswrapper[4756]: I0930 20:49:11.963488 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="e2fc60bc-0149-497d-85cf-d4614f8b33ad" containerName="rabbitmq" Sep 30 20:49:11 crc kubenswrapper[4756]: I0930 20:49:11.964651 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="65817176-3f65-4836-b4e9-1e92d6c077ea" containerName="rabbitmq" Sep 30 20:49:11 crc kubenswrapper[4756]: I0930 20:49:11.964722 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="e2fc60bc-0149-497d-85cf-d4614f8b33ad" containerName="rabbitmq" Sep 30 20:49:11 crc kubenswrapper[4756]: I0930 20:49:11.965725 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Sep 30 20:49:11 crc kubenswrapper[4756]: I0930 20:49:11.965901 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Sep 30 20:49:11 crc kubenswrapper[4756]: I0930 20:49:11.969950 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-default-user" Sep 30 20:49:11 crc kubenswrapper[4756]: I0930 20:49:11.970048 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-server-dockercfg-lvsbp" Sep 30 20:49:11 crc kubenswrapper[4756]: I0930 20:49:11.970797 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-erlang-cookie" Sep 30 20:49:11 crc kubenswrapper[4756]: I0930 20:49:11.971094 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-plugins-conf" Sep 30 20:49:11 crc kubenswrapper[4756]: I0930 20:49:11.973905 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-server-conf" Sep 30 20:49:12 crc kubenswrapper[4756]: I0930 20:49:12.140378 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/18633e6c-8de8-42d7-b97f-cefca7bcb2b5-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"18633e6c-8de8-42d7-b97f-cefca7bcb2b5\") " pod="openstack/rabbitmq-server-0" Sep 30 20:49:12 crc kubenswrapper[4756]: I0930 20:49:12.140463 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-664ad2c2-f310-4f5f-ac16-a701450ed846\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-664ad2c2-f310-4f5f-ac16-a701450ed846\") pod \"rabbitmq-server-0\" (UID: \"18633e6c-8de8-42d7-b97f-cefca7bcb2b5\") " pod="openstack/rabbitmq-server-0" Sep 30 20:49:12 crc kubenswrapper[4756]: I0930 20:49:12.140504 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/18633e6c-8de8-42d7-b97f-cefca7bcb2b5-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"18633e6c-8de8-42d7-b97f-cefca7bcb2b5\") " pod="openstack/rabbitmq-server-0" Sep 30 20:49:12 crc kubenswrapper[4756]: I0930 20:49:12.140542 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mcgm5\" (UniqueName: \"kubernetes.io/projected/18633e6c-8de8-42d7-b97f-cefca7bcb2b5-kube-api-access-mcgm5\") pod \"rabbitmq-server-0\" (UID: \"18633e6c-8de8-42d7-b97f-cefca7bcb2b5\") " pod="openstack/rabbitmq-server-0" Sep 30 20:49:12 crc kubenswrapper[4756]: I0930 20:49:12.140602 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/18633e6c-8de8-42d7-b97f-cefca7bcb2b5-server-conf\") pod \"rabbitmq-server-0\" (UID: \"18633e6c-8de8-42d7-b97f-cefca7bcb2b5\") " pod="openstack/rabbitmq-server-0" Sep 30 20:49:12 crc kubenswrapper[4756]: I0930 20:49:12.140637 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/18633e6c-8de8-42d7-b97f-cefca7bcb2b5-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"18633e6c-8de8-42d7-b97f-cefca7bcb2b5\") " pod="openstack/rabbitmq-server-0" Sep 30 20:49:12 crc kubenswrapper[4756]: I0930 20:49:12.140682 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/18633e6c-8de8-42d7-b97f-cefca7bcb2b5-pod-info\") pod \"rabbitmq-server-0\" (UID: \"18633e6c-8de8-42d7-b97f-cefca7bcb2b5\") " pod="openstack/rabbitmq-server-0" Sep 30 20:49:12 crc kubenswrapper[4756]: I0930 20:49:12.140727 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/18633e6c-8de8-42d7-b97f-cefca7bcb2b5-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"18633e6c-8de8-42d7-b97f-cefca7bcb2b5\") " pod="openstack/rabbitmq-server-0" Sep 30 20:49:12 crc kubenswrapper[4756]: I0930 20:49:12.140757 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/18633e6c-8de8-42d7-b97f-cefca7bcb2b5-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"18633e6c-8de8-42d7-b97f-cefca7bcb2b5\") " pod="openstack/rabbitmq-server-0" Sep 30 20:49:12 crc kubenswrapper[4756]: I0930 20:49:12.245946 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/18633e6c-8de8-42d7-b97f-cefca7bcb2b5-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"18633e6c-8de8-42d7-b97f-cefca7bcb2b5\") " pod="openstack/rabbitmq-server-0" Sep 30 20:49:12 crc kubenswrapper[4756]: I0930 20:49:12.246060 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mcgm5\" (UniqueName: \"kubernetes.io/projected/18633e6c-8de8-42d7-b97f-cefca7bcb2b5-kube-api-access-mcgm5\") pod \"rabbitmq-server-0\" (UID: \"18633e6c-8de8-42d7-b97f-cefca7bcb2b5\") " pod="openstack/rabbitmq-server-0" Sep 30 20:49:12 crc kubenswrapper[4756]: I0930 20:49:12.246160 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/18633e6c-8de8-42d7-b97f-cefca7bcb2b5-server-conf\") pod \"rabbitmq-server-0\" (UID: \"18633e6c-8de8-42d7-b97f-cefca7bcb2b5\") " pod="openstack/rabbitmq-server-0" Sep 30 20:49:12 crc kubenswrapper[4756]: I0930 20:49:12.246204 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/18633e6c-8de8-42d7-b97f-cefca7bcb2b5-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"18633e6c-8de8-42d7-b97f-cefca7bcb2b5\") " pod="openstack/rabbitmq-server-0" Sep 30 20:49:12 crc kubenswrapper[4756]: I0930 20:49:12.246234 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/18633e6c-8de8-42d7-b97f-cefca7bcb2b5-pod-info\") pod \"rabbitmq-server-0\" (UID: \"18633e6c-8de8-42d7-b97f-cefca7bcb2b5\") " pod="openstack/rabbitmq-server-0" Sep 30 20:49:12 crc kubenswrapper[4756]: I0930 20:49:12.246265 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/18633e6c-8de8-42d7-b97f-cefca7bcb2b5-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"18633e6c-8de8-42d7-b97f-cefca7bcb2b5\") " pod="openstack/rabbitmq-server-0" Sep 30 20:49:12 crc kubenswrapper[4756]: I0930 20:49:12.246285 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/18633e6c-8de8-42d7-b97f-cefca7bcb2b5-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"18633e6c-8de8-42d7-b97f-cefca7bcb2b5\") " pod="openstack/rabbitmq-server-0" Sep 30 20:49:12 crc kubenswrapper[4756]: I0930 20:49:12.246443 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/18633e6c-8de8-42d7-b97f-cefca7bcb2b5-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"18633e6c-8de8-42d7-b97f-cefca7bcb2b5\") " pod="openstack/rabbitmq-server-0" Sep 30 20:49:12 crc kubenswrapper[4756]: I0930 20:49:12.246472 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-664ad2c2-f310-4f5f-ac16-a701450ed846\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-664ad2c2-f310-4f5f-ac16-a701450ed846\") pod \"rabbitmq-server-0\" (UID: \"18633e6c-8de8-42d7-b97f-cefca7bcb2b5\") " pod="openstack/rabbitmq-server-0" Sep 30 20:49:12 crc kubenswrapper[4756]: I0930 20:49:12.247563 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/18633e6c-8de8-42d7-b97f-cefca7bcb2b5-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"18633e6c-8de8-42d7-b97f-cefca7bcb2b5\") " pod="openstack/rabbitmq-server-0" Sep 30 20:49:12 crc kubenswrapper[4756]: I0930 20:49:12.247572 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/18633e6c-8de8-42d7-b97f-cefca7bcb2b5-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"18633e6c-8de8-42d7-b97f-cefca7bcb2b5\") " pod="openstack/rabbitmq-server-0" Sep 30 20:49:12 crc kubenswrapper[4756]: I0930 20:49:12.250185 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/18633e6c-8de8-42d7-b97f-cefca7bcb2b5-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"18633e6c-8de8-42d7-b97f-cefca7bcb2b5\") " pod="openstack/rabbitmq-server-0" Sep 30 20:49:12 crc kubenswrapper[4756]: I0930 20:49:12.251864 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/18633e6c-8de8-42d7-b97f-cefca7bcb2b5-server-conf\") pod \"rabbitmq-server-0\" (UID: \"18633e6c-8de8-42d7-b97f-cefca7bcb2b5\") " pod="openstack/rabbitmq-server-0" Sep 30 20:49:12 crc kubenswrapper[4756]: I0930 20:49:12.261952 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/18633e6c-8de8-42d7-b97f-cefca7bcb2b5-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"18633e6c-8de8-42d7-b97f-cefca7bcb2b5\") " pod="openstack/rabbitmq-server-0" Sep 30 20:49:12 crc kubenswrapper[4756]: I0930 20:49:12.277612 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/18633e6c-8de8-42d7-b97f-cefca7bcb2b5-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"18633e6c-8de8-42d7-b97f-cefca7bcb2b5\") " pod="openstack/rabbitmq-server-0" Sep 30 20:49:12 crc kubenswrapper[4756]: I0930 20:49:12.277821 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/18633e6c-8de8-42d7-b97f-cefca7bcb2b5-pod-info\") pod \"rabbitmq-server-0\" (UID: \"18633e6c-8de8-42d7-b97f-cefca7bcb2b5\") " pod="openstack/rabbitmq-server-0" Sep 30 20:49:12 crc kubenswrapper[4756]: I0930 20:49:12.281974 4756 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Sep 30 20:49:12 crc kubenswrapper[4756]: I0930 20:49:12.282020 4756 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-664ad2c2-f310-4f5f-ac16-a701450ed846\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-664ad2c2-f310-4f5f-ac16-a701450ed846\") pod \"rabbitmq-server-0\" (UID: \"18633e6c-8de8-42d7-b97f-cefca7bcb2b5\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/13580d31e89d9734fbf1a64fe8163230ccfdaa4e77b367d3301304b5672e812d/globalmount\"" pod="openstack/rabbitmq-server-0" Sep 30 20:49:12 crc kubenswrapper[4756]: I0930 20:49:12.282233 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mcgm5\" (UniqueName: \"kubernetes.io/projected/18633e6c-8de8-42d7-b97f-cefca7bcb2b5-kube-api-access-mcgm5\") pod \"rabbitmq-server-0\" (UID: \"18633e6c-8de8-42d7-b97f-cefca7bcb2b5\") " pod="openstack/rabbitmq-server-0" Sep 30 20:49:12 crc kubenswrapper[4756]: I0930 20:49:12.374322 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-98ddfc8f-4nbjr" Sep 30 20:49:12 crc kubenswrapper[4756]: I0930 20:49:12.410376 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-664ad2c2-f310-4f5f-ac16-a701450ed846\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-664ad2c2-f310-4f5f-ac16-a701450ed846\") pod \"rabbitmq-server-0\" (UID: \"18633e6c-8de8-42d7-b97f-cefca7bcb2b5\") " pod="openstack/rabbitmq-server-0" Sep 30 20:49:12 crc kubenswrapper[4756]: I0930 20:49:12.441811 4756 generic.go:334] "Generic (PLEG): container finished" podID="09fbe05a-857d-48dc-9e31-faadf828e6eb" containerID="5cc2d29824357c6f4347767987c10ce8a60746ee280b1be5d7b8b7453ed97e4f" exitCode=0 Sep 30 20:49:12 crc kubenswrapper[4756]: I0930 20:49:12.441902 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-98ddfc8f-4nbjr" event={"ID":"09fbe05a-857d-48dc-9e31-faadf828e6eb","Type":"ContainerDied","Data":"5cc2d29824357c6f4347767987c10ce8a60746ee280b1be5d7b8b7453ed97e4f"} Sep 30 20:49:12 crc kubenswrapper[4756]: I0930 20:49:12.441927 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-98ddfc8f-4nbjr" Sep 30 20:49:12 crc kubenswrapper[4756]: I0930 20:49:12.441945 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-98ddfc8f-4nbjr" event={"ID":"09fbe05a-857d-48dc-9e31-faadf828e6eb","Type":"ContainerDied","Data":"18438bd35507e891fe08d754938be4f2dec263c786d2308a97f3774d85a3981a"} Sep 30 20:49:12 crc kubenswrapper[4756]: I0930 20:49:12.441967 4756 scope.go:117] "RemoveContainer" containerID="5cc2d29824357c6f4347767987c10ce8a60746ee280b1be5d7b8b7453ed97e4f" Sep 30 20:49:12 crc kubenswrapper[4756]: I0930 20:49:12.451973 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"e2fc60bc-0149-497d-85cf-d4614f8b33ad","Type":"ContainerDied","Data":"1168e3a048bf7752b5bc4b5ee1c573f865dd0048320ecb7237fedab8b1bc5da1"} Sep 30 20:49:12 crc kubenswrapper[4756]: I0930 20:49:12.452069 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Sep 30 20:49:12 crc kubenswrapper[4756]: I0930 20:49:12.470228 4756 scope.go:117] "RemoveContainer" containerID="0fca3a6fa411bf7a8fc4f3e0fbb0352005fc65fc992acc99c3e4d25fc8d045d1" Sep 30 20:49:12 crc kubenswrapper[4756]: I0930 20:49:12.497652 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Sep 30 20:49:12 crc kubenswrapper[4756]: I0930 20:49:12.502557 4756 scope.go:117] "RemoveContainer" containerID="5cc2d29824357c6f4347767987c10ce8a60746ee280b1be5d7b8b7453ed97e4f" Sep 30 20:49:12 crc kubenswrapper[4756]: I0930 20:49:12.504892 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Sep 30 20:49:12 crc kubenswrapper[4756]: E0930 20:49:12.506173 4756 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5cc2d29824357c6f4347767987c10ce8a60746ee280b1be5d7b8b7453ed97e4f\": container with ID starting with 5cc2d29824357c6f4347767987c10ce8a60746ee280b1be5d7b8b7453ed97e4f not found: ID does not exist" containerID="5cc2d29824357c6f4347767987c10ce8a60746ee280b1be5d7b8b7453ed97e4f" Sep 30 20:49:12 crc kubenswrapper[4756]: I0930 20:49:12.506237 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5cc2d29824357c6f4347767987c10ce8a60746ee280b1be5d7b8b7453ed97e4f"} err="failed to get container status \"5cc2d29824357c6f4347767987c10ce8a60746ee280b1be5d7b8b7453ed97e4f\": rpc error: code = NotFound desc = could not find container \"5cc2d29824357c6f4347767987c10ce8a60746ee280b1be5d7b8b7453ed97e4f\": container with ID starting with 5cc2d29824357c6f4347767987c10ce8a60746ee280b1be5d7b8b7453ed97e4f not found: ID does not exist" Sep 30 20:49:12 crc kubenswrapper[4756]: I0930 20:49:12.506274 4756 scope.go:117] "RemoveContainer" containerID="0fca3a6fa411bf7a8fc4f3e0fbb0352005fc65fc992acc99c3e4d25fc8d045d1" Sep 30 20:49:12 crc kubenswrapper[4756]: E0930 20:49:12.519987 4756 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0fca3a6fa411bf7a8fc4f3e0fbb0352005fc65fc992acc99c3e4d25fc8d045d1\": container with ID starting with 0fca3a6fa411bf7a8fc4f3e0fbb0352005fc65fc992acc99c3e4d25fc8d045d1 not found: ID does not exist" containerID="0fca3a6fa411bf7a8fc4f3e0fbb0352005fc65fc992acc99c3e4d25fc8d045d1" Sep 30 20:49:12 crc kubenswrapper[4756]: I0930 20:49:12.520051 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0fca3a6fa411bf7a8fc4f3e0fbb0352005fc65fc992acc99c3e4d25fc8d045d1"} err="failed to get container status \"0fca3a6fa411bf7a8fc4f3e0fbb0352005fc65fc992acc99c3e4d25fc8d045d1\": rpc error: code = NotFound desc = could not find container \"0fca3a6fa411bf7a8fc4f3e0fbb0352005fc65fc992acc99c3e4d25fc8d045d1\": container with ID starting with 0fca3a6fa411bf7a8fc4f3e0fbb0352005fc65fc992acc99c3e4d25fc8d045d1 not found: ID does not exist" Sep 30 20:49:12 crc kubenswrapper[4756]: I0930 20:49:12.520088 4756 scope.go:117] "RemoveContainer" containerID="4058ffc740cd540ac058c8773ba6a47ad273b94a1bde02e7bd6aaf4432ce89ef" Sep 30 20:49:12 crc kubenswrapper[4756]: I0930 20:49:12.530583 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Sep 30 20:49:12 crc kubenswrapper[4756]: E0930 20:49:12.531194 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="09fbe05a-857d-48dc-9e31-faadf828e6eb" containerName="init" Sep 30 20:49:12 crc kubenswrapper[4756]: I0930 20:49:12.531225 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="09fbe05a-857d-48dc-9e31-faadf828e6eb" containerName="init" Sep 30 20:49:12 crc kubenswrapper[4756]: E0930 20:49:12.531261 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="09fbe05a-857d-48dc-9e31-faadf828e6eb" containerName="dnsmasq-dns" Sep 30 20:49:12 crc kubenswrapper[4756]: I0930 20:49:12.531270 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="09fbe05a-857d-48dc-9e31-faadf828e6eb" containerName="dnsmasq-dns" Sep 30 20:49:12 crc kubenswrapper[4756]: I0930 20:49:12.531514 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="09fbe05a-857d-48dc-9e31-faadf828e6eb" containerName="dnsmasq-dns" Sep 30 20:49:12 crc kubenswrapper[4756]: I0930 20:49:12.532884 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Sep 30 20:49:12 crc kubenswrapper[4756]: I0930 20:49:12.536144 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-erlang-cookie" Sep 30 20:49:12 crc kubenswrapper[4756]: I0930 20:49:12.536462 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-server-dockercfg-g6qr8" Sep 30 20:49:12 crc kubenswrapper[4756]: I0930 20:49:12.536579 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-server-conf" Sep 30 20:49:12 crc kubenswrapper[4756]: I0930 20:49:12.536746 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-default-user" Sep 30 20:49:12 crc kubenswrapper[4756]: I0930 20:49:12.537106 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Sep 30 20:49:12 crc kubenswrapper[4756]: I0930 20:49:12.545213 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-plugins-conf" Sep 30 20:49:12 crc kubenswrapper[4756]: I0930 20:49:12.551519 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/09fbe05a-857d-48dc-9e31-faadf828e6eb-dns-svc\") pod \"09fbe05a-857d-48dc-9e31-faadf828e6eb\" (UID: \"09fbe05a-857d-48dc-9e31-faadf828e6eb\") " Sep 30 20:49:12 crc kubenswrapper[4756]: I0930 20:49:12.551838 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09fbe05a-857d-48dc-9e31-faadf828e6eb-config\") pod \"09fbe05a-857d-48dc-9e31-faadf828e6eb\" (UID: \"09fbe05a-857d-48dc-9e31-faadf828e6eb\") " Sep 30 20:49:12 crc kubenswrapper[4756]: I0930 20:49:12.551903 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nvj4x\" (UniqueName: \"kubernetes.io/projected/09fbe05a-857d-48dc-9e31-faadf828e6eb-kube-api-access-nvj4x\") pod \"09fbe05a-857d-48dc-9e31-faadf828e6eb\" (UID: \"09fbe05a-857d-48dc-9e31-faadf828e6eb\") " Sep 30 20:49:12 crc kubenswrapper[4756]: I0930 20:49:12.556412 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09fbe05a-857d-48dc-9e31-faadf828e6eb-kube-api-access-nvj4x" (OuterVolumeSpecName: "kube-api-access-nvj4x") pod "09fbe05a-857d-48dc-9e31-faadf828e6eb" (UID: "09fbe05a-857d-48dc-9e31-faadf828e6eb"). InnerVolumeSpecName "kube-api-access-nvj4x". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:49:12 crc kubenswrapper[4756]: I0930 20:49:12.556625 4756 scope.go:117] "RemoveContainer" containerID="87f9f533f36ac9b2fb4f15f3577e57d6287652abf8ce2563784951e51c58af65" Sep 30 20:49:12 crc kubenswrapper[4756]: I0930 20:49:12.599070 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Sep 30 20:49:12 crc kubenswrapper[4756]: I0930 20:49:12.600029 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09fbe05a-857d-48dc-9e31-faadf828e6eb-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "09fbe05a-857d-48dc-9e31-faadf828e6eb" (UID: "09fbe05a-857d-48dc-9e31-faadf828e6eb"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:49:12 crc kubenswrapper[4756]: I0930 20:49:12.602383 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09fbe05a-857d-48dc-9e31-faadf828e6eb-config" (OuterVolumeSpecName: "config") pod "09fbe05a-857d-48dc-9e31-faadf828e6eb" (UID: "09fbe05a-857d-48dc-9e31-faadf828e6eb"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:49:12 crc kubenswrapper[4756]: I0930 20:49:12.655011 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/35150b99-e380-48a6-beae-db98734c02e2-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"35150b99-e380-48a6-beae-db98734c02e2\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 20:49:12 crc kubenswrapper[4756]: I0930 20:49:12.655230 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/35150b99-e380-48a6-beae-db98734c02e2-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"35150b99-e380-48a6-beae-db98734c02e2\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 20:49:12 crc kubenswrapper[4756]: I0930 20:49:12.655282 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/35150b99-e380-48a6-beae-db98734c02e2-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"35150b99-e380-48a6-beae-db98734c02e2\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 20:49:12 crc kubenswrapper[4756]: I0930 20:49:12.655547 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/35150b99-e380-48a6-beae-db98734c02e2-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"35150b99-e380-48a6-beae-db98734c02e2\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 20:49:12 crc kubenswrapper[4756]: I0930 20:49:12.655694 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-c20fde3d-b7e2-43bc-9121-3b4b06cf4865\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c20fde3d-b7e2-43bc-9121-3b4b06cf4865\") pod \"rabbitmq-cell1-server-0\" (UID: \"35150b99-e380-48a6-beae-db98734c02e2\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 20:49:12 crc kubenswrapper[4756]: I0930 20:49:12.655768 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/35150b99-e380-48a6-beae-db98734c02e2-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"35150b99-e380-48a6-beae-db98734c02e2\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 20:49:12 crc kubenswrapper[4756]: I0930 20:49:12.655867 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6xr2p\" (UniqueName: \"kubernetes.io/projected/35150b99-e380-48a6-beae-db98734c02e2-kube-api-access-6xr2p\") pod \"rabbitmq-cell1-server-0\" (UID: \"35150b99-e380-48a6-beae-db98734c02e2\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 20:49:12 crc kubenswrapper[4756]: I0930 20:49:12.655929 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/35150b99-e380-48a6-beae-db98734c02e2-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"35150b99-e380-48a6-beae-db98734c02e2\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 20:49:12 crc kubenswrapper[4756]: I0930 20:49:12.655959 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/35150b99-e380-48a6-beae-db98734c02e2-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"35150b99-e380-48a6-beae-db98734c02e2\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 20:49:12 crc kubenswrapper[4756]: I0930 20:49:12.656094 4756 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/09fbe05a-857d-48dc-9e31-faadf828e6eb-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 30 20:49:12 crc kubenswrapper[4756]: I0930 20:49:12.656112 4756 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09fbe05a-857d-48dc-9e31-faadf828e6eb-config\") on node \"crc\" DevicePath \"\"" Sep 30 20:49:12 crc kubenswrapper[4756]: I0930 20:49:12.656147 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nvj4x\" (UniqueName: \"kubernetes.io/projected/09fbe05a-857d-48dc-9e31-faadf828e6eb-kube-api-access-nvj4x\") on node \"crc\" DevicePath \"\"" Sep 30 20:49:12 crc kubenswrapper[4756]: I0930 20:49:12.757508 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/35150b99-e380-48a6-beae-db98734c02e2-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"35150b99-e380-48a6-beae-db98734c02e2\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 20:49:12 crc kubenswrapper[4756]: I0930 20:49:12.757907 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/35150b99-e380-48a6-beae-db98734c02e2-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"35150b99-e380-48a6-beae-db98734c02e2\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 20:49:12 crc kubenswrapper[4756]: I0930 20:49:12.757964 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/35150b99-e380-48a6-beae-db98734c02e2-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"35150b99-e380-48a6-beae-db98734c02e2\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 20:49:12 crc kubenswrapper[4756]: I0930 20:49:12.757992 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/35150b99-e380-48a6-beae-db98734c02e2-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"35150b99-e380-48a6-beae-db98734c02e2\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 20:49:12 crc kubenswrapper[4756]: I0930 20:49:12.758027 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/35150b99-e380-48a6-beae-db98734c02e2-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"35150b99-e380-48a6-beae-db98734c02e2\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 20:49:12 crc kubenswrapper[4756]: I0930 20:49:12.758066 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/35150b99-e380-48a6-beae-db98734c02e2-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"35150b99-e380-48a6-beae-db98734c02e2\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 20:49:12 crc kubenswrapper[4756]: I0930 20:49:12.758100 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-c20fde3d-b7e2-43bc-9121-3b4b06cf4865\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c20fde3d-b7e2-43bc-9121-3b4b06cf4865\") pod \"rabbitmq-cell1-server-0\" (UID: \"35150b99-e380-48a6-beae-db98734c02e2\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 20:49:12 crc kubenswrapper[4756]: I0930 20:49:12.758131 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/35150b99-e380-48a6-beae-db98734c02e2-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"35150b99-e380-48a6-beae-db98734c02e2\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 20:49:12 crc kubenswrapper[4756]: I0930 20:49:12.758159 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6xr2p\" (UniqueName: \"kubernetes.io/projected/35150b99-e380-48a6-beae-db98734c02e2-kube-api-access-6xr2p\") pod \"rabbitmq-cell1-server-0\" (UID: \"35150b99-e380-48a6-beae-db98734c02e2\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 20:49:12 crc kubenswrapper[4756]: I0930 20:49:12.759075 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/35150b99-e380-48a6-beae-db98734c02e2-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"35150b99-e380-48a6-beae-db98734c02e2\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 20:49:12 crc kubenswrapper[4756]: I0930 20:49:12.759405 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/35150b99-e380-48a6-beae-db98734c02e2-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"35150b99-e380-48a6-beae-db98734c02e2\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 20:49:12 crc kubenswrapper[4756]: I0930 20:49:12.760326 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/35150b99-e380-48a6-beae-db98734c02e2-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"35150b99-e380-48a6-beae-db98734c02e2\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 20:49:12 crc kubenswrapper[4756]: I0930 20:49:12.761164 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/35150b99-e380-48a6-beae-db98734c02e2-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"35150b99-e380-48a6-beae-db98734c02e2\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 20:49:12 crc kubenswrapper[4756]: I0930 20:49:12.766611 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/35150b99-e380-48a6-beae-db98734c02e2-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"35150b99-e380-48a6-beae-db98734c02e2\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 20:49:12 crc kubenswrapper[4756]: I0930 20:49:12.766702 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/35150b99-e380-48a6-beae-db98734c02e2-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"35150b99-e380-48a6-beae-db98734c02e2\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 20:49:12 crc kubenswrapper[4756]: I0930 20:49:12.768120 4756 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Sep 30 20:49:12 crc kubenswrapper[4756]: I0930 20:49:12.768202 4756 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-c20fde3d-b7e2-43bc-9121-3b4b06cf4865\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c20fde3d-b7e2-43bc-9121-3b4b06cf4865\") pod \"rabbitmq-cell1-server-0\" (UID: \"35150b99-e380-48a6-beae-db98734c02e2\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/129c38121bc8e03a953681f31281646cec7fd324f44e599ccb7820f5cbe7b9f0/globalmount\"" pod="openstack/rabbitmq-cell1-server-0" Sep 30 20:49:12 crc kubenswrapper[4756]: I0930 20:49:12.772312 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/35150b99-e380-48a6-beae-db98734c02e2-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"35150b99-e380-48a6-beae-db98734c02e2\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 20:49:12 crc kubenswrapper[4756]: I0930 20:49:12.775233 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6xr2p\" (UniqueName: \"kubernetes.io/projected/35150b99-e380-48a6-beae-db98734c02e2-kube-api-access-6xr2p\") pod \"rabbitmq-cell1-server-0\" (UID: \"35150b99-e380-48a6-beae-db98734c02e2\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 20:49:13 crc kubenswrapper[4756]: I0930 20:49:12.809803 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-c20fde3d-b7e2-43bc-9121-3b4b06cf4865\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c20fde3d-b7e2-43bc-9121-3b4b06cf4865\") pod \"rabbitmq-cell1-server-0\" (UID: \"35150b99-e380-48a6-beae-db98734c02e2\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 20:49:13 crc kubenswrapper[4756]: I0930 20:49:12.859571 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Sep 30 20:49:13 crc kubenswrapper[4756]: I0930 20:49:12.865520 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-98ddfc8f-4nbjr"] Sep 30 20:49:13 crc kubenswrapper[4756]: I0930 20:49:12.872973 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-98ddfc8f-4nbjr"] Sep 30 20:49:13 crc kubenswrapper[4756]: I0930 20:49:13.136123 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09fbe05a-857d-48dc-9e31-faadf828e6eb" path="/var/lib/kubelet/pods/09fbe05a-857d-48dc-9e31-faadf828e6eb/volumes" Sep 30 20:49:13 crc kubenswrapper[4756]: I0930 20:49:13.137449 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="65817176-3f65-4836-b4e9-1e92d6c077ea" path="/var/lib/kubelet/pods/65817176-3f65-4836-b4e9-1e92d6c077ea/volumes" Sep 30 20:49:13 crc kubenswrapper[4756]: I0930 20:49:13.139215 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e2fc60bc-0149-497d-85cf-d4614f8b33ad" path="/var/lib/kubelet/pods/e2fc60bc-0149-497d-85cf-d4614f8b33ad/volumes" Sep 30 20:49:13 crc kubenswrapper[4756]: I0930 20:49:13.143721 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Sep 30 20:49:13 crc kubenswrapper[4756]: W0930 20:49:13.145224 4756 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod18633e6c_8de8_42d7_b97f_cefca7bcb2b5.slice/crio-af6050a559d1c59f4aa3c708f9dd1e8571160aa113231e36d456e4119f69de6f WatchSource:0}: Error finding container af6050a559d1c59f4aa3c708f9dd1e8571160aa113231e36d456e4119f69de6f: Status 404 returned error can't find the container with id af6050a559d1c59f4aa3c708f9dd1e8571160aa113231e36d456e4119f69de6f Sep 30 20:49:13 crc kubenswrapper[4756]: I0930 20:49:13.468008 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"18633e6c-8de8-42d7-b97f-cefca7bcb2b5","Type":"ContainerStarted","Data":"af6050a559d1c59f4aa3c708f9dd1e8571160aa113231e36d456e4119f69de6f"} Sep 30 20:49:14 crc kubenswrapper[4756]: I0930 20:49:14.000055 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Sep 30 20:49:14 crc kubenswrapper[4756]: I0930 20:49:14.483616 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"35150b99-e380-48a6-beae-db98734c02e2","Type":"ContainerStarted","Data":"fb7b734f87175913fe1027668eea893058d07a121e550adb3027be28de47fc3a"} Sep 30 20:49:15 crc kubenswrapper[4756]: I0930 20:49:15.497372 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"18633e6c-8de8-42d7-b97f-cefca7bcb2b5","Type":"ContainerStarted","Data":"4b0aed6f7b4523b5193342318492feacb7f5ee4d5b46a399b8050921c4053d82"} Sep 30 20:49:16 crc kubenswrapper[4756]: I0930 20:49:16.512212 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"35150b99-e380-48a6-beae-db98734c02e2","Type":"ContainerStarted","Data":"3fce9af5810040747bca828a94a349539db8fb5f388d01c7902f52ef99bb98f8"} Sep 30 20:49:21 crc kubenswrapper[4756]: I0930 20:49:21.523525 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-htt9z"] Sep 30 20:49:21 crc kubenswrapper[4756]: I0930 20:49:21.528285 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-htt9z" Sep 30 20:49:21 crc kubenswrapper[4756]: I0930 20:49:21.561027 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-htt9z"] Sep 30 20:49:21 crc kubenswrapper[4756]: I0930 20:49:21.640986 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fbeaf049-6ae8-46cb-898c-f86a9255f901-utilities\") pod \"community-operators-htt9z\" (UID: \"fbeaf049-6ae8-46cb-898c-f86a9255f901\") " pod="openshift-marketplace/community-operators-htt9z" Sep 30 20:49:21 crc kubenswrapper[4756]: I0930 20:49:21.641217 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fbeaf049-6ae8-46cb-898c-f86a9255f901-catalog-content\") pod \"community-operators-htt9z\" (UID: \"fbeaf049-6ae8-46cb-898c-f86a9255f901\") " pod="openshift-marketplace/community-operators-htt9z" Sep 30 20:49:21 crc kubenswrapper[4756]: I0930 20:49:21.641502 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zm77x\" (UniqueName: \"kubernetes.io/projected/fbeaf049-6ae8-46cb-898c-f86a9255f901-kube-api-access-zm77x\") pod \"community-operators-htt9z\" (UID: \"fbeaf049-6ae8-46cb-898c-f86a9255f901\") " pod="openshift-marketplace/community-operators-htt9z" Sep 30 20:49:21 crc kubenswrapper[4756]: I0930 20:49:21.743319 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fbeaf049-6ae8-46cb-898c-f86a9255f901-utilities\") pod \"community-operators-htt9z\" (UID: \"fbeaf049-6ae8-46cb-898c-f86a9255f901\") " pod="openshift-marketplace/community-operators-htt9z" Sep 30 20:49:21 crc kubenswrapper[4756]: I0930 20:49:21.743418 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fbeaf049-6ae8-46cb-898c-f86a9255f901-catalog-content\") pod \"community-operators-htt9z\" (UID: \"fbeaf049-6ae8-46cb-898c-f86a9255f901\") " pod="openshift-marketplace/community-operators-htt9z" Sep 30 20:49:21 crc kubenswrapper[4756]: I0930 20:49:21.743481 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zm77x\" (UniqueName: \"kubernetes.io/projected/fbeaf049-6ae8-46cb-898c-f86a9255f901-kube-api-access-zm77x\") pod \"community-operators-htt9z\" (UID: \"fbeaf049-6ae8-46cb-898c-f86a9255f901\") " pod="openshift-marketplace/community-operators-htt9z" Sep 30 20:49:21 crc kubenswrapper[4756]: I0930 20:49:21.744047 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fbeaf049-6ae8-46cb-898c-f86a9255f901-catalog-content\") pod \"community-operators-htt9z\" (UID: \"fbeaf049-6ae8-46cb-898c-f86a9255f901\") " pod="openshift-marketplace/community-operators-htt9z" Sep 30 20:49:21 crc kubenswrapper[4756]: I0930 20:49:21.744075 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fbeaf049-6ae8-46cb-898c-f86a9255f901-utilities\") pod \"community-operators-htt9z\" (UID: \"fbeaf049-6ae8-46cb-898c-f86a9255f901\") " pod="openshift-marketplace/community-operators-htt9z" Sep 30 20:49:21 crc kubenswrapper[4756]: I0930 20:49:21.772080 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zm77x\" (UniqueName: \"kubernetes.io/projected/fbeaf049-6ae8-46cb-898c-f86a9255f901-kube-api-access-zm77x\") pod \"community-operators-htt9z\" (UID: \"fbeaf049-6ae8-46cb-898c-f86a9255f901\") " pod="openshift-marketplace/community-operators-htt9z" Sep 30 20:49:21 crc kubenswrapper[4756]: I0930 20:49:21.854734 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-htt9z" Sep 30 20:49:22 crc kubenswrapper[4756]: I0930 20:49:22.405891 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-htt9z"] Sep 30 20:49:22 crc kubenswrapper[4756]: W0930 20:49:22.656722 4756 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podfbeaf049_6ae8_46cb_898c_f86a9255f901.slice/crio-1af0e121f17bdb4c825398bec3f428302eba557daa2972560a067de8fc3e118a WatchSource:0}: Error finding container 1af0e121f17bdb4c825398bec3f428302eba557daa2972560a067de8fc3e118a: Status 404 returned error can't find the container with id 1af0e121f17bdb4c825398bec3f428302eba557daa2972560a067de8fc3e118a Sep 30 20:49:23 crc kubenswrapper[4756]: I0930 20:49:23.590735 4756 generic.go:334] "Generic (PLEG): container finished" podID="fbeaf049-6ae8-46cb-898c-f86a9255f901" containerID="0e658710fc575984def7235229dd61ab540832d0be523d0998df61911996873c" exitCode=0 Sep 30 20:49:23 crc kubenswrapper[4756]: I0930 20:49:23.590818 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-htt9z" event={"ID":"fbeaf049-6ae8-46cb-898c-f86a9255f901","Type":"ContainerDied","Data":"0e658710fc575984def7235229dd61ab540832d0be523d0998df61911996873c"} Sep 30 20:49:23 crc kubenswrapper[4756]: I0930 20:49:23.591658 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-htt9z" event={"ID":"fbeaf049-6ae8-46cb-898c-f86a9255f901","Type":"ContainerStarted","Data":"1af0e121f17bdb4c825398bec3f428302eba557daa2972560a067de8fc3e118a"} Sep 30 20:49:24 crc kubenswrapper[4756]: I0930 20:49:24.602535 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-htt9z" event={"ID":"fbeaf049-6ae8-46cb-898c-f86a9255f901","Type":"ContainerStarted","Data":"0b4e4fdb50cc2957f6905ae4e8f6b72758087ab1c0db629bedfa998b5b4936dc"} Sep 30 20:49:25 crc kubenswrapper[4756]: I0930 20:49:25.616293 4756 generic.go:334] "Generic (PLEG): container finished" podID="fbeaf049-6ae8-46cb-898c-f86a9255f901" containerID="0b4e4fdb50cc2957f6905ae4e8f6b72758087ab1c0db629bedfa998b5b4936dc" exitCode=0 Sep 30 20:49:25 crc kubenswrapper[4756]: I0930 20:49:25.616361 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-htt9z" event={"ID":"fbeaf049-6ae8-46cb-898c-f86a9255f901","Type":"ContainerDied","Data":"0b4e4fdb50cc2957f6905ae4e8f6b72758087ab1c0db629bedfa998b5b4936dc"} Sep 30 20:49:26 crc kubenswrapper[4756]: I0930 20:49:26.629541 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-htt9z" event={"ID":"fbeaf049-6ae8-46cb-898c-f86a9255f901","Type":"ContainerStarted","Data":"ef24cacca7e5ef933ac83d0faee1195f07b3f5c0898dd4504e9c00cc9af4cf96"} Sep 30 20:49:26 crc kubenswrapper[4756]: I0930 20:49:26.667452 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-htt9z" podStartSLOduration=2.9139939630000002 podStartE2EDuration="5.66739028s" podCreationTimestamp="2025-09-30 20:49:21 +0000 UTC" firstStartedPulling="2025-09-30 20:49:23.595643688 +0000 UTC m=+4693.216577165" lastFinishedPulling="2025-09-30 20:49:26.349039995 +0000 UTC m=+4695.969973482" observedRunningTime="2025-09-30 20:49:26.653007124 +0000 UTC m=+4696.273940631" watchObservedRunningTime="2025-09-30 20:49:26.66739028 +0000 UTC m=+4696.288323837" Sep 30 20:49:31 crc kubenswrapper[4756]: I0930 20:49:31.856052 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-htt9z" Sep 30 20:49:31 crc kubenswrapper[4756]: I0930 20:49:31.857007 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-htt9z" Sep 30 20:49:31 crc kubenswrapper[4756]: I0930 20:49:31.936391 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-htt9z" Sep 30 20:49:32 crc kubenswrapper[4756]: I0930 20:49:32.755871 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-htt9z" Sep 30 20:49:32 crc kubenswrapper[4756]: I0930 20:49:32.813071 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-htt9z"] Sep 30 20:49:34 crc kubenswrapper[4756]: I0930 20:49:34.725883 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-htt9z" podUID="fbeaf049-6ae8-46cb-898c-f86a9255f901" containerName="registry-server" containerID="cri-o://ef24cacca7e5ef933ac83d0faee1195f07b3f5c0898dd4504e9c00cc9af4cf96" gracePeriod=2 Sep 30 20:49:35 crc kubenswrapper[4756]: I0930 20:49:35.371269 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-dq5t2"] Sep 30 20:49:35 crc kubenswrapper[4756]: I0930 20:49:35.375769 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-dq5t2" Sep 30 20:49:35 crc kubenswrapper[4756]: I0930 20:49:35.406669 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-dq5t2"] Sep 30 20:49:35 crc kubenswrapper[4756]: I0930 20:49:35.511960 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/da4f1758-85ca-4821-944f-195d7a0a8bf9-utilities\") pod \"certified-operators-dq5t2\" (UID: \"da4f1758-85ca-4821-944f-195d7a0a8bf9\") " pod="openshift-marketplace/certified-operators-dq5t2" Sep 30 20:49:35 crc kubenswrapper[4756]: I0930 20:49:35.512032 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dj2cs\" (UniqueName: \"kubernetes.io/projected/da4f1758-85ca-4821-944f-195d7a0a8bf9-kube-api-access-dj2cs\") pod \"certified-operators-dq5t2\" (UID: \"da4f1758-85ca-4821-944f-195d7a0a8bf9\") " pod="openshift-marketplace/certified-operators-dq5t2" Sep 30 20:49:35 crc kubenswrapper[4756]: I0930 20:49:35.512256 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/da4f1758-85ca-4821-944f-195d7a0a8bf9-catalog-content\") pod \"certified-operators-dq5t2\" (UID: \"da4f1758-85ca-4821-944f-195d7a0a8bf9\") " pod="openshift-marketplace/certified-operators-dq5t2" Sep 30 20:49:35 crc kubenswrapper[4756]: I0930 20:49:35.613787 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/da4f1758-85ca-4821-944f-195d7a0a8bf9-catalog-content\") pod \"certified-operators-dq5t2\" (UID: \"da4f1758-85ca-4821-944f-195d7a0a8bf9\") " pod="openshift-marketplace/certified-operators-dq5t2" Sep 30 20:49:35 crc kubenswrapper[4756]: I0930 20:49:35.613859 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/da4f1758-85ca-4821-944f-195d7a0a8bf9-utilities\") pod \"certified-operators-dq5t2\" (UID: \"da4f1758-85ca-4821-944f-195d7a0a8bf9\") " pod="openshift-marketplace/certified-operators-dq5t2" Sep 30 20:49:35 crc kubenswrapper[4756]: I0930 20:49:35.613891 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dj2cs\" (UniqueName: \"kubernetes.io/projected/da4f1758-85ca-4821-944f-195d7a0a8bf9-kube-api-access-dj2cs\") pod \"certified-operators-dq5t2\" (UID: \"da4f1758-85ca-4821-944f-195d7a0a8bf9\") " pod="openshift-marketplace/certified-operators-dq5t2" Sep 30 20:49:35 crc kubenswrapper[4756]: I0930 20:49:35.614497 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/da4f1758-85ca-4821-944f-195d7a0a8bf9-catalog-content\") pod \"certified-operators-dq5t2\" (UID: \"da4f1758-85ca-4821-944f-195d7a0a8bf9\") " pod="openshift-marketplace/certified-operators-dq5t2" Sep 30 20:49:35 crc kubenswrapper[4756]: I0930 20:49:35.614510 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/da4f1758-85ca-4821-944f-195d7a0a8bf9-utilities\") pod \"certified-operators-dq5t2\" (UID: \"da4f1758-85ca-4821-944f-195d7a0a8bf9\") " pod="openshift-marketplace/certified-operators-dq5t2" Sep 30 20:49:35 crc kubenswrapper[4756]: I0930 20:49:35.738231 4756 generic.go:334] "Generic (PLEG): container finished" podID="fbeaf049-6ae8-46cb-898c-f86a9255f901" containerID="ef24cacca7e5ef933ac83d0faee1195f07b3f5c0898dd4504e9c00cc9af4cf96" exitCode=0 Sep 30 20:49:35 crc kubenswrapper[4756]: I0930 20:49:35.738263 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-htt9z" event={"ID":"fbeaf049-6ae8-46cb-898c-f86a9255f901","Type":"ContainerDied","Data":"ef24cacca7e5ef933ac83d0faee1195f07b3f5c0898dd4504e9c00cc9af4cf96"} Sep 30 20:49:35 crc kubenswrapper[4756]: I0930 20:49:35.968601 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dj2cs\" (UniqueName: \"kubernetes.io/projected/da4f1758-85ca-4821-944f-195d7a0a8bf9-kube-api-access-dj2cs\") pod \"certified-operators-dq5t2\" (UID: \"da4f1758-85ca-4821-944f-195d7a0a8bf9\") " pod="openshift-marketplace/certified-operators-dq5t2" Sep 30 20:49:36 crc kubenswrapper[4756]: I0930 20:49:36.019938 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-dq5t2" Sep 30 20:49:36 crc kubenswrapper[4756]: I0930 20:49:36.129539 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-htt9z" Sep 30 20:49:36 crc kubenswrapper[4756]: I0930 20:49:36.235462 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fbeaf049-6ae8-46cb-898c-f86a9255f901-catalog-content\") pod \"fbeaf049-6ae8-46cb-898c-f86a9255f901\" (UID: \"fbeaf049-6ae8-46cb-898c-f86a9255f901\") " Sep 30 20:49:36 crc kubenswrapper[4756]: I0930 20:49:36.235527 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fbeaf049-6ae8-46cb-898c-f86a9255f901-utilities\") pod \"fbeaf049-6ae8-46cb-898c-f86a9255f901\" (UID: \"fbeaf049-6ae8-46cb-898c-f86a9255f901\") " Sep 30 20:49:36 crc kubenswrapper[4756]: I0930 20:49:36.235624 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zm77x\" (UniqueName: \"kubernetes.io/projected/fbeaf049-6ae8-46cb-898c-f86a9255f901-kube-api-access-zm77x\") pod \"fbeaf049-6ae8-46cb-898c-f86a9255f901\" (UID: \"fbeaf049-6ae8-46cb-898c-f86a9255f901\") " Sep 30 20:49:36 crc kubenswrapper[4756]: I0930 20:49:36.239333 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fbeaf049-6ae8-46cb-898c-f86a9255f901-utilities" (OuterVolumeSpecName: "utilities") pod "fbeaf049-6ae8-46cb-898c-f86a9255f901" (UID: "fbeaf049-6ae8-46cb-898c-f86a9255f901"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 20:49:36 crc kubenswrapper[4756]: I0930 20:49:36.246323 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fbeaf049-6ae8-46cb-898c-f86a9255f901-kube-api-access-zm77x" (OuterVolumeSpecName: "kube-api-access-zm77x") pod "fbeaf049-6ae8-46cb-898c-f86a9255f901" (UID: "fbeaf049-6ae8-46cb-898c-f86a9255f901"). InnerVolumeSpecName "kube-api-access-zm77x". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:49:36 crc kubenswrapper[4756]: I0930 20:49:36.302996 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fbeaf049-6ae8-46cb-898c-f86a9255f901-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "fbeaf049-6ae8-46cb-898c-f86a9255f901" (UID: "fbeaf049-6ae8-46cb-898c-f86a9255f901"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 20:49:36 crc kubenswrapper[4756]: I0930 20:49:36.328371 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-dq5t2"] Sep 30 20:49:36 crc kubenswrapper[4756]: I0930 20:49:36.353808 4756 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fbeaf049-6ae8-46cb-898c-f86a9255f901-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 20:49:36 crc kubenswrapper[4756]: I0930 20:49:36.353837 4756 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fbeaf049-6ae8-46cb-898c-f86a9255f901-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 20:49:36 crc kubenswrapper[4756]: I0930 20:49:36.353847 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zm77x\" (UniqueName: \"kubernetes.io/projected/fbeaf049-6ae8-46cb-898c-f86a9255f901-kube-api-access-zm77x\") on node \"crc\" DevicePath \"\"" Sep 30 20:49:36 crc kubenswrapper[4756]: I0930 20:49:36.753932 4756 generic.go:334] "Generic (PLEG): container finished" podID="da4f1758-85ca-4821-944f-195d7a0a8bf9" containerID="050f8ca9d0ba6cc3a7a61f984df146e769f67d6deb835e16aa201fd4a679fa7a" exitCode=0 Sep 30 20:49:36 crc kubenswrapper[4756]: I0930 20:49:36.754648 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-dq5t2" event={"ID":"da4f1758-85ca-4821-944f-195d7a0a8bf9","Type":"ContainerDied","Data":"050f8ca9d0ba6cc3a7a61f984df146e769f67d6deb835e16aa201fd4a679fa7a"} Sep 30 20:49:36 crc kubenswrapper[4756]: I0930 20:49:36.754703 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-dq5t2" event={"ID":"da4f1758-85ca-4821-944f-195d7a0a8bf9","Type":"ContainerStarted","Data":"bb13fe58ba5d7f2e1f50c9731fdafdabe25b1ed6da9f1d327893af0ad2ec2136"} Sep 30 20:49:36 crc kubenswrapper[4756]: I0930 20:49:36.765155 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-htt9z" event={"ID":"fbeaf049-6ae8-46cb-898c-f86a9255f901","Type":"ContainerDied","Data":"1af0e121f17bdb4c825398bec3f428302eba557daa2972560a067de8fc3e118a"} Sep 30 20:49:36 crc kubenswrapper[4756]: I0930 20:49:36.765292 4756 scope.go:117] "RemoveContainer" containerID="ef24cacca7e5ef933ac83d0faee1195f07b3f5c0898dd4504e9c00cc9af4cf96" Sep 30 20:49:36 crc kubenswrapper[4756]: I0930 20:49:36.765303 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-htt9z" Sep 30 20:49:36 crc kubenswrapper[4756]: I0930 20:49:36.808062 4756 scope.go:117] "RemoveContainer" containerID="0b4e4fdb50cc2957f6905ae4e8f6b72758087ab1c0db629bedfa998b5b4936dc" Sep 30 20:49:36 crc kubenswrapper[4756]: I0930 20:49:36.827765 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-htt9z"] Sep 30 20:49:36 crc kubenswrapper[4756]: I0930 20:49:36.833963 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-htt9z"] Sep 30 20:49:36 crc kubenswrapper[4756]: I0930 20:49:36.852497 4756 scope.go:117] "RemoveContainer" containerID="0e658710fc575984def7235229dd61ab540832d0be523d0998df61911996873c" Sep 30 20:49:37 crc kubenswrapper[4756]: I0930 20:49:37.139212 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fbeaf049-6ae8-46cb-898c-f86a9255f901" path="/var/lib/kubelet/pods/fbeaf049-6ae8-46cb-898c-f86a9255f901/volumes" Sep 30 20:49:38 crc kubenswrapper[4756]: I0930 20:49:38.793216 4756 generic.go:334] "Generic (PLEG): container finished" podID="da4f1758-85ca-4821-944f-195d7a0a8bf9" containerID="def6d7e3304c35826038315716501f873e1ee35cdf10cc9de96a58d5986b7d0f" exitCode=0 Sep 30 20:49:38 crc kubenswrapper[4756]: I0930 20:49:38.793797 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-dq5t2" event={"ID":"da4f1758-85ca-4821-944f-195d7a0a8bf9","Type":"ContainerDied","Data":"def6d7e3304c35826038315716501f873e1ee35cdf10cc9de96a58d5986b7d0f"} Sep 30 20:49:39 crc kubenswrapper[4756]: I0930 20:49:39.809189 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-dq5t2" event={"ID":"da4f1758-85ca-4821-944f-195d7a0a8bf9","Type":"ContainerStarted","Data":"ac99849f61c5d6d879172b4f9eab4fb9bb7ba3ad112ca697397b616b34c984b0"} Sep 30 20:49:39 crc kubenswrapper[4756]: I0930 20:49:39.830965 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-dq5t2" podStartSLOduration=2.261897298 podStartE2EDuration="4.830935077s" podCreationTimestamp="2025-09-30 20:49:35 +0000 UTC" firstStartedPulling="2025-09-30 20:49:36.75674292 +0000 UTC m=+4706.377676437" lastFinishedPulling="2025-09-30 20:49:39.325780699 +0000 UTC m=+4708.946714216" observedRunningTime="2025-09-30 20:49:39.828172455 +0000 UTC m=+4709.449105932" watchObservedRunningTime="2025-09-30 20:49:39.830935077 +0000 UTC m=+4709.451868584" Sep 30 20:49:46 crc kubenswrapper[4756]: I0930 20:49:46.021301 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-dq5t2" Sep 30 20:49:46 crc kubenswrapper[4756]: I0930 20:49:46.023478 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-dq5t2" Sep 30 20:49:46 crc kubenswrapper[4756]: I0930 20:49:46.078492 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-dq5t2" Sep 30 20:49:46 crc kubenswrapper[4756]: I0930 20:49:46.980218 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-dq5t2" Sep 30 20:49:47 crc kubenswrapper[4756]: I0930 20:49:47.057079 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-dq5t2"] Sep 30 20:49:48 crc kubenswrapper[4756]: I0930 20:49:48.918293 4756 generic.go:334] "Generic (PLEG): container finished" podID="18633e6c-8de8-42d7-b97f-cefca7bcb2b5" containerID="4b0aed6f7b4523b5193342318492feacb7f5ee4d5b46a399b8050921c4053d82" exitCode=0 Sep 30 20:49:48 crc kubenswrapper[4756]: I0930 20:49:48.918426 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"18633e6c-8de8-42d7-b97f-cefca7bcb2b5","Type":"ContainerDied","Data":"4b0aed6f7b4523b5193342318492feacb7f5ee4d5b46a399b8050921c4053d82"} Sep 30 20:49:48 crc kubenswrapper[4756]: I0930 20:49:48.919310 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-dq5t2" podUID="da4f1758-85ca-4821-944f-195d7a0a8bf9" containerName="registry-server" containerID="cri-o://ac99849f61c5d6d879172b4f9eab4fb9bb7ba3ad112ca697397b616b34c984b0" gracePeriod=2 Sep 30 20:49:49 crc kubenswrapper[4756]: I0930 20:49:49.929587 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"18633e6c-8de8-42d7-b97f-cefca7bcb2b5","Type":"ContainerStarted","Data":"3ea3bd66ee9ce552f6160721dc92aceecb7fa9b787feef73c1c6c9534cf951c6"} Sep 30 20:49:49 crc kubenswrapper[4756]: I0930 20:49:49.930678 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-server-0" Sep 30 20:49:49 crc kubenswrapper[4756]: I0930 20:49:49.932314 4756 generic.go:334] "Generic (PLEG): container finished" podID="35150b99-e380-48a6-beae-db98734c02e2" containerID="3fce9af5810040747bca828a94a349539db8fb5f388d01c7902f52ef99bb98f8" exitCode=0 Sep 30 20:49:49 crc kubenswrapper[4756]: I0930 20:49:49.932371 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"35150b99-e380-48a6-beae-db98734c02e2","Type":"ContainerDied","Data":"3fce9af5810040747bca828a94a349539db8fb5f388d01c7902f52ef99bb98f8"} Sep 30 20:49:49 crc kubenswrapper[4756]: I0930 20:49:49.935617 4756 generic.go:334] "Generic (PLEG): container finished" podID="da4f1758-85ca-4821-944f-195d7a0a8bf9" containerID="ac99849f61c5d6d879172b4f9eab4fb9bb7ba3ad112ca697397b616b34c984b0" exitCode=0 Sep 30 20:49:49 crc kubenswrapper[4756]: I0930 20:49:49.935660 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-dq5t2" event={"ID":"da4f1758-85ca-4821-944f-195d7a0a8bf9","Type":"ContainerDied","Data":"ac99849f61c5d6d879172b4f9eab4fb9bb7ba3ad112ca697397b616b34c984b0"} Sep 30 20:49:49 crc kubenswrapper[4756]: I0930 20:49:49.970757 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-server-0" podStartSLOduration=38.970722304 podStartE2EDuration="38.970722304s" podCreationTimestamp="2025-09-30 20:49:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 20:49:49.96021762 +0000 UTC m=+4719.581151127" watchObservedRunningTime="2025-09-30 20:49:49.970722304 +0000 UTC m=+4719.591655791" Sep 30 20:49:50 crc kubenswrapper[4756]: I0930 20:49:50.557729 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-dq5t2" Sep 30 20:49:50 crc kubenswrapper[4756]: I0930 20:49:50.644643 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/da4f1758-85ca-4821-944f-195d7a0a8bf9-utilities\") pod \"da4f1758-85ca-4821-944f-195d7a0a8bf9\" (UID: \"da4f1758-85ca-4821-944f-195d7a0a8bf9\") " Sep 30 20:49:50 crc kubenswrapper[4756]: I0930 20:49:50.644886 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dj2cs\" (UniqueName: \"kubernetes.io/projected/da4f1758-85ca-4821-944f-195d7a0a8bf9-kube-api-access-dj2cs\") pod \"da4f1758-85ca-4821-944f-195d7a0a8bf9\" (UID: \"da4f1758-85ca-4821-944f-195d7a0a8bf9\") " Sep 30 20:49:50 crc kubenswrapper[4756]: I0930 20:49:50.644952 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/da4f1758-85ca-4821-944f-195d7a0a8bf9-catalog-content\") pod \"da4f1758-85ca-4821-944f-195d7a0a8bf9\" (UID: \"da4f1758-85ca-4821-944f-195d7a0a8bf9\") " Sep 30 20:49:50 crc kubenswrapper[4756]: I0930 20:49:50.649507 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/da4f1758-85ca-4821-944f-195d7a0a8bf9-utilities" (OuterVolumeSpecName: "utilities") pod "da4f1758-85ca-4821-944f-195d7a0a8bf9" (UID: "da4f1758-85ca-4821-944f-195d7a0a8bf9"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 20:49:50 crc kubenswrapper[4756]: I0930 20:49:50.650724 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/da4f1758-85ca-4821-944f-195d7a0a8bf9-kube-api-access-dj2cs" (OuterVolumeSpecName: "kube-api-access-dj2cs") pod "da4f1758-85ca-4821-944f-195d7a0a8bf9" (UID: "da4f1758-85ca-4821-944f-195d7a0a8bf9"). InnerVolumeSpecName "kube-api-access-dj2cs". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:49:50 crc kubenswrapper[4756]: I0930 20:49:50.690581 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/da4f1758-85ca-4821-944f-195d7a0a8bf9-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "da4f1758-85ca-4821-944f-195d7a0a8bf9" (UID: "da4f1758-85ca-4821-944f-195d7a0a8bf9"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 20:49:50 crc kubenswrapper[4756]: I0930 20:49:50.747668 4756 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/da4f1758-85ca-4821-944f-195d7a0a8bf9-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 20:49:50 crc kubenswrapper[4756]: I0930 20:49:50.747720 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dj2cs\" (UniqueName: \"kubernetes.io/projected/da4f1758-85ca-4821-944f-195d7a0a8bf9-kube-api-access-dj2cs\") on node \"crc\" DevicePath \"\"" Sep 30 20:49:50 crc kubenswrapper[4756]: I0930 20:49:50.747743 4756 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/da4f1758-85ca-4821-944f-195d7a0a8bf9-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 20:49:50 crc kubenswrapper[4756]: I0930 20:49:50.947144 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"35150b99-e380-48a6-beae-db98734c02e2","Type":"ContainerStarted","Data":"4849581aeb6dbecb33e595fab32698e364b1a5ab587523c1860c7e42bc4caa7a"} Sep 30 20:49:50 crc kubenswrapper[4756]: I0930 20:49:50.948905 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-cell1-server-0" Sep 30 20:49:50 crc kubenswrapper[4756]: I0930 20:49:50.954355 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-dq5t2" event={"ID":"da4f1758-85ca-4821-944f-195d7a0a8bf9","Type":"ContainerDied","Data":"bb13fe58ba5d7f2e1f50c9731fdafdabe25b1ed6da9f1d327893af0ad2ec2136"} Sep 30 20:49:50 crc kubenswrapper[4756]: I0930 20:49:50.954417 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-dq5t2" Sep 30 20:49:50 crc kubenswrapper[4756]: I0930 20:49:50.954457 4756 scope.go:117] "RemoveContainer" containerID="ac99849f61c5d6d879172b4f9eab4fb9bb7ba3ad112ca697397b616b34c984b0" Sep 30 20:49:50 crc kubenswrapper[4756]: I0930 20:49:50.983527 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-cell1-server-0" podStartSLOduration=38.983497275 podStartE2EDuration="38.983497275s" podCreationTimestamp="2025-09-30 20:49:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 20:49:50.98061029 +0000 UTC m=+4720.601543807" watchObservedRunningTime="2025-09-30 20:49:50.983497275 +0000 UTC m=+4720.604430752" Sep 30 20:49:51 crc kubenswrapper[4756]: I0930 20:49:51.053535 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-dq5t2"] Sep 30 20:49:51 crc kubenswrapper[4756]: I0930 20:49:51.058896 4756 scope.go:117] "RemoveContainer" containerID="def6d7e3304c35826038315716501f873e1ee35cdf10cc9de96a58d5986b7d0f" Sep 30 20:49:51 crc kubenswrapper[4756]: I0930 20:49:51.065707 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-dq5t2"] Sep 30 20:49:51 crc kubenswrapper[4756]: I0930 20:49:51.100775 4756 scope.go:117] "RemoveContainer" containerID="050f8ca9d0ba6cc3a7a61f984df146e769f67d6deb835e16aa201fd4a679fa7a" Sep 30 20:49:51 crc kubenswrapper[4756]: I0930 20:49:51.127896 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="da4f1758-85ca-4821-944f-195d7a0a8bf9" path="/var/lib/kubelet/pods/da4f1758-85ca-4821-944f-195d7a0a8bf9/volumes" Sep 30 20:50:02 crc kubenswrapper[4756]: I0930 20:50:02.602959 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-server-0" Sep 30 20:50:02 crc kubenswrapper[4756]: I0930 20:50:02.863544 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-cell1-server-0" Sep 30 20:50:10 crc kubenswrapper[4756]: I0930 20:50:10.967154 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/mariadb-client-1-default"] Sep 30 20:50:10 crc kubenswrapper[4756]: E0930 20:50:10.968597 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="da4f1758-85ca-4821-944f-195d7a0a8bf9" containerName="extract-content" Sep 30 20:50:10 crc kubenswrapper[4756]: I0930 20:50:10.968625 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="da4f1758-85ca-4821-944f-195d7a0a8bf9" containerName="extract-content" Sep 30 20:50:10 crc kubenswrapper[4756]: E0930 20:50:10.968652 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="da4f1758-85ca-4821-944f-195d7a0a8bf9" containerName="extract-utilities" Sep 30 20:50:10 crc kubenswrapper[4756]: I0930 20:50:10.968664 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="da4f1758-85ca-4821-944f-195d7a0a8bf9" containerName="extract-utilities" Sep 30 20:50:10 crc kubenswrapper[4756]: E0930 20:50:10.968714 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="da4f1758-85ca-4821-944f-195d7a0a8bf9" containerName="registry-server" Sep 30 20:50:10 crc kubenswrapper[4756]: I0930 20:50:10.968724 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="da4f1758-85ca-4821-944f-195d7a0a8bf9" containerName="registry-server" Sep 30 20:50:10 crc kubenswrapper[4756]: E0930 20:50:10.968738 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fbeaf049-6ae8-46cb-898c-f86a9255f901" containerName="extract-utilities" Sep 30 20:50:10 crc kubenswrapper[4756]: I0930 20:50:10.968746 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="fbeaf049-6ae8-46cb-898c-f86a9255f901" containerName="extract-utilities" Sep 30 20:50:10 crc kubenswrapper[4756]: E0930 20:50:10.968772 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fbeaf049-6ae8-46cb-898c-f86a9255f901" containerName="registry-server" Sep 30 20:50:10 crc kubenswrapper[4756]: I0930 20:50:10.968783 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="fbeaf049-6ae8-46cb-898c-f86a9255f901" containerName="registry-server" Sep 30 20:50:10 crc kubenswrapper[4756]: E0930 20:50:10.968795 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fbeaf049-6ae8-46cb-898c-f86a9255f901" containerName="extract-content" Sep 30 20:50:10 crc kubenswrapper[4756]: I0930 20:50:10.968803 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="fbeaf049-6ae8-46cb-898c-f86a9255f901" containerName="extract-content" Sep 30 20:50:10 crc kubenswrapper[4756]: I0930 20:50:10.968993 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="da4f1758-85ca-4821-944f-195d7a0a8bf9" containerName="registry-server" Sep 30 20:50:10 crc kubenswrapper[4756]: I0930 20:50:10.969019 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="fbeaf049-6ae8-46cb-898c-f86a9255f901" containerName="registry-server" Sep 30 20:50:10 crc kubenswrapper[4756]: I0930 20:50:10.969730 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-1-default" Sep 30 20:50:10 crc kubenswrapper[4756]: I0930 20:50:10.972714 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"default-dockercfg-bchvx" Sep 30 20:50:10 crc kubenswrapper[4756]: I0930 20:50:10.980784 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client-1-default"] Sep 30 20:50:11 crc kubenswrapper[4756]: I0930 20:50:11.033325 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x4m4b\" (UniqueName: \"kubernetes.io/projected/f3bf88a2-b588-4df6-a570-cc455fe7205b-kube-api-access-x4m4b\") pod \"mariadb-client-1-default\" (UID: \"f3bf88a2-b588-4df6-a570-cc455fe7205b\") " pod="openstack/mariadb-client-1-default" Sep 30 20:50:11 crc kubenswrapper[4756]: I0930 20:50:11.134872 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x4m4b\" (UniqueName: \"kubernetes.io/projected/f3bf88a2-b588-4df6-a570-cc455fe7205b-kube-api-access-x4m4b\") pod \"mariadb-client-1-default\" (UID: \"f3bf88a2-b588-4df6-a570-cc455fe7205b\") " pod="openstack/mariadb-client-1-default" Sep 30 20:50:11 crc kubenswrapper[4756]: I0930 20:50:11.161713 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x4m4b\" (UniqueName: \"kubernetes.io/projected/f3bf88a2-b588-4df6-a570-cc455fe7205b-kube-api-access-x4m4b\") pod \"mariadb-client-1-default\" (UID: \"f3bf88a2-b588-4df6-a570-cc455fe7205b\") " pod="openstack/mariadb-client-1-default" Sep 30 20:50:11 crc kubenswrapper[4756]: I0930 20:50:11.299014 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"default-dockercfg-bchvx" Sep 30 20:50:11 crc kubenswrapper[4756]: I0930 20:50:11.307665 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-1-default" Sep 30 20:50:11 crc kubenswrapper[4756]: I0930 20:50:11.872825 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client-1-default"] Sep 30 20:50:12 crc kubenswrapper[4756]: I0930 20:50:12.188842 4756 generic.go:334] "Generic (PLEG): container finished" podID="f3bf88a2-b588-4df6-a570-cc455fe7205b" containerID="a6c14311921e95a815d84260255dc79b59fec6980fea05047fd971e4b564f77c" exitCode=0 Sep 30 20:50:12 crc kubenswrapper[4756]: I0930 20:50:12.189035 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-1-default" event={"ID":"f3bf88a2-b588-4df6-a570-cc455fe7205b","Type":"ContainerDied","Data":"a6c14311921e95a815d84260255dc79b59fec6980fea05047fd971e4b564f77c"} Sep 30 20:50:12 crc kubenswrapper[4756]: I0930 20:50:12.189325 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-1-default" event={"ID":"f3bf88a2-b588-4df6-a570-cc455fe7205b","Type":"ContainerStarted","Data":"a0de35ecaaf7d6a21c4d50b9a7e5f2ddb8032157a357cfd42e3ef5cf12c0156c"} Sep 30 20:50:13 crc kubenswrapper[4756]: I0930 20:50:13.589984 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-1-default" Sep 30 20:50:13 crc kubenswrapper[4756]: I0930 20:50:13.630016 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_mariadb-client-1-default_f3bf88a2-b588-4df6-a570-cc455fe7205b/mariadb-client-1-default/0.log" Sep 30 20:50:13 crc kubenswrapper[4756]: I0930 20:50:13.661685 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/mariadb-client-1-default"] Sep 30 20:50:13 crc kubenswrapper[4756]: I0930 20:50:13.668648 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/mariadb-client-1-default"] Sep 30 20:50:13 crc kubenswrapper[4756]: I0930 20:50:13.680656 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x4m4b\" (UniqueName: \"kubernetes.io/projected/f3bf88a2-b588-4df6-a570-cc455fe7205b-kube-api-access-x4m4b\") pod \"f3bf88a2-b588-4df6-a570-cc455fe7205b\" (UID: \"f3bf88a2-b588-4df6-a570-cc455fe7205b\") " Sep 30 20:50:13 crc kubenswrapper[4756]: I0930 20:50:13.688462 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f3bf88a2-b588-4df6-a570-cc455fe7205b-kube-api-access-x4m4b" (OuterVolumeSpecName: "kube-api-access-x4m4b") pod "f3bf88a2-b588-4df6-a570-cc455fe7205b" (UID: "f3bf88a2-b588-4df6-a570-cc455fe7205b"). InnerVolumeSpecName "kube-api-access-x4m4b". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:50:13 crc kubenswrapper[4756]: I0930 20:50:13.782022 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x4m4b\" (UniqueName: \"kubernetes.io/projected/f3bf88a2-b588-4df6-a570-cc455fe7205b-kube-api-access-x4m4b\") on node \"crc\" DevicePath \"\"" Sep 30 20:50:14 crc kubenswrapper[4756]: I0930 20:50:14.148789 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/mariadb-client-2-default"] Sep 30 20:50:14 crc kubenswrapper[4756]: E0930 20:50:14.149379 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f3bf88a2-b588-4df6-a570-cc455fe7205b" containerName="mariadb-client-1-default" Sep 30 20:50:14 crc kubenswrapper[4756]: I0930 20:50:14.149419 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="f3bf88a2-b588-4df6-a570-cc455fe7205b" containerName="mariadb-client-1-default" Sep 30 20:50:14 crc kubenswrapper[4756]: I0930 20:50:14.149606 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="f3bf88a2-b588-4df6-a570-cc455fe7205b" containerName="mariadb-client-1-default" Sep 30 20:50:14 crc kubenswrapper[4756]: I0930 20:50:14.150373 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-2-default" Sep 30 20:50:14 crc kubenswrapper[4756]: I0930 20:50:14.163182 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client-2-default"] Sep 30 20:50:14 crc kubenswrapper[4756]: I0930 20:50:14.189347 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-47ds7\" (UniqueName: \"kubernetes.io/projected/4aadf84f-c642-4367-ab8e-307193fde932-kube-api-access-47ds7\") pod \"mariadb-client-2-default\" (UID: \"4aadf84f-c642-4367-ab8e-307193fde932\") " pod="openstack/mariadb-client-2-default" Sep 30 20:50:14 crc kubenswrapper[4756]: I0930 20:50:14.210083 4756 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a0de35ecaaf7d6a21c4d50b9a7e5f2ddb8032157a357cfd42e3ef5cf12c0156c" Sep 30 20:50:14 crc kubenswrapper[4756]: I0930 20:50:14.210182 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-1-default" Sep 30 20:50:14 crc kubenswrapper[4756]: I0930 20:50:14.291832 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-47ds7\" (UniqueName: \"kubernetes.io/projected/4aadf84f-c642-4367-ab8e-307193fde932-kube-api-access-47ds7\") pod \"mariadb-client-2-default\" (UID: \"4aadf84f-c642-4367-ab8e-307193fde932\") " pod="openstack/mariadb-client-2-default" Sep 30 20:50:14 crc kubenswrapper[4756]: I0930 20:50:14.319592 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-47ds7\" (UniqueName: \"kubernetes.io/projected/4aadf84f-c642-4367-ab8e-307193fde932-kube-api-access-47ds7\") pod \"mariadb-client-2-default\" (UID: \"4aadf84f-c642-4367-ab8e-307193fde932\") " pod="openstack/mariadb-client-2-default" Sep 30 20:50:14 crc kubenswrapper[4756]: I0930 20:50:14.478317 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-2-default" Sep 30 20:50:15 crc kubenswrapper[4756]: I0930 20:50:15.084692 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client-2-default"] Sep 30 20:50:15 crc kubenswrapper[4756]: I0930 20:50:15.132414 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f3bf88a2-b588-4df6-a570-cc455fe7205b" path="/var/lib/kubelet/pods/f3bf88a2-b588-4df6-a570-cc455fe7205b/volumes" Sep 30 20:50:15 crc kubenswrapper[4756]: I0930 20:50:15.221819 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-2-default" event={"ID":"4aadf84f-c642-4367-ab8e-307193fde932","Type":"ContainerStarted","Data":"67767e117178d41671e1c77a2a6b769132e1dbc874f28b05b7be985246cf98f2"} Sep 30 20:50:16 crc kubenswrapper[4756]: I0930 20:50:16.232908 4756 generic.go:334] "Generic (PLEG): container finished" podID="4aadf84f-c642-4367-ab8e-307193fde932" containerID="4bc23a744b0c480961daa73712841719b2dc2de699734261c363aa5f8b70579c" exitCode=0 Sep 30 20:50:16 crc kubenswrapper[4756]: I0930 20:50:16.233027 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-2-default" event={"ID":"4aadf84f-c642-4367-ab8e-307193fde932","Type":"ContainerDied","Data":"4bc23a744b0c480961daa73712841719b2dc2de699734261c363aa5f8b70579c"} Sep 30 20:50:17 crc kubenswrapper[4756]: I0930 20:50:17.662946 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-2-default" Sep 30 20:50:17 crc kubenswrapper[4756]: I0930 20:50:17.713682 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_mariadb-client-2-default_4aadf84f-c642-4367-ab8e-307193fde932/mariadb-client-2-default/0.log" Sep 30 20:50:17 crc kubenswrapper[4756]: I0930 20:50:17.749445 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/mariadb-client-2-default"] Sep 30 20:50:17 crc kubenswrapper[4756]: I0930 20:50:17.755003 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-47ds7\" (UniqueName: \"kubernetes.io/projected/4aadf84f-c642-4367-ab8e-307193fde932-kube-api-access-47ds7\") pod \"4aadf84f-c642-4367-ab8e-307193fde932\" (UID: \"4aadf84f-c642-4367-ab8e-307193fde932\") " Sep 30 20:50:17 crc kubenswrapper[4756]: I0930 20:50:17.761611 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/mariadb-client-2-default"] Sep 30 20:50:17 crc kubenswrapper[4756]: I0930 20:50:17.762835 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4aadf84f-c642-4367-ab8e-307193fde932-kube-api-access-47ds7" (OuterVolumeSpecName: "kube-api-access-47ds7") pod "4aadf84f-c642-4367-ab8e-307193fde932" (UID: "4aadf84f-c642-4367-ab8e-307193fde932"). InnerVolumeSpecName "kube-api-access-47ds7". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:50:17 crc kubenswrapper[4756]: I0930 20:50:17.856811 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-47ds7\" (UniqueName: \"kubernetes.io/projected/4aadf84f-c642-4367-ab8e-307193fde932-kube-api-access-47ds7\") on node \"crc\" DevicePath \"\"" Sep 30 20:50:18 crc kubenswrapper[4756]: I0930 20:50:18.161578 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/mariadb-client-1"] Sep 30 20:50:18 crc kubenswrapper[4756]: E0930 20:50:18.162039 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4aadf84f-c642-4367-ab8e-307193fde932" containerName="mariadb-client-2-default" Sep 30 20:50:18 crc kubenswrapper[4756]: I0930 20:50:18.162060 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="4aadf84f-c642-4367-ab8e-307193fde932" containerName="mariadb-client-2-default" Sep 30 20:50:18 crc kubenswrapper[4756]: I0930 20:50:18.162245 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="4aadf84f-c642-4367-ab8e-307193fde932" containerName="mariadb-client-2-default" Sep 30 20:50:18 crc kubenswrapper[4756]: I0930 20:50:18.162913 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-1" Sep 30 20:50:18 crc kubenswrapper[4756]: I0930 20:50:18.179969 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client-1"] Sep 30 20:50:18 crc kubenswrapper[4756]: I0930 20:50:18.258786 4756 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="67767e117178d41671e1c77a2a6b769132e1dbc874f28b05b7be985246cf98f2" Sep 30 20:50:18 crc kubenswrapper[4756]: I0930 20:50:18.258847 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-2-default" Sep 30 20:50:18 crc kubenswrapper[4756]: I0930 20:50:18.262324 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mlqfm\" (UniqueName: \"kubernetes.io/projected/e39db131-2635-4691-98d1-0867ce04efd7-kube-api-access-mlqfm\") pod \"mariadb-client-1\" (UID: \"e39db131-2635-4691-98d1-0867ce04efd7\") " pod="openstack/mariadb-client-1" Sep 30 20:50:18 crc kubenswrapper[4756]: I0930 20:50:18.364338 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mlqfm\" (UniqueName: \"kubernetes.io/projected/e39db131-2635-4691-98d1-0867ce04efd7-kube-api-access-mlqfm\") pod \"mariadb-client-1\" (UID: \"e39db131-2635-4691-98d1-0867ce04efd7\") " pod="openstack/mariadb-client-1" Sep 30 20:50:18 crc kubenswrapper[4756]: I0930 20:50:18.387087 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mlqfm\" (UniqueName: \"kubernetes.io/projected/e39db131-2635-4691-98d1-0867ce04efd7-kube-api-access-mlqfm\") pod \"mariadb-client-1\" (UID: \"e39db131-2635-4691-98d1-0867ce04efd7\") " pod="openstack/mariadb-client-1" Sep 30 20:50:18 crc kubenswrapper[4756]: I0930 20:50:18.486992 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-1" Sep 30 20:50:19 crc kubenswrapper[4756]: I0930 20:50:19.073865 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client-1"] Sep 30 20:50:19 crc kubenswrapper[4756]: W0930 20:50:19.076895 4756 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode39db131_2635_4691_98d1_0867ce04efd7.slice/crio-9777f2a07c6c1d83fc8ad08169cac7927a17186d0a91ccabf838d3bd4f0e1477 WatchSource:0}: Error finding container 9777f2a07c6c1d83fc8ad08169cac7927a17186d0a91ccabf838d3bd4f0e1477: Status 404 returned error can't find the container with id 9777f2a07c6c1d83fc8ad08169cac7927a17186d0a91ccabf838d3bd4f0e1477 Sep 30 20:50:19 crc kubenswrapper[4756]: I0930 20:50:19.134015 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4aadf84f-c642-4367-ab8e-307193fde932" path="/var/lib/kubelet/pods/4aadf84f-c642-4367-ab8e-307193fde932/volumes" Sep 30 20:50:19 crc kubenswrapper[4756]: I0930 20:50:19.271702 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-1" event={"ID":"e39db131-2635-4691-98d1-0867ce04efd7","Type":"ContainerStarted","Data":"9777f2a07c6c1d83fc8ad08169cac7927a17186d0a91ccabf838d3bd4f0e1477"} Sep 30 20:50:20 crc kubenswrapper[4756]: I0930 20:50:20.283671 4756 generic.go:334] "Generic (PLEG): container finished" podID="e39db131-2635-4691-98d1-0867ce04efd7" containerID="7d55d5d800055e314497a1429e35586141107f9ce684ffac18f807ffa7de1231" exitCode=0 Sep 30 20:50:20 crc kubenswrapper[4756]: I0930 20:50:20.283800 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-1" event={"ID":"e39db131-2635-4691-98d1-0867ce04efd7","Type":"ContainerDied","Data":"7d55d5d800055e314497a1429e35586141107f9ce684ffac18f807ffa7de1231"} Sep 30 20:50:21 crc kubenswrapper[4756]: I0930 20:50:21.743637 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-1" Sep 30 20:50:21 crc kubenswrapper[4756]: I0930 20:50:21.764075 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_mariadb-client-1_e39db131-2635-4691-98d1-0867ce04efd7/mariadb-client-1/0.log" Sep 30 20:50:21 crc kubenswrapper[4756]: I0930 20:50:21.792762 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/mariadb-client-1"] Sep 30 20:50:21 crc kubenswrapper[4756]: I0930 20:50:21.800064 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/mariadb-client-1"] Sep 30 20:50:21 crc kubenswrapper[4756]: I0930 20:50:21.830209 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mlqfm\" (UniqueName: \"kubernetes.io/projected/e39db131-2635-4691-98d1-0867ce04efd7-kube-api-access-mlqfm\") pod \"e39db131-2635-4691-98d1-0867ce04efd7\" (UID: \"e39db131-2635-4691-98d1-0867ce04efd7\") " Sep 30 20:50:21 crc kubenswrapper[4756]: I0930 20:50:21.840695 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e39db131-2635-4691-98d1-0867ce04efd7-kube-api-access-mlqfm" (OuterVolumeSpecName: "kube-api-access-mlqfm") pod "e39db131-2635-4691-98d1-0867ce04efd7" (UID: "e39db131-2635-4691-98d1-0867ce04efd7"). InnerVolumeSpecName "kube-api-access-mlqfm". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:50:21 crc kubenswrapper[4756]: I0930 20:50:21.933938 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mlqfm\" (UniqueName: \"kubernetes.io/projected/e39db131-2635-4691-98d1-0867ce04efd7-kube-api-access-mlqfm\") on node \"crc\" DevicePath \"\"" Sep 30 20:50:22 crc kubenswrapper[4756]: I0930 20:50:22.200637 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/mariadb-client-4-default"] Sep 30 20:50:22 crc kubenswrapper[4756]: E0930 20:50:22.201483 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e39db131-2635-4691-98d1-0867ce04efd7" containerName="mariadb-client-1" Sep 30 20:50:22 crc kubenswrapper[4756]: I0930 20:50:22.201522 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="e39db131-2635-4691-98d1-0867ce04efd7" containerName="mariadb-client-1" Sep 30 20:50:22 crc kubenswrapper[4756]: I0930 20:50:22.201823 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="e39db131-2635-4691-98d1-0867ce04efd7" containerName="mariadb-client-1" Sep 30 20:50:22 crc kubenswrapper[4756]: I0930 20:50:22.203006 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-4-default" Sep 30 20:50:22 crc kubenswrapper[4756]: I0930 20:50:22.227189 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client-4-default"] Sep 30 20:50:22 crc kubenswrapper[4756]: I0930 20:50:22.238644 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vf9ms\" (UniqueName: \"kubernetes.io/projected/fe3b1c7b-28cb-4282-86f9-9d0e6c2d6e09-kube-api-access-vf9ms\") pod \"mariadb-client-4-default\" (UID: \"fe3b1c7b-28cb-4282-86f9-9d0e6c2d6e09\") " pod="openstack/mariadb-client-4-default" Sep 30 20:50:22 crc kubenswrapper[4756]: I0930 20:50:22.307151 4756 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9777f2a07c6c1d83fc8ad08169cac7927a17186d0a91ccabf838d3bd4f0e1477" Sep 30 20:50:22 crc kubenswrapper[4756]: I0930 20:50:22.307255 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-1" Sep 30 20:50:22 crc kubenswrapper[4756]: I0930 20:50:22.340639 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vf9ms\" (UniqueName: \"kubernetes.io/projected/fe3b1c7b-28cb-4282-86f9-9d0e6c2d6e09-kube-api-access-vf9ms\") pod \"mariadb-client-4-default\" (UID: \"fe3b1c7b-28cb-4282-86f9-9d0e6c2d6e09\") " pod="openstack/mariadb-client-4-default" Sep 30 20:50:22 crc kubenswrapper[4756]: I0930 20:50:22.365895 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vf9ms\" (UniqueName: \"kubernetes.io/projected/fe3b1c7b-28cb-4282-86f9-9d0e6c2d6e09-kube-api-access-vf9ms\") pod \"mariadb-client-4-default\" (UID: \"fe3b1c7b-28cb-4282-86f9-9d0e6c2d6e09\") " pod="openstack/mariadb-client-4-default" Sep 30 20:50:22 crc kubenswrapper[4756]: I0930 20:50:22.593801 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-4-default" Sep 30 20:50:23 crc kubenswrapper[4756]: I0930 20:50:23.136948 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e39db131-2635-4691-98d1-0867ce04efd7" path="/var/lib/kubelet/pods/e39db131-2635-4691-98d1-0867ce04efd7/volumes" Sep 30 20:50:23 crc kubenswrapper[4756]: I0930 20:50:23.202628 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client-4-default"] Sep 30 20:50:23 crc kubenswrapper[4756]: I0930 20:50:23.321437 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-4-default" event={"ID":"fe3b1c7b-28cb-4282-86f9-9d0e6c2d6e09","Type":"ContainerStarted","Data":"d6fc280a8f9a77324958b1554a5b34752633867a3417805735cccc4056180057"} Sep 30 20:50:24 crc kubenswrapper[4756]: I0930 20:50:24.337492 4756 generic.go:334] "Generic (PLEG): container finished" podID="fe3b1c7b-28cb-4282-86f9-9d0e6c2d6e09" containerID="583cef01c27733e67bc68f358793355e08e5190a98d85295e4a46acad2aa47a3" exitCode=0 Sep 30 20:50:24 crc kubenswrapper[4756]: I0930 20:50:24.337656 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-4-default" event={"ID":"fe3b1c7b-28cb-4282-86f9-9d0e6c2d6e09","Type":"ContainerDied","Data":"583cef01c27733e67bc68f358793355e08e5190a98d85295e4a46acad2aa47a3"} Sep 30 20:50:25 crc kubenswrapper[4756]: I0930 20:50:25.812598 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-4-default" Sep 30 20:50:25 crc kubenswrapper[4756]: I0930 20:50:25.831919 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_mariadb-client-4-default_fe3b1c7b-28cb-4282-86f9-9d0e6c2d6e09/mariadb-client-4-default/0.log" Sep 30 20:50:25 crc kubenswrapper[4756]: I0930 20:50:25.863829 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/mariadb-client-4-default"] Sep 30 20:50:25 crc kubenswrapper[4756]: I0930 20:50:25.870163 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/mariadb-client-4-default"] Sep 30 20:50:26 crc kubenswrapper[4756]: I0930 20:50:26.005046 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vf9ms\" (UniqueName: \"kubernetes.io/projected/fe3b1c7b-28cb-4282-86f9-9d0e6c2d6e09-kube-api-access-vf9ms\") pod \"fe3b1c7b-28cb-4282-86f9-9d0e6c2d6e09\" (UID: \"fe3b1c7b-28cb-4282-86f9-9d0e6c2d6e09\") " Sep 30 20:50:26 crc kubenswrapper[4756]: I0930 20:50:26.012243 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fe3b1c7b-28cb-4282-86f9-9d0e6c2d6e09-kube-api-access-vf9ms" (OuterVolumeSpecName: "kube-api-access-vf9ms") pod "fe3b1c7b-28cb-4282-86f9-9d0e6c2d6e09" (UID: "fe3b1c7b-28cb-4282-86f9-9d0e6c2d6e09"). InnerVolumeSpecName "kube-api-access-vf9ms". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:50:26 crc kubenswrapper[4756]: I0930 20:50:26.107158 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vf9ms\" (UniqueName: \"kubernetes.io/projected/fe3b1c7b-28cb-4282-86f9-9d0e6c2d6e09-kube-api-access-vf9ms\") on node \"crc\" DevicePath \"\"" Sep 30 20:50:26 crc kubenswrapper[4756]: I0930 20:50:26.368026 4756 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d6fc280a8f9a77324958b1554a5b34752633867a3417805735cccc4056180057" Sep 30 20:50:26 crc kubenswrapper[4756]: I0930 20:50:26.368089 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-4-default" Sep 30 20:50:27 crc kubenswrapper[4756]: I0930 20:50:27.133172 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fe3b1c7b-28cb-4282-86f9-9d0e6c2d6e09" path="/var/lib/kubelet/pods/fe3b1c7b-28cb-4282-86f9-9d0e6c2d6e09/volumes" Sep 30 20:50:29 crc kubenswrapper[4756]: I0930 20:50:29.831407 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/mariadb-client-5-default"] Sep 30 20:50:29 crc kubenswrapper[4756]: E0930 20:50:29.832229 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fe3b1c7b-28cb-4282-86f9-9d0e6c2d6e09" containerName="mariadb-client-4-default" Sep 30 20:50:29 crc kubenswrapper[4756]: I0930 20:50:29.832263 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="fe3b1c7b-28cb-4282-86f9-9d0e6c2d6e09" containerName="mariadb-client-4-default" Sep 30 20:50:29 crc kubenswrapper[4756]: I0930 20:50:29.832687 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="fe3b1c7b-28cb-4282-86f9-9d0e6c2d6e09" containerName="mariadb-client-4-default" Sep 30 20:50:29 crc kubenswrapper[4756]: I0930 20:50:29.833984 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-5-default" Sep 30 20:50:29 crc kubenswrapper[4756]: I0930 20:50:29.837274 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"default-dockercfg-bchvx" Sep 30 20:50:29 crc kubenswrapper[4756]: I0930 20:50:29.846168 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client-5-default"] Sep 30 20:50:29 crc kubenswrapper[4756]: I0930 20:50:29.988587 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5zmrn\" (UniqueName: \"kubernetes.io/projected/09d9aa79-1619-43c6-bffb-c8281e725abe-kube-api-access-5zmrn\") pod \"mariadb-client-5-default\" (UID: \"09d9aa79-1619-43c6-bffb-c8281e725abe\") " pod="openstack/mariadb-client-5-default" Sep 30 20:50:30 crc kubenswrapper[4756]: I0930 20:50:30.089917 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5zmrn\" (UniqueName: \"kubernetes.io/projected/09d9aa79-1619-43c6-bffb-c8281e725abe-kube-api-access-5zmrn\") pod \"mariadb-client-5-default\" (UID: \"09d9aa79-1619-43c6-bffb-c8281e725abe\") " pod="openstack/mariadb-client-5-default" Sep 30 20:50:30 crc kubenswrapper[4756]: I0930 20:50:30.113331 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5zmrn\" (UniqueName: \"kubernetes.io/projected/09d9aa79-1619-43c6-bffb-c8281e725abe-kube-api-access-5zmrn\") pod \"mariadb-client-5-default\" (UID: \"09d9aa79-1619-43c6-bffb-c8281e725abe\") " pod="openstack/mariadb-client-5-default" Sep 30 20:50:30 crc kubenswrapper[4756]: I0930 20:50:30.171973 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-5-default" Sep 30 20:50:30 crc kubenswrapper[4756]: I0930 20:50:30.732386 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client-5-default"] Sep 30 20:50:31 crc kubenswrapper[4756]: I0930 20:50:31.428874 4756 generic.go:334] "Generic (PLEG): container finished" podID="09d9aa79-1619-43c6-bffb-c8281e725abe" containerID="f1fbecb04be4c6cf44fc9cc73784021069d0edf0f84fa0f0d024c4fe5fd041e5" exitCode=0 Sep 30 20:50:31 crc kubenswrapper[4756]: I0930 20:50:31.428942 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-5-default" event={"ID":"09d9aa79-1619-43c6-bffb-c8281e725abe","Type":"ContainerDied","Data":"f1fbecb04be4c6cf44fc9cc73784021069d0edf0f84fa0f0d024c4fe5fd041e5"} Sep 30 20:50:31 crc kubenswrapper[4756]: I0930 20:50:31.429355 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-5-default" event={"ID":"09d9aa79-1619-43c6-bffb-c8281e725abe","Type":"ContainerStarted","Data":"67b89c27abd0349e88c937c83a9abc43ed1eb4800a12732de3ebb0bb234e84d8"} Sep 30 20:50:32 crc kubenswrapper[4756]: I0930 20:50:32.901402 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-5-default" Sep 30 20:50:32 crc kubenswrapper[4756]: I0930 20:50:32.926943 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_mariadb-client-5-default_09d9aa79-1619-43c6-bffb-c8281e725abe/mariadb-client-5-default/0.log" Sep 30 20:50:32 crc kubenswrapper[4756]: I0930 20:50:32.965947 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/mariadb-client-5-default"] Sep 30 20:50:32 crc kubenswrapper[4756]: I0930 20:50:32.976908 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/mariadb-client-5-default"] Sep 30 20:50:33 crc kubenswrapper[4756]: I0930 20:50:33.044986 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5zmrn\" (UniqueName: \"kubernetes.io/projected/09d9aa79-1619-43c6-bffb-c8281e725abe-kube-api-access-5zmrn\") pod \"09d9aa79-1619-43c6-bffb-c8281e725abe\" (UID: \"09d9aa79-1619-43c6-bffb-c8281e725abe\") " Sep 30 20:50:33 crc kubenswrapper[4756]: I0930 20:50:33.053639 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09d9aa79-1619-43c6-bffb-c8281e725abe-kube-api-access-5zmrn" (OuterVolumeSpecName: "kube-api-access-5zmrn") pod "09d9aa79-1619-43c6-bffb-c8281e725abe" (UID: "09d9aa79-1619-43c6-bffb-c8281e725abe"). InnerVolumeSpecName "kube-api-access-5zmrn". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:50:33 crc kubenswrapper[4756]: I0930 20:50:33.101835 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/mariadb-client-6-default"] Sep 30 20:50:33 crc kubenswrapper[4756]: E0930 20:50:33.102605 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="09d9aa79-1619-43c6-bffb-c8281e725abe" containerName="mariadb-client-5-default" Sep 30 20:50:33 crc kubenswrapper[4756]: I0930 20:50:33.102647 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="09d9aa79-1619-43c6-bffb-c8281e725abe" containerName="mariadb-client-5-default" Sep 30 20:50:33 crc kubenswrapper[4756]: I0930 20:50:33.102989 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="09d9aa79-1619-43c6-bffb-c8281e725abe" containerName="mariadb-client-5-default" Sep 30 20:50:33 crc kubenswrapper[4756]: I0930 20:50:33.104530 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-6-default" Sep 30 20:50:33 crc kubenswrapper[4756]: I0930 20:50:33.147896 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5zmrn\" (UniqueName: \"kubernetes.io/projected/09d9aa79-1619-43c6-bffb-c8281e725abe-kube-api-access-5zmrn\") on node \"crc\" DevicePath \"\"" Sep 30 20:50:33 crc kubenswrapper[4756]: I0930 20:50:33.154997 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09d9aa79-1619-43c6-bffb-c8281e725abe" path="/var/lib/kubelet/pods/09d9aa79-1619-43c6-bffb-c8281e725abe/volumes" Sep 30 20:50:33 crc kubenswrapper[4756]: I0930 20:50:33.155686 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client-6-default"] Sep 30 20:50:33 crc kubenswrapper[4756]: I0930 20:50:33.250280 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kqrv6\" (UniqueName: \"kubernetes.io/projected/2fde716d-73c5-40b4-82c8-460d927ef538-kube-api-access-kqrv6\") pod \"mariadb-client-6-default\" (UID: \"2fde716d-73c5-40b4-82c8-460d927ef538\") " pod="openstack/mariadb-client-6-default" Sep 30 20:50:33 crc kubenswrapper[4756]: I0930 20:50:33.352452 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kqrv6\" (UniqueName: \"kubernetes.io/projected/2fde716d-73c5-40b4-82c8-460d927ef538-kube-api-access-kqrv6\") pod \"mariadb-client-6-default\" (UID: \"2fde716d-73c5-40b4-82c8-460d927ef538\") " pod="openstack/mariadb-client-6-default" Sep 30 20:50:33 crc kubenswrapper[4756]: I0930 20:50:33.388772 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kqrv6\" (UniqueName: \"kubernetes.io/projected/2fde716d-73c5-40b4-82c8-460d927ef538-kube-api-access-kqrv6\") pod \"mariadb-client-6-default\" (UID: \"2fde716d-73c5-40b4-82c8-460d927ef538\") " pod="openstack/mariadb-client-6-default" Sep 30 20:50:33 crc kubenswrapper[4756]: I0930 20:50:33.442168 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-6-default" Sep 30 20:50:33 crc kubenswrapper[4756]: I0930 20:50:33.455113 4756 scope.go:117] "RemoveContainer" containerID="f1fbecb04be4c6cf44fc9cc73784021069d0edf0f84fa0f0d024c4fe5fd041e5" Sep 30 20:50:33 crc kubenswrapper[4756]: I0930 20:50:33.455137 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-5-default" Sep 30 20:50:33 crc kubenswrapper[4756]: I0930 20:50:33.895087 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client-6-default"] Sep 30 20:50:34 crc kubenswrapper[4756]: I0930 20:50:34.468536 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-6-default" event={"ID":"2fde716d-73c5-40b4-82c8-460d927ef538","Type":"ContainerStarted","Data":"f3794c952da1566d30622cfed27c63f51dfd56be0a96d7295eccd49c2aa1d9c8"} Sep 30 20:50:34 crc kubenswrapper[4756]: I0930 20:50:34.469089 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-6-default" event={"ID":"2fde716d-73c5-40b4-82c8-460d927ef538","Type":"ContainerStarted","Data":"9daaf5ed4be7a974fa249b04eac3b81f04d746561d3dd3a934b4190f3fa8b1e0"} Sep 30 20:50:34 crc kubenswrapper[4756]: I0930 20:50:34.503444 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/mariadb-client-6-default" podStartSLOduration=1.503364489 podStartE2EDuration="1.503364489s" podCreationTimestamp="2025-09-30 20:50:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 20:50:34.488225694 +0000 UTC m=+4764.109159181" watchObservedRunningTime="2025-09-30 20:50:34.503364489 +0000 UTC m=+4764.124298006" Sep 30 20:50:34 crc kubenswrapper[4756]: I0930 20:50:34.641313 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_mariadb-client-6-default_2fde716d-73c5-40b4-82c8-460d927ef538/mariadb-client-6-default/0.log" Sep 30 20:50:35 crc kubenswrapper[4756]: I0930 20:50:35.483921 4756 generic.go:334] "Generic (PLEG): container finished" podID="2fde716d-73c5-40b4-82c8-460d927ef538" containerID="f3794c952da1566d30622cfed27c63f51dfd56be0a96d7295eccd49c2aa1d9c8" exitCode=0 Sep 30 20:50:35 crc kubenswrapper[4756]: I0930 20:50:35.484025 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-6-default" event={"ID":"2fde716d-73c5-40b4-82c8-460d927ef538","Type":"ContainerDied","Data":"f3794c952da1566d30622cfed27c63f51dfd56be0a96d7295eccd49c2aa1d9c8"} Sep 30 20:50:36 crc kubenswrapper[4756]: I0930 20:50:36.961447 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-6-default" Sep 30 20:50:37 crc kubenswrapper[4756]: I0930 20:50:37.003585 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/mariadb-client-6-default"] Sep 30 20:50:37 crc kubenswrapper[4756]: I0930 20:50:37.008406 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/mariadb-client-6-default"] Sep 30 20:50:37 crc kubenswrapper[4756]: I0930 20:50:37.121078 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kqrv6\" (UniqueName: \"kubernetes.io/projected/2fde716d-73c5-40b4-82c8-460d927ef538-kube-api-access-kqrv6\") pod \"2fde716d-73c5-40b4-82c8-460d927ef538\" (UID: \"2fde716d-73c5-40b4-82c8-460d927ef538\") " Sep 30 20:50:37 crc kubenswrapper[4756]: I0930 20:50:37.129776 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2fde716d-73c5-40b4-82c8-460d927ef538-kube-api-access-kqrv6" (OuterVolumeSpecName: "kube-api-access-kqrv6") pod "2fde716d-73c5-40b4-82c8-460d927ef538" (UID: "2fde716d-73c5-40b4-82c8-460d927ef538"). InnerVolumeSpecName "kube-api-access-kqrv6". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:50:37 crc kubenswrapper[4756]: I0930 20:50:37.137165 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2fde716d-73c5-40b4-82c8-460d927ef538" path="/var/lib/kubelet/pods/2fde716d-73c5-40b4-82c8-460d927ef538/volumes" Sep 30 20:50:37 crc kubenswrapper[4756]: I0930 20:50:37.153492 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/mariadb-client-7-default"] Sep 30 20:50:37 crc kubenswrapper[4756]: E0930 20:50:37.154362 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2fde716d-73c5-40b4-82c8-460d927ef538" containerName="mariadb-client-6-default" Sep 30 20:50:37 crc kubenswrapper[4756]: I0930 20:50:37.154508 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="2fde716d-73c5-40b4-82c8-460d927ef538" containerName="mariadb-client-6-default" Sep 30 20:50:37 crc kubenswrapper[4756]: I0930 20:50:37.155041 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="2fde716d-73c5-40b4-82c8-460d927ef538" containerName="mariadb-client-6-default" Sep 30 20:50:37 crc kubenswrapper[4756]: I0930 20:50:37.156541 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-7-default" Sep 30 20:50:37 crc kubenswrapper[4756]: I0930 20:50:37.167713 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client-7-default"] Sep 30 20:50:37 crc kubenswrapper[4756]: I0930 20:50:37.224783 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kqrv6\" (UniqueName: \"kubernetes.io/projected/2fde716d-73c5-40b4-82c8-460d927ef538-kube-api-access-kqrv6\") on node \"crc\" DevicePath \"\"" Sep 30 20:50:37 crc kubenswrapper[4756]: I0930 20:50:37.338159 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4mxvz\" (UniqueName: \"kubernetes.io/projected/31bc98bd-70a0-4146-8065-d199b2d880e4-kube-api-access-4mxvz\") pod \"mariadb-client-7-default\" (UID: \"31bc98bd-70a0-4146-8065-d199b2d880e4\") " pod="openstack/mariadb-client-7-default" Sep 30 20:50:37 crc kubenswrapper[4756]: I0930 20:50:37.440521 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4mxvz\" (UniqueName: \"kubernetes.io/projected/31bc98bd-70a0-4146-8065-d199b2d880e4-kube-api-access-4mxvz\") pod \"mariadb-client-7-default\" (UID: \"31bc98bd-70a0-4146-8065-d199b2d880e4\") " pod="openstack/mariadb-client-7-default" Sep 30 20:50:37 crc kubenswrapper[4756]: I0930 20:50:37.475294 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4mxvz\" (UniqueName: \"kubernetes.io/projected/31bc98bd-70a0-4146-8065-d199b2d880e4-kube-api-access-4mxvz\") pod \"mariadb-client-7-default\" (UID: \"31bc98bd-70a0-4146-8065-d199b2d880e4\") " pod="openstack/mariadb-client-7-default" Sep 30 20:50:37 crc kubenswrapper[4756]: I0930 20:50:37.503475 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-7-default" Sep 30 20:50:37 crc kubenswrapper[4756]: I0930 20:50:37.512765 4756 scope.go:117] "RemoveContainer" containerID="f3794c952da1566d30622cfed27c63f51dfd56be0a96d7295eccd49c2aa1d9c8" Sep 30 20:50:37 crc kubenswrapper[4756]: I0930 20:50:37.512872 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-6-default" Sep 30 20:50:38 crc kubenswrapper[4756]: I0930 20:50:38.062713 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client-7-default"] Sep 30 20:50:38 crc kubenswrapper[4756]: W0930 20:50:38.069908 4756 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod31bc98bd_70a0_4146_8065_d199b2d880e4.slice/crio-46790234ab8b9b1ff603dfb15b3ece57c4d3dea5244ce1fbc1a6295dd75efce3 WatchSource:0}: Error finding container 46790234ab8b9b1ff603dfb15b3ece57c4d3dea5244ce1fbc1a6295dd75efce3: Status 404 returned error can't find the container with id 46790234ab8b9b1ff603dfb15b3ece57c4d3dea5244ce1fbc1a6295dd75efce3 Sep 30 20:50:38 crc kubenswrapper[4756]: I0930 20:50:38.532988 4756 generic.go:334] "Generic (PLEG): container finished" podID="31bc98bd-70a0-4146-8065-d199b2d880e4" containerID="e8e211d1162dc2a4b47a798ac51ef2dab9e075e6b37613f0d83f200e9a52a324" exitCode=0 Sep 30 20:50:38 crc kubenswrapper[4756]: I0930 20:50:38.533281 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-7-default" event={"ID":"31bc98bd-70a0-4146-8065-d199b2d880e4","Type":"ContainerDied","Data":"e8e211d1162dc2a4b47a798ac51ef2dab9e075e6b37613f0d83f200e9a52a324"} Sep 30 20:50:38 crc kubenswrapper[4756]: I0930 20:50:38.533365 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-7-default" event={"ID":"31bc98bd-70a0-4146-8065-d199b2d880e4","Type":"ContainerStarted","Data":"46790234ab8b9b1ff603dfb15b3ece57c4d3dea5244ce1fbc1a6295dd75efce3"} Sep 30 20:50:40 crc kubenswrapper[4756]: I0930 20:50:40.117954 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-7-default" Sep 30 20:50:40 crc kubenswrapper[4756]: I0930 20:50:40.151138 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_mariadb-client-7-default_31bc98bd-70a0-4146-8065-d199b2d880e4/mariadb-client-7-default/0.log" Sep 30 20:50:40 crc kubenswrapper[4756]: I0930 20:50:40.176572 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/mariadb-client-7-default"] Sep 30 20:50:40 crc kubenswrapper[4756]: I0930 20:50:40.188939 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/mariadb-client-7-default"] Sep 30 20:50:40 crc kubenswrapper[4756]: I0930 20:50:40.293338 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4mxvz\" (UniqueName: \"kubernetes.io/projected/31bc98bd-70a0-4146-8065-d199b2d880e4-kube-api-access-4mxvz\") pod \"31bc98bd-70a0-4146-8065-d199b2d880e4\" (UID: \"31bc98bd-70a0-4146-8065-d199b2d880e4\") " Sep 30 20:50:40 crc kubenswrapper[4756]: I0930 20:50:40.308703 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/31bc98bd-70a0-4146-8065-d199b2d880e4-kube-api-access-4mxvz" (OuterVolumeSpecName: "kube-api-access-4mxvz") pod "31bc98bd-70a0-4146-8065-d199b2d880e4" (UID: "31bc98bd-70a0-4146-8065-d199b2d880e4"). InnerVolumeSpecName "kube-api-access-4mxvz". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:50:40 crc kubenswrapper[4756]: I0930 20:50:40.312099 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/mariadb-client-2"] Sep 30 20:50:40 crc kubenswrapper[4756]: E0930 20:50:40.312991 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="31bc98bd-70a0-4146-8065-d199b2d880e4" containerName="mariadb-client-7-default" Sep 30 20:50:40 crc kubenswrapper[4756]: I0930 20:50:40.313008 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="31bc98bd-70a0-4146-8065-d199b2d880e4" containerName="mariadb-client-7-default" Sep 30 20:50:40 crc kubenswrapper[4756]: I0930 20:50:40.313383 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="31bc98bd-70a0-4146-8065-d199b2d880e4" containerName="mariadb-client-7-default" Sep 30 20:50:40 crc kubenswrapper[4756]: I0930 20:50:40.314268 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-2" Sep 30 20:50:40 crc kubenswrapper[4756]: I0930 20:50:40.320704 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client-2"] Sep 30 20:50:40 crc kubenswrapper[4756]: I0930 20:50:40.403423 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4mxvz\" (UniqueName: \"kubernetes.io/projected/31bc98bd-70a0-4146-8065-d199b2d880e4-kube-api-access-4mxvz\") on node \"crc\" DevicePath \"\"" Sep 30 20:50:40 crc kubenswrapper[4756]: I0930 20:50:40.504940 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jlqbj\" (UniqueName: \"kubernetes.io/projected/3e1c4bdd-07a8-4cfd-8e3c-82c5942537cc-kube-api-access-jlqbj\") pod \"mariadb-client-2\" (UID: \"3e1c4bdd-07a8-4cfd-8e3c-82c5942537cc\") " pod="openstack/mariadb-client-2" Sep 30 20:50:40 crc kubenswrapper[4756]: I0930 20:50:40.559371 4756 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="46790234ab8b9b1ff603dfb15b3ece57c4d3dea5244ce1fbc1a6295dd75efce3" Sep 30 20:50:40 crc kubenswrapper[4756]: I0930 20:50:40.559455 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-7-default" Sep 30 20:50:40 crc kubenswrapper[4756]: I0930 20:50:40.607810 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jlqbj\" (UniqueName: \"kubernetes.io/projected/3e1c4bdd-07a8-4cfd-8e3c-82c5942537cc-kube-api-access-jlqbj\") pod \"mariadb-client-2\" (UID: \"3e1c4bdd-07a8-4cfd-8e3c-82c5942537cc\") " pod="openstack/mariadb-client-2" Sep 30 20:50:40 crc kubenswrapper[4756]: I0930 20:50:40.632012 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jlqbj\" (UniqueName: \"kubernetes.io/projected/3e1c4bdd-07a8-4cfd-8e3c-82c5942537cc-kube-api-access-jlqbj\") pod \"mariadb-client-2\" (UID: \"3e1c4bdd-07a8-4cfd-8e3c-82c5942537cc\") " pod="openstack/mariadb-client-2" Sep 30 20:50:40 crc kubenswrapper[4756]: I0930 20:50:40.656024 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-2" Sep 30 20:50:41 crc kubenswrapper[4756]: I0930 20:50:41.127086 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="31bc98bd-70a0-4146-8065-d199b2d880e4" path="/var/lib/kubelet/pods/31bc98bd-70a0-4146-8065-d199b2d880e4/volumes" Sep 30 20:50:41 crc kubenswrapper[4756]: I0930 20:50:41.189131 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client-2"] Sep 30 20:50:41 crc kubenswrapper[4756]: I0930 20:50:41.539130 4756 patch_prober.go:28] interesting pod/machine-config-daemon-4n9zj container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 20:50:41 crc kubenswrapper[4756]: I0930 20:50:41.539524 4756 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 20:50:41 crc kubenswrapper[4756]: I0930 20:50:41.571843 4756 generic.go:334] "Generic (PLEG): container finished" podID="3e1c4bdd-07a8-4cfd-8e3c-82c5942537cc" containerID="f028b580346d3179d86ab8bc73003d7cb4cda091d0cba083e47123c0ac085277" exitCode=0 Sep 30 20:50:41 crc kubenswrapper[4756]: I0930 20:50:41.571907 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-2" event={"ID":"3e1c4bdd-07a8-4cfd-8e3c-82c5942537cc","Type":"ContainerDied","Data":"f028b580346d3179d86ab8bc73003d7cb4cda091d0cba083e47123c0ac085277"} Sep 30 20:50:41 crc kubenswrapper[4756]: I0930 20:50:41.571945 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-2" event={"ID":"3e1c4bdd-07a8-4cfd-8e3c-82c5942537cc","Type":"ContainerStarted","Data":"08e303a9e4a5c47c372dfded838914402fd6f4ea3ea8cfbc8f51a2e3cfc9106e"} Sep 30 20:50:43 crc kubenswrapper[4756]: I0930 20:50:43.073682 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-2" Sep 30 20:50:43 crc kubenswrapper[4756]: I0930 20:50:43.137249 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_mariadb-client-2_3e1c4bdd-07a8-4cfd-8e3c-82c5942537cc/mariadb-client-2/0.log" Sep 30 20:50:43 crc kubenswrapper[4756]: I0930 20:50:43.168874 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/mariadb-client-2"] Sep 30 20:50:43 crc kubenswrapper[4756]: I0930 20:50:43.181654 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/mariadb-client-2"] Sep 30 20:50:43 crc kubenswrapper[4756]: I0930 20:50:43.252876 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jlqbj\" (UniqueName: \"kubernetes.io/projected/3e1c4bdd-07a8-4cfd-8e3c-82c5942537cc-kube-api-access-jlqbj\") pod \"3e1c4bdd-07a8-4cfd-8e3c-82c5942537cc\" (UID: \"3e1c4bdd-07a8-4cfd-8e3c-82c5942537cc\") " Sep 30 20:50:43 crc kubenswrapper[4756]: I0930 20:50:43.260602 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3e1c4bdd-07a8-4cfd-8e3c-82c5942537cc-kube-api-access-jlqbj" (OuterVolumeSpecName: "kube-api-access-jlqbj") pod "3e1c4bdd-07a8-4cfd-8e3c-82c5942537cc" (UID: "3e1c4bdd-07a8-4cfd-8e3c-82c5942537cc"). InnerVolumeSpecName "kube-api-access-jlqbj". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:50:43 crc kubenswrapper[4756]: I0930 20:50:43.356106 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jlqbj\" (UniqueName: \"kubernetes.io/projected/3e1c4bdd-07a8-4cfd-8e3c-82c5942537cc-kube-api-access-jlqbj\") on node \"crc\" DevicePath \"\"" Sep 30 20:50:43 crc kubenswrapper[4756]: I0930 20:50:43.595564 4756 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="08e303a9e4a5c47c372dfded838914402fd6f4ea3ea8cfbc8f51a2e3cfc9106e" Sep 30 20:50:43 crc kubenswrapper[4756]: I0930 20:50:43.595682 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-2" Sep 30 20:50:45 crc kubenswrapper[4756]: I0930 20:50:45.125531 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3e1c4bdd-07a8-4cfd-8e3c-82c5942537cc" path="/var/lib/kubelet/pods/3e1c4bdd-07a8-4cfd-8e3c-82c5942537cc/volumes" Sep 30 20:51:11 crc kubenswrapper[4756]: I0930 20:51:11.539380 4756 patch_prober.go:28] interesting pod/machine-config-daemon-4n9zj container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 20:51:11 crc kubenswrapper[4756]: I0930 20:51:11.540100 4756 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 20:51:20 crc kubenswrapper[4756]: I0930 20:51:20.114230 4756 scope.go:117] "RemoveContainer" containerID="386db2b3ec8b7c313b33834aa997f6d5d6878ef0731d50eee3a9a33cf3e2c1f0" Sep 30 20:51:41 crc kubenswrapper[4756]: I0930 20:51:41.538962 4756 patch_prober.go:28] interesting pod/machine-config-daemon-4n9zj container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 20:51:41 crc kubenswrapper[4756]: I0930 20:51:41.539721 4756 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 20:51:41 crc kubenswrapper[4756]: I0930 20:51:41.539778 4756 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" Sep 30 20:51:41 crc kubenswrapper[4756]: I0930 20:51:41.540842 4756 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"fd9fa9739b840f2adab14d744818d1716aa3d12ebd65d535d5bd7108701e2831"} pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 30 20:51:41 crc kubenswrapper[4756]: I0930 20:51:41.540931 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" containerName="machine-config-daemon" containerID="cri-o://fd9fa9739b840f2adab14d744818d1716aa3d12ebd65d535d5bd7108701e2831" gracePeriod=600 Sep 30 20:51:42 crc kubenswrapper[4756]: I0930 20:51:42.200217 4756 generic.go:334] "Generic (PLEG): container finished" podID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" containerID="fd9fa9739b840f2adab14d744818d1716aa3d12ebd65d535d5bd7108701e2831" exitCode=0 Sep 30 20:51:42 crc kubenswrapper[4756]: I0930 20:51:42.200298 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" event={"ID":"3370c2ca-fec3-4f90-8df7-51e21e6c7e1c","Type":"ContainerDied","Data":"fd9fa9739b840f2adab14d744818d1716aa3d12ebd65d535d5bd7108701e2831"} Sep 30 20:51:42 crc kubenswrapper[4756]: I0930 20:51:42.200930 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" event={"ID":"3370c2ca-fec3-4f90-8df7-51e21e6c7e1c","Type":"ContainerStarted","Data":"47efbbc76271cd44fa05dca3e26855fb46918fbc5a3a511e482c89c79b8cb60c"} Sep 30 20:51:42 crc kubenswrapper[4756]: I0930 20:51:42.200968 4756 scope.go:117] "RemoveContainer" containerID="bdd10774ccbb11f8a2293190d3ed532f32596ee6e63b4f323c2f18f8a020bf99" Sep 30 20:53:29 crc kubenswrapper[4756]: I0930 20:53:29.457442 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-gtbpz"] Sep 30 20:53:29 crc kubenswrapper[4756]: E0930 20:53:29.461039 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3e1c4bdd-07a8-4cfd-8e3c-82c5942537cc" containerName="mariadb-client-2" Sep 30 20:53:29 crc kubenswrapper[4756]: I0930 20:53:29.461261 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="3e1c4bdd-07a8-4cfd-8e3c-82c5942537cc" containerName="mariadb-client-2" Sep 30 20:53:29 crc kubenswrapper[4756]: I0930 20:53:29.462096 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="3e1c4bdd-07a8-4cfd-8e3c-82c5942537cc" containerName="mariadb-client-2" Sep 30 20:53:29 crc kubenswrapper[4756]: I0930 20:53:29.474432 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-gtbpz"] Sep 30 20:53:29 crc kubenswrapper[4756]: I0930 20:53:29.474711 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-gtbpz" Sep 30 20:53:29 crc kubenswrapper[4756]: I0930 20:53:29.548009 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4ee74d6a-0714-42b4-b3ce-734e4b0ea789-utilities\") pod \"redhat-operators-gtbpz\" (UID: \"4ee74d6a-0714-42b4-b3ce-734e4b0ea789\") " pod="openshift-marketplace/redhat-operators-gtbpz" Sep 30 20:53:29 crc kubenswrapper[4756]: I0930 20:53:29.548075 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4ee74d6a-0714-42b4-b3ce-734e4b0ea789-catalog-content\") pod \"redhat-operators-gtbpz\" (UID: \"4ee74d6a-0714-42b4-b3ce-734e4b0ea789\") " pod="openshift-marketplace/redhat-operators-gtbpz" Sep 30 20:53:29 crc kubenswrapper[4756]: I0930 20:53:29.548148 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g7ctc\" (UniqueName: \"kubernetes.io/projected/4ee74d6a-0714-42b4-b3ce-734e4b0ea789-kube-api-access-g7ctc\") pod \"redhat-operators-gtbpz\" (UID: \"4ee74d6a-0714-42b4-b3ce-734e4b0ea789\") " pod="openshift-marketplace/redhat-operators-gtbpz" Sep 30 20:53:29 crc kubenswrapper[4756]: I0930 20:53:29.649786 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4ee74d6a-0714-42b4-b3ce-734e4b0ea789-utilities\") pod \"redhat-operators-gtbpz\" (UID: \"4ee74d6a-0714-42b4-b3ce-734e4b0ea789\") " pod="openshift-marketplace/redhat-operators-gtbpz" Sep 30 20:53:29 crc kubenswrapper[4756]: I0930 20:53:29.649840 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4ee74d6a-0714-42b4-b3ce-734e4b0ea789-catalog-content\") pod \"redhat-operators-gtbpz\" (UID: \"4ee74d6a-0714-42b4-b3ce-734e4b0ea789\") " pod="openshift-marketplace/redhat-operators-gtbpz" Sep 30 20:53:29 crc kubenswrapper[4756]: I0930 20:53:29.649868 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g7ctc\" (UniqueName: \"kubernetes.io/projected/4ee74d6a-0714-42b4-b3ce-734e4b0ea789-kube-api-access-g7ctc\") pod \"redhat-operators-gtbpz\" (UID: \"4ee74d6a-0714-42b4-b3ce-734e4b0ea789\") " pod="openshift-marketplace/redhat-operators-gtbpz" Sep 30 20:53:29 crc kubenswrapper[4756]: I0930 20:53:29.650382 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4ee74d6a-0714-42b4-b3ce-734e4b0ea789-catalog-content\") pod \"redhat-operators-gtbpz\" (UID: \"4ee74d6a-0714-42b4-b3ce-734e4b0ea789\") " pod="openshift-marketplace/redhat-operators-gtbpz" Sep 30 20:53:29 crc kubenswrapper[4756]: I0930 20:53:29.650482 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4ee74d6a-0714-42b4-b3ce-734e4b0ea789-utilities\") pod \"redhat-operators-gtbpz\" (UID: \"4ee74d6a-0714-42b4-b3ce-734e4b0ea789\") " pod="openshift-marketplace/redhat-operators-gtbpz" Sep 30 20:53:29 crc kubenswrapper[4756]: I0930 20:53:29.669878 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g7ctc\" (UniqueName: \"kubernetes.io/projected/4ee74d6a-0714-42b4-b3ce-734e4b0ea789-kube-api-access-g7ctc\") pod \"redhat-operators-gtbpz\" (UID: \"4ee74d6a-0714-42b4-b3ce-734e4b0ea789\") " pod="openshift-marketplace/redhat-operators-gtbpz" Sep 30 20:53:29 crc kubenswrapper[4756]: I0930 20:53:29.807919 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-gtbpz" Sep 30 20:53:30 crc kubenswrapper[4756]: I0930 20:53:30.289602 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-gtbpz"] Sep 30 20:53:30 crc kubenswrapper[4756]: I0930 20:53:30.327047 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-gtbpz" event={"ID":"4ee74d6a-0714-42b4-b3ce-734e4b0ea789","Type":"ContainerStarted","Data":"1784694c10cb3bfe267e38a0b0442e91f06bb62102c94c7bc3e751b8e1b39490"} Sep 30 20:53:31 crc kubenswrapper[4756]: I0930 20:53:31.339740 4756 generic.go:334] "Generic (PLEG): container finished" podID="4ee74d6a-0714-42b4-b3ce-734e4b0ea789" containerID="a704e944958e91bfd22e4ee3cd68ca9fd5e53703d38ba0ac0bbe8d07ea6fb282" exitCode=0 Sep 30 20:53:31 crc kubenswrapper[4756]: I0930 20:53:31.339876 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-gtbpz" event={"ID":"4ee74d6a-0714-42b4-b3ce-734e4b0ea789","Type":"ContainerDied","Data":"a704e944958e91bfd22e4ee3cd68ca9fd5e53703d38ba0ac0bbe8d07ea6fb282"} Sep 30 20:53:31 crc kubenswrapper[4756]: I0930 20:53:31.344108 4756 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Sep 30 20:53:33 crc kubenswrapper[4756]: I0930 20:53:33.355979 4756 generic.go:334] "Generic (PLEG): container finished" podID="4ee74d6a-0714-42b4-b3ce-734e4b0ea789" containerID="73634905bd12bb05cb9f7ef48c7eaf7b05b59169f6e3c2d08967f177ad572bc7" exitCode=0 Sep 30 20:53:33 crc kubenswrapper[4756]: I0930 20:53:33.356072 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-gtbpz" event={"ID":"4ee74d6a-0714-42b4-b3ce-734e4b0ea789","Type":"ContainerDied","Data":"73634905bd12bb05cb9f7ef48c7eaf7b05b59169f6e3c2d08967f177ad572bc7"} Sep 30 20:53:34 crc kubenswrapper[4756]: I0930 20:53:34.368499 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-gtbpz" event={"ID":"4ee74d6a-0714-42b4-b3ce-734e4b0ea789","Type":"ContainerStarted","Data":"308c110eeab0c64f27f55c55549ebcf6eab763e5a353d75a163fcdd078b16b0a"} Sep 30 20:53:34 crc kubenswrapper[4756]: I0930 20:53:34.400730 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-gtbpz" podStartSLOduration=2.965846215 podStartE2EDuration="5.400708132s" podCreationTimestamp="2025-09-30 20:53:29 +0000 UTC" firstStartedPulling="2025-09-30 20:53:31.342978526 +0000 UTC m=+4940.963912013" lastFinishedPulling="2025-09-30 20:53:33.777840413 +0000 UTC m=+4943.398773930" observedRunningTime="2025-09-30 20:53:34.394939622 +0000 UTC m=+4944.015873129" watchObservedRunningTime="2025-09-30 20:53:34.400708132 +0000 UTC m=+4944.021641619" Sep 30 20:53:39 crc kubenswrapper[4756]: I0930 20:53:39.808804 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-gtbpz" Sep 30 20:53:39 crc kubenswrapper[4756]: I0930 20:53:39.809327 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-gtbpz" Sep 30 20:53:39 crc kubenswrapper[4756]: I0930 20:53:39.874444 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-gtbpz" Sep 30 20:53:40 crc kubenswrapper[4756]: I0930 20:53:40.486893 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-gtbpz" Sep 30 20:53:40 crc kubenswrapper[4756]: I0930 20:53:40.535675 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-gtbpz"] Sep 30 20:53:41 crc kubenswrapper[4756]: I0930 20:53:41.540087 4756 patch_prober.go:28] interesting pod/machine-config-daemon-4n9zj container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 20:53:41 crc kubenswrapper[4756]: I0930 20:53:41.540689 4756 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 20:53:42 crc kubenswrapper[4756]: I0930 20:53:42.435836 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-gtbpz" podUID="4ee74d6a-0714-42b4-b3ce-734e4b0ea789" containerName="registry-server" containerID="cri-o://308c110eeab0c64f27f55c55549ebcf6eab763e5a353d75a163fcdd078b16b0a" gracePeriod=2 Sep 30 20:53:42 crc kubenswrapper[4756]: I0930 20:53:42.888110 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-gtbpz" Sep 30 20:53:42 crc kubenswrapper[4756]: I0930 20:53:42.963134 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4ee74d6a-0714-42b4-b3ce-734e4b0ea789-utilities\") pod \"4ee74d6a-0714-42b4-b3ce-734e4b0ea789\" (UID: \"4ee74d6a-0714-42b4-b3ce-734e4b0ea789\") " Sep 30 20:53:42 crc kubenswrapper[4756]: I0930 20:53:42.963191 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-g7ctc\" (UniqueName: \"kubernetes.io/projected/4ee74d6a-0714-42b4-b3ce-734e4b0ea789-kube-api-access-g7ctc\") pod \"4ee74d6a-0714-42b4-b3ce-734e4b0ea789\" (UID: \"4ee74d6a-0714-42b4-b3ce-734e4b0ea789\") " Sep 30 20:53:42 crc kubenswrapper[4756]: I0930 20:53:42.963376 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4ee74d6a-0714-42b4-b3ce-734e4b0ea789-catalog-content\") pod \"4ee74d6a-0714-42b4-b3ce-734e4b0ea789\" (UID: \"4ee74d6a-0714-42b4-b3ce-734e4b0ea789\") " Sep 30 20:53:42 crc kubenswrapper[4756]: I0930 20:53:42.964683 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4ee74d6a-0714-42b4-b3ce-734e4b0ea789-utilities" (OuterVolumeSpecName: "utilities") pod "4ee74d6a-0714-42b4-b3ce-734e4b0ea789" (UID: "4ee74d6a-0714-42b4-b3ce-734e4b0ea789"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 20:53:42 crc kubenswrapper[4756]: I0930 20:53:42.987831 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4ee74d6a-0714-42b4-b3ce-734e4b0ea789-kube-api-access-g7ctc" (OuterVolumeSpecName: "kube-api-access-g7ctc") pod "4ee74d6a-0714-42b4-b3ce-734e4b0ea789" (UID: "4ee74d6a-0714-42b4-b3ce-734e4b0ea789"). InnerVolumeSpecName "kube-api-access-g7ctc". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:53:43 crc kubenswrapper[4756]: I0930 20:53:43.065465 4756 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4ee74d6a-0714-42b4-b3ce-734e4b0ea789-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 20:53:43 crc kubenswrapper[4756]: I0930 20:53:43.065497 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-g7ctc\" (UniqueName: \"kubernetes.io/projected/4ee74d6a-0714-42b4-b3ce-734e4b0ea789-kube-api-access-g7ctc\") on node \"crc\" DevicePath \"\"" Sep 30 20:53:43 crc kubenswrapper[4756]: I0930 20:53:43.446799 4756 generic.go:334] "Generic (PLEG): container finished" podID="4ee74d6a-0714-42b4-b3ce-734e4b0ea789" containerID="308c110eeab0c64f27f55c55549ebcf6eab763e5a353d75a163fcdd078b16b0a" exitCode=0 Sep 30 20:53:43 crc kubenswrapper[4756]: I0930 20:53:43.446842 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-gtbpz" event={"ID":"4ee74d6a-0714-42b4-b3ce-734e4b0ea789","Type":"ContainerDied","Data":"308c110eeab0c64f27f55c55549ebcf6eab763e5a353d75a163fcdd078b16b0a"} Sep 30 20:53:43 crc kubenswrapper[4756]: I0930 20:53:43.446874 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-gtbpz" event={"ID":"4ee74d6a-0714-42b4-b3ce-734e4b0ea789","Type":"ContainerDied","Data":"1784694c10cb3bfe267e38a0b0442e91f06bb62102c94c7bc3e751b8e1b39490"} Sep 30 20:53:43 crc kubenswrapper[4756]: I0930 20:53:43.446895 4756 scope.go:117] "RemoveContainer" containerID="308c110eeab0c64f27f55c55549ebcf6eab763e5a353d75a163fcdd078b16b0a" Sep 30 20:53:43 crc kubenswrapper[4756]: I0930 20:53:43.447029 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-gtbpz" Sep 30 20:53:43 crc kubenswrapper[4756]: I0930 20:53:43.468598 4756 scope.go:117] "RemoveContainer" containerID="73634905bd12bb05cb9f7ef48c7eaf7b05b59169f6e3c2d08967f177ad572bc7" Sep 30 20:53:43 crc kubenswrapper[4756]: I0930 20:53:43.490494 4756 scope.go:117] "RemoveContainer" containerID="a704e944958e91bfd22e4ee3cd68ca9fd5e53703d38ba0ac0bbe8d07ea6fb282" Sep 30 20:53:43 crc kubenswrapper[4756]: I0930 20:53:43.545293 4756 scope.go:117] "RemoveContainer" containerID="308c110eeab0c64f27f55c55549ebcf6eab763e5a353d75a163fcdd078b16b0a" Sep 30 20:53:43 crc kubenswrapper[4756]: E0930 20:53:43.545973 4756 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"308c110eeab0c64f27f55c55549ebcf6eab763e5a353d75a163fcdd078b16b0a\": container with ID starting with 308c110eeab0c64f27f55c55549ebcf6eab763e5a353d75a163fcdd078b16b0a not found: ID does not exist" containerID="308c110eeab0c64f27f55c55549ebcf6eab763e5a353d75a163fcdd078b16b0a" Sep 30 20:53:43 crc kubenswrapper[4756]: I0930 20:53:43.546040 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"308c110eeab0c64f27f55c55549ebcf6eab763e5a353d75a163fcdd078b16b0a"} err="failed to get container status \"308c110eeab0c64f27f55c55549ebcf6eab763e5a353d75a163fcdd078b16b0a\": rpc error: code = NotFound desc = could not find container \"308c110eeab0c64f27f55c55549ebcf6eab763e5a353d75a163fcdd078b16b0a\": container with ID starting with 308c110eeab0c64f27f55c55549ebcf6eab763e5a353d75a163fcdd078b16b0a not found: ID does not exist" Sep 30 20:53:43 crc kubenswrapper[4756]: I0930 20:53:43.546079 4756 scope.go:117] "RemoveContainer" containerID="73634905bd12bb05cb9f7ef48c7eaf7b05b59169f6e3c2d08967f177ad572bc7" Sep 30 20:53:43 crc kubenswrapper[4756]: E0930 20:53:43.546681 4756 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"73634905bd12bb05cb9f7ef48c7eaf7b05b59169f6e3c2d08967f177ad572bc7\": container with ID starting with 73634905bd12bb05cb9f7ef48c7eaf7b05b59169f6e3c2d08967f177ad572bc7 not found: ID does not exist" containerID="73634905bd12bb05cb9f7ef48c7eaf7b05b59169f6e3c2d08967f177ad572bc7" Sep 30 20:53:43 crc kubenswrapper[4756]: I0930 20:53:43.546737 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"73634905bd12bb05cb9f7ef48c7eaf7b05b59169f6e3c2d08967f177ad572bc7"} err="failed to get container status \"73634905bd12bb05cb9f7ef48c7eaf7b05b59169f6e3c2d08967f177ad572bc7\": rpc error: code = NotFound desc = could not find container \"73634905bd12bb05cb9f7ef48c7eaf7b05b59169f6e3c2d08967f177ad572bc7\": container with ID starting with 73634905bd12bb05cb9f7ef48c7eaf7b05b59169f6e3c2d08967f177ad572bc7 not found: ID does not exist" Sep 30 20:53:43 crc kubenswrapper[4756]: I0930 20:53:43.546771 4756 scope.go:117] "RemoveContainer" containerID="a704e944958e91bfd22e4ee3cd68ca9fd5e53703d38ba0ac0bbe8d07ea6fb282" Sep 30 20:53:43 crc kubenswrapper[4756]: E0930 20:53:43.547605 4756 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a704e944958e91bfd22e4ee3cd68ca9fd5e53703d38ba0ac0bbe8d07ea6fb282\": container with ID starting with a704e944958e91bfd22e4ee3cd68ca9fd5e53703d38ba0ac0bbe8d07ea6fb282 not found: ID does not exist" containerID="a704e944958e91bfd22e4ee3cd68ca9fd5e53703d38ba0ac0bbe8d07ea6fb282" Sep 30 20:53:43 crc kubenswrapper[4756]: I0930 20:53:43.547643 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a704e944958e91bfd22e4ee3cd68ca9fd5e53703d38ba0ac0bbe8d07ea6fb282"} err="failed to get container status \"a704e944958e91bfd22e4ee3cd68ca9fd5e53703d38ba0ac0bbe8d07ea6fb282\": rpc error: code = NotFound desc = could not find container \"a704e944958e91bfd22e4ee3cd68ca9fd5e53703d38ba0ac0bbe8d07ea6fb282\": container with ID starting with a704e944958e91bfd22e4ee3cd68ca9fd5e53703d38ba0ac0bbe8d07ea6fb282 not found: ID does not exist" Sep 30 20:53:43 crc kubenswrapper[4756]: I0930 20:53:43.974983 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4ee74d6a-0714-42b4-b3ce-734e4b0ea789-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "4ee74d6a-0714-42b4-b3ce-734e4b0ea789" (UID: "4ee74d6a-0714-42b4-b3ce-734e4b0ea789"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 20:53:43 crc kubenswrapper[4756]: I0930 20:53:43.977949 4756 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4ee74d6a-0714-42b4-b3ce-734e4b0ea789-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 20:53:44 crc kubenswrapper[4756]: I0930 20:53:44.079320 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-gtbpz"] Sep 30 20:53:44 crc kubenswrapper[4756]: I0930 20:53:44.085755 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-gtbpz"] Sep 30 20:53:45 crc kubenswrapper[4756]: I0930 20:53:45.138296 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4ee74d6a-0714-42b4-b3ce-734e4b0ea789" path="/var/lib/kubelet/pods/4ee74d6a-0714-42b4-b3ce-734e4b0ea789/volumes" Sep 30 20:54:11 crc kubenswrapper[4756]: I0930 20:54:11.539523 4756 patch_prober.go:28] interesting pod/machine-config-daemon-4n9zj container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 20:54:11 crc kubenswrapper[4756]: I0930 20:54:11.540041 4756 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 20:54:41 crc kubenswrapper[4756]: I0930 20:54:41.539213 4756 patch_prober.go:28] interesting pod/machine-config-daemon-4n9zj container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 20:54:41 crc kubenswrapper[4756]: I0930 20:54:41.540621 4756 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 20:54:41 crc kubenswrapper[4756]: I0930 20:54:41.540719 4756 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" Sep 30 20:54:41 crc kubenswrapper[4756]: I0930 20:54:41.541858 4756 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"47efbbc76271cd44fa05dca3e26855fb46918fbc5a3a511e482c89c79b8cb60c"} pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 30 20:54:41 crc kubenswrapper[4756]: I0930 20:54:41.541970 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" containerName="machine-config-daemon" containerID="cri-o://47efbbc76271cd44fa05dca3e26855fb46918fbc5a3a511e482c89c79b8cb60c" gracePeriod=600 Sep 30 20:54:41 crc kubenswrapper[4756]: E0930 20:54:41.674930 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4n9zj_openshift-machine-config-operator(3370c2ca-fec3-4f90-8df7-51e21e6c7e1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" Sep 30 20:54:42 crc kubenswrapper[4756]: I0930 20:54:42.019849 4756 generic.go:334] "Generic (PLEG): container finished" podID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" containerID="47efbbc76271cd44fa05dca3e26855fb46918fbc5a3a511e482c89c79b8cb60c" exitCode=0 Sep 30 20:54:42 crc kubenswrapper[4756]: I0930 20:54:42.019936 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" event={"ID":"3370c2ca-fec3-4f90-8df7-51e21e6c7e1c","Type":"ContainerDied","Data":"47efbbc76271cd44fa05dca3e26855fb46918fbc5a3a511e482c89c79b8cb60c"} Sep 30 20:54:42 crc kubenswrapper[4756]: I0930 20:54:42.020001 4756 scope.go:117] "RemoveContainer" containerID="fd9fa9739b840f2adab14d744818d1716aa3d12ebd65d535d5bd7108701e2831" Sep 30 20:54:42 crc kubenswrapper[4756]: I0930 20:54:42.021056 4756 scope.go:117] "RemoveContainer" containerID="47efbbc76271cd44fa05dca3e26855fb46918fbc5a3a511e482c89c79b8cb60c" Sep 30 20:54:42 crc kubenswrapper[4756]: E0930 20:54:42.021809 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4n9zj_openshift-machine-config-operator(3370c2ca-fec3-4f90-8df7-51e21e6c7e1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" Sep 30 20:54:53 crc kubenswrapper[4756]: I0930 20:54:53.117225 4756 scope.go:117] "RemoveContainer" containerID="47efbbc76271cd44fa05dca3e26855fb46918fbc5a3a511e482c89c79b8cb60c" Sep 30 20:54:53 crc kubenswrapper[4756]: E0930 20:54:53.118162 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4n9zj_openshift-machine-config-operator(3370c2ca-fec3-4f90-8df7-51e21e6c7e1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" Sep 30 20:55:03 crc kubenswrapper[4756]: I0930 20:55:03.138919 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/mariadb-copy-data"] Sep 30 20:55:03 crc kubenswrapper[4756]: E0930 20:55:03.140312 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4ee74d6a-0714-42b4-b3ce-734e4b0ea789" containerName="registry-server" Sep 30 20:55:03 crc kubenswrapper[4756]: I0930 20:55:03.140345 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="4ee74d6a-0714-42b4-b3ce-734e4b0ea789" containerName="registry-server" Sep 30 20:55:03 crc kubenswrapper[4756]: E0930 20:55:03.140381 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4ee74d6a-0714-42b4-b3ce-734e4b0ea789" containerName="extract-utilities" Sep 30 20:55:03 crc kubenswrapper[4756]: I0930 20:55:03.140426 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="4ee74d6a-0714-42b4-b3ce-734e4b0ea789" containerName="extract-utilities" Sep 30 20:55:03 crc kubenswrapper[4756]: E0930 20:55:03.140459 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4ee74d6a-0714-42b4-b3ce-734e4b0ea789" containerName="extract-content" Sep 30 20:55:03 crc kubenswrapper[4756]: I0930 20:55:03.140482 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="4ee74d6a-0714-42b4-b3ce-734e4b0ea789" containerName="extract-content" Sep 30 20:55:03 crc kubenswrapper[4756]: I0930 20:55:03.140820 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="4ee74d6a-0714-42b4-b3ce-734e4b0ea789" containerName="registry-server" Sep 30 20:55:03 crc kubenswrapper[4756]: I0930 20:55:03.141761 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-copy-data" Sep 30 20:55:03 crc kubenswrapper[4756]: I0930 20:55:03.145218 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"default-dockercfg-bchvx" Sep 30 20:55:03 crc kubenswrapper[4756]: I0930 20:55:03.152997 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-copy-data"] Sep 30 20:55:03 crc kubenswrapper[4756]: I0930 20:55:03.314242 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-88892d1c-870a-4f4c-8e84-60dcaef0a345\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-88892d1c-870a-4f4c-8e84-60dcaef0a345\") pod \"mariadb-copy-data\" (UID: \"68468641-7a26-4d0b-8386-ed0a3d2280f3\") " pod="openstack/mariadb-copy-data" Sep 30 20:55:03 crc kubenswrapper[4756]: I0930 20:55:03.314351 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ktjxj\" (UniqueName: \"kubernetes.io/projected/68468641-7a26-4d0b-8386-ed0a3d2280f3-kube-api-access-ktjxj\") pod \"mariadb-copy-data\" (UID: \"68468641-7a26-4d0b-8386-ed0a3d2280f3\") " pod="openstack/mariadb-copy-data" Sep 30 20:55:03 crc kubenswrapper[4756]: I0930 20:55:03.415779 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-88892d1c-870a-4f4c-8e84-60dcaef0a345\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-88892d1c-870a-4f4c-8e84-60dcaef0a345\") pod \"mariadb-copy-data\" (UID: \"68468641-7a26-4d0b-8386-ed0a3d2280f3\") " pod="openstack/mariadb-copy-data" Sep 30 20:55:03 crc kubenswrapper[4756]: I0930 20:55:03.415837 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ktjxj\" (UniqueName: \"kubernetes.io/projected/68468641-7a26-4d0b-8386-ed0a3d2280f3-kube-api-access-ktjxj\") pod \"mariadb-copy-data\" (UID: \"68468641-7a26-4d0b-8386-ed0a3d2280f3\") " pod="openstack/mariadb-copy-data" Sep 30 20:55:03 crc kubenswrapper[4756]: I0930 20:55:03.419388 4756 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Sep 30 20:55:03 crc kubenswrapper[4756]: I0930 20:55:03.419464 4756 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-88892d1c-870a-4f4c-8e84-60dcaef0a345\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-88892d1c-870a-4f4c-8e84-60dcaef0a345\") pod \"mariadb-copy-data\" (UID: \"68468641-7a26-4d0b-8386-ed0a3d2280f3\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/d36a589207d136061d303ed0b716c6b34dee6180c7c9efdebf4af442e48a11e5/globalmount\"" pod="openstack/mariadb-copy-data" Sep 30 20:55:03 crc kubenswrapper[4756]: I0930 20:55:03.461873 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ktjxj\" (UniqueName: \"kubernetes.io/projected/68468641-7a26-4d0b-8386-ed0a3d2280f3-kube-api-access-ktjxj\") pod \"mariadb-copy-data\" (UID: \"68468641-7a26-4d0b-8386-ed0a3d2280f3\") " pod="openstack/mariadb-copy-data" Sep 30 20:55:03 crc kubenswrapper[4756]: I0930 20:55:03.467972 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-88892d1c-870a-4f4c-8e84-60dcaef0a345\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-88892d1c-870a-4f4c-8e84-60dcaef0a345\") pod \"mariadb-copy-data\" (UID: \"68468641-7a26-4d0b-8386-ed0a3d2280f3\") " pod="openstack/mariadb-copy-data" Sep 30 20:55:03 crc kubenswrapper[4756]: I0930 20:55:03.769642 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-copy-data" Sep 30 20:55:04 crc kubenswrapper[4756]: I0930 20:55:04.266695 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-copy-data"] Sep 30 20:55:04 crc kubenswrapper[4756]: W0930 20:55:04.272758 4756 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod68468641_7a26_4d0b_8386_ed0a3d2280f3.slice/crio-b95166e78bf97906ae1e749ce73d7e64f7a67bf5772a78dac08d8c51bc389f18 WatchSource:0}: Error finding container b95166e78bf97906ae1e749ce73d7e64f7a67bf5772a78dac08d8c51bc389f18: Status 404 returned error can't find the container with id b95166e78bf97906ae1e749ce73d7e64f7a67bf5772a78dac08d8c51bc389f18 Sep 30 20:55:05 crc kubenswrapper[4756]: I0930 20:55:05.237344 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-copy-data" event={"ID":"68468641-7a26-4d0b-8386-ed0a3d2280f3","Type":"ContainerStarted","Data":"759e2b0d0861ad565c9d729ebde9b9e1461ee4c9b7eebf079ab06e73f9701226"} Sep 30 20:55:05 crc kubenswrapper[4756]: I0930 20:55:05.237682 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-copy-data" event={"ID":"68468641-7a26-4d0b-8386-ed0a3d2280f3","Type":"ContainerStarted","Data":"b95166e78bf97906ae1e749ce73d7e64f7a67bf5772a78dac08d8c51bc389f18"} Sep 30 20:55:05 crc kubenswrapper[4756]: I0930 20:55:05.252316 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/mariadb-copy-data" podStartSLOduration=3.252297589 podStartE2EDuration="3.252297589s" podCreationTimestamp="2025-09-30 20:55:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 20:55:05.249120896 +0000 UTC m=+5034.870054373" watchObservedRunningTime="2025-09-30 20:55:05.252297589 +0000 UTC m=+5034.873231066" Sep 30 20:55:06 crc kubenswrapper[4756]: I0930 20:55:06.116670 4756 scope.go:117] "RemoveContainer" containerID="47efbbc76271cd44fa05dca3e26855fb46918fbc5a3a511e482c89c79b8cb60c" Sep 30 20:55:06 crc kubenswrapper[4756]: E0930 20:55:06.117170 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4n9zj_openshift-machine-config-operator(3370c2ca-fec3-4f90-8df7-51e21e6c7e1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" Sep 30 20:55:06 crc kubenswrapper[4756]: I0930 20:55:06.999575 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/mariadb-client"] Sep 30 20:55:07 crc kubenswrapper[4756]: I0930 20:55:07.001356 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client" Sep 30 20:55:07 crc kubenswrapper[4756]: I0930 20:55:07.018174 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client"] Sep 30 20:55:07 crc kubenswrapper[4756]: I0930 20:55:07.073064 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qzwvk\" (UniqueName: \"kubernetes.io/projected/b14018b5-aae0-49dd-90d3-9e4557f34a48-kube-api-access-qzwvk\") pod \"mariadb-client\" (UID: \"b14018b5-aae0-49dd-90d3-9e4557f34a48\") " pod="openstack/mariadb-client" Sep 30 20:55:07 crc kubenswrapper[4756]: I0930 20:55:07.175681 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qzwvk\" (UniqueName: \"kubernetes.io/projected/b14018b5-aae0-49dd-90d3-9e4557f34a48-kube-api-access-qzwvk\") pod \"mariadb-client\" (UID: \"b14018b5-aae0-49dd-90d3-9e4557f34a48\") " pod="openstack/mariadb-client" Sep 30 20:55:07 crc kubenswrapper[4756]: I0930 20:55:07.205108 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qzwvk\" (UniqueName: \"kubernetes.io/projected/b14018b5-aae0-49dd-90d3-9e4557f34a48-kube-api-access-qzwvk\") pod \"mariadb-client\" (UID: \"b14018b5-aae0-49dd-90d3-9e4557f34a48\") " pod="openstack/mariadb-client" Sep 30 20:55:07 crc kubenswrapper[4756]: I0930 20:55:07.349582 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client" Sep 30 20:55:07 crc kubenswrapper[4756]: I0930 20:55:07.829899 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client"] Sep 30 20:55:07 crc kubenswrapper[4756]: W0930 20:55:07.833640 4756 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb14018b5_aae0_49dd_90d3_9e4557f34a48.slice/crio-64223d87048742d7ba9b0e6d5a677a9c13e09926bdc99727da31aff5672655af WatchSource:0}: Error finding container 64223d87048742d7ba9b0e6d5a677a9c13e09926bdc99727da31aff5672655af: Status 404 returned error can't find the container with id 64223d87048742d7ba9b0e6d5a677a9c13e09926bdc99727da31aff5672655af Sep 30 20:55:08 crc kubenswrapper[4756]: I0930 20:55:08.263638 4756 generic.go:334] "Generic (PLEG): container finished" podID="b14018b5-aae0-49dd-90d3-9e4557f34a48" containerID="0f9a3dcf9b4cf9ab232985a3aa39a5f486136192d8b1349cd04275faa7d9c32b" exitCode=0 Sep 30 20:55:08 crc kubenswrapper[4756]: I0930 20:55:08.263736 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client" event={"ID":"b14018b5-aae0-49dd-90d3-9e4557f34a48","Type":"ContainerDied","Data":"0f9a3dcf9b4cf9ab232985a3aa39a5f486136192d8b1349cd04275faa7d9c32b"} Sep 30 20:55:08 crc kubenswrapper[4756]: I0930 20:55:08.263879 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client" event={"ID":"b14018b5-aae0-49dd-90d3-9e4557f34a48","Type":"ContainerStarted","Data":"64223d87048742d7ba9b0e6d5a677a9c13e09926bdc99727da31aff5672655af"} Sep 30 20:55:09 crc kubenswrapper[4756]: I0930 20:55:09.674629 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client" Sep 30 20:55:09 crc kubenswrapper[4756]: I0930 20:55:09.701556 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_mariadb-client_b14018b5-aae0-49dd-90d3-9e4557f34a48/mariadb-client/0.log" Sep 30 20:55:09 crc kubenswrapper[4756]: I0930 20:55:09.733980 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/mariadb-client"] Sep 30 20:55:09 crc kubenswrapper[4756]: I0930 20:55:09.751385 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/mariadb-client"] Sep 30 20:55:09 crc kubenswrapper[4756]: I0930 20:55:09.817846 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qzwvk\" (UniqueName: \"kubernetes.io/projected/b14018b5-aae0-49dd-90d3-9e4557f34a48-kube-api-access-qzwvk\") pod \"b14018b5-aae0-49dd-90d3-9e4557f34a48\" (UID: \"b14018b5-aae0-49dd-90d3-9e4557f34a48\") " Sep 30 20:55:09 crc kubenswrapper[4756]: I0930 20:55:09.825507 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b14018b5-aae0-49dd-90d3-9e4557f34a48-kube-api-access-qzwvk" (OuterVolumeSpecName: "kube-api-access-qzwvk") pod "b14018b5-aae0-49dd-90d3-9e4557f34a48" (UID: "b14018b5-aae0-49dd-90d3-9e4557f34a48"). InnerVolumeSpecName "kube-api-access-qzwvk". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:55:09 crc kubenswrapper[4756]: I0930 20:55:09.860900 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/mariadb-client"] Sep 30 20:55:09 crc kubenswrapper[4756]: E0930 20:55:09.861280 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b14018b5-aae0-49dd-90d3-9e4557f34a48" containerName="mariadb-client" Sep 30 20:55:09 crc kubenswrapper[4756]: I0930 20:55:09.861305 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="b14018b5-aae0-49dd-90d3-9e4557f34a48" containerName="mariadb-client" Sep 30 20:55:09 crc kubenswrapper[4756]: I0930 20:55:09.861522 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="b14018b5-aae0-49dd-90d3-9e4557f34a48" containerName="mariadb-client" Sep 30 20:55:09 crc kubenswrapper[4756]: I0930 20:55:09.862263 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client" Sep 30 20:55:09 crc kubenswrapper[4756]: I0930 20:55:09.866918 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client"] Sep 30 20:55:09 crc kubenswrapper[4756]: I0930 20:55:09.919835 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qzwvk\" (UniqueName: \"kubernetes.io/projected/b14018b5-aae0-49dd-90d3-9e4557f34a48-kube-api-access-qzwvk\") on node \"crc\" DevicePath \"\"" Sep 30 20:55:10 crc kubenswrapper[4756]: I0930 20:55:10.021211 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2nldn\" (UniqueName: \"kubernetes.io/projected/595b7feb-47ec-469e-91e2-a4ab170dfd17-kube-api-access-2nldn\") pod \"mariadb-client\" (UID: \"595b7feb-47ec-469e-91e2-a4ab170dfd17\") " pod="openstack/mariadb-client" Sep 30 20:55:10 crc kubenswrapper[4756]: I0930 20:55:10.122892 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2nldn\" (UniqueName: \"kubernetes.io/projected/595b7feb-47ec-469e-91e2-a4ab170dfd17-kube-api-access-2nldn\") pod \"mariadb-client\" (UID: \"595b7feb-47ec-469e-91e2-a4ab170dfd17\") " pod="openstack/mariadb-client" Sep 30 20:55:10 crc kubenswrapper[4756]: I0930 20:55:10.156345 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2nldn\" (UniqueName: \"kubernetes.io/projected/595b7feb-47ec-469e-91e2-a4ab170dfd17-kube-api-access-2nldn\") pod \"mariadb-client\" (UID: \"595b7feb-47ec-469e-91e2-a4ab170dfd17\") " pod="openstack/mariadb-client" Sep 30 20:55:10 crc kubenswrapper[4756]: I0930 20:55:10.209315 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client" Sep 30 20:55:10 crc kubenswrapper[4756]: I0930 20:55:10.281922 4756 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="64223d87048742d7ba9b0e6d5a677a9c13e09926bdc99727da31aff5672655af" Sep 30 20:55:10 crc kubenswrapper[4756]: I0930 20:55:10.281993 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client" Sep 30 20:55:10 crc kubenswrapper[4756]: I0930 20:55:10.312126 4756 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openstack/mariadb-client" oldPodUID="b14018b5-aae0-49dd-90d3-9e4557f34a48" podUID="595b7feb-47ec-469e-91e2-a4ab170dfd17" Sep 30 20:55:10 crc kubenswrapper[4756]: I0930 20:55:10.636247 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client"] Sep 30 20:55:10 crc kubenswrapper[4756]: W0930 20:55:10.636891 4756 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod595b7feb_47ec_469e_91e2_a4ab170dfd17.slice/crio-03de9128474d99791847ce03a857c36af56c73a8bfeac67b0861233854ba492a WatchSource:0}: Error finding container 03de9128474d99791847ce03a857c36af56c73a8bfeac67b0861233854ba492a: Status 404 returned error can't find the container with id 03de9128474d99791847ce03a857c36af56c73a8bfeac67b0861233854ba492a Sep 30 20:55:11 crc kubenswrapper[4756]: I0930 20:55:11.138345 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b14018b5-aae0-49dd-90d3-9e4557f34a48" path="/var/lib/kubelet/pods/b14018b5-aae0-49dd-90d3-9e4557f34a48/volumes" Sep 30 20:55:11 crc kubenswrapper[4756]: I0930 20:55:11.293013 4756 generic.go:334] "Generic (PLEG): container finished" podID="595b7feb-47ec-469e-91e2-a4ab170dfd17" containerID="9dff59cddd5c40bfe30f8443845e8cb6346640e3c6219dc5a72090b89979ba7f" exitCode=0 Sep 30 20:55:11 crc kubenswrapper[4756]: I0930 20:55:11.293093 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client" event={"ID":"595b7feb-47ec-469e-91e2-a4ab170dfd17","Type":"ContainerDied","Data":"9dff59cddd5c40bfe30f8443845e8cb6346640e3c6219dc5a72090b89979ba7f"} Sep 30 20:55:11 crc kubenswrapper[4756]: I0930 20:55:11.293135 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client" event={"ID":"595b7feb-47ec-469e-91e2-a4ab170dfd17","Type":"ContainerStarted","Data":"03de9128474d99791847ce03a857c36af56c73a8bfeac67b0861233854ba492a"} Sep 30 20:55:12 crc kubenswrapper[4756]: I0930 20:55:12.625115 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client" Sep 30 20:55:12 crc kubenswrapper[4756]: I0930 20:55:12.640932 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_mariadb-client_595b7feb-47ec-469e-91e2-a4ab170dfd17/mariadb-client/0.log" Sep 30 20:55:12 crc kubenswrapper[4756]: I0930 20:55:12.664234 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/mariadb-client"] Sep 30 20:55:12 crc kubenswrapper[4756]: I0930 20:55:12.669016 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/mariadb-client"] Sep 30 20:55:12 crc kubenswrapper[4756]: I0930 20:55:12.763935 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2nldn\" (UniqueName: \"kubernetes.io/projected/595b7feb-47ec-469e-91e2-a4ab170dfd17-kube-api-access-2nldn\") pod \"595b7feb-47ec-469e-91e2-a4ab170dfd17\" (UID: \"595b7feb-47ec-469e-91e2-a4ab170dfd17\") " Sep 30 20:55:12 crc kubenswrapper[4756]: I0930 20:55:12.771560 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/595b7feb-47ec-469e-91e2-a4ab170dfd17-kube-api-access-2nldn" (OuterVolumeSpecName: "kube-api-access-2nldn") pod "595b7feb-47ec-469e-91e2-a4ab170dfd17" (UID: "595b7feb-47ec-469e-91e2-a4ab170dfd17"). InnerVolumeSpecName "kube-api-access-2nldn". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:55:12 crc kubenswrapper[4756]: I0930 20:55:12.865493 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2nldn\" (UniqueName: \"kubernetes.io/projected/595b7feb-47ec-469e-91e2-a4ab170dfd17-kube-api-access-2nldn\") on node \"crc\" DevicePath \"\"" Sep 30 20:55:13 crc kubenswrapper[4756]: I0930 20:55:13.129112 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="595b7feb-47ec-469e-91e2-a4ab170dfd17" path="/var/lib/kubelet/pods/595b7feb-47ec-469e-91e2-a4ab170dfd17/volumes" Sep 30 20:55:13 crc kubenswrapper[4756]: I0930 20:55:13.311986 4756 scope.go:117] "RemoveContainer" containerID="9dff59cddd5c40bfe30f8443845e8cb6346640e3c6219dc5a72090b89979ba7f" Sep 30 20:55:13 crc kubenswrapper[4756]: I0930 20:55:13.312058 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client" Sep 30 20:55:17 crc kubenswrapper[4756]: I0930 20:55:17.116465 4756 scope.go:117] "RemoveContainer" containerID="47efbbc76271cd44fa05dca3e26855fb46918fbc5a3a511e482c89c79b8cb60c" Sep 30 20:55:17 crc kubenswrapper[4756]: E0930 20:55:17.117160 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4n9zj_openshift-machine-config-operator(3370c2ca-fec3-4f90-8df7-51e21e6c7e1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" Sep 30 20:55:29 crc kubenswrapper[4756]: I0930 20:55:29.116463 4756 scope.go:117] "RemoveContainer" containerID="47efbbc76271cd44fa05dca3e26855fb46918fbc5a3a511e482c89c79b8cb60c" Sep 30 20:55:29 crc kubenswrapper[4756]: E0930 20:55:29.117235 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4n9zj_openshift-machine-config-operator(3370c2ca-fec3-4f90-8df7-51e21e6c7e1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" Sep 30 20:55:41 crc kubenswrapper[4756]: I0930 20:55:41.119695 4756 scope.go:117] "RemoveContainer" containerID="47efbbc76271cd44fa05dca3e26855fb46918fbc5a3a511e482c89c79b8cb60c" Sep 30 20:55:41 crc kubenswrapper[4756]: E0930 20:55:41.120422 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4n9zj_openshift-machine-config-operator(3370c2ca-fec3-4f90-8df7-51e21e6c7e1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" Sep 30 20:55:56 crc kubenswrapper[4756]: I0930 20:55:56.116268 4756 scope.go:117] "RemoveContainer" containerID="47efbbc76271cd44fa05dca3e26855fb46918fbc5a3a511e482c89c79b8cb60c" Sep 30 20:55:56 crc kubenswrapper[4756]: E0930 20:55:56.117749 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4n9zj_openshift-machine-config-operator(3370c2ca-fec3-4f90-8df7-51e21e6c7e1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" Sep 30 20:55:58 crc kubenswrapper[4756]: I0930 20:55:58.629853 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-nb-0"] Sep 30 20:55:58 crc kubenswrapper[4756]: E0930 20:55:58.630518 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="595b7feb-47ec-469e-91e2-a4ab170dfd17" containerName="mariadb-client" Sep 30 20:55:58 crc kubenswrapper[4756]: I0930 20:55:58.630536 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="595b7feb-47ec-469e-91e2-a4ab170dfd17" containerName="mariadb-client" Sep 30 20:55:58 crc kubenswrapper[4756]: I0930 20:55:58.630751 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="595b7feb-47ec-469e-91e2-a4ab170dfd17" containerName="mariadb-client" Sep 30 20:55:58 crc kubenswrapper[4756]: I0930 20:55:58.631814 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Sep 30 20:55:58 crc kubenswrapper[4756]: I0930 20:55:58.634255 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-nb-dockercfg-qx9k6" Sep 30 20:55:58 crc kubenswrapper[4756]: I0930 20:55:58.635014 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-scripts" Sep 30 20:55:58 crc kubenswrapper[4756]: I0930 20:55:58.635064 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-config" Sep 30 20:55:58 crc kubenswrapper[4756]: I0930 20:55:58.650375 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Sep 30 20:55:58 crc kubenswrapper[4756]: I0930 20:55:58.663275 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-nb-2"] Sep 30 20:55:58 crc kubenswrapper[4756]: I0930 20:55:58.682617 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-nb-1"] Sep 30 20:55:58 crc kubenswrapper[4756]: I0930 20:55:58.682946 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-2" Sep 30 20:55:58 crc kubenswrapper[4756]: I0930 20:55:58.696012 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-2"] Sep 30 20:55:58 crc kubenswrapper[4756]: I0930 20:55:58.696077 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-1"] Sep 30 20:55:58 crc kubenswrapper[4756]: I0930 20:55:58.696164 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-1" Sep 30 20:55:58 crc kubenswrapper[4756]: I0930 20:55:58.733975 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/e3a83234-34b6-4c4c-8150-e85b357a7249-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"e3a83234-34b6-4c4c-8150-e85b357a7249\") " pod="openstack/ovsdbserver-nb-0" Sep 30 20:55:58 crc kubenswrapper[4756]: I0930 20:55:58.734135 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-5f25614a-c082-45ed-8da7-7bb081938826\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-5f25614a-c082-45ed-8da7-7bb081938826\") pod \"ovsdbserver-nb-0\" (UID: \"e3a83234-34b6-4c4c-8150-e85b357a7249\") " pod="openstack/ovsdbserver-nb-0" Sep 30 20:55:58 crc kubenswrapper[4756]: I0930 20:55:58.734177 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e3a83234-34b6-4c4c-8150-e85b357a7249-config\") pod \"ovsdbserver-nb-0\" (UID: \"e3a83234-34b6-4c4c-8150-e85b357a7249\") " pod="openstack/ovsdbserver-nb-0" Sep 30 20:55:58 crc kubenswrapper[4756]: I0930 20:55:58.734212 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dchp6\" (UniqueName: \"kubernetes.io/projected/e3a83234-34b6-4c4c-8150-e85b357a7249-kube-api-access-dchp6\") pod \"ovsdbserver-nb-0\" (UID: \"e3a83234-34b6-4c4c-8150-e85b357a7249\") " pod="openstack/ovsdbserver-nb-0" Sep 30 20:55:58 crc kubenswrapper[4756]: I0930 20:55:58.734282 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e3a83234-34b6-4c4c-8150-e85b357a7249-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"e3a83234-34b6-4c4c-8150-e85b357a7249\") " pod="openstack/ovsdbserver-nb-0" Sep 30 20:55:58 crc kubenswrapper[4756]: I0930 20:55:58.734528 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/e3a83234-34b6-4c4c-8150-e85b357a7249-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"e3a83234-34b6-4c4c-8150-e85b357a7249\") " pod="openstack/ovsdbserver-nb-0" Sep 30 20:55:58 crc kubenswrapper[4756]: I0930 20:55:58.835487 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/97be13c3-6dba-47c2-91dc-b14cdc6c45dc-scripts\") pod \"ovsdbserver-nb-1\" (UID: \"97be13c3-6dba-47c2-91dc-b14cdc6c45dc\") " pod="openstack/ovsdbserver-nb-1" Sep 30 20:55:58 crc kubenswrapper[4756]: I0930 20:55:58.835765 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-5f25614a-c082-45ed-8da7-7bb081938826\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-5f25614a-c082-45ed-8da7-7bb081938826\") pod \"ovsdbserver-nb-0\" (UID: \"e3a83234-34b6-4c4c-8150-e85b357a7249\") " pod="openstack/ovsdbserver-nb-0" Sep 30 20:55:58 crc kubenswrapper[4756]: I0930 20:55:58.835917 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e3a83234-34b6-4c4c-8150-e85b357a7249-config\") pod \"ovsdbserver-nb-0\" (UID: \"e3a83234-34b6-4c4c-8150-e85b357a7249\") " pod="openstack/ovsdbserver-nb-0" Sep 30 20:55:58 crc kubenswrapper[4756]: I0930 20:55:58.836030 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/63cc3cb7-a06a-43d3-8963-11254e723416-scripts\") pod \"ovsdbserver-nb-2\" (UID: \"63cc3cb7-a06a-43d3-8963-11254e723416\") " pod="openstack/ovsdbserver-nb-2" Sep 30 20:55:58 crc kubenswrapper[4756]: I0930 20:55:58.836074 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dchp6\" (UniqueName: \"kubernetes.io/projected/e3a83234-34b6-4c4c-8150-e85b357a7249-kube-api-access-dchp6\") pod \"ovsdbserver-nb-0\" (UID: \"e3a83234-34b6-4c4c-8150-e85b357a7249\") " pod="openstack/ovsdbserver-nb-0" Sep 30 20:55:58 crc kubenswrapper[4756]: I0930 20:55:58.836160 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e3a83234-34b6-4c4c-8150-e85b357a7249-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"e3a83234-34b6-4c4c-8150-e85b357a7249\") " pod="openstack/ovsdbserver-nb-0" Sep 30 20:55:58 crc kubenswrapper[4756]: I0930 20:55:58.836206 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/63cc3cb7-a06a-43d3-8963-11254e723416-ovsdb-rundir\") pod \"ovsdbserver-nb-2\" (UID: \"63cc3cb7-a06a-43d3-8963-11254e723416\") " pod="openstack/ovsdbserver-nb-2" Sep 30 20:55:58 crc kubenswrapper[4756]: I0930 20:55:58.836258 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5m265\" (UniqueName: \"kubernetes.io/projected/63cc3cb7-a06a-43d3-8963-11254e723416-kube-api-access-5m265\") pod \"ovsdbserver-nb-2\" (UID: \"63cc3cb7-a06a-43d3-8963-11254e723416\") " pod="openstack/ovsdbserver-nb-2" Sep 30 20:55:58 crc kubenswrapper[4756]: I0930 20:55:58.836370 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/97be13c3-6dba-47c2-91dc-b14cdc6c45dc-ovsdb-rundir\") pod \"ovsdbserver-nb-1\" (UID: \"97be13c3-6dba-47c2-91dc-b14cdc6c45dc\") " pod="openstack/ovsdbserver-nb-1" Sep 30 20:55:58 crc kubenswrapper[4756]: I0930 20:55:58.836426 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/63cc3cb7-a06a-43d3-8963-11254e723416-combined-ca-bundle\") pod \"ovsdbserver-nb-2\" (UID: \"63cc3cb7-a06a-43d3-8963-11254e723416\") " pod="openstack/ovsdbserver-nb-2" Sep 30 20:55:58 crc kubenswrapper[4756]: I0930 20:55:58.836481 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/97be13c3-6dba-47c2-91dc-b14cdc6c45dc-config\") pod \"ovsdbserver-nb-1\" (UID: \"97be13c3-6dba-47c2-91dc-b14cdc6c45dc\") " pod="openstack/ovsdbserver-nb-1" Sep 30 20:55:58 crc kubenswrapper[4756]: I0930 20:55:58.836498 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/63cc3cb7-a06a-43d3-8963-11254e723416-config\") pod \"ovsdbserver-nb-2\" (UID: \"63cc3cb7-a06a-43d3-8963-11254e723416\") " pod="openstack/ovsdbserver-nb-2" Sep 30 20:55:58 crc kubenswrapper[4756]: I0930 20:55:58.836515 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-dc929a24-455c-4577-9114-cabdda8ab9a3\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-dc929a24-455c-4577-9114-cabdda8ab9a3\") pod \"ovsdbserver-nb-2\" (UID: \"63cc3cb7-a06a-43d3-8963-11254e723416\") " pod="openstack/ovsdbserver-nb-2" Sep 30 20:55:58 crc kubenswrapper[4756]: I0930 20:55:58.836551 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-cb7623cf-708f-43e7-ab11-fca1109a995f\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-cb7623cf-708f-43e7-ab11-fca1109a995f\") pod \"ovsdbserver-nb-1\" (UID: \"97be13c3-6dba-47c2-91dc-b14cdc6c45dc\") " pod="openstack/ovsdbserver-nb-1" Sep 30 20:55:58 crc kubenswrapper[4756]: I0930 20:55:58.836579 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/e3a83234-34b6-4c4c-8150-e85b357a7249-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"e3a83234-34b6-4c4c-8150-e85b357a7249\") " pod="openstack/ovsdbserver-nb-0" Sep 30 20:55:58 crc kubenswrapper[4756]: I0930 20:55:58.836602 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/97be13c3-6dba-47c2-91dc-b14cdc6c45dc-combined-ca-bundle\") pod \"ovsdbserver-nb-1\" (UID: \"97be13c3-6dba-47c2-91dc-b14cdc6c45dc\") " pod="openstack/ovsdbserver-nb-1" Sep 30 20:55:58 crc kubenswrapper[4756]: I0930 20:55:58.836648 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/e3a83234-34b6-4c4c-8150-e85b357a7249-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"e3a83234-34b6-4c4c-8150-e85b357a7249\") " pod="openstack/ovsdbserver-nb-0" Sep 30 20:55:58 crc kubenswrapper[4756]: I0930 20:55:58.836671 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wf4cq\" (UniqueName: \"kubernetes.io/projected/97be13c3-6dba-47c2-91dc-b14cdc6c45dc-kube-api-access-wf4cq\") pod \"ovsdbserver-nb-1\" (UID: \"97be13c3-6dba-47c2-91dc-b14cdc6c45dc\") " pod="openstack/ovsdbserver-nb-1" Sep 30 20:55:58 crc kubenswrapper[4756]: I0930 20:55:58.837897 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/e3a83234-34b6-4c4c-8150-e85b357a7249-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"e3a83234-34b6-4c4c-8150-e85b357a7249\") " pod="openstack/ovsdbserver-nb-0" Sep 30 20:55:58 crc kubenswrapper[4756]: I0930 20:55:58.838021 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/e3a83234-34b6-4c4c-8150-e85b357a7249-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"e3a83234-34b6-4c4c-8150-e85b357a7249\") " pod="openstack/ovsdbserver-nb-0" Sep 30 20:55:58 crc kubenswrapper[4756]: I0930 20:55:58.835525 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-sb-0"] Sep 30 20:55:58 crc kubenswrapper[4756]: I0930 20:55:58.838688 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e3a83234-34b6-4c4c-8150-e85b357a7249-config\") pod \"ovsdbserver-nb-0\" (UID: \"e3a83234-34b6-4c4c-8150-e85b357a7249\") " pod="openstack/ovsdbserver-nb-0" Sep 30 20:55:58 crc kubenswrapper[4756]: I0930 20:55:58.839881 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Sep 30 20:55:58 crc kubenswrapper[4756]: I0930 20:55:58.842322 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-sb-dockercfg-jkkbr" Sep 30 20:55:58 crc kubenswrapper[4756]: I0930 20:55:58.842546 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-config" Sep 30 20:55:58 crc kubenswrapper[4756]: I0930 20:55:58.842873 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-scripts" Sep 30 20:55:58 crc kubenswrapper[4756]: I0930 20:55:58.845726 4756 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Sep 30 20:55:58 crc kubenswrapper[4756]: I0930 20:55:58.845838 4756 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-5f25614a-c082-45ed-8da7-7bb081938826\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-5f25614a-c082-45ed-8da7-7bb081938826\") pod \"ovsdbserver-nb-0\" (UID: \"e3a83234-34b6-4c4c-8150-e85b357a7249\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/662b72e588c53ad18660721aa6e78b0105d6de9f5c6703d9849830ae6f5cc3ca/globalmount\"" pod="openstack/ovsdbserver-nb-0" Sep 30 20:55:58 crc kubenswrapper[4756]: I0930 20:55:58.850176 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-sb-2"] Sep 30 20:55:58 crc kubenswrapper[4756]: I0930 20:55:58.851747 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-2" Sep 30 20:55:58 crc kubenswrapper[4756]: I0930 20:55:58.856933 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e3a83234-34b6-4c4c-8150-e85b357a7249-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"e3a83234-34b6-4c4c-8150-e85b357a7249\") " pod="openstack/ovsdbserver-nb-0" Sep 30 20:55:58 crc kubenswrapper[4756]: I0930 20:55:58.858152 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Sep 30 20:55:58 crc kubenswrapper[4756]: I0930 20:55:58.867444 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-sb-1"] Sep 30 20:55:58 crc kubenswrapper[4756]: I0930 20:55:58.869685 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-1" Sep 30 20:55:58 crc kubenswrapper[4756]: I0930 20:55:58.874533 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-2"] Sep 30 20:55:58 crc kubenswrapper[4756]: I0930 20:55:58.877260 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dchp6\" (UniqueName: \"kubernetes.io/projected/e3a83234-34b6-4c4c-8150-e85b357a7249-kube-api-access-dchp6\") pod \"ovsdbserver-nb-0\" (UID: \"e3a83234-34b6-4c4c-8150-e85b357a7249\") " pod="openstack/ovsdbserver-nb-0" Sep 30 20:55:58 crc kubenswrapper[4756]: I0930 20:55:58.882593 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-1"] Sep 30 20:55:58 crc kubenswrapper[4756]: I0930 20:55:58.888949 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-5f25614a-c082-45ed-8da7-7bb081938826\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-5f25614a-c082-45ed-8da7-7bb081938826\") pod \"ovsdbserver-nb-0\" (UID: \"e3a83234-34b6-4c4c-8150-e85b357a7249\") " pod="openstack/ovsdbserver-nb-0" Sep 30 20:55:58 crc kubenswrapper[4756]: I0930 20:55:58.940339 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5m265\" (UniqueName: \"kubernetes.io/projected/63cc3cb7-a06a-43d3-8963-11254e723416-kube-api-access-5m265\") pod \"ovsdbserver-nb-2\" (UID: \"63cc3cb7-a06a-43d3-8963-11254e723416\") " pod="openstack/ovsdbserver-nb-2" Sep 30 20:55:58 crc kubenswrapper[4756]: I0930 20:55:58.940380 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/9d1172a6-25c7-4141-a33a-0cb6e7532927-scripts\") pod \"ovsdbserver-sb-2\" (UID: \"9d1172a6-25c7-4141-a33a-0cb6e7532927\") " pod="openstack/ovsdbserver-sb-2" Sep 30 20:55:58 crc kubenswrapper[4756]: I0930 20:55:58.940431 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jk68x\" (UniqueName: \"kubernetes.io/projected/9d1172a6-25c7-4141-a33a-0cb6e7532927-kube-api-access-jk68x\") pod \"ovsdbserver-sb-2\" (UID: \"9d1172a6-25c7-4141-a33a-0cb6e7532927\") " pod="openstack/ovsdbserver-sb-2" Sep 30 20:55:58 crc kubenswrapper[4756]: I0930 20:55:58.940455 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a46c3457-8a08-4958-9b56-1583c6d5dc99-config\") pod \"ovsdbserver-sb-0\" (UID: \"a46c3457-8a08-4958-9b56-1583c6d5dc99\") " pod="openstack/ovsdbserver-sb-0" Sep 30 20:55:58 crc kubenswrapper[4756]: I0930 20:55:58.940574 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-b42cad44-bf45-43f1-9ba4-c91b386e0768\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-b42cad44-bf45-43f1-9ba4-c91b386e0768\") pod \"ovsdbserver-sb-1\" (UID: \"1765ebd7-500e-4d06-8354-7121b40002ad\") " pod="openstack/ovsdbserver-sb-1" Sep 30 20:55:58 crc kubenswrapper[4756]: I0930 20:55:58.940625 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a46c3457-8a08-4958-9b56-1583c6d5dc99-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"a46c3457-8a08-4958-9b56-1583c6d5dc99\") " pod="openstack/ovsdbserver-sb-0" Sep 30 20:55:58 crc kubenswrapper[4756]: I0930 20:55:58.940656 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/97be13c3-6dba-47c2-91dc-b14cdc6c45dc-ovsdb-rundir\") pod \"ovsdbserver-nb-1\" (UID: \"97be13c3-6dba-47c2-91dc-b14cdc6c45dc\") " pod="openstack/ovsdbserver-nb-1" Sep 30 20:55:58 crc kubenswrapper[4756]: I0930 20:55:58.940694 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/63cc3cb7-a06a-43d3-8963-11254e723416-combined-ca-bundle\") pod \"ovsdbserver-nb-2\" (UID: \"63cc3cb7-a06a-43d3-8963-11254e723416\") " pod="openstack/ovsdbserver-nb-2" Sep 30 20:55:58 crc kubenswrapper[4756]: I0930 20:55:58.940728 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-68f0f38e-6807-41d6-9e4b-0539be5c9007\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-68f0f38e-6807-41d6-9e4b-0539be5c9007\") pod \"ovsdbserver-sb-2\" (UID: \"9d1172a6-25c7-4141-a33a-0cb6e7532927\") " pod="openstack/ovsdbserver-sb-2" Sep 30 20:55:58 crc kubenswrapper[4756]: I0930 20:55:58.940768 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/97be13c3-6dba-47c2-91dc-b14cdc6c45dc-config\") pod \"ovsdbserver-nb-1\" (UID: \"97be13c3-6dba-47c2-91dc-b14cdc6c45dc\") " pod="openstack/ovsdbserver-nb-1" Sep 30 20:55:58 crc kubenswrapper[4756]: I0930 20:55:58.940794 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/9d1172a6-25c7-4141-a33a-0cb6e7532927-ovsdb-rundir\") pod \"ovsdbserver-sb-2\" (UID: \"9d1172a6-25c7-4141-a33a-0cb6e7532927\") " pod="openstack/ovsdbserver-sb-2" Sep 30 20:55:58 crc kubenswrapper[4756]: I0930 20:55:58.940825 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1765ebd7-500e-4d06-8354-7121b40002ad-config\") pod \"ovsdbserver-sb-1\" (UID: \"1765ebd7-500e-4d06-8354-7121b40002ad\") " pod="openstack/ovsdbserver-sb-1" Sep 30 20:55:58 crc kubenswrapper[4756]: I0930 20:55:58.940847 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/63cc3cb7-a06a-43d3-8963-11254e723416-config\") pod \"ovsdbserver-nb-2\" (UID: \"63cc3cb7-a06a-43d3-8963-11254e723416\") " pod="openstack/ovsdbserver-nb-2" Sep 30 20:55:58 crc kubenswrapper[4756]: I0930 20:55:58.940870 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-dc929a24-455c-4577-9114-cabdda8ab9a3\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-dc929a24-455c-4577-9114-cabdda8ab9a3\") pod \"ovsdbserver-nb-2\" (UID: \"63cc3cb7-a06a-43d3-8963-11254e723416\") " pod="openstack/ovsdbserver-nb-2" Sep 30 20:55:58 crc kubenswrapper[4756]: I0930 20:55:58.940906 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-cb7623cf-708f-43e7-ab11-fca1109a995f\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-cb7623cf-708f-43e7-ab11-fca1109a995f\") pod \"ovsdbserver-nb-1\" (UID: \"97be13c3-6dba-47c2-91dc-b14cdc6c45dc\") " pod="openstack/ovsdbserver-nb-1" Sep 30 20:55:58 crc kubenswrapper[4756]: I0930 20:55:58.940930 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9d1172a6-25c7-4141-a33a-0cb6e7532927-combined-ca-bundle\") pod \"ovsdbserver-sb-2\" (UID: \"9d1172a6-25c7-4141-a33a-0cb6e7532927\") " pod="openstack/ovsdbserver-sb-2" Sep 30 20:55:58 crc kubenswrapper[4756]: I0930 20:55:58.940954 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/97be13c3-6dba-47c2-91dc-b14cdc6c45dc-combined-ca-bundle\") pod \"ovsdbserver-nb-1\" (UID: \"97be13c3-6dba-47c2-91dc-b14cdc6c45dc\") " pod="openstack/ovsdbserver-nb-1" Sep 30 20:55:58 crc kubenswrapper[4756]: I0930 20:55:58.940985 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/1765ebd7-500e-4d06-8354-7121b40002ad-ovsdb-rundir\") pod \"ovsdbserver-sb-1\" (UID: \"1765ebd7-500e-4d06-8354-7121b40002ad\") " pod="openstack/ovsdbserver-sb-1" Sep 30 20:55:58 crc kubenswrapper[4756]: I0930 20:55:58.940995 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/97be13c3-6dba-47c2-91dc-b14cdc6c45dc-ovsdb-rundir\") pod \"ovsdbserver-nb-1\" (UID: \"97be13c3-6dba-47c2-91dc-b14cdc6c45dc\") " pod="openstack/ovsdbserver-nb-1" Sep 30 20:55:58 crc kubenswrapper[4756]: I0930 20:55:58.941014 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4vgdb\" (UniqueName: \"kubernetes.io/projected/1765ebd7-500e-4d06-8354-7121b40002ad-kube-api-access-4vgdb\") pod \"ovsdbserver-sb-1\" (UID: \"1765ebd7-500e-4d06-8354-7121b40002ad\") " pod="openstack/ovsdbserver-sb-1" Sep 30 20:55:58 crc kubenswrapper[4756]: I0930 20:55:58.941478 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wf4cq\" (UniqueName: \"kubernetes.io/projected/97be13c3-6dba-47c2-91dc-b14cdc6c45dc-kube-api-access-wf4cq\") pod \"ovsdbserver-nb-1\" (UID: \"97be13c3-6dba-47c2-91dc-b14cdc6c45dc\") " pod="openstack/ovsdbserver-nb-1" Sep 30 20:55:58 crc kubenswrapper[4756]: I0930 20:55:58.941540 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/1765ebd7-500e-4d06-8354-7121b40002ad-scripts\") pod \"ovsdbserver-sb-1\" (UID: \"1765ebd7-500e-4d06-8354-7121b40002ad\") " pod="openstack/ovsdbserver-sb-1" Sep 30 20:55:58 crc kubenswrapper[4756]: I0930 20:55:58.941719 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d1172a6-25c7-4141-a33a-0cb6e7532927-config\") pod \"ovsdbserver-sb-2\" (UID: \"9d1172a6-25c7-4141-a33a-0cb6e7532927\") " pod="openstack/ovsdbserver-sb-2" Sep 30 20:55:58 crc kubenswrapper[4756]: I0930 20:55:58.941768 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/97be13c3-6dba-47c2-91dc-b14cdc6c45dc-scripts\") pod \"ovsdbserver-nb-1\" (UID: \"97be13c3-6dba-47c2-91dc-b14cdc6c45dc\") " pod="openstack/ovsdbserver-nb-1" Sep 30 20:55:58 crc kubenswrapper[4756]: I0930 20:55:58.941802 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/63cc3cb7-a06a-43d3-8963-11254e723416-scripts\") pod \"ovsdbserver-nb-2\" (UID: \"63cc3cb7-a06a-43d3-8963-11254e723416\") " pod="openstack/ovsdbserver-nb-2" Sep 30 20:55:58 crc kubenswrapper[4756]: I0930 20:55:58.942064 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a46c3457-8a08-4958-9b56-1583c6d5dc99-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"a46c3457-8a08-4958-9b56-1583c6d5dc99\") " pod="openstack/ovsdbserver-sb-0" Sep 30 20:55:58 crc kubenswrapper[4756]: I0930 20:55:58.942096 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1765ebd7-500e-4d06-8354-7121b40002ad-combined-ca-bundle\") pod \"ovsdbserver-sb-1\" (UID: \"1765ebd7-500e-4d06-8354-7121b40002ad\") " pod="openstack/ovsdbserver-sb-1" Sep 30 20:55:58 crc kubenswrapper[4756]: I0930 20:55:58.942159 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-d54e41d0-1007-4e33-b3d3-27dd6dcf901e\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-d54e41d0-1007-4e33-b3d3-27dd6dcf901e\") pod \"ovsdbserver-sb-0\" (UID: \"a46c3457-8a08-4958-9b56-1583c6d5dc99\") " pod="openstack/ovsdbserver-sb-0" Sep 30 20:55:58 crc kubenswrapper[4756]: I0930 20:55:58.942428 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/63cc3cb7-a06a-43d3-8963-11254e723416-ovsdb-rundir\") pod \"ovsdbserver-nb-2\" (UID: \"63cc3cb7-a06a-43d3-8963-11254e723416\") " pod="openstack/ovsdbserver-nb-2" Sep 30 20:55:58 crc kubenswrapper[4756]: I0930 20:55:58.942450 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/a46c3457-8a08-4958-9b56-1583c6d5dc99-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"a46c3457-8a08-4958-9b56-1583c6d5dc99\") " pod="openstack/ovsdbserver-sb-0" Sep 30 20:55:58 crc kubenswrapper[4756]: I0930 20:55:58.942693 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t8x56\" (UniqueName: \"kubernetes.io/projected/a46c3457-8a08-4958-9b56-1583c6d5dc99-kube-api-access-t8x56\") pod \"ovsdbserver-sb-0\" (UID: \"a46c3457-8a08-4958-9b56-1583c6d5dc99\") " pod="openstack/ovsdbserver-sb-0" Sep 30 20:55:58 crc kubenswrapper[4756]: I0930 20:55:58.943752 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/97be13c3-6dba-47c2-91dc-b14cdc6c45dc-config\") pod \"ovsdbserver-nb-1\" (UID: \"97be13c3-6dba-47c2-91dc-b14cdc6c45dc\") " pod="openstack/ovsdbserver-nb-1" Sep 30 20:55:58 crc kubenswrapper[4756]: I0930 20:55:58.946965 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/97be13c3-6dba-47c2-91dc-b14cdc6c45dc-combined-ca-bundle\") pod \"ovsdbserver-nb-1\" (UID: \"97be13c3-6dba-47c2-91dc-b14cdc6c45dc\") " pod="openstack/ovsdbserver-nb-1" Sep 30 20:55:58 crc kubenswrapper[4756]: I0930 20:55:58.950171 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/63cc3cb7-a06a-43d3-8963-11254e723416-combined-ca-bundle\") pod \"ovsdbserver-nb-2\" (UID: \"63cc3cb7-a06a-43d3-8963-11254e723416\") " pod="openstack/ovsdbserver-nb-2" Sep 30 20:55:58 crc kubenswrapper[4756]: I0930 20:55:58.958196 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5m265\" (UniqueName: \"kubernetes.io/projected/63cc3cb7-a06a-43d3-8963-11254e723416-kube-api-access-5m265\") pod \"ovsdbserver-nb-2\" (UID: \"63cc3cb7-a06a-43d3-8963-11254e723416\") " pod="openstack/ovsdbserver-nb-2" Sep 30 20:55:58 crc kubenswrapper[4756]: I0930 20:55:58.965210 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/63cc3cb7-a06a-43d3-8963-11254e723416-ovsdb-rundir\") pod \"ovsdbserver-nb-2\" (UID: \"63cc3cb7-a06a-43d3-8963-11254e723416\") " pod="openstack/ovsdbserver-nb-2" Sep 30 20:55:58 crc kubenswrapper[4756]: I0930 20:55:58.966105 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/63cc3cb7-a06a-43d3-8963-11254e723416-config\") pod \"ovsdbserver-nb-2\" (UID: \"63cc3cb7-a06a-43d3-8963-11254e723416\") " pod="openstack/ovsdbserver-nb-2" Sep 30 20:55:58 crc kubenswrapper[4756]: I0930 20:55:58.966536 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/97be13c3-6dba-47c2-91dc-b14cdc6c45dc-scripts\") pod \"ovsdbserver-nb-1\" (UID: \"97be13c3-6dba-47c2-91dc-b14cdc6c45dc\") " pod="openstack/ovsdbserver-nb-1" Sep 30 20:55:58 crc kubenswrapper[4756]: I0930 20:55:58.966922 4756 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Sep 30 20:55:58 crc kubenswrapper[4756]: I0930 20:55:58.966957 4756 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-dc929a24-455c-4577-9114-cabdda8ab9a3\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-dc929a24-455c-4577-9114-cabdda8ab9a3\") pod \"ovsdbserver-nb-2\" (UID: \"63cc3cb7-a06a-43d3-8963-11254e723416\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1eb317e24f968d6e4afe18825c98340e65551f99b06426a144747fc36d2e5772/globalmount\"" pod="openstack/ovsdbserver-nb-2" Sep 30 20:55:58 crc kubenswrapper[4756]: I0930 20:55:58.967010 4756 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Sep 30 20:55:58 crc kubenswrapper[4756]: I0930 20:55:58.967048 4756 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-cb7623cf-708f-43e7-ab11-fca1109a995f\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-cb7623cf-708f-43e7-ab11-fca1109a995f\") pod \"ovsdbserver-nb-1\" (UID: \"97be13c3-6dba-47c2-91dc-b14cdc6c45dc\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/cb97c8cbb45765109518f1fbfab645e5969257f57f2042d39ab6d1fa0ee03d59/globalmount\"" pod="openstack/ovsdbserver-nb-1" Sep 30 20:55:58 crc kubenswrapper[4756]: I0930 20:55:58.967346 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/63cc3cb7-a06a-43d3-8963-11254e723416-scripts\") pod \"ovsdbserver-nb-2\" (UID: \"63cc3cb7-a06a-43d3-8963-11254e723416\") " pod="openstack/ovsdbserver-nb-2" Sep 30 20:55:58 crc kubenswrapper[4756]: I0930 20:55:58.982656 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Sep 30 20:55:58 crc kubenswrapper[4756]: I0930 20:55:58.988126 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wf4cq\" (UniqueName: \"kubernetes.io/projected/97be13c3-6dba-47c2-91dc-b14cdc6c45dc-kube-api-access-wf4cq\") pod \"ovsdbserver-nb-1\" (UID: \"97be13c3-6dba-47c2-91dc-b14cdc6c45dc\") " pod="openstack/ovsdbserver-nb-1" Sep 30 20:55:58 crc kubenswrapper[4756]: I0930 20:55:58.996931 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-dc929a24-455c-4577-9114-cabdda8ab9a3\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-dc929a24-455c-4577-9114-cabdda8ab9a3\") pod \"ovsdbserver-nb-2\" (UID: \"63cc3cb7-a06a-43d3-8963-11254e723416\") " pod="openstack/ovsdbserver-nb-2" Sep 30 20:55:58 crc kubenswrapper[4756]: I0930 20:55:58.998777 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-cb7623cf-708f-43e7-ab11-fca1109a995f\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-cb7623cf-708f-43e7-ab11-fca1109a995f\") pod \"ovsdbserver-nb-1\" (UID: \"97be13c3-6dba-47c2-91dc-b14cdc6c45dc\") " pod="openstack/ovsdbserver-nb-1" Sep 30 20:55:59 crc kubenswrapper[4756]: I0930 20:55:59.015467 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-2" Sep 30 20:55:59 crc kubenswrapper[4756]: I0930 20:55:59.029035 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-1" Sep 30 20:55:59 crc kubenswrapper[4756]: I0930 20:55:59.063608 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a46c3457-8a08-4958-9b56-1583c6d5dc99-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"a46c3457-8a08-4958-9b56-1583c6d5dc99\") " pod="openstack/ovsdbserver-sb-0" Sep 30 20:55:59 crc kubenswrapper[4756]: I0930 20:55:59.063646 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1765ebd7-500e-4d06-8354-7121b40002ad-combined-ca-bundle\") pod \"ovsdbserver-sb-1\" (UID: \"1765ebd7-500e-4d06-8354-7121b40002ad\") " pod="openstack/ovsdbserver-sb-1" Sep 30 20:55:59 crc kubenswrapper[4756]: I0930 20:55:59.063697 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-d54e41d0-1007-4e33-b3d3-27dd6dcf901e\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-d54e41d0-1007-4e33-b3d3-27dd6dcf901e\") pod \"ovsdbserver-sb-0\" (UID: \"a46c3457-8a08-4958-9b56-1583c6d5dc99\") " pod="openstack/ovsdbserver-sb-0" Sep 30 20:55:59 crc kubenswrapper[4756]: I0930 20:55:59.063718 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/a46c3457-8a08-4958-9b56-1583c6d5dc99-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"a46c3457-8a08-4958-9b56-1583c6d5dc99\") " pod="openstack/ovsdbserver-sb-0" Sep 30 20:55:59 crc kubenswrapper[4756]: I0930 20:55:59.063736 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t8x56\" (UniqueName: \"kubernetes.io/projected/a46c3457-8a08-4958-9b56-1583c6d5dc99-kube-api-access-t8x56\") pod \"ovsdbserver-sb-0\" (UID: \"a46c3457-8a08-4958-9b56-1583c6d5dc99\") " pod="openstack/ovsdbserver-sb-0" Sep 30 20:55:59 crc kubenswrapper[4756]: I0930 20:55:59.063758 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/9d1172a6-25c7-4141-a33a-0cb6e7532927-scripts\") pod \"ovsdbserver-sb-2\" (UID: \"9d1172a6-25c7-4141-a33a-0cb6e7532927\") " pod="openstack/ovsdbserver-sb-2" Sep 30 20:55:59 crc kubenswrapper[4756]: I0930 20:55:59.063777 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jk68x\" (UniqueName: \"kubernetes.io/projected/9d1172a6-25c7-4141-a33a-0cb6e7532927-kube-api-access-jk68x\") pod \"ovsdbserver-sb-2\" (UID: \"9d1172a6-25c7-4141-a33a-0cb6e7532927\") " pod="openstack/ovsdbserver-sb-2" Sep 30 20:55:59 crc kubenswrapper[4756]: I0930 20:55:59.063796 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a46c3457-8a08-4958-9b56-1583c6d5dc99-config\") pod \"ovsdbserver-sb-0\" (UID: \"a46c3457-8a08-4958-9b56-1583c6d5dc99\") " pod="openstack/ovsdbserver-sb-0" Sep 30 20:55:59 crc kubenswrapper[4756]: I0930 20:55:59.063814 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-b42cad44-bf45-43f1-9ba4-c91b386e0768\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-b42cad44-bf45-43f1-9ba4-c91b386e0768\") pod \"ovsdbserver-sb-1\" (UID: \"1765ebd7-500e-4d06-8354-7121b40002ad\") " pod="openstack/ovsdbserver-sb-1" Sep 30 20:55:59 crc kubenswrapper[4756]: I0930 20:55:59.063832 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a46c3457-8a08-4958-9b56-1583c6d5dc99-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"a46c3457-8a08-4958-9b56-1583c6d5dc99\") " pod="openstack/ovsdbserver-sb-0" Sep 30 20:55:59 crc kubenswrapper[4756]: I0930 20:55:59.063858 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-68f0f38e-6807-41d6-9e4b-0539be5c9007\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-68f0f38e-6807-41d6-9e4b-0539be5c9007\") pod \"ovsdbserver-sb-2\" (UID: \"9d1172a6-25c7-4141-a33a-0cb6e7532927\") " pod="openstack/ovsdbserver-sb-2" Sep 30 20:55:59 crc kubenswrapper[4756]: I0930 20:55:59.063880 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/9d1172a6-25c7-4141-a33a-0cb6e7532927-ovsdb-rundir\") pod \"ovsdbserver-sb-2\" (UID: \"9d1172a6-25c7-4141-a33a-0cb6e7532927\") " pod="openstack/ovsdbserver-sb-2" Sep 30 20:55:59 crc kubenswrapper[4756]: I0930 20:55:59.063895 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1765ebd7-500e-4d06-8354-7121b40002ad-config\") pod \"ovsdbserver-sb-1\" (UID: \"1765ebd7-500e-4d06-8354-7121b40002ad\") " pod="openstack/ovsdbserver-sb-1" Sep 30 20:55:59 crc kubenswrapper[4756]: I0930 20:55:59.063917 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9d1172a6-25c7-4141-a33a-0cb6e7532927-combined-ca-bundle\") pod \"ovsdbserver-sb-2\" (UID: \"9d1172a6-25c7-4141-a33a-0cb6e7532927\") " pod="openstack/ovsdbserver-sb-2" Sep 30 20:55:59 crc kubenswrapper[4756]: I0930 20:55:59.063937 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/1765ebd7-500e-4d06-8354-7121b40002ad-ovsdb-rundir\") pod \"ovsdbserver-sb-1\" (UID: \"1765ebd7-500e-4d06-8354-7121b40002ad\") " pod="openstack/ovsdbserver-sb-1" Sep 30 20:55:59 crc kubenswrapper[4756]: I0930 20:55:59.063955 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4vgdb\" (UniqueName: \"kubernetes.io/projected/1765ebd7-500e-4d06-8354-7121b40002ad-kube-api-access-4vgdb\") pod \"ovsdbserver-sb-1\" (UID: \"1765ebd7-500e-4d06-8354-7121b40002ad\") " pod="openstack/ovsdbserver-sb-1" Sep 30 20:55:59 crc kubenswrapper[4756]: I0930 20:55:59.063978 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/1765ebd7-500e-4d06-8354-7121b40002ad-scripts\") pod \"ovsdbserver-sb-1\" (UID: \"1765ebd7-500e-4d06-8354-7121b40002ad\") " pod="openstack/ovsdbserver-sb-1" Sep 30 20:55:59 crc kubenswrapper[4756]: I0930 20:55:59.063996 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d1172a6-25c7-4141-a33a-0cb6e7532927-config\") pod \"ovsdbserver-sb-2\" (UID: \"9d1172a6-25c7-4141-a33a-0cb6e7532927\") " pod="openstack/ovsdbserver-sb-2" Sep 30 20:55:59 crc kubenswrapper[4756]: I0930 20:55:59.064607 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a46c3457-8a08-4958-9b56-1583c6d5dc99-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"a46c3457-8a08-4958-9b56-1583c6d5dc99\") " pod="openstack/ovsdbserver-sb-0" Sep 30 20:55:59 crc kubenswrapper[4756]: I0930 20:55:59.064781 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d1172a6-25c7-4141-a33a-0cb6e7532927-config\") pod \"ovsdbserver-sb-2\" (UID: \"9d1172a6-25c7-4141-a33a-0cb6e7532927\") " pod="openstack/ovsdbserver-sb-2" Sep 30 20:55:59 crc kubenswrapper[4756]: I0930 20:55:59.065955 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/9d1172a6-25c7-4141-a33a-0cb6e7532927-scripts\") pod \"ovsdbserver-sb-2\" (UID: \"9d1172a6-25c7-4141-a33a-0cb6e7532927\") " pod="openstack/ovsdbserver-sb-2" Sep 30 20:55:59 crc kubenswrapper[4756]: I0930 20:55:59.066351 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/a46c3457-8a08-4958-9b56-1583c6d5dc99-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"a46c3457-8a08-4958-9b56-1583c6d5dc99\") " pod="openstack/ovsdbserver-sb-0" Sep 30 20:55:59 crc kubenswrapper[4756]: I0930 20:55:59.067625 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a46c3457-8a08-4958-9b56-1583c6d5dc99-config\") pod \"ovsdbserver-sb-0\" (UID: \"a46c3457-8a08-4958-9b56-1583c6d5dc99\") " pod="openstack/ovsdbserver-sb-0" Sep 30 20:55:59 crc kubenswrapper[4756]: I0930 20:55:59.067773 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/1765ebd7-500e-4d06-8354-7121b40002ad-ovsdb-rundir\") pod \"ovsdbserver-sb-1\" (UID: \"1765ebd7-500e-4d06-8354-7121b40002ad\") " pod="openstack/ovsdbserver-sb-1" Sep 30 20:55:59 crc kubenswrapper[4756]: I0930 20:55:59.068319 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/9d1172a6-25c7-4141-a33a-0cb6e7532927-ovsdb-rundir\") pod \"ovsdbserver-sb-2\" (UID: \"9d1172a6-25c7-4141-a33a-0cb6e7532927\") " pod="openstack/ovsdbserver-sb-2" Sep 30 20:55:59 crc kubenswrapper[4756]: I0930 20:55:59.069248 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1765ebd7-500e-4d06-8354-7121b40002ad-combined-ca-bundle\") pod \"ovsdbserver-sb-1\" (UID: \"1765ebd7-500e-4d06-8354-7121b40002ad\") " pod="openstack/ovsdbserver-sb-1" Sep 30 20:55:59 crc kubenswrapper[4756]: I0930 20:55:59.069277 4756 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Sep 30 20:55:59 crc kubenswrapper[4756]: I0930 20:55:59.069312 4756 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-d54e41d0-1007-4e33-b3d3-27dd6dcf901e\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-d54e41d0-1007-4e33-b3d3-27dd6dcf901e\") pod \"ovsdbserver-sb-0\" (UID: \"a46c3457-8a08-4958-9b56-1583c6d5dc99\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/16157f58e2c9a589b69cfcd99c99ecdbf45e7bec1950910cc2e8dff002f3c789/globalmount\"" pod="openstack/ovsdbserver-sb-0" Sep 30 20:55:59 crc kubenswrapper[4756]: I0930 20:55:59.069413 4756 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Sep 30 20:55:59 crc kubenswrapper[4756]: I0930 20:55:59.069443 4756 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-b42cad44-bf45-43f1-9ba4-c91b386e0768\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-b42cad44-bf45-43f1-9ba4-c91b386e0768\") pod \"ovsdbserver-sb-1\" (UID: \"1765ebd7-500e-4d06-8354-7121b40002ad\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/7a8a5b98527a9ca39f9b280aec865032fd2e50c1a539e377a734d8a6ef616143/globalmount\"" pod="openstack/ovsdbserver-sb-1" Sep 30 20:55:59 crc kubenswrapper[4756]: I0930 20:55:59.069630 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1765ebd7-500e-4d06-8354-7121b40002ad-config\") pod \"ovsdbserver-sb-1\" (UID: \"1765ebd7-500e-4d06-8354-7121b40002ad\") " pod="openstack/ovsdbserver-sb-1" Sep 30 20:55:59 crc kubenswrapper[4756]: I0930 20:55:59.069738 4756 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Sep 30 20:55:59 crc kubenswrapper[4756]: I0930 20:55:59.069767 4756 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-68f0f38e-6807-41d6-9e4b-0539be5c9007\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-68f0f38e-6807-41d6-9e4b-0539be5c9007\") pod \"ovsdbserver-sb-2\" (UID: \"9d1172a6-25c7-4141-a33a-0cb6e7532927\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/9dc63242ff6c2624cae9a065849d7f37cd18c5ad46fc752ede3b037cdb9923ad/globalmount\"" pod="openstack/ovsdbserver-sb-2" Sep 30 20:55:59 crc kubenswrapper[4756]: I0930 20:55:59.070950 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/1765ebd7-500e-4d06-8354-7121b40002ad-scripts\") pod \"ovsdbserver-sb-1\" (UID: \"1765ebd7-500e-4d06-8354-7121b40002ad\") " pod="openstack/ovsdbserver-sb-1" Sep 30 20:55:59 crc kubenswrapper[4756]: I0930 20:55:59.071365 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9d1172a6-25c7-4141-a33a-0cb6e7532927-combined-ca-bundle\") pod \"ovsdbserver-sb-2\" (UID: \"9d1172a6-25c7-4141-a33a-0cb6e7532927\") " pod="openstack/ovsdbserver-sb-2" Sep 30 20:55:59 crc kubenswrapper[4756]: I0930 20:55:59.071388 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a46c3457-8a08-4958-9b56-1583c6d5dc99-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"a46c3457-8a08-4958-9b56-1583c6d5dc99\") " pod="openstack/ovsdbserver-sb-0" Sep 30 20:55:59 crc kubenswrapper[4756]: I0930 20:55:59.089582 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t8x56\" (UniqueName: \"kubernetes.io/projected/a46c3457-8a08-4958-9b56-1583c6d5dc99-kube-api-access-t8x56\") pod \"ovsdbserver-sb-0\" (UID: \"a46c3457-8a08-4958-9b56-1583c6d5dc99\") " pod="openstack/ovsdbserver-sb-0" Sep 30 20:55:59 crc kubenswrapper[4756]: I0930 20:55:59.090778 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jk68x\" (UniqueName: \"kubernetes.io/projected/9d1172a6-25c7-4141-a33a-0cb6e7532927-kube-api-access-jk68x\") pod \"ovsdbserver-sb-2\" (UID: \"9d1172a6-25c7-4141-a33a-0cb6e7532927\") " pod="openstack/ovsdbserver-sb-2" Sep 30 20:55:59 crc kubenswrapper[4756]: I0930 20:55:59.098280 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4vgdb\" (UniqueName: \"kubernetes.io/projected/1765ebd7-500e-4d06-8354-7121b40002ad-kube-api-access-4vgdb\") pod \"ovsdbserver-sb-1\" (UID: \"1765ebd7-500e-4d06-8354-7121b40002ad\") " pod="openstack/ovsdbserver-sb-1" Sep 30 20:55:59 crc kubenswrapper[4756]: I0930 20:55:59.124988 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-d54e41d0-1007-4e33-b3d3-27dd6dcf901e\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-d54e41d0-1007-4e33-b3d3-27dd6dcf901e\") pod \"ovsdbserver-sb-0\" (UID: \"a46c3457-8a08-4958-9b56-1583c6d5dc99\") " pod="openstack/ovsdbserver-sb-0" Sep 30 20:55:59 crc kubenswrapper[4756]: I0930 20:55:59.129773 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-b42cad44-bf45-43f1-9ba4-c91b386e0768\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-b42cad44-bf45-43f1-9ba4-c91b386e0768\") pod \"ovsdbserver-sb-1\" (UID: \"1765ebd7-500e-4d06-8354-7121b40002ad\") " pod="openstack/ovsdbserver-sb-1" Sep 30 20:55:59 crc kubenswrapper[4756]: I0930 20:55:59.131994 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-68f0f38e-6807-41d6-9e4b-0539be5c9007\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-68f0f38e-6807-41d6-9e4b-0539be5c9007\") pod \"ovsdbserver-sb-2\" (UID: \"9d1172a6-25c7-4141-a33a-0cb6e7532927\") " pod="openstack/ovsdbserver-sb-2" Sep 30 20:55:59 crc kubenswrapper[4756]: I0930 20:55:59.222206 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Sep 30 20:55:59 crc kubenswrapper[4756]: I0930 20:55:59.231586 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-2" Sep 30 20:55:59 crc kubenswrapper[4756]: I0930 20:55:59.238038 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-1" Sep 30 20:55:59 crc kubenswrapper[4756]: I0930 20:55:59.534585 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Sep 30 20:55:59 crc kubenswrapper[4756]: I0930 20:55:59.671272 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-1"] Sep 30 20:55:59 crc kubenswrapper[4756]: W0930 20:55:59.674189 4756 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod97be13c3_6dba_47c2_91dc_b14cdc6c45dc.slice/crio-5d88f60d50a815bfdae13468572cba57d298282f6b145dfed4e6466dcb604662 WatchSource:0}: Error finding container 5d88f60d50a815bfdae13468572cba57d298282f6b145dfed4e6466dcb604662: Status 404 returned error can't find the container with id 5d88f60d50a815bfdae13468572cba57d298282f6b145dfed4e6466dcb604662 Sep 30 20:55:59 crc kubenswrapper[4756]: I0930 20:55:59.737878 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-1" event={"ID":"97be13c3-6dba-47c2-91dc-b14cdc6c45dc","Type":"ContainerStarted","Data":"5d88f60d50a815bfdae13468572cba57d298282f6b145dfed4e6466dcb604662"} Sep 30 20:55:59 crc kubenswrapper[4756]: I0930 20:55:59.741976 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"e3a83234-34b6-4c4c-8150-e85b357a7249","Type":"ContainerStarted","Data":"02fd5067c1bb5370ba9a2e0db2b6e0277b11007293a232ab41a90d0064605522"} Sep 30 20:55:59 crc kubenswrapper[4756]: I0930 20:55:59.782049 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Sep 30 20:55:59 crc kubenswrapper[4756]: W0930 20:55:59.786526 4756 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda46c3457_8a08_4958_9b56_1583c6d5dc99.slice/crio-a68dc2335035e3cf226390efb68a75d076aaecee51fa0ab184c3756fe9492c18 WatchSource:0}: Error finding container a68dc2335035e3cf226390efb68a75d076aaecee51fa0ab184c3756fe9492c18: Status 404 returned error can't find the container with id a68dc2335035e3cf226390efb68a75d076aaecee51fa0ab184c3756fe9492c18 Sep 30 20:55:59 crc kubenswrapper[4756]: I0930 20:55:59.887672 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-1"] Sep 30 20:55:59 crc kubenswrapper[4756]: W0930 20:55:59.895775 4756 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1765ebd7_500e_4d06_8354_7121b40002ad.slice/crio-bdf10c92b0f877bcd7386c6c2f3fe16a1d6f1e74c6ec718f4c03ad3927565cd6 WatchSource:0}: Error finding container bdf10c92b0f877bcd7386c6c2f3fe16a1d6f1e74c6ec718f4c03ad3927565cd6: Status 404 returned error can't find the container with id bdf10c92b0f877bcd7386c6c2f3fe16a1d6f1e74c6ec718f4c03ad3927565cd6 Sep 30 20:56:00 crc kubenswrapper[4756]: I0930 20:56:00.382985 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-2"] Sep 30 20:56:00 crc kubenswrapper[4756]: W0930 20:56:00.384925 4756 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod63cc3cb7_a06a_43d3_8963_11254e723416.slice/crio-413afb7d87c9741bb05bb5bf2ee2323f8f1c5246414d0d5bf16fc321ddb27e50 WatchSource:0}: Error finding container 413afb7d87c9741bb05bb5bf2ee2323f8f1c5246414d0d5bf16fc321ddb27e50: Status 404 returned error can't find the container with id 413afb7d87c9741bb05bb5bf2ee2323f8f1c5246414d0d5bf16fc321ddb27e50 Sep 30 20:56:00 crc kubenswrapper[4756]: I0930 20:56:00.670243 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-2"] Sep 30 20:56:00 crc kubenswrapper[4756]: W0930 20:56:00.676770 4756 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9d1172a6_25c7_4141_a33a_0cb6e7532927.slice/crio-8ca53b5844bafe4234a808726e153419218efa9c19286e18298dd658008cefae WatchSource:0}: Error finding container 8ca53b5844bafe4234a808726e153419218efa9c19286e18298dd658008cefae: Status 404 returned error can't find the container with id 8ca53b5844bafe4234a808726e153419218efa9c19286e18298dd658008cefae Sep 30 20:56:00 crc kubenswrapper[4756]: I0930 20:56:00.754412 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-2" event={"ID":"9d1172a6-25c7-4141-a33a-0cb6e7532927","Type":"ContainerStarted","Data":"8ca53b5844bafe4234a808726e153419218efa9c19286e18298dd658008cefae"} Sep 30 20:56:00 crc kubenswrapper[4756]: I0930 20:56:00.756352 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-1" event={"ID":"97be13c3-6dba-47c2-91dc-b14cdc6c45dc","Type":"ContainerStarted","Data":"577ceb15dc81d80587eb82368d6a51ea2cf3fcad3c3fef89e9afcbef854ede58"} Sep 30 20:56:00 crc kubenswrapper[4756]: I0930 20:56:00.756414 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-1" event={"ID":"97be13c3-6dba-47c2-91dc-b14cdc6c45dc","Type":"ContainerStarted","Data":"addbd5f907d658455c8e2a3eaa728d4b7b70a9a1e8fb78055360bf9fc83866a8"} Sep 30 20:56:00 crc kubenswrapper[4756]: I0930 20:56:00.758064 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-1" event={"ID":"1765ebd7-500e-4d06-8354-7121b40002ad","Type":"ContainerStarted","Data":"b07dd73a340a0e46adb6b105a61b98d7a8bcc34fc3977727cbb19e0480d54d57"} Sep 30 20:56:00 crc kubenswrapper[4756]: I0930 20:56:00.758100 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-1" event={"ID":"1765ebd7-500e-4d06-8354-7121b40002ad","Type":"ContainerStarted","Data":"984a5310d06cfb5234578a5e9826eb1dcbc6b10800bd94a3b981be6a30ca94f8"} Sep 30 20:56:00 crc kubenswrapper[4756]: I0930 20:56:00.758111 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-1" event={"ID":"1765ebd7-500e-4d06-8354-7121b40002ad","Type":"ContainerStarted","Data":"bdf10c92b0f877bcd7386c6c2f3fe16a1d6f1e74c6ec718f4c03ad3927565cd6"} Sep 30 20:56:00 crc kubenswrapper[4756]: I0930 20:56:00.760167 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"a46c3457-8a08-4958-9b56-1583c6d5dc99","Type":"ContainerStarted","Data":"7ca16911de9ec004d200d01aceddd799d456841e0b63e6313dd4a48bce382d5d"} Sep 30 20:56:00 crc kubenswrapper[4756]: I0930 20:56:00.760203 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"a46c3457-8a08-4958-9b56-1583c6d5dc99","Type":"ContainerStarted","Data":"dafca9d4de40217b3719ef494f5ee72562c07519c05f5a0b643725aab1acd973"} Sep 30 20:56:00 crc kubenswrapper[4756]: I0930 20:56:00.760215 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"a46c3457-8a08-4958-9b56-1583c6d5dc99","Type":"ContainerStarted","Data":"a68dc2335035e3cf226390efb68a75d076aaecee51fa0ab184c3756fe9492c18"} Sep 30 20:56:00 crc kubenswrapper[4756]: I0930 20:56:00.762167 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-2" event={"ID":"63cc3cb7-a06a-43d3-8963-11254e723416","Type":"ContainerStarted","Data":"8e7055744803b3f8ad9c1b8fa333a88542f067d56229d4c83469b979d817ea55"} Sep 30 20:56:00 crc kubenswrapper[4756]: I0930 20:56:00.762200 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-2" event={"ID":"63cc3cb7-a06a-43d3-8963-11254e723416","Type":"ContainerStarted","Data":"bfa85c8da3f6593cc0dc30361159d4c608f118c839ca75f9fb29b67475a9379c"} Sep 30 20:56:00 crc kubenswrapper[4756]: I0930 20:56:00.762210 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-2" event={"ID":"63cc3cb7-a06a-43d3-8963-11254e723416","Type":"ContainerStarted","Data":"413afb7d87c9741bb05bb5bf2ee2323f8f1c5246414d0d5bf16fc321ddb27e50"} Sep 30 20:56:00 crc kubenswrapper[4756]: I0930 20:56:00.763888 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"e3a83234-34b6-4c4c-8150-e85b357a7249","Type":"ContainerStarted","Data":"36e1dc8172757f8eff3d1e7c688ffb855165c67e79500a48d9f9fc9edb8f5e48"} Sep 30 20:56:00 crc kubenswrapper[4756]: I0930 20:56:00.763912 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"e3a83234-34b6-4c4c-8150-e85b357a7249","Type":"ContainerStarted","Data":"dcec789604ae746047d82210e4e83c80341b2b7df712efc100850168c282f0b7"} Sep 30 20:56:00 crc kubenswrapper[4756]: I0930 20:56:00.779577 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-nb-1" podStartSLOduration=3.779557763 podStartE2EDuration="3.779557763s" podCreationTimestamp="2025-09-30 20:55:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 20:56:00.771936084 +0000 UTC m=+5090.392869581" watchObservedRunningTime="2025-09-30 20:56:00.779557763 +0000 UTC m=+5090.400491240" Sep 30 20:56:00 crc kubenswrapper[4756]: I0930 20:56:00.793483 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-sb-1" podStartSLOduration=3.793462516 podStartE2EDuration="3.793462516s" podCreationTimestamp="2025-09-30 20:55:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 20:56:00.788752413 +0000 UTC m=+5090.409685910" watchObservedRunningTime="2025-09-30 20:56:00.793462516 +0000 UTC m=+5090.414395993" Sep 30 20:56:00 crc kubenswrapper[4756]: I0930 20:56:00.823535 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-nb-2" podStartSLOduration=3.82350973 podStartE2EDuration="3.82350973s" podCreationTimestamp="2025-09-30 20:55:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 20:56:00.8208598 +0000 UTC m=+5090.441793277" watchObservedRunningTime="2025-09-30 20:56:00.82350973 +0000 UTC m=+5090.444443207" Sep 30 20:56:00 crc kubenswrapper[4756]: I0930 20:56:00.849602 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-sb-0" podStartSLOduration=3.849581129 podStartE2EDuration="3.849581129s" podCreationTimestamp="2025-09-30 20:55:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 20:56:00.842075634 +0000 UTC m=+5090.463009121" watchObservedRunningTime="2025-09-30 20:56:00.849581129 +0000 UTC m=+5090.470514606" Sep 30 20:56:00 crc kubenswrapper[4756]: I0930 20:56:00.867094 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-nb-0" podStartSLOduration=3.867069596 podStartE2EDuration="3.867069596s" podCreationTimestamp="2025-09-30 20:55:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 20:56:00.86225755 +0000 UTC m=+5090.483191037" watchObservedRunningTime="2025-09-30 20:56:00.867069596 +0000 UTC m=+5090.488003083" Sep 30 20:56:01 crc kubenswrapper[4756]: I0930 20:56:01.780612 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-2" event={"ID":"9d1172a6-25c7-4141-a33a-0cb6e7532927","Type":"ContainerStarted","Data":"e58a2a1d67b6f4d3c8ac6c41d443a807749c857dd77ea774ae0dd72b96e03486"} Sep 30 20:56:01 crc kubenswrapper[4756]: I0930 20:56:01.780690 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-2" event={"ID":"9d1172a6-25c7-4141-a33a-0cb6e7532927","Type":"ContainerStarted","Data":"9461e579e7d938bb33cbeff16a09b07363d6f48e07213c444c971efe357df6e2"} Sep 30 20:56:01 crc kubenswrapper[4756]: I0930 20:56:01.812634 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-sb-2" podStartSLOduration=4.812608238 podStartE2EDuration="4.812608238s" podCreationTimestamp="2025-09-30 20:55:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 20:56:01.806822037 +0000 UTC m=+5091.427755544" watchObservedRunningTime="2025-09-30 20:56:01.812608238 +0000 UTC m=+5091.433541755" Sep 30 20:56:01 crc kubenswrapper[4756]: I0930 20:56:01.983392 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-nb-0" Sep 30 20:56:02 crc kubenswrapper[4756]: I0930 20:56:02.016659 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-nb-2" Sep 30 20:56:02 crc kubenswrapper[4756]: I0930 20:56:02.029817 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-nb-1" Sep 30 20:56:02 crc kubenswrapper[4756]: I0930 20:56:02.089720 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-nb-1" Sep 30 20:56:02 crc kubenswrapper[4756]: I0930 20:56:02.222383 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-sb-0" Sep 30 20:56:02 crc kubenswrapper[4756]: I0930 20:56:02.233632 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-sb-2" Sep 30 20:56:02 crc kubenswrapper[4756]: I0930 20:56:02.239435 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-sb-1" Sep 30 20:56:02 crc kubenswrapper[4756]: I0930 20:56:02.792690 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-nb-1" Sep 30 20:56:03 crc kubenswrapper[4756]: I0930 20:56:03.983430 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-nb-0" Sep 30 20:56:04 crc kubenswrapper[4756]: I0930 20:56:04.016231 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-nb-2" Sep 30 20:56:04 crc kubenswrapper[4756]: I0930 20:56:04.086945 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-nb-1" Sep 30 20:56:04 crc kubenswrapper[4756]: I0930 20:56:04.223084 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-sb-0" Sep 30 20:56:04 crc kubenswrapper[4756]: I0930 20:56:04.233284 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-sb-2" Sep 30 20:56:04 crc kubenswrapper[4756]: I0930 20:56:04.238500 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-sb-1" Sep 30 20:56:04 crc kubenswrapper[4756]: I0930 20:56:04.371371 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-547968cc8f-zhqzz"] Sep 30 20:56:04 crc kubenswrapper[4756]: I0930 20:56:04.372629 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-547968cc8f-zhqzz" Sep 30 20:56:04 crc kubenswrapper[4756]: I0930 20:56:04.374858 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-nb" Sep 30 20:56:04 crc kubenswrapper[4756]: I0930 20:56:04.409857 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-547968cc8f-zhqzz"] Sep 30 20:56:04 crc kubenswrapper[4756]: I0930 20:56:04.464684 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ce1c92f3-408b-499c-be52-ce01ffa31044-ovsdbserver-nb\") pod \"dnsmasq-dns-547968cc8f-zhqzz\" (UID: \"ce1c92f3-408b-499c-be52-ce01ffa31044\") " pod="openstack/dnsmasq-dns-547968cc8f-zhqzz" Sep 30 20:56:04 crc kubenswrapper[4756]: I0930 20:56:04.465007 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ce1c92f3-408b-499c-be52-ce01ffa31044-dns-svc\") pod \"dnsmasq-dns-547968cc8f-zhqzz\" (UID: \"ce1c92f3-408b-499c-be52-ce01ffa31044\") " pod="openstack/dnsmasq-dns-547968cc8f-zhqzz" Sep 30 20:56:04 crc kubenswrapper[4756]: I0930 20:56:04.465186 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sj8gw\" (UniqueName: \"kubernetes.io/projected/ce1c92f3-408b-499c-be52-ce01ffa31044-kube-api-access-sj8gw\") pod \"dnsmasq-dns-547968cc8f-zhqzz\" (UID: \"ce1c92f3-408b-499c-be52-ce01ffa31044\") " pod="openstack/dnsmasq-dns-547968cc8f-zhqzz" Sep 30 20:56:04 crc kubenswrapper[4756]: I0930 20:56:04.465372 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ce1c92f3-408b-499c-be52-ce01ffa31044-config\") pod \"dnsmasq-dns-547968cc8f-zhqzz\" (UID: \"ce1c92f3-408b-499c-be52-ce01ffa31044\") " pod="openstack/dnsmasq-dns-547968cc8f-zhqzz" Sep 30 20:56:04 crc kubenswrapper[4756]: I0930 20:56:04.566790 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ce1c92f3-408b-499c-be52-ce01ffa31044-ovsdbserver-nb\") pod \"dnsmasq-dns-547968cc8f-zhqzz\" (UID: \"ce1c92f3-408b-499c-be52-ce01ffa31044\") " pod="openstack/dnsmasq-dns-547968cc8f-zhqzz" Sep 30 20:56:04 crc kubenswrapper[4756]: I0930 20:56:04.566842 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ce1c92f3-408b-499c-be52-ce01ffa31044-dns-svc\") pod \"dnsmasq-dns-547968cc8f-zhqzz\" (UID: \"ce1c92f3-408b-499c-be52-ce01ffa31044\") " pod="openstack/dnsmasq-dns-547968cc8f-zhqzz" Sep 30 20:56:04 crc kubenswrapper[4756]: I0930 20:56:04.566891 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sj8gw\" (UniqueName: \"kubernetes.io/projected/ce1c92f3-408b-499c-be52-ce01ffa31044-kube-api-access-sj8gw\") pod \"dnsmasq-dns-547968cc8f-zhqzz\" (UID: \"ce1c92f3-408b-499c-be52-ce01ffa31044\") " pod="openstack/dnsmasq-dns-547968cc8f-zhqzz" Sep 30 20:56:04 crc kubenswrapper[4756]: I0930 20:56:04.566956 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ce1c92f3-408b-499c-be52-ce01ffa31044-config\") pod \"dnsmasq-dns-547968cc8f-zhqzz\" (UID: \"ce1c92f3-408b-499c-be52-ce01ffa31044\") " pod="openstack/dnsmasq-dns-547968cc8f-zhqzz" Sep 30 20:56:04 crc kubenswrapper[4756]: I0930 20:56:04.567944 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ce1c92f3-408b-499c-be52-ce01ffa31044-dns-svc\") pod \"dnsmasq-dns-547968cc8f-zhqzz\" (UID: \"ce1c92f3-408b-499c-be52-ce01ffa31044\") " pod="openstack/dnsmasq-dns-547968cc8f-zhqzz" Sep 30 20:56:04 crc kubenswrapper[4756]: I0930 20:56:04.567948 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ce1c92f3-408b-499c-be52-ce01ffa31044-config\") pod \"dnsmasq-dns-547968cc8f-zhqzz\" (UID: \"ce1c92f3-408b-499c-be52-ce01ffa31044\") " pod="openstack/dnsmasq-dns-547968cc8f-zhqzz" Sep 30 20:56:04 crc kubenswrapper[4756]: I0930 20:56:04.568013 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ce1c92f3-408b-499c-be52-ce01ffa31044-ovsdbserver-nb\") pod \"dnsmasq-dns-547968cc8f-zhqzz\" (UID: \"ce1c92f3-408b-499c-be52-ce01ffa31044\") " pod="openstack/dnsmasq-dns-547968cc8f-zhqzz" Sep 30 20:56:04 crc kubenswrapper[4756]: I0930 20:56:04.589551 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sj8gw\" (UniqueName: \"kubernetes.io/projected/ce1c92f3-408b-499c-be52-ce01ffa31044-kube-api-access-sj8gw\") pod \"dnsmasq-dns-547968cc8f-zhqzz\" (UID: \"ce1c92f3-408b-499c-be52-ce01ffa31044\") " pod="openstack/dnsmasq-dns-547968cc8f-zhqzz" Sep 30 20:56:04 crc kubenswrapper[4756]: I0930 20:56:04.721586 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-547968cc8f-zhqzz" Sep 30 20:56:05 crc kubenswrapper[4756]: I0930 20:56:05.027119 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-nb-0" Sep 30 20:56:05 crc kubenswrapper[4756]: I0930 20:56:05.063654 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-nb-2" Sep 30 20:56:05 crc kubenswrapper[4756]: I0930 20:56:05.071183 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-nb-0" Sep 30 20:56:05 crc kubenswrapper[4756]: I0930 20:56:05.105569 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-nb-2" Sep 30 20:56:05 crc kubenswrapper[4756]: I0930 20:56:05.171030 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-547968cc8f-zhqzz"] Sep 30 20:56:05 crc kubenswrapper[4756]: I0930 20:56:05.264346 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-sb-0" Sep 30 20:56:05 crc kubenswrapper[4756]: I0930 20:56:05.277033 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-sb-1" Sep 30 20:56:05 crc kubenswrapper[4756]: I0930 20:56:05.294810 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-sb-2" Sep 30 20:56:05 crc kubenswrapper[4756]: I0930 20:56:05.311165 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-sb-0" Sep 30 20:56:05 crc kubenswrapper[4756]: I0930 20:56:05.356108 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-sb-1" Sep 30 20:56:05 crc kubenswrapper[4756]: I0930 20:56:05.656053 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-547968cc8f-zhqzz"] Sep 30 20:56:05 crc kubenswrapper[4756]: I0930 20:56:05.688127 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5f5cc984d9-dfbmg"] Sep 30 20:56:05 crc kubenswrapper[4756]: I0930 20:56:05.690261 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5f5cc984d9-dfbmg" Sep 30 20:56:05 crc kubenswrapper[4756]: I0930 20:56:05.692376 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-sb" Sep 30 20:56:05 crc kubenswrapper[4756]: I0930 20:56:05.697282 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5f5cc984d9-dfbmg"] Sep 30 20:56:05 crc kubenswrapper[4756]: I0930 20:56:05.786788 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/178b785b-98f8-495b-ae5e-7342ad1a94d6-ovsdbserver-sb\") pod \"dnsmasq-dns-5f5cc984d9-dfbmg\" (UID: \"178b785b-98f8-495b-ae5e-7342ad1a94d6\") " pod="openstack/dnsmasq-dns-5f5cc984d9-dfbmg" Sep 30 20:56:05 crc kubenswrapper[4756]: I0930 20:56:05.786921 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/178b785b-98f8-495b-ae5e-7342ad1a94d6-config\") pod \"dnsmasq-dns-5f5cc984d9-dfbmg\" (UID: \"178b785b-98f8-495b-ae5e-7342ad1a94d6\") " pod="openstack/dnsmasq-dns-5f5cc984d9-dfbmg" Sep 30 20:56:05 crc kubenswrapper[4756]: I0930 20:56:05.786951 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/178b785b-98f8-495b-ae5e-7342ad1a94d6-ovsdbserver-nb\") pod \"dnsmasq-dns-5f5cc984d9-dfbmg\" (UID: \"178b785b-98f8-495b-ae5e-7342ad1a94d6\") " pod="openstack/dnsmasq-dns-5f5cc984d9-dfbmg" Sep 30 20:56:05 crc kubenswrapper[4756]: I0930 20:56:05.786972 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/178b785b-98f8-495b-ae5e-7342ad1a94d6-dns-svc\") pod \"dnsmasq-dns-5f5cc984d9-dfbmg\" (UID: \"178b785b-98f8-495b-ae5e-7342ad1a94d6\") " pod="openstack/dnsmasq-dns-5f5cc984d9-dfbmg" Sep 30 20:56:05 crc kubenswrapper[4756]: I0930 20:56:05.787241 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zgpn9\" (UniqueName: \"kubernetes.io/projected/178b785b-98f8-495b-ae5e-7342ad1a94d6-kube-api-access-zgpn9\") pod \"dnsmasq-dns-5f5cc984d9-dfbmg\" (UID: \"178b785b-98f8-495b-ae5e-7342ad1a94d6\") " pod="openstack/dnsmasq-dns-5f5cc984d9-dfbmg" Sep 30 20:56:05 crc kubenswrapper[4756]: I0930 20:56:05.827143 4756 generic.go:334] "Generic (PLEG): container finished" podID="ce1c92f3-408b-499c-be52-ce01ffa31044" containerID="a3ae7f861ff4890b53ec7e76269d493ca826b7d1e4087f78c66ec0c284e75010" exitCode=0 Sep 30 20:56:05 crc kubenswrapper[4756]: I0930 20:56:05.827249 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-547968cc8f-zhqzz" event={"ID":"ce1c92f3-408b-499c-be52-ce01ffa31044","Type":"ContainerDied","Data":"a3ae7f861ff4890b53ec7e76269d493ca826b7d1e4087f78c66ec0c284e75010"} Sep 30 20:56:05 crc kubenswrapper[4756]: I0930 20:56:05.827598 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-547968cc8f-zhqzz" event={"ID":"ce1c92f3-408b-499c-be52-ce01ffa31044","Type":"ContainerStarted","Data":"bdb2ebe9fc1c9d021b74f261325e298849d85b91823270fcabb097793aba5545"} Sep 30 20:56:05 crc kubenswrapper[4756]: I0930 20:56:05.889314 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zgpn9\" (UniqueName: \"kubernetes.io/projected/178b785b-98f8-495b-ae5e-7342ad1a94d6-kube-api-access-zgpn9\") pod \"dnsmasq-dns-5f5cc984d9-dfbmg\" (UID: \"178b785b-98f8-495b-ae5e-7342ad1a94d6\") " pod="openstack/dnsmasq-dns-5f5cc984d9-dfbmg" Sep 30 20:56:05 crc kubenswrapper[4756]: I0930 20:56:05.889563 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/178b785b-98f8-495b-ae5e-7342ad1a94d6-ovsdbserver-sb\") pod \"dnsmasq-dns-5f5cc984d9-dfbmg\" (UID: \"178b785b-98f8-495b-ae5e-7342ad1a94d6\") " pod="openstack/dnsmasq-dns-5f5cc984d9-dfbmg" Sep 30 20:56:05 crc kubenswrapper[4756]: I0930 20:56:05.889722 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/178b785b-98f8-495b-ae5e-7342ad1a94d6-config\") pod \"dnsmasq-dns-5f5cc984d9-dfbmg\" (UID: \"178b785b-98f8-495b-ae5e-7342ad1a94d6\") " pod="openstack/dnsmasq-dns-5f5cc984d9-dfbmg" Sep 30 20:56:05 crc kubenswrapper[4756]: I0930 20:56:05.889807 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/178b785b-98f8-495b-ae5e-7342ad1a94d6-ovsdbserver-nb\") pod \"dnsmasq-dns-5f5cc984d9-dfbmg\" (UID: \"178b785b-98f8-495b-ae5e-7342ad1a94d6\") " pod="openstack/dnsmasq-dns-5f5cc984d9-dfbmg" Sep 30 20:56:05 crc kubenswrapper[4756]: I0930 20:56:05.889840 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/178b785b-98f8-495b-ae5e-7342ad1a94d6-dns-svc\") pod \"dnsmasq-dns-5f5cc984d9-dfbmg\" (UID: \"178b785b-98f8-495b-ae5e-7342ad1a94d6\") " pod="openstack/dnsmasq-dns-5f5cc984d9-dfbmg" Sep 30 20:56:05 crc kubenswrapper[4756]: I0930 20:56:05.893700 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-sb-2" Sep 30 20:56:05 crc kubenswrapper[4756]: I0930 20:56:05.894026 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/178b785b-98f8-495b-ae5e-7342ad1a94d6-config\") pod \"dnsmasq-dns-5f5cc984d9-dfbmg\" (UID: \"178b785b-98f8-495b-ae5e-7342ad1a94d6\") " pod="openstack/dnsmasq-dns-5f5cc984d9-dfbmg" Sep 30 20:56:05 crc kubenswrapper[4756]: I0930 20:56:05.894076 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/178b785b-98f8-495b-ae5e-7342ad1a94d6-ovsdbserver-nb\") pod \"dnsmasq-dns-5f5cc984d9-dfbmg\" (UID: \"178b785b-98f8-495b-ae5e-7342ad1a94d6\") " pod="openstack/dnsmasq-dns-5f5cc984d9-dfbmg" Sep 30 20:56:05 crc kubenswrapper[4756]: I0930 20:56:05.894662 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/178b785b-98f8-495b-ae5e-7342ad1a94d6-dns-svc\") pod \"dnsmasq-dns-5f5cc984d9-dfbmg\" (UID: \"178b785b-98f8-495b-ae5e-7342ad1a94d6\") " pod="openstack/dnsmasq-dns-5f5cc984d9-dfbmg" Sep 30 20:56:05 crc kubenswrapper[4756]: I0930 20:56:05.900072 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/178b785b-98f8-495b-ae5e-7342ad1a94d6-ovsdbserver-sb\") pod \"dnsmasq-dns-5f5cc984d9-dfbmg\" (UID: \"178b785b-98f8-495b-ae5e-7342ad1a94d6\") " pod="openstack/dnsmasq-dns-5f5cc984d9-dfbmg" Sep 30 20:56:05 crc kubenswrapper[4756]: I0930 20:56:05.912823 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zgpn9\" (UniqueName: \"kubernetes.io/projected/178b785b-98f8-495b-ae5e-7342ad1a94d6-kube-api-access-zgpn9\") pod \"dnsmasq-dns-5f5cc984d9-dfbmg\" (UID: \"178b785b-98f8-495b-ae5e-7342ad1a94d6\") " pod="openstack/dnsmasq-dns-5f5cc984d9-dfbmg" Sep 30 20:56:06 crc kubenswrapper[4756]: I0930 20:56:06.007604 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5f5cc984d9-dfbmg" Sep 30 20:56:06 crc kubenswrapper[4756]: I0930 20:56:06.250713 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5f5cc984d9-dfbmg"] Sep 30 20:56:06 crc kubenswrapper[4756]: I0930 20:56:06.840762 4756 generic.go:334] "Generic (PLEG): container finished" podID="178b785b-98f8-495b-ae5e-7342ad1a94d6" containerID="6823f75ee12827c1ebdea68dde9926dffbc6ce1c55cde9784807805c890fe1a0" exitCode=0 Sep 30 20:56:06 crc kubenswrapper[4756]: I0930 20:56:06.840813 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5f5cc984d9-dfbmg" event={"ID":"178b785b-98f8-495b-ae5e-7342ad1a94d6","Type":"ContainerDied","Data":"6823f75ee12827c1ebdea68dde9926dffbc6ce1c55cde9784807805c890fe1a0"} Sep 30 20:56:06 crc kubenswrapper[4756]: I0930 20:56:06.840862 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5f5cc984d9-dfbmg" event={"ID":"178b785b-98f8-495b-ae5e-7342ad1a94d6","Type":"ContainerStarted","Data":"7122ea929217362fb35a826e4868056c8ecfe5023d52523cef8eb607fcdd8722"} Sep 30 20:56:06 crc kubenswrapper[4756]: I0930 20:56:06.845151 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-547968cc8f-zhqzz" event={"ID":"ce1c92f3-408b-499c-be52-ce01ffa31044","Type":"ContainerStarted","Data":"2b63d3bbe9e91fd478e5c365ea8a9e35e6d1fcbc022e27e9ad159fe151bf9e23"} Sep 30 20:56:06 crc kubenswrapper[4756]: I0930 20:56:06.845452 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-547968cc8f-zhqzz" Sep 30 20:56:06 crc kubenswrapper[4756]: I0930 20:56:06.845447 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-547968cc8f-zhqzz" podUID="ce1c92f3-408b-499c-be52-ce01ffa31044" containerName="dnsmasq-dns" containerID="cri-o://2b63d3bbe9e91fd478e5c365ea8a9e35e6d1fcbc022e27e9ad159fe151bf9e23" gracePeriod=10 Sep 30 20:56:07 crc kubenswrapper[4756]: I0930 20:56:07.285280 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-547968cc8f-zhqzz" Sep 30 20:56:07 crc kubenswrapper[4756]: I0930 20:56:07.420904 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ce1c92f3-408b-499c-be52-ce01ffa31044-config\") pod \"ce1c92f3-408b-499c-be52-ce01ffa31044\" (UID: \"ce1c92f3-408b-499c-be52-ce01ffa31044\") " Sep 30 20:56:07 crc kubenswrapper[4756]: I0930 20:56:07.421078 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ce1c92f3-408b-499c-be52-ce01ffa31044-ovsdbserver-nb\") pod \"ce1c92f3-408b-499c-be52-ce01ffa31044\" (UID: \"ce1c92f3-408b-499c-be52-ce01ffa31044\") " Sep 30 20:56:07 crc kubenswrapper[4756]: I0930 20:56:07.421121 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sj8gw\" (UniqueName: \"kubernetes.io/projected/ce1c92f3-408b-499c-be52-ce01ffa31044-kube-api-access-sj8gw\") pod \"ce1c92f3-408b-499c-be52-ce01ffa31044\" (UID: \"ce1c92f3-408b-499c-be52-ce01ffa31044\") " Sep 30 20:56:07 crc kubenswrapper[4756]: I0930 20:56:07.421149 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ce1c92f3-408b-499c-be52-ce01ffa31044-dns-svc\") pod \"ce1c92f3-408b-499c-be52-ce01ffa31044\" (UID: \"ce1c92f3-408b-499c-be52-ce01ffa31044\") " Sep 30 20:56:07 crc kubenswrapper[4756]: I0930 20:56:07.425688 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ce1c92f3-408b-499c-be52-ce01ffa31044-kube-api-access-sj8gw" (OuterVolumeSpecName: "kube-api-access-sj8gw") pod "ce1c92f3-408b-499c-be52-ce01ffa31044" (UID: "ce1c92f3-408b-499c-be52-ce01ffa31044"). InnerVolumeSpecName "kube-api-access-sj8gw". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:56:07 crc kubenswrapper[4756]: I0930 20:56:07.461032 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ce1c92f3-408b-499c-be52-ce01ffa31044-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "ce1c92f3-408b-499c-be52-ce01ffa31044" (UID: "ce1c92f3-408b-499c-be52-ce01ffa31044"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:56:07 crc kubenswrapper[4756]: I0930 20:56:07.464031 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ce1c92f3-408b-499c-be52-ce01ffa31044-config" (OuterVolumeSpecName: "config") pod "ce1c92f3-408b-499c-be52-ce01ffa31044" (UID: "ce1c92f3-408b-499c-be52-ce01ffa31044"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:56:07 crc kubenswrapper[4756]: I0930 20:56:07.488777 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ce1c92f3-408b-499c-be52-ce01ffa31044-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "ce1c92f3-408b-499c-be52-ce01ffa31044" (UID: "ce1c92f3-408b-499c-be52-ce01ffa31044"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:56:07 crc kubenswrapper[4756]: I0930 20:56:07.523062 4756 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ce1c92f3-408b-499c-be52-ce01ffa31044-config\") on node \"crc\" DevicePath \"\"" Sep 30 20:56:07 crc kubenswrapper[4756]: I0930 20:56:07.523096 4756 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ce1c92f3-408b-499c-be52-ce01ffa31044-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Sep 30 20:56:07 crc kubenswrapper[4756]: I0930 20:56:07.523108 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sj8gw\" (UniqueName: \"kubernetes.io/projected/ce1c92f3-408b-499c-be52-ce01ffa31044-kube-api-access-sj8gw\") on node \"crc\" DevicePath \"\"" Sep 30 20:56:07 crc kubenswrapper[4756]: I0930 20:56:07.523118 4756 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ce1c92f3-408b-499c-be52-ce01ffa31044-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 30 20:56:07 crc kubenswrapper[4756]: I0930 20:56:07.856734 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5f5cc984d9-dfbmg" event={"ID":"178b785b-98f8-495b-ae5e-7342ad1a94d6","Type":"ContainerStarted","Data":"5309f5f49a000f7b3b6aee9e22518c19b8e1c46211a4e5192d41c1be6e0a773a"} Sep 30 20:56:07 crc kubenswrapper[4756]: I0930 20:56:07.857303 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-5f5cc984d9-dfbmg" Sep 30 20:56:07 crc kubenswrapper[4756]: I0930 20:56:07.859627 4756 generic.go:334] "Generic (PLEG): container finished" podID="ce1c92f3-408b-499c-be52-ce01ffa31044" containerID="2b63d3bbe9e91fd478e5c365ea8a9e35e6d1fcbc022e27e9ad159fe151bf9e23" exitCode=0 Sep 30 20:56:07 crc kubenswrapper[4756]: I0930 20:56:07.859907 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-547968cc8f-zhqzz" Sep 30 20:56:07 crc kubenswrapper[4756]: I0930 20:56:07.859877 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-547968cc8f-zhqzz" event={"ID":"ce1c92f3-408b-499c-be52-ce01ffa31044","Type":"ContainerDied","Data":"2b63d3bbe9e91fd478e5c365ea8a9e35e6d1fcbc022e27e9ad159fe151bf9e23"} Sep 30 20:56:07 crc kubenswrapper[4756]: I0930 20:56:07.860228 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-547968cc8f-zhqzz" event={"ID":"ce1c92f3-408b-499c-be52-ce01ffa31044","Type":"ContainerDied","Data":"bdb2ebe9fc1c9d021b74f261325e298849d85b91823270fcabb097793aba5545"} Sep 30 20:56:07 crc kubenswrapper[4756]: I0930 20:56:07.860265 4756 scope.go:117] "RemoveContainer" containerID="2b63d3bbe9e91fd478e5c365ea8a9e35e6d1fcbc022e27e9ad159fe151bf9e23" Sep 30 20:56:07 crc kubenswrapper[4756]: I0930 20:56:07.886194 4756 scope.go:117] "RemoveContainer" containerID="a3ae7f861ff4890b53ec7e76269d493ca826b7d1e4087f78c66ec0c284e75010" Sep 30 20:56:07 crc kubenswrapper[4756]: I0930 20:56:07.886175 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-5f5cc984d9-dfbmg" podStartSLOduration=2.886145578 podStartE2EDuration="2.886145578s" podCreationTimestamp="2025-09-30 20:56:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 20:56:07.88083288 +0000 UTC m=+5097.501766447" watchObservedRunningTime="2025-09-30 20:56:07.886145578 +0000 UTC m=+5097.507079055" Sep 30 20:56:07 crc kubenswrapper[4756]: I0930 20:56:07.909973 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-547968cc8f-zhqzz"] Sep 30 20:56:07 crc kubenswrapper[4756]: I0930 20:56:07.916442 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-547968cc8f-zhqzz"] Sep 30 20:56:07 crc kubenswrapper[4756]: I0930 20:56:07.927618 4756 scope.go:117] "RemoveContainer" containerID="2b63d3bbe9e91fd478e5c365ea8a9e35e6d1fcbc022e27e9ad159fe151bf9e23" Sep 30 20:56:07 crc kubenswrapper[4756]: E0930 20:56:07.928031 4756 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2b63d3bbe9e91fd478e5c365ea8a9e35e6d1fcbc022e27e9ad159fe151bf9e23\": container with ID starting with 2b63d3bbe9e91fd478e5c365ea8a9e35e6d1fcbc022e27e9ad159fe151bf9e23 not found: ID does not exist" containerID="2b63d3bbe9e91fd478e5c365ea8a9e35e6d1fcbc022e27e9ad159fe151bf9e23" Sep 30 20:56:07 crc kubenswrapper[4756]: I0930 20:56:07.928081 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2b63d3bbe9e91fd478e5c365ea8a9e35e6d1fcbc022e27e9ad159fe151bf9e23"} err="failed to get container status \"2b63d3bbe9e91fd478e5c365ea8a9e35e6d1fcbc022e27e9ad159fe151bf9e23\": rpc error: code = NotFound desc = could not find container \"2b63d3bbe9e91fd478e5c365ea8a9e35e6d1fcbc022e27e9ad159fe151bf9e23\": container with ID starting with 2b63d3bbe9e91fd478e5c365ea8a9e35e6d1fcbc022e27e9ad159fe151bf9e23 not found: ID does not exist" Sep 30 20:56:07 crc kubenswrapper[4756]: I0930 20:56:07.928114 4756 scope.go:117] "RemoveContainer" containerID="a3ae7f861ff4890b53ec7e76269d493ca826b7d1e4087f78c66ec0c284e75010" Sep 30 20:56:07 crc kubenswrapper[4756]: E0930 20:56:07.928451 4756 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a3ae7f861ff4890b53ec7e76269d493ca826b7d1e4087f78c66ec0c284e75010\": container with ID starting with a3ae7f861ff4890b53ec7e76269d493ca826b7d1e4087f78c66ec0c284e75010 not found: ID does not exist" containerID="a3ae7f861ff4890b53ec7e76269d493ca826b7d1e4087f78c66ec0c284e75010" Sep 30 20:56:07 crc kubenswrapper[4756]: I0930 20:56:07.928474 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a3ae7f861ff4890b53ec7e76269d493ca826b7d1e4087f78c66ec0c284e75010"} err="failed to get container status \"a3ae7f861ff4890b53ec7e76269d493ca826b7d1e4087f78c66ec0c284e75010\": rpc error: code = NotFound desc = could not find container \"a3ae7f861ff4890b53ec7e76269d493ca826b7d1e4087f78c66ec0c284e75010\": container with ID starting with a3ae7f861ff4890b53ec7e76269d493ca826b7d1e4087f78c66ec0c284e75010 not found: ID does not exist" Sep 30 20:56:08 crc kubenswrapper[4756]: I0930 20:56:08.219532 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-copy-data"] Sep 30 20:56:08 crc kubenswrapper[4756]: E0930 20:56:08.220229 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ce1c92f3-408b-499c-be52-ce01ffa31044" containerName="init" Sep 30 20:56:08 crc kubenswrapper[4756]: I0930 20:56:08.220386 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="ce1c92f3-408b-499c-be52-ce01ffa31044" containerName="init" Sep 30 20:56:08 crc kubenswrapper[4756]: E0930 20:56:08.220551 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ce1c92f3-408b-499c-be52-ce01ffa31044" containerName="dnsmasq-dns" Sep 30 20:56:08 crc kubenswrapper[4756]: I0930 20:56:08.220658 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="ce1c92f3-408b-499c-be52-ce01ffa31044" containerName="dnsmasq-dns" Sep 30 20:56:08 crc kubenswrapper[4756]: I0930 20:56:08.221112 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="ce1c92f3-408b-499c-be52-ce01ffa31044" containerName="dnsmasq-dns" Sep 30 20:56:08 crc kubenswrapper[4756]: I0930 20:56:08.222100 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-copy-data" Sep 30 20:56:08 crc kubenswrapper[4756]: I0930 20:56:08.226338 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovn-data-cert" Sep 30 20:56:08 crc kubenswrapper[4756]: I0930 20:56:08.235731 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-copy-data"] Sep 30 20:56:08 crc kubenswrapper[4756]: I0930 20:56:08.338848 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-70807260-c9e6-4b66-8e78-b5352431b0f1\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-70807260-c9e6-4b66-8e78-b5352431b0f1\") pod \"ovn-copy-data\" (UID: \"4f059cb9-0352-4976-b189-a13a57de725e\") " pod="openstack/ovn-copy-data" Sep 30 20:56:08 crc kubenswrapper[4756]: I0930 20:56:08.339005 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5p7t2\" (UniqueName: \"kubernetes.io/projected/4f059cb9-0352-4976-b189-a13a57de725e-kube-api-access-5p7t2\") pod \"ovn-copy-data\" (UID: \"4f059cb9-0352-4976-b189-a13a57de725e\") " pod="openstack/ovn-copy-data" Sep 30 20:56:08 crc kubenswrapper[4756]: I0930 20:56:08.339216 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-data-cert\" (UniqueName: \"kubernetes.io/secret/4f059cb9-0352-4976-b189-a13a57de725e-ovn-data-cert\") pod \"ovn-copy-data\" (UID: \"4f059cb9-0352-4976-b189-a13a57de725e\") " pod="openstack/ovn-copy-data" Sep 30 20:56:08 crc kubenswrapper[4756]: I0930 20:56:08.441812 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5p7t2\" (UniqueName: \"kubernetes.io/projected/4f059cb9-0352-4976-b189-a13a57de725e-kube-api-access-5p7t2\") pod \"ovn-copy-data\" (UID: \"4f059cb9-0352-4976-b189-a13a57de725e\") " pod="openstack/ovn-copy-data" Sep 30 20:56:08 crc kubenswrapper[4756]: I0930 20:56:08.441893 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-data-cert\" (UniqueName: \"kubernetes.io/secret/4f059cb9-0352-4976-b189-a13a57de725e-ovn-data-cert\") pod \"ovn-copy-data\" (UID: \"4f059cb9-0352-4976-b189-a13a57de725e\") " pod="openstack/ovn-copy-data" Sep 30 20:56:08 crc kubenswrapper[4756]: I0930 20:56:08.441939 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-70807260-c9e6-4b66-8e78-b5352431b0f1\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-70807260-c9e6-4b66-8e78-b5352431b0f1\") pod \"ovn-copy-data\" (UID: \"4f059cb9-0352-4976-b189-a13a57de725e\") " pod="openstack/ovn-copy-data" Sep 30 20:56:08 crc kubenswrapper[4756]: I0930 20:56:08.456950 4756 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Sep 30 20:56:08 crc kubenswrapper[4756]: I0930 20:56:08.457378 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-data-cert\" (UniqueName: \"kubernetes.io/secret/4f059cb9-0352-4976-b189-a13a57de725e-ovn-data-cert\") pod \"ovn-copy-data\" (UID: \"4f059cb9-0352-4976-b189-a13a57de725e\") " pod="openstack/ovn-copy-data" Sep 30 20:56:08 crc kubenswrapper[4756]: I0930 20:56:08.457447 4756 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-70807260-c9e6-4b66-8e78-b5352431b0f1\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-70807260-c9e6-4b66-8e78-b5352431b0f1\") pod \"ovn-copy-data\" (UID: \"4f059cb9-0352-4976-b189-a13a57de725e\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/a907a4d2f3bb3cdb2c4b61d569fa9ffc01e6a5d01f8ed8a53a57de6ba2bd5b2b/globalmount\"" pod="openstack/ovn-copy-data" Sep 30 20:56:08 crc kubenswrapper[4756]: I0930 20:56:08.470948 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5p7t2\" (UniqueName: \"kubernetes.io/projected/4f059cb9-0352-4976-b189-a13a57de725e-kube-api-access-5p7t2\") pod \"ovn-copy-data\" (UID: \"4f059cb9-0352-4976-b189-a13a57de725e\") " pod="openstack/ovn-copy-data" Sep 30 20:56:08 crc kubenswrapper[4756]: I0930 20:56:08.499428 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-70807260-c9e6-4b66-8e78-b5352431b0f1\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-70807260-c9e6-4b66-8e78-b5352431b0f1\") pod \"ovn-copy-data\" (UID: \"4f059cb9-0352-4976-b189-a13a57de725e\") " pod="openstack/ovn-copy-data" Sep 30 20:56:08 crc kubenswrapper[4756]: I0930 20:56:08.605169 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-copy-data" Sep 30 20:56:09 crc kubenswrapper[4756]: I0930 20:56:09.116788 4756 scope.go:117] "RemoveContainer" containerID="47efbbc76271cd44fa05dca3e26855fb46918fbc5a3a511e482c89c79b8cb60c" Sep 30 20:56:09 crc kubenswrapper[4756]: E0930 20:56:09.117516 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4n9zj_openshift-machine-config-operator(3370c2ca-fec3-4f90-8df7-51e21e6c7e1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" Sep 30 20:56:09 crc kubenswrapper[4756]: I0930 20:56:09.132819 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ce1c92f3-408b-499c-be52-ce01ffa31044" path="/var/lib/kubelet/pods/ce1c92f3-408b-499c-be52-ce01ffa31044/volumes" Sep 30 20:56:09 crc kubenswrapper[4756]: I0930 20:56:09.154828 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-copy-data"] Sep 30 20:56:09 crc kubenswrapper[4756]: I0930 20:56:09.880002 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-copy-data" event={"ID":"4f059cb9-0352-4976-b189-a13a57de725e","Type":"ContainerStarted","Data":"058890cf9aead4c9f6571c11b298350df3dff0df33d27606150d20b2df294127"} Sep 30 20:56:09 crc kubenswrapper[4756]: I0930 20:56:09.880326 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-copy-data" event={"ID":"4f059cb9-0352-4976-b189-a13a57de725e","Type":"ContainerStarted","Data":"08660ecd6a4100913b02967d20772792a12a0ca30c3eb4bd2f38f2c8e61bffa3"} Sep 30 20:56:14 crc kubenswrapper[4756]: I0930 20:56:14.533613 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-copy-data" podStartSLOduration=7.533592008 podStartE2EDuration="7.533592008s" podCreationTimestamp="2025-09-30 20:56:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 20:56:09.894429668 +0000 UTC m=+5099.515363155" watchObservedRunningTime="2025-09-30 20:56:14.533592008 +0000 UTC m=+5104.154525495" Sep 30 20:56:14 crc kubenswrapper[4756]: I0930 20:56:14.544556 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-northd-0"] Sep 30 20:56:14 crc kubenswrapper[4756]: I0930 20:56:14.546194 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Sep 30 20:56:14 crc kubenswrapper[4756]: I0930 20:56:14.550627 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovnnorthd-ovnnorthd-dockercfg-lchsr" Sep 30 20:56:14 crc kubenswrapper[4756]: I0930 20:56:14.550657 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-scripts" Sep 30 20:56:14 crc kubenswrapper[4756]: I0930 20:56:14.550661 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-config" Sep 30 20:56:14 crc kubenswrapper[4756]: I0930 20:56:14.564275 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-0"] Sep 30 20:56:14 crc kubenswrapper[4756]: I0930 20:56:14.670210 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5v7bf\" (UniqueName: \"kubernetes.io/projected/e184d685-977f-412b-b88f-1aec1b3c53d3-kube-api-access-5v7bf\") pod \"ovn-northd-0\" (UID: \"e184d685-977f-412b-b88f-1aec1b3c53d3\") " pod="openstack/ovn-northd-0" Sep 30 20:56:14 crc kubenswrapper[4756]: I0930 20:56:14.670263 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e184d685-977f-412b-b88f-1aec1b3c53d3-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"e184d685-977f-412b-b88f-1aec1b3c53d3\") " pod="openstack/ovn-northd-0" Sep 30 20:56:14 crc kubenswrapper[4756]: I0930 20:56:14.670283 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/e184d685-977f-412b-b88f-1aec1b3c53d3-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"e184d685-977f-412b-b88f-1aec1b3c53d3\") " pod="openstack/ovn-northd-0" Sep 30 20:56:14 crc kubenswrapper[4756]: I0930 20:56:14.670312 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e184d685-977f-412b-b88f-1aec1b3c53d3-config\") pod \"ovn-northd-0\" (UID: \"e184d685-977f-412b-b88f-1aec1b3c53d3\") " pod="openstack/ovn-northd-0" Sep 30 20:56:14 crc kubenswrapper[4756]: I0930 20:56:14.670418 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/e184d685-977f-412b-b88f-1aec1b3c53d3-scripts\") pod \"ovn-northd-0\" (UID: \"e184d685-977f-412b-b88f-1aec1b3c53d3\") " pod="openstack/ovn-northd-0" Sep 30 20:56:14 crc kubenswrapper[4756]: I0930 20:56:14.771988 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5v7bf\" (UniqueName: \"kubernetes.io/projected/e184d685-977f-412b-b88f-1aec1b3c53d3-kube-api-access-5v7bf\") pod \"ovn-northd-0\" (UID: \"e184d685-977f-412b-b88f-1aec1b3c53d3\") " pod="openstack/ovn-northd-0" Sep 30 20:56:14 crc kubenswrapper[4756]: I0930 20:56:14.772051 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e184d685-977f-412b-b88f-1aec1b3c53d3-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"e184d685-977f-412b-b88f-1aec1b3c53d3\") " pod="openstack/ovn-northd-0" Sep 30 20:56:14 crc kubenswrapper[4756]: I0930 20:56:14.772078 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/e184d685-977f-412b-b88f-1aec1b3c53d3-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"e184d685-977f-412b-b88f-1aec1b3c53d3\") " pod="openstack/ovn-northd-0" Sep 30 20:56:14 crc kubenswrapper[4756]: I0930 20:56:14.772120 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e184d685-977f-412b-b88f-1aec1b3c53d3-config\") pod \"ovn-northd-0\" (UID: \"e184d685-977f-412b-b88f-1aec1b3c53d3\") " pod="openstack/ovn-northd-0" Sep 30 20:56:14 crc kubenswrapper[4756]: I0930 20:56:14.772171 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/e184d685-977f-412b-b88f-1aec1b3c53d3-scripts\") pod \"ovn-northd-0\" (UID: \"e184d685-977f-412b-b88f-1aec1b3c53d3\") " pod="openstack/ovn-northd-0" Sep 30 20:56:14 crc kubenswrapper[4756]: I0930 20:56:14.773314 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/e184d685-977f-412b-b88f-1aec1b3c53d3-scripts\") pod \"ovn-northd-0\" (UID: \"e184d685-977f-412b-b88f-1aec1b3c53d3\") " pod="openstack/ovn-northd-0" Sep 30 20:56:14 crc kubenswrapper[4756]: I0930 20:56:14.773517 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/e184d685-977f-412b-b88f-1aec1b3c53d3-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"e184d685-977f-412b-b88f-1aec1b3c53d3\") " pod="openstack/ovn-northd-0" Sep 30 20:56:14 crc kubenswrapper[4756]: I0930 20:56:14.773857 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e184d685-977f-412b-b88f-1aec1b3c53d3-config\") pod \"ovn-northd-0\" (UID: \"e184d685-977f-412b-b88f-1aec1b3c53d3\") " pod="openstack/ovn-northd-0" Sep 30 20:56:14 crc kubenswrapper[4756]: I0930 20:56:14.779067 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e184d685-977f-412b-b88f-1aec1b3c53d3-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"e184d685-977f-412b-b88f-1aec1b3c53d3\") " pod="openstack/ovn-northd-0" Sep 30 20:56:14 crc kubenswrapper[4756]: I0930 20:56:14.792187 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5v7bf\" (UniqueName: \"kubernetes.io/projected/e184d685-977f-412b-b88f-1aec1b3c53d3-kube-api-access-5v7bf\") pod \"ovn-northd-0\" (UID: \"e184d685-977f-412b-b88f-1aec1b3c53d3\") " pod="openstack/ovn-northd-0" Sep 30 20:56:14 crc kubenswrapper[4756]: I0930 20:56:14.875361 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Sep 30 20:56:15 crc kubenswrapper[4756]: I0930 20:56:15.323983 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-0"] Sep 30 20:56:15 crc kubenswrapper[4756]: W0930 20:56:15.334808 4756 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode184d685_977f_412b_b88f_1aec1b3c53d3.slice/crio-62a81b2e1da10052b5c7addaf98d9fa2a83ed2c367096b680cdfdbbcd072c4a8 WatchSource:0}: Error finding container 62a81b2e1da10052b5c7addaf98d9fa2a83ed2c367096b680cdfdbbcd072c4a8: Status 404 returned error can't find the container with id 62a81b2e1da10052b5c7addaf98d9fa2a83ed2c367096b680cdfdbbcd072c4a8 Sep 30 20:56:15 crc kubenswrapper[4756]: I0930 20:56:15.927016 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"e184d685-977f-412b-b88f-1aec1b3c53d3","Type":"ContainerStarted","Data":"16ae056ed411f55a2fc5a1df08c4683576c5b43fd257bab6011c124a29c657a7"} Sep 30 20:56:15 crc kubenswrapper[4756]: I0930 20:56:15.927381 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"e184d685-977f-412b-b88f-1aec1b3c53d3","Type":"ContainerStarted","Data":"6f1b5b38fde4db03669ca899d0598066b78e696c7bfec0a0b5725f98723d255b"} Sep 30 20:56:15 crc kubenswrapper[4756]: I0930 20:56:15.927410 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"e184d685-977f-412b-b88f-1aec1b3c53d3","Type":"ContainerStarted","Data":"62a81b2e1da10052b5c7addaf98d9fa2a83ed2c367096b680cdfdbbcd072c4a8"} Sep 30 20:56:15 crc kubenswrapper[4756]: I0930 20:56:15.927428 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-northd-0" Sep 30 20:56:15 crc kubenswrapper[4756]: I0930 20:56:15.948265 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-northd-0" podStartSLOduration=1.948243306 podStartE2EDuration="1.948243306s" podCreationTimestamp="2025-09-30 20:56:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 20:56:15.944981361 +0000 UTC m=+5105.565914848" watchObservedRunningTime="2025-09-30 20:56:15.948243306 +0000 UTC m=+5105.569176783" Sep 30 20:56:16 crc kubenswrapper[4756]: I0930 20:56:16.009557 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-5f5cc984d9-dfbmg" Sep 30 20:56:16 crc kubenswrapper[4756]: I0930 20:56:16.059575 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5b7946d7b9-psch6"] Sep 30 20:56:16 crc kubenswrapper[4756]: I0930 20:56:16.059782 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-5b7946d7b9-psch6" podUID="5993c2dd-f61e-4bdb-ade9-578cfcc6ab41" containerName="dnsmasq-dns" containerID="cri-o://4af506dc1153474ea15af59464c7c04d60455898b66c30feaaa231b97465b2e1" gracePeriod=10 Sep 30 20:56:16 crc kubenswrapper[4756]: I0930 20:56:16.568628 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5b7946d7b9-psch6" Sep 30 20:56:16 crc kubenswrapper[4756]: I0930 20:56:16.705732 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5993c2dd-f61e-4bdb-ade9-578cfcc6ab41-config\") pod \"5993c2dd-f61e-4bdb-ade9-578cfcc6ab41\" (UID: \"5993c2dd-f61e-4bdb-ade9-578cfcc6ab41\") " Sep 30 20:56:16 crc kubenswrapper[4756]: I0930 20:56:16.705853 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5993c2dd-f61e-4bdb-ade9-578cfcc6ab41-dns-svc\") pod \"5993c2dd-f61e-4bdb-ade9-578cfcc6ab41\" (UID: \"5993c2dd-f61e-4bdb-ade9-578cfcc6ab41\") " Sep 30 20:56:16 crc kubenswrapper[4756]: I0930 20:56:16.705910 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fp6lt\" (UniqueName: \"kubernetes.io/projected/5993c2dd-f61e-4bdb-ade9-578cfcc6ab41-kube-api-access-fp6lt\") pod \"5993c2dd-f61e-4bdb-ade9-578cfcc6ab41\" (UID: \"5993c2dd-f61e-4bdb-ade9-578cfcc6ab41\") " Sep 30 20:56:16 crc kubenswrapper[4756]: I0930 20:56:16.711495 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5993c2dd-f61e-4bdb-ade9-578cfcc6ab41-kube-api-access-fp6lt" (OuterVolumeSpecName: "kube-api-access-fp6lt") pod "5993c2dd-f61e-4bdb-ade9-578cfcc6ab41" (UID: "5993c2dd-f61e-4bdb-ade9-578cfcc6ab41"). InnerVolumeSpecName "kube-api-access-fp6lt". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:56:16 crc kubenswrapper[4756]: I0930 20:56:16.749825 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5993c2dd-f61e-4bdb-ade9-578cfcc6ab41-config" (OuterVolumeSpecName: "config") pod "5993c2dd-f61e-4bdb-ade9-578cfcc6ab41" (UID: "5993c2dd-f61e-4bdb-ade9-578cfcc6ab41"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:56:16 crc kubenswrapper[4756]: I0930 20:56:16.750438 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5993c2dd-f61e-4bdb-ade9-578cfcc6ab41-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "5993c2dd-f61e-4bdb-ade9-578cfcc6ab41" (UID: "5993c2dd-f61e-4bdb-ade9-578cfcc6ab41"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:56:16 crc kubenswrapper[4756]: I0930 20:56:16.808917 4756 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5993c2dd-f61e-4bdb-ade9-578cfcc6ab41-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 30 20:56:16 crc kubenswrapper[4756]: I0930 20:56:16.808959 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fp6lt\" (UniqueName: \"kubernetes.io/projected/5993c2dd-f61e-4bdb-ade9-578cfcc6ab41-kube-api-access-fp6lt\") on node \"crc\" DevicePath \"\"" Sep 30 20:56:16 crc kubenswrapper[4756]: I0930 20:56:16.808973 4756 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5993c2dd-f61e-4bdb-ade9-578cfcc6ab41-config\") on node \"crc\" DevicePath \"\"" Sep 30 20:56:16 crc kubenswrapper[4756]: I0930 20:56:16.938349 4756 generic.go:334] "Generic (PLEG): container finished" podID="5993c2dd-f61e-4bdb-ade9-578cfcc6ab41" containerID="4af506dc1153474ea15af59464c7c04d60455898b66c30feaaa231b97465b2e1" exitCode=0 Sep 30 20:56:16 crc kubenswrapper[4756]: I0930 20:56:16.938438 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5b7946d7b9-psch6" Sep 30 20:56:16 crc kubenswrapper[4756]: I0930 20:56:16.939019 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5b7946d7b9-psch6" event={"ID":"5993c2dd-f61e-4bdb-ade9-578cfcc6ab41","Type":"ContainerDied","Data":"4af506dc1153474ea15af59464c7c04d60455898b66c30feaaa231b97465b2e1"} Sep 30 20:56:16 crc kubenswrapper[4756]: I0930 20:56:16.939104 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5b7946d7b9-psch6" event={"ID":"5993c2dd-f61e-4bdb-ade9-578cfcc6ab41","Type":"ContainerDied","Data":"9f76215580ad3be220010ae0b113909dcb358393f223421db8d4d3e77391a58e"} Sep 30 20:56:16 crc kubenswrapper[4756]: I0930 20:56:16.939172 4756 scope.go:117] "RemoveContainer" containerID="4af506dc1153474ea15af59464c7c04d60455898b66c30feaaa231b97465b2e1" Sep 30 20:56:16 crc kubenswrapper[4756]: I0930 20:56:16.967348 4756 scope.go:117] "RemoveContainer" containerID="3d1cbce16efe229574769a3bb9b38eea231dcc69f91ea60f1370dcc12957403f" Sep 30 20:56:16 crc kubenswrapper[4756]: I0930 20:56:16.974266 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5b7946d7b9-psch6"] Sep 30 20:56:16 crc kubenswrapper[4756]: I0930 20:56:16.982810 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5b7946d7b9-psch6"] Sep 30 20:56:16 crc kubenswrapper[4756]: I0930 20:56:16.996548 4756 scope.go:117] "RemoveContainer" containerID="4af506dc1153474ea15af59464c7c04d60455898b66c30feaaa231b97465b2e1" Sep 30 20:56:16 crc kubenswrapper[4756]: E0930 20:56:16.996912 4756 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4af506dc1153474ea15af59464c7c04d60455898b66c30feaaa231b97465b2e1\": container with ID starting with 4af506dc1153474ea15af59464c7c04d60455898b66c30feaaa231b97465b2e1 not found: ID does not exist" containerID="4af506dc1153474ea15af59464c7c04d60455898b66c30feaaa231b97465b2e1" Sep 30 20:56:16 crc kubenswrapper[4756]: I0930 20:56:16.996979 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4af506dc1153474ea15af59464c7c04d60455898b66c30feaaa231b97465b2e1"} err="failed to get container status \"4af506dc1153474ea15af59464c7c04d60455898b66c30feaaa231b97465b2e1\": rpc error: code = NotFound desc = could not find container \"4af506dc1153474ea15af59464c7c04d60455898b66c30feaaa231b97465b2e1\": container with ID starting with 4af506dc1153474ea15af59464c7c04d60455898b66c30feaaa231b97465b2e1 not found: ID does not exist" Sep 30 20:56:16 crc kubenswrapper[4756]: I0930 20:56:16.997017 4756 scope.go:117] "RemoveContainer" containerID="3d1cbce16efe229574769a3bb9b38eea231dcc69f91ea60f1370dcc12957403f" Sep 30 20:56:16 crc kubenswrapper[4756]: E0930 20:56:16.999047 4756 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3d1cbce16efe229574769a3bb9b38eea231dcc69f91ea60f1370dcc12957403f\": container with ID starting with 3d1cbce16efe229574769a3bb9b38eea231dcc69f91ea60f1370dcc12957403f not found: ID does not exist" containerID="3d1cbce16efe229574769a3bb9b38eea231dcc69f91ea60f1370dcc12957403f" Sep 30 20:56:16 crc kubenswrapper[4756]: I0930 20:56:16.999083 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3d1cbce16efe229574769a3bb9b38eea231dcc69f91ea60f1370dcc12957403f"} err="failed to get container status \"3d1cbce16efe229574769a3bb9b38eea231dcc69f91ea60f1370dcc12957403f\": rpc error: code = NotFound desc = could not find container \"3d1cbce16efe229574769a3bb9b38eea231dcc69f91ea60f1370dcc12957403f\": container with ID starting with 3d1cbce16efe229574769a3bb9b38eea231dcc69f91ea60f1370dcc12957403f not found: ID does not exist" Sep 30 20:56:17 crc kubenswrapper[4756]: I0930 20:56:17.125367 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5993c2dd-f61e-4bdb-ade9-578cfcc6ab41" path="/var/lib/kubelet/pods/5993c2dd-f61e-4bdb-ade9-578cfcc6ab41/volumes" Sep 30 20:56:19 crc kubenswrapper[4756]: I0930 20:56:19.555647 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-create-2h268"] Sep 30 20:56:19 crc kubenswrapper[4756]: E0930 20:56:19.556371 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5993c2dd-f61e-4bdb-ade9-578cfcc6ab41" containerName="init" Sep 30 20:56:19 crc kubenswrapper[4756]: I0930 20:56:19.556382 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="5993c2dd-f61e-4bdb-ade9-578cfcc6ab41" containerName="init" Sep 30 20:56:19 crc kubenswrapper[4756]: E0930 20:56:19.556479 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5993c2dd-f61e-4bdb-ade9-578cfcc6ab41" containerName="dnsmasq-dns" Sep 30 20:56:19 crc kubenswrapper[4756]: I0930 20:56:19.556487 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="5993c2dd-f61e-4bdb-ade9-578cfcc6ab41" containerName="dnsmasq-dns" Sep 30 20:56:19 crc kubenswrapper[4756]: I0930 20:56:19.556651 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="5993c2dd-f61e-4bdb-ade9-578cfcc6ab41" containerName="dnsmasq-dns" Sep 30 20:56:19 crc kubenswrapper[4756]: I0930 20:56:19.557148 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-2h268" Sep 30 20:56:19 crc kubenswrapper[4756]: I0930 20:56:19.569616 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-2h268"] Sep 30 20:56:19 crc kubenswrapper[4756]: I0930 20:56:19.669959 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4rfrp\" (UniqueName: \"kubernetes.io/projected/1de8243f-1534-4d86-a9f8-2f2df9ba6ba7-kube-api-access-4rfrp\") pod \"keystone-db-create-2h268\" (UID: \"1de8243f-1534-4d86-a9f8-2f2df9ba6ba7\") " pod="openstack/keystone-db-create-2h268" Sep 30 20:56:19 crc kubenswrapper[4756]: I0930 20:56:19.771856 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4rfrp\" (UniqueName: \"kubernetes.io/projected/1de8243f-1534-4d86-a9f8-2f2df9ba6ba7-kube-api-access-4rfrp\") pod \"keystone-db-create-2h268\" (UID: \"1de8243f-1534-4d86-a9f8-2f2df9ba6ba7\") " pod="openstack/keystone-db-create-2h268" Sep 30 20:56:19 crc kubenswrapper[4756]: I0930 20:56:19.793982 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4rfrp\" (UniqueName: \"kubernetes.io/projected/1de8243f-1534-4d86-a9f8-2f2df9ba6ba7-kube-api-access-4rfrp\") pod \"keystone-db-create-2h268\" (UID: \"1de8243f-1534-4d86-a9f8-2f2df9ba6ba7\") " pod="openstack/keystone-db-create-2h268" Sep 30 20:56:19 crc kubenswrapper[4756]: I0930 20:56:19.910962 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-2h268" Sep 30 20:56:20 crc kubenswrapper[4756]: I0930 20:56:20.162730 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-2h268"] Sep 30 20:56:20 crc kubenswrapper[4756]: W0930 20:56:20.168035 4756 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1de8243f_1534_4d86_a9f8_2f2df9ba6ba7.slice/crio-3049a64a0805c63f09a71ba15fe87118cd04dd062f64cb550504d9a4a4334611 WatchSource:0}: Error finding container 3049a64a0805c63f09a71ba15fe87118cd04dd062f64cb550504d9a4a4334611: Status 404 returned error can't find the container with id 3049a64a0805c63f09a71ba15fe87118cd04dd062f64cb550504d9a4a4334611 Sep 30 20:56:20 crc kubenswrapper[4756]: I0930 20:56:20.317538 4756 scope.go:117] "RemoveContainer" containerID="7d55d5d800055e314497a1429e35586141107f9ce684ffac18f807ffa7de1231" Sep 30 20:56:20 crc kubenswrapper[4756]: I0930 20:56:20.437820 4756 scope.go:117] "RemoveContainer" containerID="4bc23a744b0c480961daa73712841719b2dc2de699734261c363aa5f8b70579c" Sep 30 20:56:20 crc kubenswrapper[4756]: I0930 20:56:20.466944 4756 scope.go:117] "RemoveContainer" containerID="a6c14311921e95a815d84260255dc79b59fec6980fea05047fd971e4b564f77c" Sep 30 20:56:20 crc kubenswrapper[4756]: I0930 20:56:20.973540 4756 generic.go:334] "Generic (PLEG): container finished" podID="1de8243f-1534-4d86-a9f8-2f2df9ba6ba7" containerID="1e2397e4928273b5a7b200819279cdc0b3a5136f36fd569c13144d1da47cb6ae" exitCode=0 Sep 30 20:56:20 crc kubenswrapper[4756]: I0930 20:56:20.973592 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-2h268" event={"ID":"1de8243f-1534-4d86-a9f8-2f2df9ba6ba7","Type":"ContainerDied","Data":"1e2397e4928273b5a7b200819279cdc0b3a5136f36fd569c13144d1da47cb6ae"} Sep 30 20:56:20 crc kubenswrapper[4756]: I0930 20:56:20.973747 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-2h268" event={"ID":"1de8243f-1534-4d86-a9f8-2f2df9ba6ba7","Type":"ContainerStarted","Data":"3049a64a0805c63f09a71ba15fe87118cd04dd062f64cb550504d9a4a4334611"} Sep 30 20:56:22 crc kubenswrapper[4756]: I0930 20:56:22.116180 4756 scope.go:117] "RemoveContainer" containerID="47efbbc76271cd44fa05dca3e26855fb46918fbc5a3a511e482c89c79b8cb60c" Sep 30 20:56:22 crc kubenswrapper[4756]: E0930 20:56:22.116427 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4n9zj_openshift-machine-config-operator(3370c2ca-fec3-4f90-8df7-51e21e6c7e1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" Sep 30 20:56:22 crc kubenswrapper[4756]: I0930 20:56:22.283027 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-2h268" Sep 30 20:56:22 crc kubenswrapper[4756]: I0930 20:56:22.413760 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4rfrp\" (UniqueName: \"kubernetes.io/projected/1de8243f-1534-4d86-a9f8-2f2df9ba6ba7-kube-api-access-4rfrp\") pod \"1de8243f-1534-4d86-a9f8-2f2df9ba6ba7\" (UID: \"1de8243f-1534-4d86-a9f8-2f2df9ba6ba7\") " Sep 30 20:56:22 crc kubenswrapper[4756]: I0930 20:56:22.419615 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1de8243f-1534-4d86-a9f8-2f2df9ba6ba7-kube-api-access-4rfrp" (OuterVolumeSpecName: "kube-api-access-4rfrp") pod "1de8243f-1534-4d86-a9f8-2f2df9ba6ba7" (UID: "1de8243f-1534-4d86-a9f8-2f2df9ba6ba7"). InnerVolumeSpecName "kube-api-access-4rfrp". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:56:22 crc kubenswrapper[4756]: I0930 20:56:22.515679 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4rfrp\" (UniqueName: \"kubernetes.io/projected/1de8243f-1534-4d86-a9f8-2f2df9ba6ba7-kube-api-access-4rfrp\") on node \"crc\" DevicePath \"\"" Sep 30 20:56:22 crc kubenswrapper[4756]: I0930 20:56:22.994453 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-2h268" event={"ID":"1de8243f-1534-4d86-a9f8-2f2df9ba6ba7","Type":"ContainerDied","Data":"3049a64a0805c63f09a71ba15fe87118cd04dd062f64cb550504d9a4a4334611"} Sep 30 20:56:22 crc kubenswrapper[4756]: I0930 20:56:22.994515 4756 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3049a64a0805c63f09a71ba15fe87118cd04dd062f64cb550504d9a4a4334611" Sep 30 20:56:22 crc kubenswrapper[4756]: I0930 20:56:22.994540 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-2h268" Sep 30 20:56:29 crc kubenswrapper[4756]: I0930 20:56:29.682881 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-9b8d-account-create-pvdf6"] Sep 30 20:56:29 crc kubenswrapper[4756]: E0930 20:56:29.684017 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1de8243f-1534-4d86-a9f8-2f2df9ba6ba7" containerName="mariadb-database-create" Sep 30 20:56:29 crc kubenswrapper[4756]: I0930 20:56:29.684038 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="1de8243f-1534-4d86-a9f8-2f2df9ba6ba7" containerName="mariadb-database-create" Sep 30 20:56:29 crc kubenswrapper[4756]: I0930 20:56:29.684324 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="1de8243f-1534-4d86-a9f8-2f2df9ba6ba7" containerName="mariadb-database-create" Sep 30 20:56:29 crc kubenswrapper[4756]: I0930 20:56:29.685169 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-9b8d-account-create-pvdf6" Sep 30 20:56:29 crc kubenswrapper[4756]: I0930 20:56:29.689591 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-db-secret" Sep 30 20:56:29 crc kubenswrapper[4756]: I0930 20:56:29.705596 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-9b8d-account-create-pvdf6"] Sep 30 20:56:29 crc kubenswrapper[4756]: I0930 20:56:29.745255 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2ntk5\" (UniqueName: \"kubernetes.io/projected/f46f27e6-1550-4485-b876-caa797674f2c-kube-api-access-2ntk5\") pod \"keystone-9b8d-account-create-pvdf6\" (UID: \"f46f27e6-1550-4485-b876-caa797674f2c\") " pod="openstack/keystone-9b8d-account-create-pvdf6" Sep 30 20:56:29 crc kubenswrapper[4756]: I0930 20:56:29.847994 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2ntk5\" (UniqueName: \"kubernetes.io/projected/f46f27e6-1550-4485-b876-caa797674f2c-kube-api-access-2ntk5\") pod \"keystone-9b8d-account-create-pvdf6\" (UID: \"f46f27e6-1550-4485-b876-caa797674f2c\") " pod="openstack/keystone-9b8d-account-create-pvdf6" Sep 30 20:56:29 crc kubenswrapper[4756]: I0930 20:56:29.879540 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2ntk5\" (UniqueName: \"kubernetes.io/projected/f46f27e6-1550-4485-b876-caa797674f2c-kube-api-access-2ntk5\") pod \"keystone-9b8d-account-create-pvdf6\" (UID: \"f46f27e6-1550-4485-b876-caa797674f2c\") " pod="openstack/keystone-9b8d-account-create-pvdf6" Sep 30 20:56:29 crc kubenswrapper[4756]: I0930 20:56:29.936075 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-northd-0" Sep 30 20:56:30 crc kubenswrapper[4756]: I0930 20:56:30.003930 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-9b8d-account-create-pvdf6" Sep 30 20:56:30 crc kubenswrapper[4756]: I0930 20:56:30.524450 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-9b8d-account-create-pvdf6"] Sep 30 20:56:30 crc kubenswrapper[4756]: W0930 20:56:30.529727 4756 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf46f27e6_1550_4485_b876_caa797674f2c.slice/crio-33c42350acf110dd1d11cf69c2b256894a7c974d150586490b055ed48161049b WatchSource:0}: Error finding container 33c42350acf110dd1d11cf69c2b256894a7c974d150586490b055ed48161049b: Status 404 returned error can't find the container with id 33c42350acf110dd1d11cf69c2b256894a7c974d150586490b055ed48161049b Sep 30 20:56:31 crc kubenswrapper[4756]: I0930 20:56:31.082760 4756 generic.go:334] "Generic (PLEG): container finished" podID="f46f27e6-1550-4485-b876-caa797674f2c" containerID="8abdcf3a96ef10f873ae33c102f9ccc7fef5646367a55c5d9a830bb0eeacbb87" exitCode=0 Sep 30 20:56:31 crc kubenswrapper[4756]: I0930 20:56:31.082816 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-9b8d-account-create-pvdf6" event={"ID":"f46f27e6-1550-4485-b876-caa797674f2c","Type":"ContainerDied","Data":"8abdcf3a96ef10f873ae33c102f9ccc7fef5646367a55c5d9a830bb0eeacbb87"} Sep 30 20:56:31 crc kubenswrapper[4756]: I0930 20:56:31.082848 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-9b8d-account-create-pvdf6" event={"ID":"f46f27e6-1550-4485-b876-caa797674f2c","Type":"ContainerStarted","Data":"33c42350acf110dd1d11cf69c2b256894a7c974d150586490b055ed48161049b"} Sep 30 20:56:32 crc kubenswrapper[4756]: I0930 20:56:32.450252 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-9b8d-account-create-pvdf6" Sep 30 20:56:32 crc kubenswrapper[4756]: I0930 20:56:32.488907 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2ntk5\" (UniqueName: \"kubernetes.io/projected/f46f27e6-1550-4485-b876-caa797674f2c-kube-api-access-2ntk5\") pod \"f46f27e6-1550-4485-b876-caa797674f2c\" (UID: \"f46f27e6-1550-4485-b876-caa797674f2c\") " Sep 30 20:56:32 crc kubenswrapper[4756]: I0930 20:56:32.499720 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f46f27e6-1550-4485-b876-caa797674f2c-kube-api-access-2ntk5" (OuterVolumeSpecName: "kube-api-access-2ntk5") pod "f46f27e6-1550-4485-b876-caa797674f2c" (UID: "f46f27e6-1550-4485-b876-caa797674f2c"). InnerVolumeSpecName "kube-api-access-2ntk5". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:56:32 crc kubenswrapper[4756]: I0930 20:56:32.590480 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2ntk5\" (UniqueName: \"kubernetes.io/projected/f46f27e6-1550-4485-b876-caa797674f2c-kube-api-access-2ntk5\") on node \"crc\" DevicePath \"\"" Sep 30 20:56:33 crc kubenswrapper[4756]: I0930 20:56:33.099867 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-9b8d-account-create-pvdf6" event={"ID":"f46f27e6-1550-4485-b876-caa797674f2c","Type":"ContainerDied","Data":"33c42350acf110dd1d11cf69c2b256894a7c974d150586490b055ed48161049b"} Sep 30 20:56:33 crc kubenswrapper[4756]: I0930 20:56:33.100120 4756 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="33c42350acf110dd1d11cf69c2b256894a7c974d150586490b055ed48161049b" Sep 30 20:56:33 crc kubenswrapper[4756]: I0930 20:56:33.099928 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-9b8d-account-create-pvdf6" Sep 30 20:56:35 crc kubenswrapper[4756]: I0930 20:56:35.226854 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-sync-gkpxw"] Sep 30 20:56:35 crc kubenswrapper[4756]: E0930 20:56:35.227542 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f46f27e6-1550-4485-b876-caa797674f2c" containerName="mariadb-account-create" Sep 30 20:56:35 crc kubenswrapper[4756]: I0930 20:56:35.227561 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="f46f27e6-1550-4485-b876-caa797674f2c" containerName="mariadb-account-create" Sep 30 20:56:35 crc kubenswrapper[4756]: I0930 20:56:35.227747 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="f46f27e6-1550-4485-b876-caa797674f2c" containerName="mariadb-account-create" Sep 30 20:56:35 crc kubenswrapper[4756]: I0930 20:56:35.229062 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-gkpxw" Sep 30 20:56:35 crc kubenswrapper[4756]: I0930 20:56:35.232807 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Sep 30 20:56:35 crc kubenswrapper[4756]: I0930 20:56:35.232937 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-fm7pq" Sep 30 20:56:35 crc kubenswrapper[4756]: I0930 20:56:35.233041 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Sep 30 20:56:35 crc kubenswrapper[4756]: I0930 20:56:35.233187 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Sep 30 20:56:35 crc kubenswrapper[4756]: I0930 20:56:35.250466 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-gkpxw"] Sep 30 20:56:35 crc kubenswrapper[4756]: I0930 20:56:35.332071 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8ac64df1-a909-4013-981b-23bfc2cf0e4e-config-data\") pod \"keystone-db-sync-gkpxw\" (UID: \"8ac64df1-a909-4013-981b-23bfc2cf0e4e\") " pod="openstack/keystone-db-sync-gkpxw" Sep 30 20:56:35 crc kubenswrapper[4756]: I0930 20:56:35.332226 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qft2k\" (UniqueName: \"kubernetes.io/projected/8ac64df1-a909-4013-981b-23bfc2cf0e4e-kube-api-access-qft2k\") pod \"keystone-db-sync-gkpxw\" (UID: \"8ac64df1-a909-4013-981b-23bfc2cf0e4e\") " pod="openstack/keystone-db-sync-gkpxw" Sep 30 20:56:35 crc kubenswrapper[4756]: I0930 20:56:35.332303 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8ac64df1-a909-4013-981b-23bfc2cf0e4e-combined-ca-bundle\") pod \"keystone-db-sync-gkpxw\" (UID: \"8ac64df1-a909-4013-981b-23bfc2cf0e4e\") " pod="openstack/keystone-db-sync-gkpxw" Sep 30 20:56:35 crc kubenswrapper[4756]: I0930 20:56:35.433511 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qft2k\" (UniqueName: \"kubernetes.io/projected/8ac64df1-a909-4013-981b-23bfc2cf0e4e-kube-api-access-qft2k\") pod \"keystone-db-sync-gkpxw\" (UID: \"8ac64df1-a909-4013-981b-23bfc2cf0e4e\") " pod="openstack/keystone-db-sync-gkpxw" Sep 30 20:56:35 crc kubenswrapper[4756]: I0930 20:56:35.433628 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8ac64df1-a909-4013-981b-23bfc2cf0e4e-combined-ca-bundle\") pod \"keystone-db-sync-gkpxw\" (UID: \"8ac64df1-a909-4013-981b-23bfc2cf0e4e\") " pod="openstack/keystone-db-sync-gkpxw" Sep 30 20:56:35 crc kubenswrapper[4756]: I0930 20:56:35.433669 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8ac64df1-a909-4013-981b-23bfc2cf0e4e-config-data\") pod \"keystone-db-sync-gkpxw\" (UID: \"8ac64df1-a909-4013-981b-23bfc2cf0e4e\") " pod="openstack/keystone-db-sync-gkpxw" Sep 30 20:56:35 crc kubenswrapper[4756]: I0930 20:56:35.439770 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8ac64df1-a909-4013-981b-23bfc2cf0e4e-combined-ca-bundle\") pod \"keystone-db-sync-gkpxw\" (UID: \"8ac64df1-a909-4013-981b-23bfc2cf0e4e\") " pod="openstack/keystone-db-sync-gkpxw" Sep 30 20:56:35 crc kubenswrapper[4756]: I0930 20:56:35.445110 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8ac64df1-a909-4013-981b-23bfc2cf0e4e-config-data\") pod \"keystone-db-sync-gkpxw\" (UID: \"8ac64df1-a909-4013-981b-23bfc2cf0e4e\") " pod="openstack/keystone-db-sync-gkpxw" Sep 30 20:56:35 crc kubenswrapper[4756]: I0930 20:56:35.454440 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qft2k\" (UniqueName: \"kubernetes.io/projected/8ac64df1-a909-4013-981b-23bfc2cf0e4e-kube-api-access-qft2k\") pod \"keystone-db-sync-gkpxw\" (UID: \"8ac64df1-a909-4013-981b-23bfc2cf0e4e\") " pod="openstack/keystone-db-sync-gkpxw" Sep 30 20:56:35 crc kubenswrapper[4756]: I0930 20:56:35.566557 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-gkpxw" Sep 30 20:56:35 crc kubenswrapper[4756]: I0930 20:56:35.985907 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-gkpxw"] Sep 30 20:56:36 crc kubenswrapper[4756]: I0930 20:56:36.116449 4756 scope.go:117] "RemoveContainer" containerID="47efbbc76271cd44fa05dca3e26855fb46918fbc5a3a511e482c89c79b8cb60c" Sep 30 20:56:36 crc kubenswrapper[4756]: E0930 20:56:36.117592 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4n9zj_openshift-machine-config-operator(3370c2ca-fec3-4f90-8df7-51e21e6c7e1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" Sep 30 20:56:36 crc kubenswrapper[4756]: I0930 20:56:36.125597 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-gkpxw" event={"ID":"8ac64df1-a909-4013-981b-23bfc2cf0e4e","Type":"ContainerStarted","Data":"b862781acb30a4ffdc1699f1fb6094336d0d0d4a2e8e311279828f9b9e91050f"} Sep 30 20:56:37 crc kubenswrapper[4756]: I0930 20:56:37.138364 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-gkpxw" event={"ID":"8ac64df1-a909-4013-981b-23bfc2cf0e4e","Type":"ContainerStarted","Data":"9afa79264565db561d923f42d248b7598ee531b2e203d481d6f80ed0fd35d191"} Sep 30 20:56:37 crc kubenswrapper[4756]: I0930 20:56:37.170429 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-db-sync-gkpxw" podStartSLOduration=2.170388333 podStartE2EDuration="2.170388333s" podCreationTimestamp="2025-09-30 20:56:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 20:56:37.16453681 +0000 UTC m=+5126.785470337" watchObservedRunningTime="2025-09-30 20:56:37.170388333 +0000 UTC m=+5126.791321820" Sep 30 20:56:38 crc kubenswrapper[4756]: I0930 20:56:38.150180 4756 generic.go:334] "Generic (PLEG): container finished" podID="8ac64df1-a909-4013-981b-23bfc2cf0e4e" containerID="9afa79264565db561d923f42d248b7598ee531b2e203d481d6f80ed0fd35d191" exitCode=0 Sep 30 20:56:38 crc kubenswrapper[4756]: I0930 20:56:38.150239 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-gkpxw" event={"ID":"8ac64df1-a909-4013-981b-23bfc2cf0e4e","Type":"ContainerDied","Data":"9afa79264565db561d923f42d248b7598ee531b2e203d481d6f80ed0fd35d191"} Sep 30 20:56:39 crc kubenswrapper[4756]: I0930 20:56:39.509247 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-gkpxw" Sep 30 20:56:39 crc kubenswrapper[4756]: I0930 20:56:39.610871 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qft2k\" (UniqueName: \"kubernetes.io/projected/8ac64df1-a909-4013-981b-23bfc2cf0e4e-kube-api-access-qft2k\") pod \"8ac64df1-a909-4013-981b-23bfc2cf0e4e\" (UID: \"8ac64df1-a909-4013-981b-23bfc2cf0e4e\") " Sep 30 20:56:39 crc kubenswrapper[4756]: I0930 20:56:39.610994 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8ac64df1-a909-4013-981b-23bfc2cf0e4e-config-data\") pod \"8ac64df1-a909-4013-981b-23bfc2cf0e4e\" (UID: \"8ac64df1-a909-4013-981b-23bfc2cf0e4e\") " Sep 30 20:56:39 crc kubenswrapper[4756]: I0930 20:56:39.611269 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8ac64df1-a909-4013-981b-23bfc2cf0e4e-combined-ca-bundle\") pod \"8ac64df1-a909-4013-981b-23bfc2cf0e4e\" (UID: \"8ac64df1-a909-4013-981b-23bfc2cf0e4e\") " Sep 30 20:56:39 crc kubenswrapper[4756]: I0930 20:56:39.619887 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8ac64df1-a909-4013-981b-23bfc2cf0e4e-kube-api-access-qft2k" (OuterVolumeSpecName: "kube-api-access-qft2k") pod "8ac64df1-a909-4013-981b-23bfc2cf0e4e" (UID: "8ac64df1-a909-4013-981b-23bfc2cf0e4e"). InnerVolumeSpecName "kube-api-access-qft2k". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:56:39 crc kubenswrapper[4756]: I0930 20:56:39.642226 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8ac64df1-a909-4013-981b-23bfc2cf0e4e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "8ac64df1-a909-4013-981b-23bfc2cf0e4e" (UID: "8ac64df1-a909-4013-981b-23bfc2cf0e4e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:56:39 crc kubenswrapper[4756]: I0930 20:56:39.660330 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8ac64df1-a909-4013-981b-23bfc2cf0e4e-config-data" (OuterVolumeSpecName: "config-data") pod "8ac64df1-a909-4013-981b-23bfc2cf0e4e" (UID: "8ac64df1-a909-4013-981b-23bfc2cf0e4e"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:56:39 crc kubenswrapper[4756]: I0930 20:56:39.714895 4756 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8ac64df1-a909-4013-981b-23bfc2cf0e4e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 20:56:39 crc kubenswrapper[4756]: I0930 20:56:39.714970 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qft2k\" (UniqueName: \"kubernetes.io/projected/8ac64df1-a909-4013-981b-23bfc2cf0e4e-kube-api-access-qft2k\") on node \"crc\" DevicePath \"\"" Sep 30 20:56:39 crc kubenswrapper[4756]: I0930 20:56:39.714987 4756 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8ac64df1-a909-4013-981b-23bfc2cf0e4e-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 20:56:40 crc kubenswrapper[4756]: I0930 20:56:40.174584 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-gkpxw" event={"ID":"8ac64df1-a909-4013-981b-23bfc2cf0e4e","Type":"ContainerDied","Data":"b862781acb30a4ffdc1699f1fb6094336d0d0d4a2e8e311279828f9b9e91050f"} Sep 30 20:56:40 crc kubenswrapper[4756]: I0930 20:56:40.174640 4756 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b862781acb30a4ffdc1699f1fb6094336d0d0d4a2e8e311279828f9b9e91050f" Sep 30 20:56:40 crc kubenswrapper[4756]: I0930 20:56:40.175347 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-gkpxw" Sep 30 20:56:40 crc kubenswrapper[4756]: I0930 20:56:40.465074 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-9km4q"] Sep 30 20:56:40 crc kubenswrapper[4756]: E0930 20:56:40.465590 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8ac64df1-a909-4013-981b-23bfc2cf0e4e" containerName="keystone-db-sync" Sep 30 20:56:40 crc kubenswrapper[4756]: I0930 20:56:40.465617 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="8ac64df1-a909-4013-981b-23bfc2cf0e4e" containerName="keystone-db-sync" Sep 30 20:56:40 crc kubenswrapper[4756]: I0930 20:56:40.465819 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="8ac64df1-a909-4013-981b-23bfc2cf0e4e" containerName="keystone-db-sync" Sep 30 20:56:40 crc kubenswrapper[4756]: I0930 20:56:40.466639 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-9km4q" Sep 30 20:56:40 crc kubenswrapper[4756]: I0930 20:56:40.470125 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Sep 30 20:56:40 crc kubenswrapper[4756]: I0930 20:56:40.470209 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Sep 30 20:56:40 crc kubenswrapper[4756]: I0930 20:56:40.470427 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Sep 30 20:56:40 crc kubenswrapper[4756]: I0930 20:56:40.470871 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-fm7pq" Sep 30 20:56:40 crc kubenswrapper[4756]: I0930 20:56:40.477590 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-7b596d9679-bmcnx"] Sep 30 20:56:40 crc kubenswrapper[4756]: I0930 20:56:40.479355 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7b596d9679-bmcnx" Sep 30 20:56:40 crc kubenswrapper[4756]: I0930 20:56:40.492271 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-9km4q"] Sep 30 20:56:40 crc kubenswrapper[4756]: I0930 20:56:40.505916 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7b596d9679-bmcnx"] Sep 30 20:56:40 crc kubenswrapper[4756]: I0930 20:56:40.554727 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/cf2d1731-99e3-4949-8396-a49bd436c7bc-fernet-keys\") pod \"keystone-bootstrap-9km4q\" (UID: \"cf2d1731-99e3-4949-8396-a49bd436c7bc\") " pod="openstack/keystone-bootstrap-9km4q" Sep 30 20:56:40 crc kubenswrapper[4756]: I0930 20:56:40.555053 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/71236825-df25-4a45-803f-4fb5ea260419-ovsdbserver-sb\") pod \"dnsmasq-dns-7b596d9679-bmcnx\" (UID: \"71236825-df25-4a45-803f-4fb5ea260419\") " pod="openstack/dnsmasq-dns-7b596d9679-bmcnx" Sep 30 20:56:40 crc kubenswrapper[4756]: I0930 20:56:40.555093 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nt8dd\" (UniqueName: \"kubernetes.io/projected/cf2d1731-99e3-4949-8396-a49bd436c7bc-kube-api-access-nt8dd\") pod \"keystone-bootstrap-9km4q\" (UID: \"cf2d1731-99e3-4949-8396-a49bd436c7bc\") " pod="openstack/keystone-bootstrap-9km4q" Sep 30 20:56:40 crc kubenswrapper[4756]: I0930 20:56:40.555119 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6d4kl\" (UniqueName: \"kubernetes.io/projected/71236825-df25-4a45-803f-4fb5ea260419-kube-api-access-6d4kl\") pod \"dnsmasq-dns-7b596d9679-bmcnx\" (UID: \"71236825-df25-4a45-803f-4fb5ea260419\") " pod="openstack/dnsmasq-dns-7b596d9679-bmcnx" Sep 30 20:56:40 crc kubenswrapper[4756]: I0930 20:56:40.555168 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cf2d1731-99e3-4949-8396-a49bd436c7bc-scripts\") pod \"keystone-bootstrap-9km4q\" (UID: \"cf2d1731-99e3-4949-8396-a49bd436c7bc\") " pod="openstack/keystone-bootstrap-9km4q" Sep 30 20:56:40 crc kubenswrapper[4756]: I0930 20:56:40.555181 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cf2d1731-99e3-4949-8396-a49bd436c7bc-config-data\") pod \"keystone-bootstrap-9km4q\" (UID: \"cf2d1731-99e3-4949-8396-a49bd436c7bc\") " pod="openstack/keystone-bootstrap-9km4q" Sep 30 20:56:40 crc kubenswrapper[4756]: I0930 20:56:40.555211 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/71236825-df25-4a45-803f-4fb5ea260419-dns-svc\") pod \"dnsmasq-dns-7b596d9679-bmcnx\" (UID: \"71236825-df25-4a45-803f-4fb5ea260419\") " pod="openstack/dnsmasq-dns-7b596d9679-bmcnx" Sep 30 20:56:40 crc kubenswrapper[4756]: I0930 20:56:40.555236 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/cf2d1731-99e3-4949-8396-a49bd436c7bc-credential-keys\") pod \"keystone-bootstrap-9km4q\" (UID: \"cf2d1731-99e3-4949-8396-a49bd436c7bc\") " pod="openstack/keystone-bootstrap-9km4q" Sep 30 20:56:40 crc kubenswrapper[4756]: I0930 20:56:40.555253 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cf2d1731-99e3-4949-8396-a49bd436c7bc-combined-ca-bundle\") pod \"keystone-bootstrap-9km4q\" (UID: \"cf2d1731-99e3-4949-8396-a49bd436c7bc\") " pod="openstack/keystone-bootstrap-9km4q" Sep 30 20:56:40 crc kubenswrapper[4756]: I0930 20:56:40.555274 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/71236825-df25-4a45-803f-4fb5ea260419-ovsdbserver-nb\") pod \"dnsmasq-dns-7b596d9679-bmcnx\" (UID: \"71236825-df25-4a45-803f-4fb5ea260419\") " pod="openstack/dnsmasq-dns-7b596d9679-bmcnx" Sep 30 20:56:40 crc kubenswrapper[4756]: I0930 20:56:40.555296 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/71236825-df25-4a45-803f-4fb5ea260419-config\") pod \"dnsmasq-dns-7b596d9679-bmcnx\" (UID: \"71236825-df25-4a45-803f-4fb5ea260419\") " pod="openstack/dnsmasq-dns-7b596d9679-bmcnx" Sep 30 20:56:40 crc kubenswrapper[4756]: I0930 20:56:40.656417 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/71236825-df25-4a45-803f-4fb5ea260419-ovsdbserver-nb\") pod \"dnsmasq-dns-7b596d9679-bmcnx\" (UID: \"71236825-df25-4a45-803f-4fb5ea260419\") " pod="openstack/dnsmasq-dns-7b596d9679-bmcnx" Sep 30 20:56:40 crc kubenswrapper[4756]: I0930 20:56:40.656480 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/71236825-df25-4a45-803f-4fb5ea260419-config\") pod \"dnsmasq-dns-7b596d9679-bmcnx\" (UID: \"71236825-df25-4a45-803f-4fb5ea260419\") " pod="openstack/dnsmasq-dns-7b596d9679-bmcnx" Sep 30 20:56:40 crc kubenswrapper[4756]: I0930 20:56:40.656517 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/cf2d1731-99e3-4949-8396-a49bd436c7bc-fernet-keys\") pod \"keystone-bootstrap-9km4q\" (UID: \"cf2d1731-99e3-4949-8396-a49bd436c7bc\") " pod="openstack/keystone-bootstrap-9km4q" Sep 30 20:56:40 crc kubenswrapper[4756]: I0930 20:56:40.656574 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/71236825-df25-4a45-803f-4fb5ea260419-ovsdbserver-sb\") pod \"dnsmasq-dns-7b596d9679-bmcnx\" (UID: \"71236825-df25-4a45-803f-4fb5ea260419\") " pod="openstack/dnsmasq-dns-7b596d9679-bmcnx" Sep 30 20:56:40 crc kubenswrapper[4756]: I0930 20:56:40.656616 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nt8dd\" (UniqueName: \"kubernetes.io/projected/cf2d1731-99e3-4949-8396-a49bd436c7bc-kube-api-access-nt8dd\") pod \"keystone-bootstrap-9km4q\" (UID: \"cf2d1731-99e3-4949-8396-a49bd436c7bc\") " pod="openstack/keystone-bootstrap-9km4q" Sep 30 20:56:40 crc kubenswrapper[4756]: I0930 20:56:40.656636 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6d4kl\" (UniqueName: \"kubernetes.io/projected/71236825-df25-4a45-803f-4fb5ea260419-kube-api-access-6d4kl\") pod \"dnsmasq-dns-7b596d9679-bmcnx\" (UID: \"71236825-df25-4a45-803f-4fb5ea260419\") " pod="openstack/dnsmasq-dns-7b596d9679-bmcnx" Sep 30 20:56:40 crc kubenswrapper[4756]: I0930 20:56:40.656678 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cf2d1731-99e3-4949-8396-a49bd436c7bc-scripts\") pod \"keystone-bootstrap-9km4q\" (UID: \"cf2d1731-99e3-4949-8396-a49bd436c7bc\") " pod="openstack/keystone-bootstrap-9km4q" Sep 30 20:56:40 crc kubenswrapper[4756]: I0930 20:56:40.656694 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cf2d1731-99e3-4949-8396-a49bd436c7bc-config-data\") pod \"keystone-bootstrap-9km4q\" (UID: \"cf2d1731-99e3-4949-8396-a49bd436c7bc\") " pod="openstack/keystone-bootstrap-9km4q" Sep 30 20:56:40 crc kubenswrapper[4756]: I0930 20:56:40.656721 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/71236825-df25-4a45-803f-4fb5ea260419-dns-svc\") pod \"dnsmasq-dns-7b596d9679-bmcnx\" (UID: \"71236825-df25-4a45-803f-4fb5ea260419\") " pod="openstack/dnsmasq-dns-7b596d9679-bmcnx" Sep 30 20:56:40 crc kubenswrapper[4756]: I0930 20:56:40.656748 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/cf2d1731-99e3-4949-8396-a49bd436c7bc-credential-keys\") pod \"keystone-bootstrap-9km4q\" (UID: \"cf2d1731-99e3-4949-8396-a49bd436c7bc\") " pod="openstack/keystone-bootstrap-9km4q" Sep 30 20:56:40 crc kubenswrapper[4756]: I0930 20:56:40.656769 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cf2d1731-99e3-4949-8396-a49bd436c7bc-combined-ca-bundle\") pod \"keystone-bootstrap-9km4q\" (UID: \"cf2d1731-99e3-4949-8396-a49bd436c7bc\") " pod="openstack/keystone-bootstrap-9km4q" Sep 30 20:56:40 crc kubenswrapper[4756]: I0930 20:56:40.657332 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/71236825-df25-4a45-803f-4fb5ea260419-ovsdbserver-nb\") pod \"dnsmasq-dns-7b596d9679-bmcnx\" (UID: \"71236825-df25-4a45-803f-4fb5ea260419\") " pod="openstack/dnsmasq-dns-7b596d9679-bmcnx" Sep 30 20:56:40 crc kubenswrapper[4756]: I0930 20:56:40.657478 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/71236825-df25-4a45-803f-4fb5ea260419-ovsdbserver-sb\") pod \"dnsmasq-dns-7b596d9679-bmcnx\" (UID: \"71236825-df25-4a45-803f-4fb5ea260419\") " pod="openstack/dnsmasq-dns-7b596d9679-bmcnx" Sep 30 20:56:40 crc kubenswrapper[4756]: I0930 20:56:40.657604 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/71236825-df25-4a45-803f-4fb5ea260419-dns-svc\") pod \"dnsmasq-dns-7b596d9679-bmcnx\" (UID: \"71236825-df25-4a45-803f-4fb5ea260419\") " pod="openstack/dnsmasq-dns-7b596d9679-bmcnx" Sep 30 20:56:40 crc kubenswrapper[4756]: I0930 20:56:40.658165 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/71236825-df25-4a45-803f-4fb5ea260419-config\") pod \"dnsmasq-dns-7b596d9679-bmcnx\" (UID: \"71236825-df25-4a45-803f-4fb5ea260419\") " pod="openstack/dnsmasq-dns-7b596d9679-bmcnx" Sep 30 20:56:40 crc kubenswrapper[4756]: I0930 20:56:40.660500 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cf2d1731-99e3-4949-8396-a49bd436c7bc-scripts\") pod \"keystone-bootstrap-9km4q\" (UID: \"cf2d1731-99e3-4949-8396-a49bd436c7bc\") " pod="openstack/keystone-bootstrap-9km4q" Sep 30 20:56:40 crc kubenswrapper[4756]: I0930 20:56:40.662006 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/cf2d1731-99e3-4949-8396-a49bd436c7bc-fernet-keys\") pod \"keystone-bootstrap-9km4q\" (UID: \"cf2d1731-99e3-4949-8396-a49bd436c7bc\") " pod="openstack/keystone-bootstrap-9km4q" Sep 30 20:56:40 crc kubenswrapper[4756]: I0930 20:56:40.662386 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cf2d1731-99e3-4949-8396-a49bd436c7bc-config-data\") pod \"keystone-bootstrap-9km4q\" (UID: \"cf2d1731-99e3-4949-8396-a49bd436c7bc\") " pod="openstack/keystone-bootstrap-9km4q" Sep 30 20:56:40 crc kubenswrapper[4756]: I0930 20:56:40.663151 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cf2d1731-99e3-4949-8396-a49bd436c7bc-combined-ca-bundle\") pod \"keystone-bootstrap-9km4q\" (UID: \"cf2d1731-99e3-4949-8396-a49bd436c7bc\") " pod="openstack/keystone-bootstrap-9km4q" Sep 30 20:56:40 crc kubenswrapper[4756]: I0930 20:56:40.669740 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/cf2d1731-99e3-4949-8396-a49bd436c7bc-credential-keys\") pod \"keystone-bootstrap-9km4q\" (UID: \"cf2d1731-99e3-4949-8396-a49bd436c7bc\") " pod="openstack/keystone-bootstrap-9km4q" Sep 30 20:56:40 crc kubenswrapper[4756]: I0930 20:56:40.678240 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6d4kl\" (UniqueName: \"kubernetes.io/projected/71236825-df25-4a45-803f-4fb5ea260419-kube-api-access-6d4kl\") pod \"dnsmasq-dns-7b596d9679-bmcnx\" (UID: \"71236825-df25-4a45-803f-4fb5ea260419\") " pod="openstack/dnsmasq-dns-7b596d9679-bmcnx" Sep 30 20:56:40 crc kubenswrapper[4756]: I0930 20:56:40.678887 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nt8dd\" (UniqueName: \"kubernetes.io/projected/cf2d1731-99e3-4949-8396-a49bd436c7bc-kube-api-access-nt8dd\") pod \"keystone-bootstrap-9km4q\" (UID: \"cf2d1731-99e3-4949-8396-a49bd436c7bc\") " pod="openstack/keystone-bootstrap-9km4q" Sep 30 20:56:40 crc kubenswrapper[4756]: I0930 20:56:40.784028 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-9km4q" Sep 30 20:56:40 crc kubenswrapper[4756]: I0930 20:56:40.793700 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7b596d9679-bmcnx" Sep 30 20:56:41 crc kubenswrapper[4756]: I0930 20:56:41.295066 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7b596d9679-bmcnx"] Sep 30 20:56:41 crc kubenswrapper[4756]: I0930 20:56:41.364552 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-9km4q"] Sep 30 20:56:42 crc kubenswrapper[4756]: I0930 20:56:42.195164 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-9km4q" event={"ID":"cf2d1731-99e3-4949-8396-a49bd436c7bc","Type":"ContainerStarted","Data":"d1c3222dedb34463fafc99c69f7ce1e3e59c38d7e972c4c1d34eefe7278b9632"} Sep 30 20:56:42 crc kubenswrapper[4756]: I0930 20:56:42.195251 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-9km4q" event={"ID":"cf2d1731-99e3-4949-8396-a49bd436c7bc","Type":"ContainerStarted","Data":"a9dda7c149c6500b053dff38883ad7d75751ecbccf7a52b93458810eb9cd0510"} Sep 30 20:56:42 crc kubenswrapper[4756]: I0930 20:56:42.199379 4756 generic.go:334] "Generic (PLEG): container finished" podID="71236825-df25-4a45-803f-4fb5ea260419" containerID="54cab6884d3b690aa37bf60644a4e11bcfb78b3348a3ae2d32e9913d10dc8865" exitCode=0 Sep 30 20:56:42 crc kubenswrapper[4756]: I0930 20:56:42.199430 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7b596d9679-bmcnx" event={"ID":"71236825-df25-4a45-803f-4fb5ea260419","Type":"ContainerDied","Data":"54cab6884d3b690aa37bf60644a4e11bcfb78b3348a3ae2d32e9913d10dc8865"} Sep 30 20:56:42 crc kubenswrapper[4756]: I0930 20:56:42.199452 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7b596d9679-bmcnx" event={"ID":"71236825-df25-4a45-803f-4fb5ea260419","Type":"ContainerStarted","Data":"809cd950c6da0d943945c278e1d7137d9fef0e0ac34f556f88c2a95e919e3b2a"} Sep 30 20:56:42 crc kubenswrapper[4756]: I0930 20:56:42.221144 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-9km4q" podStartSLOduration=2.221130087 podStartE2EDuration="2.221130087s" podCreationTimestamp="2025-09-30 20:56:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 20:56:42.217225575 +0000 UTC m=+5131.838159062" watchObservedRunningTime="2025-09-30 20:56:42.221130087 +0000 UTC m=+5131.842063564" Sep 30 20:56:43 crc kubenswrapper[4756]: I0930 20:56:43.210713 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7b596d9679-bmcnx" event={"ID":"71236825-df25-4a45-803f-4fb5ea260419","Type":"ContainerStarted","Data":"eec329a6c55b6287d8ff03be11ca35a5a4e6f341df066cc8d52a09347c0f4c6f"} Sep 30 20:56:43 crc kubenswrapper[4756]: I0930 20:56:43.211659 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-7b596d9679-bmcnx" Sep 30 20:56:43 crc kubenswrapper[4756]: I0930 20:56:43.248931 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-7b596d9679-bmcnx" podStartSLOduration=3.248908354 podStartE2EDuration="3.248908354s" podCreationTimestamp="2025-09-30 20:56:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 20:56:43.2387742 +0000 UTC m=+5132.859707707" watchObservedRunningTime="2025-09-30 20:56:43.248908354 +0000 UTC m=+5132.869841851" Sep 30 20:56:45 crc kubenswrapper[4756]: I0930 20:56:45.232987 4756 generic.go:334] "Generic (PLEG): container finished" podID="cf2d1731-99e3-4949-8396-a49bd436c7bc" containerID="d1c3222dedb34463fafc99c69f7ce1e3e59c38d7e972c4c1d34eefe7278b9632" exitCode=0 Sep 30 20:56:45 crc kubenswrapper[4756]: I0930 20:56:45.233149 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-9km4q" event={"ID":"cf2d1731-99e3-4949-8396-a49bd436c7bc","Type":"ContainerDied","Data":"d1c3222dedb34463fafc99c69f7ce1e3e59c38d7e972c4c1d34eefe7278b9632"} Sep 30 20:56:46 crc kubenswrapper[4756]: I0930 20:56:46.546867 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-9km4q" Sep 30 20:56:46 crc kubenswrapper[4756]: I0930 20:56:46.676900 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nt8dd\" (UniqueName: \"kubernetes.io/projected/cf2d1731-99e3-4949-8396-a49bd436c7bc-kube-api-access-nt8dd\") pod \"cf2d1731-99e3-4949-8396-a49bd436c7bc\" (UID: \"cf2d1731-99e3-4949-8396-a49bd436c7bc\") " Sep 30 20:56:46 crc kubenswrapper[4756]: I0930 20:56:46.676973 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cf2d1731-99e3-4949-8396-a49bd436c7bc-scripts\") pod \"cf2d1731-99e3-4949-8396-a49bd436c7bc\" (UID: \"cf2d1731-99e3-4949-8396-a49bd436c7bc\") " Sep 30 20:56:46 crc kubenswrapper[4756]: I0930 20:56:46.676991 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cf2d1731-99e3-4949-8396-a49bd436c7bc-config-data\") pod \"cf2d1731-99e3-4949-8396-a49bd436c7bc\" (UID: \"cf2d1731-99e3-4949-8396-a49bd436c7bc\") " Sep 30 20:56:46 crc kubenswrapper[4756]: I0930 20:56:46.677028 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/cf2d1731-99e3-4949-8396-a49bd436c7bc-credential-keys\") pod \"cf2d1731-99e3-4949-8396-a49bd436c7bc\" (UID: \"cf2d1731-99e3-4949-8396-a49bd436c7bc\") " Sep 30 20:56:46 crc kubenswrapper[4756]: I0930 20:56:46.677061 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cf2d1731-99e3-4949-8396-a49bd436c7bc-combined-ca-bundle\") pod \"cf2d1731-99e3-4949-8396-a49bd436c7bc\" (UID: \"cf2d1731-99e3-4949-8396-a49bd436c7bc\") " Sep 30 20:56:46 crc kubenswrapper[4756]: I0930 20:56:46.677166 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/cf2d1731-99e3-4949-8396-a49bd436c7bc-fernet-keys\") pod \"cf2d1731-99e3-4949-8396-a49bd436c7bc\" (UID: \"cf2d1731-99e3-4949-8396-a49bd436c7bc\") " Sep 30 20:56:46 crc kubenswrapper[4756]: I0930 20:56:46.682181 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cf2d1731-99e3-4949-8396-a49bd436c7bc-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "cf2d1731-99e3-4949-8396-a49bd436c7bc" (UID: "cf2d1731-99e3-4949-8396-a49bd436c7bc"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:56:46 crc kubenswrapper[4756]: I0930 20:56:46.682226 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cf2d1731-99e3-4949-8396-a49bd436c7bc-scripts" (OuterVolumeSpecName: "scripts") pod "cf2d1731-99e3-4949-8396-a49bd436c7bc" (UID: "cf2d1731-99e3-4949-8396-a49bd436c7bc"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:56:46 crc kubenswrapper[4756]: I0930 20:56:46.682255 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cf2d1731-99e3-4949-8396-a49bd436c7bc-kube-api-access-nt8dd" (OuterVolumeSpecName: "kube-api-access-nt8dd") pod "cf2d1731-99e3-4949-8396-a49bd436c7bc" (UID: "cf2d1731-99e3-4949-8396-a49bd436c7bc"). InnerVolumeSpecName "kube-api-access-nt8dd". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:56:46 crc kubenswrapper[4756]: I0930 20:56:46.683534 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cf2d1731-99e3-4949-8396-a49bd436c7bc-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "cf2d1731-99e3-4949-8396-a49bd436c7bc" (UID: "cf2d1731-99e3-4949-8396-a49bd436c7bc"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:56:46 crc kubenswrapper[4756]: I0930 20:56:46.699867 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cf2d1731-99e3-4949-8396-a49bd436c7bc-config-data" (OuterVolumeSpecName: "config-data") pod "cf2d1731-99e3-4949-8396-a49bd436c7bc" (UID: "cf2d1731-99e3-4949-8396-a49bd436c7bc"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:56:46 crc kubenswrapper[4756]: I0930 20:56:46.700202 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cf2d1731-99e3-4949-8396-a49bd436c7bc-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "cf2d1731-99e3-4949-8396-a49bd436c7bc" (UID: "cf2d1731-99e3-4949-8396-a49bd436c7bc"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:56:46 crc kubenswrapper[4756]: I0930 20:56:46.780737 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nt8dd\" (UniqueName: \"kubernetes.io/projected/cf2d1731-99e3-4949-8396-a49bd436c7bc-kube-api-access-nt8dd\") on node \"crc\" DevicePath \"\"" Sep 30 20:56:46 crc kubenswrapper[4756]: I0930 20:56:46.780826 4756 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cf2d1731-99e3-4949-8396-a49bd436c7bc-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 20:56:46 crc kubenswrapper[4756]: I0930 20:56:46.780895 4756 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cf2d1731-99e3-4949-8396-a49bd436c7bc-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 20:56:46 crc kubenswrapper[4756]: I0930 20:56:46.780911 4756 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/cf2d1731-99e3-4949-8396-a49bd436c7bc-credential-keys\") on node \"crc\" DevicePath \"\"" Sep 30 20:56:46 crc kubenswrapper[4756]: I0930 20:56:46.780924 4756 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cf2d1731-99e3-4949-8396-a49bd436c7bc-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 20:56:46 crc kubenswrapper[4756]: I0930 20:56:46.780934 4756 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/cf2d1731-99e3-4949-8396-a49bd436c7bc-fernet-keys\") on node \"crc\" DevicePath \"\"" Sep 30 20:56:47 crc kubenswrapper[4756]: I0930 20:56:47.254436 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-9km4q" event={"ID":"cf2d1731-99e3-4949-8396-a49bd436c7bc","Type":"ContainerDied","Data":"a9dda7c149c6500b053dff38883ad7d75751ecbccf7a52b93458810eb9cd0510"} Sep 30 20:56:47 crc kubenswrapper[4756]: I0930 20:56:47.254487 4756 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a9dda7c149c6500b053dff38883ad7d75751ecbccf7a52b93458810eb9cd0510" Sep 30 20:56:47 crc kubenswrapper[4756]: I0930 20:56:47.254839 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-9km4q" Sep 30 20:56:47 crc kubenswrapper[4756]: I0930 20:56:47.337557 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-9km4q"] Sep 30 20:56:47 crc kubenswrapper[4756]: I0930 20:56:47.343760 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-9km4q"] Sep 30 20:56:47 crc kubenswrapper[4756]: I0930 20:56:47.428063 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-4zpvm"] Sep 30 20:56:47 crc kubenswrapper[4756]: E0930 20:56:47.428384 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cf2d1731-99e3-4949-8396-a49bd436c7bc" containerName="keystone-bootstrap" Sep 30 20:56:47 crc kubenswrapper[4756]: I0930 20:56:47.428424 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="cf2d1731-99e3-4949-8396-a49bd436c7bc" containerName="keystone-bootstrap" Sep 30 20:56:47 crc kubenswrapper[4756]: I0930 20:56:47.428582 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="cf2d1731-99e3-4949-8396-a49bd436c7bc" containerName="keystone-bootstrap" Sep 30 20:56:47 crc kubenswrapper[4756]: I0930 20:56:47.430594 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-4zpvm" Sep 30 20:56:47 crc kubenswrapper[4756]: I0930 20:56:47.431991 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Sep 30 20:56:47 crc kubenswrapper[4756]: I0930 20:56:47.435223 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Sep 30 20:56:47 crc kubenswrapper[4756]: I0930 20:56:47.435437 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Sep 30 20:56:47 crc kubenswrapper[4756]: I0930 20:56:47.435794 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-fm7pq" Sep 30 20:56:47 crc kubenswrapper[4756]: I0930 20:56:47.437311 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-4zpvm"] Sep 30 20:56:47 crc kubenswrapper[4756]: I0930 20:56:47.595689 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/894ad8d4-57b4-4822-9ef7-f69bcd1222df-config-data\") pod \"keystone-bootstrap-4zpvm\" (UID: \"894ad8d4-57b4-4822-9ef7-f69bcd1222df\") " pod="openstack/keystone-bootstrap-4zpvm" Sep 30 20:56:47 crc kubenswrapper[4756]: I0930 20:56:47.595770 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/894ad8d4-57b4-4822-9ef7-f69bcd1222df-credential-keys\") pod \"keystone-bootstrap-4zpvm\" (UID: \"894ad8d4-57b4-4822-9ef7-f69bcd1222df\") " pod="openstack/keystone-bootstrap-4zpvm" Sep 30 20:56:47 crc kubenswrapper[4756]: I0930 20:56:47.595808 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/894ad8d4-57b4-4822-9ef7-f69bcd1222df-fernet-keys\") pod \"keystone-bootstrap-4zpvm\" (UID: \"894ad8d4-57b4-4822-9ef7-f69bcd1222df\") " pod="openstack/keystone-bootstrap-4zpvm" Sep 30 20:56:47 crc kubenswrapper[4756]: I0930 20:56:47.595872 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/894ad8d4-57b4-4822-9ef7-f69bcd1222df-combined-ca-bundle\") pod \"keystone-bootstrap-4zpvm\" (UID: \"894ad8d4-57b4-4822-9ef7-f69bcd1222df\") " pod="openstack/keystone-bootstrap-4zpvm" Sep 30 20:56:47 crc kubenswrapper[4756]: I0930 20:56:47.595907 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wsnw6\" (UniqueName: \"kubernetes.io/projected/894ad8d4-57b4-4822-9ef7-f69bcd1222df-kube-api-access-wsnw6\") pod \"keystone-bootstrap-4zpvm\" (UID: \"894ad8d4-57b4-4822-9ef7-f69bcd1222df\") " pod="openstack/keystone-bootstrap-4zpvm" Sep 30 20:56:47 crc kubenswrapper[4756]: I0930 20:56:47.596123 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/894ad8d4-57b4-4822-9ef7-f69bcd1222df-scripts\") pod \"keystone-bootstrap-4zpvm\" (UID: \"894ad8d4-57b4-4822-9ef7-f69bcd1222df\") " pod="openstack/keystone-bootstrap-4zpvm" Sep 30 20:56:47 crc kubenswrapper[4756]: I0930 20:56:47.697591 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/894ad8d4-57b4-4822-9ef7-f69bcd1222df-scripts\") pod \"keystone-bootstrap-4zpvm\" (UID: \"894ad8d4-57b4-4822-9ef7-f69bcd1222df\") " pod="openstack/keystone-bootstrap-4zpvm" Sep 30 20:56:47 crc kubenswrapper[4756]: I0930 20:56:47.697717 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/894ad8d4-57b4-4822-9ef7-f69bcd1222df-config-data\") pod \"keystone-bootstrap-4zpvm\" (UID: \"894ad8d4-57b4-4822-9ef7-f69bcd1222df\") " pod="openstack/keystone-bootstrap-4zpvm" Sep 30 20:56:47 crc kubenswrapper[4756]: I0930 20:56:47.697796 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/894ad8d4-57b4-4822-9ef7-f69bcd1222df-credential-keys\") pod \"keystone-bootstrap-4zpvm\" (UID: \"894ad8d4-57b4-4822-9ef7-f69bcd1222df\") " pod="openstack/keystone-bootstrap-4zpvm" Sep 30 20:56:47 crc kubenswrapper[4756]: I0930 20:56:47.697848 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/894ad8d4-57b4-4822-9ef7-f69bcd1222df-fernet-keys\") pod \"keystone-bootstrap-4zpvm\" (UID: \"894ad8d4-57b4-4822-9ef7-f69bcd1222df\") " pod="openstack/keystone-bootstrap-4zpvm" Sep 30 20:56:47 crc kubenswrapper[4756]: I0930 20:56:47.697964 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/894ad8d4-57b4-4822-9ef7-f69bcd1222df-combined-ca-bundle\") pod \"keystone-bootstrap-4zpvm\" (UID: \"894ad8d4-57b4-4822-9ef7-f69bcd1222df\") " pod="openstack/keystone-bootstrap-4zpvm" Sep 30 20:56:47 crc kubenswrapper[4756]: I0930 20:56:47.698022 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wsnw6\" (UniqueName: \"kubernetes.io/projected/894ad8d4-57b4-4822-9ef7-f69bcd1222df-kube-api-access-wsnw6\") pod \"keystone-bootstrap-4zpvm\" (UID: \"894ad8d4-57b4-4822-9ef7-f69bcd1222df\") " pod="openstack/keystone-bootstrap-4zpvm" Sep 30 20:56:47 crc kubenswrapper[4756]: I0930 20:56:47.702589 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/894ad8d4-57b4-4822-9ef7-f69bcd1222df-combined-ca-bundle\") pod \"keystone-bootstrap-4zpvm\" (UID: \"894ad8d4-57b4-4822-9ef7-f69bcd1222df\") " pod="openstack/keystone-bootstrap-4zpvm" Sep 30 20:56:47 crc kubenswrapper[4756]: I0930 20:56:47.703058 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/894ad8d4-57b4-4822-9ef7-f69bcd1222df-fernet-keys\") pod \"keystone-bootstrap-4zpvm\" (UID: \"894ad8d4-57b4-4822-9ef7-f69bcd1222df\") " pod="openstack/keystone-bootstrap-4zpvm" Sep 30 20:56:47 crc kubenswrapper[4756]: I0930 20:56:47.706317 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/894ad8d4-57b4-4822-9ef7-f69bcd1222df-credential-keys\") pod \"keystone-bootstrap-4zpvm\" (UID: \"894ad8d4-57b4-4822-9ef7-f69bcd1222df\") " pod="openstack/keystone-bootstrap-4zpvm" Sep 30 20:56:47 crc kubenswrapper[4756]: I0930 20:56:47.708713 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/894ad8d4-57b4-4822-9ef7-f69bcd1222df-config-data\") pod \"keystone-bootstrap-4zpvm\" (UID: \"894ad8d4-57b4-4822-9ef7-f69bcd1222df\") " pod="openstack/keystone-bootstrap-4zpvm" Sep 30 20:56:47 crc kubenswrapper[4756]: I0930 20:56:47.709181 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/894ad8d4-57b4-4822-9ef7-f69bcd1222df-scripts\") pod \"keystone-bootstrap-4zpvm\" (UID: \"894ad8d4-57b4-4822-9ef7-f69bcd1222df\") " pod="openstack/keystone-bootstrap-4zpvm" Sep 30 20:56:47 crc kubenswrapper[4756]: I0930 20:56:47.729557 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wsnw6\" (UniqueName: \"kubernetes.io/projected/894ad8d4-57b4-4822-9ef7-f69bcd1222df-kube-api-access-wsnw6\") pod \"keystone-bootstrap-4zpvm\" (UID: \"894ad8d4-57b4-4822-9ef7-f69bcd1222df\") " pod="openstack/keystone-bootstrap-4zpvm" Sep 30 20:56:47 crc kubenswrapper[4756]: I0930 20:56:47.759887 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-4zpvm" Sep 30 20:56:48 crc kubenswrapper[4756]: I0930 20:56:48.210258 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-4zpvm"] Sep 30 20:56:48 crc kubenswrapper[4756]: I0930 20:56:48.262912 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-4zpvm" event={"ID":"894ad8d4-57b4-4822-9ef7-f69bcd1222df","Type":"ContainerStarted","Data":"26ccba33c6cf8b8fe3dbecb40fab5611538cec04faba88cbe81bb5a5da6c31f8"} Sep 30 20:56:49 crc kubenswrapper[4756]: I0930 20:56:49.136539 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cf2d1731-99e3-4949-8396-a49bd436c7bc" path="/var/lib/kubelet/pods/cf2d1731-99e3-4949-8396-a49bd436c7bc/volumes" Sep 30 20:56:49 crc kubenswrapper[4756]: I0930 20:56:49.274031 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-4zpvm" event={"ID":"894ad8d4-57b4-4822-9ef7-f69bcd1222df","Type":"ContainerStarted","Data":"f9c953bd4a1be94c439c1e1296bf492ec49b81a59f65827c62a85d2a1dc8b6c8"} Sep 30 20:56:49 crc kubenswrapper[4756]: I0930 20:56:49.299497 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-4zpvm" podStartSLOduration=2.299482126 podStartE2EDuration="2.299482126s" podCreationTimestamp="2025-09-30 20:56:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 20:56:49.295182124 +0000 UTC m=+5138.916115601" watchObservedRunningTime="2025-09-30 20:56:49.299482126 +0000 UTC m=+5138.920415603" Sep 30 20:56:50 crc kubenswrapper[4756]: I0930 20:56:50.116501 4756 scope.go:117] "RemoveContainer" containerID="47efbbc76271cd44fa05dca3e26855fb46918fbc5a3a511e482c89c79b8cb60c" Sep 30 20:56:50 crc kubenswrapper[4756]: E0930 20:56:50.117914 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4n9zj_openshift-machine-config-operator(3370c2ca-fec3-4f90-8df7-51e21e6c7e1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" Sep 30 20:56:50 crc kubenswrapper[4756]: I0930 20:56:50.795656 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-7b596d9679-bmcnx" Sep 30 20:56:50 crc kubenswrapper[4756]: I0930 20:56:50.896140 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5f5cc984d9-dfbmg"] Sep 30 20:56:50 crc kubenswrapper[4756]: I0930 20:56:50.896491 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-5f5cc984d9-dfbmg" podUID="178b785b-98f8-495b-ae5e-7342ad1a94d6" containerName="dnsmasq-dns" containerID="cri-o://5309f5f49a000f7b3b6aee9e22518c19b8e1c46211a4e5192d41c1be6e0a773a" gracePeriod=10 Sep 30 20:56:51 crc kubenswrapper[4756]: I0930 20:56:51.009410 4756 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-5f5cc984d9-dfbmg" podUID="178b785b-98f8-495b-ae5e-7342ad1a94d6" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.1.13:5353: connect: connection refused" Sep 30 20:56:51 crc kubenswrapper[4756]: I0930 20:56:51.293859 4756 generic.go:334] "Generic (PLEG): container finished" podID="178b785b-98f8-495b-ae5e-7342ad1a94d6" containerID="5309f5f49a000f7b3b6aee9e22518c19b8e1c46211a4e5192d41c1be6e0a773a" exitCode=0 Sep 30 20:56:51 crc kubenswrapper[4756]: I0930 20:56:51.293935 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5f5cc984d9-dfbmg" event={"ID":"178b785b-98f8-495b-ae5e-7342ad1a94d6","Type":"ContainerDied","Data":"5309f5f49a000f7b3b6aee9e22518c19b8e1c46211a4e5192d41c1be6e0a773a"} Sep 30 20:56:51 crc kubenswrapper[4756]: I0930 20:56:51.296094 4756 generic.go:334] "Generic (PLEG): container finished" podID="894ad8d4-57b4-4822-9ef7-f69bcd1222df" containerID="f9c953bd4a1be94c439c1e1296bf492ec49b81a59f65827c62a85d2a1dc8b6c8" exitCode=0 Sep 30 20:56:51 crc kubenswrapper[4756]: I0930 20:56:51.296150 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-4zpvm" event={"ID":"894ad8d4-57b4-4822-9ef7-f69bcd1222df","Type":"ContainerDied","Data":"f9c953bd4a1be94c439c1e1296bf492ec49b81a59f65827c62a85d2a1dc8b6c8"} Sep 30 20:56:51 crc kubenswrapper[4756]: I0930 20:56:51.371362 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5f5cc984d9-dfbmg" Sep 30 20:56:51 crc kubenswrapper[4756]: I0930 20:56:51.470424 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/178b785b-98f8-495b-ae5e-7342ad1a94d6-ovsdbserver-sb\") pod \"178b785b-98f8-495b-ae5e-7342ad1a94d6\" (UID: \"178b785b-98f8-495b-ae5e-7342ad1a94d6\") " Sep 30 20:56:51 crc kubenswrapper[4756]: I0930 20:56:51.470538 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/178b785b-98f8-495b-ae5e-7342ad1a94d6-config\") pod \"178b785b-98f8-495b-ae5e-7342ad1a94d6\" (UID: \"178b785b-98f8-495b-ae5e-7342ad1a94d6\") " Sep 30 20:56:51 crc kubenswrapper[4756]: I0930 20:56:51.470591 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/178b785b-98f8-495b-ae5e-7342ad1a94d6-ovsdbserver-nb\") pod \"178b785b-98f8-495b-ae5e-7342ad1a94d6\" (UID: \"178b785b-98f8-495b-ae5e-7342ad1a94d6\") " Sep 30 20:56:51 crc kubenswrapper[4756]: I0930 20:56:51.470638 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zgpn9\" (UniqueName: \"kubernetes.io/projected/178b785b-98f8-495b-ae5e-7342ad1a94d6-kube-api-access-zgpn9\") pod \"178b785b-98f8-495b-ae5e-7342ad1a94d6\" (UID: \"178b785b-98f8-495b-ae5e-7342ad1a94d6\") " Sep 30 20:56:51 crc kubenswrapper[4756]: I0930 20:56:51.470661 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/178b785b-98f8-495b-ae5e-7342ad1a94d6-dns-svc\") pod \"178b785b-98f8-495b-ae5e-7342ad1a94d6\" (UID: \"178b785b-98f8-495b-ae5e-7342ad1a94d6\") " Sep 30 20:56:51 crc kubenswrapper[4756]: I0930 20:56:51.482915 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/178b785b-98f8-495b-ae5e-7342ad1a94d6-kube-api-access-zgpn9" (OuterVolumeSpecName: "kube-api-access-zgpn9") pod "178b785b-98f8-495b-ae5e-7342ad1a94d6" (UID: "178b785b-98f8-495b-ae5e-7342ad1a94d6"). InnerVolumeSpecName "kube-api-access-zgpn9". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:56:51 crc kubenswrapper[4756]: I0930 20:56:51.509536 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/178b785b-98f8-495b-ae5e-7342ad1a94d6-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "178b785b-98f8-495b-ae5e-7342ad1a94d6" (UID: "178b785b-98f8-495b-ae5e-7342ad1a94d6"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:56:51 crc kubenswrapper[4756]: I0930 20:56:51.510531 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/178b785b-98f8-495b-ae5e-7342ad1a94d6-config" (OuterVolumeSpecName: "config") pod "178b785b-98f8-495b-ae5e-7342ad1a94d6" (UID: "178b785b-98f8-495b-ae5e-7342ad1a94d6"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:56:51 crc kubenswrapper[4756]: I0930 20:56:51.510900 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/178b785b-98f8-495b-ae5e-7342ad1a94d6-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "178b785b-98f8-495b-ae5e-7342ad1a94d6" (UID: "178b785b-98f8-495b-ae5e-7342ad1a94d6"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:56:51 crc kubenswrapper[4756]: I0930 20:56:51.516615 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/178b785b-98f8-495b-ae5e-7342ad1a94d6-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "178b785b-98f8-495b-ae5e-7342ad1a94d6" (UID: "178b785b-98f8-495b-ae5e-7342ad1a94d6"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:56:51 crc kubenswrapper[4756]: I0930 20:56:51.572572 4756 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/178b785b-98f8-495b-ae5e-7342ad1a94d6-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Sep 30 20:56:51 crc kubenswrapper[4756]: I0930 20:56:51.572604 4756 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/178b785b-98f8-495b-ae5e-7342ad1a94d6-config\") on node \"crc\" DevicePath \"\"" Sep 30 20:56:51 crc kubenswrapper[4756]: I0930 20:56:51.572613 4756 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/178b785b-98f8-495b-ae5e-7342ad1a94d6-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Sep 30 20:56:51 crc kubenswrapper[4756]: I0930 20:56:51.572624 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zgpn9\" (UniqueName: \"kubernetes.io/projected/178b785b-98f8-495b-ae5e-7342ad1a94d6-kube-api-access-zgpn9\") on node \"crc\" DevicePath \"\"" Sep 30 20:56:51 crc kubenswrapper[4756]: I0930 20:56:51.572632 4756 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/178b785b-98f8-495b-ae5e-7342ad1a94d6-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 30 20:56:52 crc kubenswrapper[4756]: I0930 20:56:52.310022 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5f5cc984d9-dfbmg" event={"ID":"178b785b-98f8-495b-ae5e-7342ad1a94d6","Type":"ContainerDied","Data":"7122ea929217362fb35a826e4868056c8ecfe5023d52523cef8eb607fcdd8722"} Sep 30 20:56:52 crc kubenswrapper[4756]: I0930 20:56:52.310136 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5f5cc984d9-dfbmg" Sep 30 20:56:52 crc kubenswrapper[4756]: I0930 20:56:52.310641 4756 scope.go:117] "RemoveContainer" containerID="5309f5f49a000f7b3b6aee9e22518c19b8e1c46211a4e5192d41c1be6e0a773a" Sep 30 20:56:52 crc kubenswrapper[4756]: I0930 20:56:52.361665 4756 scope.go:117] "RemoveContainer" containerID="6823f75ee12827c1ebdea68dde9926dffbc6ce1c55cde9784807805c890fe1a0" Sep 30 20:56:52 crc kubenswrapper[4756]: I0930 20:56:52.371972 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5f5cc984d9-dfbmg"] Sep 30 20:56:52 crc kubenswrapper[4756]: I0930 20:56:52.379622 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5f5cc984d9-dfbmg"] Sep 30 20:56:52 crc kubenswrapper[4756]: I0930 20:56:52.648665 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-4zpvm" Sep 30 20:56:52 crc kubenswrapper[4756]: I0930 20:56:52.798343 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/894ad8d4-57b4-4822-9ef7-f69bcd1222df-fernet-keys\") pod \"894ad8d4-57b4-4822-9ef7-f69bcd1222df\" (UID: \"894ad8d4-57b4-4822-9ef7-f69bcd1222df\") " Sep 30 20:56:52 crc kubenswrapper[4756]: I0930 20:56:52.798460 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wsnw6\" (UniqueName: \"kubernetes.io/projected/894ad8d4-57b4-4822-9ef7-f69bcd1222df-kube-api-access-wsnw6\") pod \"894ad8d4-57b4-4822-9ef7-f69bcd1222df\" (UID: \"894ad8d4-57b4-4822-9ef7-f69bcd1222df\") " Sep 30 20:56:52 crc kubenswrapper[4756]: I0930 20:56:52.798508 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/894ad8d4-57b4-4822-9ef7-f69bcd1222df-config-data\") pod \"894ad8d4-57b4-4822-9ef7-f69bcd1222df\" (UID: \"894ad8d4-57b4-4822-9ef7-f69bcd1222df\") " Sep 30 20:56:52 crc kubenswrapper[4756]: I0930 20:56:52.798564 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/894ad8d4-57b4-4822-9ef7-f69bcd1222df-combined-ca-bundle\") pod \"894ad8d4-57b4-4822-9ef7-f69bcd1222df\" (UID: \"894ad8d4-57b4-4822-9ef7-f69bcd1222df\") " Sep 30 20:56:52 crc kubenswrapper[4756]: I0930 20:56:52.798614 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/894ad8d4-57b4-4822-9ef7-f69bcd1222df-scripts\") pod \"894ad8d4-57b4-4822-9ef7-f69bcd1222df\" (UID: \"894ad8d4-57b4-4822-9ef7-f69bcd1222df\") " Sep 30 20:56:52 crc kubenswrapper[4756]: I0930 20:56:52.798786 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/894ad8d4-57b4-4822-9ef7-f69bcd1222df-credential-keys\") pod \"894ad8d4-57b4-4822-9ef7-f69bcd1222df\" (UID: \"894ad8d4-57b4-4822-9ef7-f69bcd1222df\") " Sep 30 20:56:52 crc kubenswrapper[4756]: I0930 20:56:52.803663 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/894ad8d4-57b4-4822-9ef7-f69bcd1222df-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "894ad8d4-57b4-4822-9ef7-f69bcd1222df" (UID: "894ad8d4-57b4-4822-9ef7-f69bcd1222df"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:56:52 crc kubenswrapper[4756]: I0930 20:56:52.804080 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/894ad8d4-57b4-4822-9ef7-f69bcd1222df-kube-api-access-wsnw6" (OuterVolumeSpecName: "kube-api-access-wsnw6") pod "894ad8d4-57b4-4822-9ef7-f69bcd1222df" (UID: "894ad8d4-57b4-4822-9ef7-f69bcd1222df"). InnerVolumeSpecName "kube-api-access-wsnw6". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:56:52 crc kubenswrapper[4756]: I0930 20:56:52.805592 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/894ad8d4-57b4-4822-9ef7-f69bcd1222df-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "894ad8d4-57b4-4822-9ef7-f69bcd1222df" (UID: "894ad8d4-57b4-4822-9ef7-f69bcd1222df"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:56:52 crc kubenswrapper[4756]: I0930 20:56:52.806302 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/894ad8d4-57b4-4822-9ef7-f69bcd1222df-scripts" (OuterVolumeSpecName: "scripts") pod "894ad8d4-57b4-4822-9ef7-f69bcd1222df" (UID: "894ad8d4-57b4-4822-9ef7-f69bcd1222df"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:56:52 crc kubenswrapper[4756]: I0930 20:56:52.826900 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/894ad8d4-57b4-4822-9ef7-f69bcd1222df-config-data" (OuterVolumeSpecName: "config-data") pod "894ad8d4-57b4-4822-9ef7-f69bcd1222df" (UID: "894ad8d4-57b4-4822-9ef7-f69bcd1222df"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:56:52 crc kubenswrapper[4756]: I0930 20:56:52.827697 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/894ad8d4-57b4-4822-9ef7-f69bcd1222df-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "894ad8d4-57b4-4822-9ef7-f69bcd1222df" (UID: "894ad8d4-57b4-4822-9ef7-f69bcd1222df"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:56:52 crc kubenswrapper[4756]: I0930 20:56:52.902017 4756 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/894ad8d4-57b4-4822-9ef7-f69bcd1222df-credential-keys\") on node \"crc\" DevicePath \"\"" Sep 30 20:56:52 crc kubenswrapper[4756]: I0930 20:56:52.902283 4756 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/894ad8d4-57b4-4822-9ef7-f69bcd1222df-fernet-keys\") on node \"crc\" DevicePath \"\"" Sep 30 20:56:52 crc kubenswrapper[4756]: I0930 20:56:52.902295 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wsnw6\" (UniqueName: \"kubernetes.io/projected/894ad8d4-57b4-4822-9ef7-f69bcd1222df-kube-api-access-wsnw6\") on node \"crc\" DevicePath \"\"" Sep 30 20:56:52 crc kubenswrapper[4756]: I0930 20:56:52.902303 4756 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/894ad8d4-57b4-4822-9ef7-f69bcd1222df-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 20:56:52 crc kubenswrapper[4756]: I0930 20:56:52.902313 4756 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/894ad8d4-57b4-4822-9ef7-f69bcd1222df-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 20:56:52 crc kubenswrapper[4756]: I0930 20:56:52.902323 4756 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/894ad8d4-57b4-4822-9ef7-f69bcd1222df-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 20:56:53 crc kubenswrapper[4756]: I0930 20:56:53.125940 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="178b785b-98f8-495b-ae5e-7342ad1a94d6" path="/var/lib/kubelet/pods/178b785b-98f8-495b-ae5e-7342ad1a94d6/volumes" Sep 30 20:56:53 crc kubenswrapper[4756]: I0930 20:56:53.321042 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-4zpvm" event={"ID":"894ad8d4-57b4-4822-9ef7-f69bcd1222df","Type":"ContainerDied","Data":"26ccba33c6cf8b8fe3dbecb40fab5611538cec04faba88cbe81bb5a5da6c31f8"} Sep 30 20:56:53 crc kubenswrapper[4756]: I0930 20:56:53.321077 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-4zpvm" Sep 30 20:56:53 crc kubenswrapper[4756]: I0930 20:56:53.321100 4756 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="26ccba33c6cf8b8fe3dbecb40fab5611538cec04faba88cbe81bb5a5da6c31f8" Sep 30 20:56:53 crc kubenswrapper[4756]: I0930 20:56:53.422366 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-79dfdbf96-s7xnv"] Sep 30 20:56:53 crc kubenswrapper[4756]: E0930 20:56:53.422757 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="178b785b-98f8-495b-ae5e-7342ad1a94d6" containerName="init" Sep 30 20:56:53 crc kubenswrapper[4756]: I0930 20:56:53.422776 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="178b785b-98f8-495b-ae5e-7342ad1a94d6" containerName="init" Sep 30 20:56:53 crc kubenswrapper[4756]: E0930 20:56:53.422794 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="178b785b-98f8-495b-ae5e-7342ad1a94d6" containerName="dnsmasq-dns" Sep 30 20:56:53 crc kubenswrapper[4756]: I0930 20:56:53.422804 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="178b785b-98f8-495b-ae5e-7342ad1a94d6" containerName="dnsmasq-dns" Sep 30 20:56:53 crc kubenswrapper[4756]: E0930 20:56:53.422831 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="894ad8d4-57b4-4822-9ef7-f69bcd1222df" containerName="keystone-bootstrap" Sep 30 20:56:53 crc kubenswrapper[4756]: I0930 20:56:53.422839 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="894ad8d4-57b4-4822-9ef7-f69bcd1222df" containerName="keystone-bootstrap" Sep 30 20:56:53 crc kubenswrapper[4756]: I0930 20:56:53.425101 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="178b785b-98f8-495b-ae5e-7342ad1a94d6" containerName="dnsmasq-dns" Sep 30 20:56:53 crc kubenswrapper[4756]: I0930 20:56:53.425140 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="894ad8d4-57b4-4822-9ef7-f69bcd1222df" containerName="keystone-bootstrap" Sep 30 20:56:53 crc kubenswrapper[4756]: I0930 20:56:53.426886 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-79dfdbf96-s7xnv" Sep 30 20:56:53 crc kubenswrapper[4756]: I0930 20:56:53.430907 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Sep 30 20:56:53 crc kubenswrapper[4756]: I0930 20:56:53.431134 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-fm7pq" Sep 30 20:56:53 crc kubenswrapper[4756]: I0930 20:56:53.431076 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Sep 30 20:56:53 crc kubenswrapper[4756]: I0930 20:56:53.434022 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Sep 30 20:56:53 crc kubenswrapper[4756]: I0930 20:56:53.440519 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-79dfdbf96-s7xnv"] Sep 30 20:56:53 crc kubenswrapper[4756]: I0930 20:56:53.614072 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f65b915b-51d9-4a4f-aa56-aba336e5f7d4-config-data\") pod \"keystone-79dfdbf96-s7xnv\" (UID: \"f65b915b-51d9-4a4f-aa56-aba336e5f7d4\") " pod="openstack/keystone-79dfdbf96-s7xnv" Sep 30 20:56:53 crc kubenswrapper[4756]: I0930 20:56:53.614174 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f65b915b-51d9-4a4f-aa56-aba336e5f7d4-combined-ca-bundle\") pod \"keystone-79dfdbf96-s7xnv\" (UID: \"f65b915b-51d9-4a4f-aa56-aba336e5f7d4\") " pod="openstack/keystone-79dfdbf96-s7xnv" Sep 30 20:56:53 crc kubenswrapper[4756]: I0930 20:56:53.614258 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/f65b915b-51d9-4a4f-aa56-aba336e5f7d4-fernet-keys\") pod \"keystone-79dfdbf96-s7xnv\" (UID: \"f65b915b-51d9-4a4f-aa56-aba336e5f7d4\") " pod="openstack/keystone-79dfdbf96-s7xnv" Sep 30 20:56:53 crc kubenswrapper[4756]: I0930 20:56:53.614321 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/f65b915b-51d9-4a4f-aa56-aba336e5f7d4-credential-keys\") pod \"keystone-79dfdbf96-s7xnv\" (UID: \"f65b915b-51d9-4a4f-aa56-aba336e5f7d4\") " pod="openstack/keystone-79dfdbf96-s7xnv" Sep 30 20:56:53 crc kubenswrapper[4756]: I0930 20:56:53.614457 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f65b915b-51d9-4a4f-aa56-aba336e5f7d4-scripts\") pod \"keystone-79dfdbf96-s7xnv\" (UID: \"f65b915b-51d9-4a4f-aa56-aba336e5f7d4\") " pod="openstack/keystone-79dfdbf96-s7xnv" Sep 30 20:56:53 crc kubenswrapper[4756]: I0930 20:56:53.614494 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tm7jz\" (UniqueName: \"kubernetes.io/projected/f65b915b-51d9-4a4f-aa56-aba336e5f7d4-kube-api-access-tm7jz\") pod \"keystone-79dfdbf96-s7xnv\" (UID: \"f65b915b-51d9-4a4f-aa56-aba336e5f7d4\") " pod="openstack/keystone-79dfdbf96-s7xnv" Sep 30 20:56:53 crc kubenswrapper[4756]: I0930 20:56:53.716032 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/f65b915b-51d9-4a4f-aa56-aba336e5f7d4-credential-keys\") pod \"keystone-79dfdbf96-s7xnv\" (UID: \"f65b915b-51d9-4a4f-aa56-aba336e5f7d4\") " pod="openstack/keystone-79dfdbf96-s7xnv" Sep 30 20:56:53 crc kubenswrapper[4756]: I0930 20:56:53.716131 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f65b915b-51d9-4a4f-aa56-aba336e5f7d4-scripts\") pod \"keystone-79dfdbf96-s7xnv\" (UID: \"f65b915b-51d9-4a4f-aa56-aba336e5f7d4\") " pod="openstack/keystone-79dfdbf96-s7xnv" Sep 30 20:56:53 crc kubenswrapper[4756]: I0930 20:56:53.716163 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tm7jz\" (UniqueName: \"kubernetes.io/projected/f65b915b-51d9-4a4f-aa56-aba336e5f7d4-kube-api-access-tm7jz\") pod \"keystone-79dfdbf96-s7xnv\" (UID: \"f65b915b-51d9-4a4f-aa56-aba336e5f7d4\") " pod="openstack/keystone-79dfdbf96-s7xnv" Sep 30 20:56:53 crc kubenswrapper[4756]: I0930 20:56:53.716206 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f65b915b-51d9-4a4f-aa56-aba336e5f7d4-config-data\") pod \"keystone-79dfdbf96-s7xnv\" (UID: \"f65b915b-51d9-4a4f-aa56-aba336e5f7d4\") " pod="openstack/keystone-79dfdbf96-s7xnv" Sep 30 20:56:53 crc kubenswrapper[4756]: I0930 20:56:53.716254 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f65b915b-51d9-4a4f-aa56-aba336e5f7d4-combined-ca-bundle\") pod \"keystone-79dfdbf96-s7xnv\" (UID: \"f65b915b-51d9-4a4f-aa56-aba336e5f7d4\") " pod="openstack/keystone-79dfdbf96-s7xnv" Sep 30 20:56:53 crc kubenswrapper[4756]: I0930 20:56:53.716289 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/f65b915b-51d9-4a4f-aa56-aba336e5f7d4-fernet-keys\") pod \"keystone-79dfdbf96-s7xnv\" (UID: \"f65b915b-51d9-4a4f-aa56-aba336e5f7d4\") " pod="openstack/keystone-79dfdbf96-s7xnv" Sep 30 20:56:53 crc kubenswrapper[4756]: I0930 20:56:53.720473 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/f65b915b-51d9-4a4f-aa56-aba336e5f7d4-fernet-keys\") pod \"keystone-79dfdbf96-s7xnv\" (UID: \"f65b915b-51d9-4a4f-aa56-aba336e5f7d4\") " pod="openstack/keystone-79dfdbf96-s7xnv" Sep 30 20:56:53 crc kubenswrapper[4756]: I0930 20:56:53.723868 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f65b915b-51d9-4a4f-aa56-aba336e5f7d4-scripts\") pod \"keystone-79dfdbf96-s7xnv\" (UID: \"f65b915b-51d9-4a4f-aa56-aba336e5f7d4\") " pod="openstack/keystone-79dfdbf96-s7xnv" Sep 30 20:56:53 crc kubenswrapper[4756]: I0930 20:56:53.723899 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f65b915b-51d9-4a4f-aa56-aba336e5f7d4-combined-ca-bundle\") pod \"keystone-79dfdbf96-s7xnv\" (UID: \"f65b915b-51d9-4a4f-aa56-aba336e5f7d4\") " pod="openstack/keystone-79dfdbf96-s7xnv" Sep 30 20:56:53 crc kubenswrapper[4756]: I0930 20:56:53.724069 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f65b915b-51d9-4a4f-aa56-aba336e5f7d4-config-data\") pod \"keystone-79dfdbf96-s7xnv\" (UID: \"f65b915b-51d9-4a4f-aa56-aba336e5f7d4\") " pod="openstack/keystone-79dfdbf96-s7xnv" Sep 30 20:56:53 crc kubenswrapper[4756]: I0930 20:56:53.724706 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/f65b915b-51d9-4a4f-aa56-aba336e5f7d4-credential-keys\") pod \"keystone-79dfdbf96-s7xnv\" (UID: \"f65b915b-51d9-4a4f-aa56-aba336e5f7d4\") " pod="openstack/keystone-79dfdbf96-s7xnv" Sep 30 20:56:53 crc kubenswrapper[4756]: I0930 20:56:53.731741 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tm7jz\" (UniqueName: \"kubernetes.io/projected/f65b915b-51d9-4a4f-aa56-aba336e5f7d4-kube-api-access-tm7jz\") pod \"keystone-79dfdbf96-s7xnv\" (UID: \"f65b915b-51d9-4a4f-aa56-aba336e5f7d4\") " pod="openstack/keystone-79dfdbf96-s7xnv" Sep 30 20:56:53 crc kubenswrapper[4756]: I0930 20:56:53.760975 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-79dfdbf96-s7xnv" Sep 30 20:56:54 crc kubenswrapper[4756]: I0930 20:56:54.269853 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-79dfdbf96-s7xnv"] Sep 30 20:56:54 crc kubenswrapper[4756]: W0930 20:56:54.271166 4756 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf65b915b_51d9_4a4f_aa56_aba336e5f7d4.slice/crio-ba408820e147a9abe6bab45bb6e4eae3a9db9c45e38287569f723085ba54a94d WatchSource:0}: Error finding container ba408820e147a9abe6bab45bb6e4eae3a9db9c45e38287569f723085ba54a94d: Status 404 returned error can't find the container with id ba408820e147a9abe6bab45bb6e4eae3a9db9c45e38287569f723085ba54a94d Sep 30 20:56:54 crc kubenswrapper[4756]: I0930 20:56:54.338309 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-79dfdbf96-s7xnv" event={"ID":"f65b915b-51d9-4a4f-aa56-aba336e5f7d4","Type":"ContainerStarted","Data":"ba408820e147a9abe6bab45bb6e4eae3a9db9c45e38287569f723085ba54a94d"} Sep 30 20:56:55 crc kubenswrapper[4756]: I0930 20:56:55.348945 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-79dfdbf96-s7xnv" event={"ID":"f65b915b-51d9-4a4f-aa56-aba336e5f7d4","Type":"ContainerStarted","Data":"2148a36d363656a71aceff8c918e45153dd8c6d08ae0c6c45929a09d295cd789"} Sep 30 20:56:55 crc kubenswrapper[4756]: I0930 20:56:55.349133 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/keystone-79dfdbf96-s7xnv" Sep 30 20:56:55 crc kubenswrapper[4756]: I0930 20:56:55.368845 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-79dfdbf96-s7xnv" podStartSLOduration=2.368825968 podStartE2EDuration="2.368825968s" podCreationTimestamp="2025-09-30 20:56:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 20:56:55.368634503 +0000 UTC m=+5144.989567990" watchObservedRunningTime="2025-09-30 20:56:55.368825968 +0000 UTC m=+5144.989759455" Sep 30 20:57:02 crc kubenswrapper[4756]: I0930 20:57:02.118142 4756 scope.go:117] "RemoveContainer" containerID="47efbbc76271cd44fa05dca3e26855fb46918fbc5a3a511e482c89c79b8cb60c" Sep 30 20:57:02 crc kubenswrapper[4756]: E0930 20:57:02.119501 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4n9zj_openshift-machine-config-operator(3370c2ca-fec3-4f90-8df7-51e21e6c7e1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" Sep 30 20:57:17 crc kubenswrapper[4756]: I0930 20:57:17.116624 4756 scope.go:117] "RemoveContainer" containerID="47efbbc76271cd44fa05dca3e26855fb46918fbc5a3a511e482c89c79b8cb60c" Sep 30 20:57:17 crc kubenswrapper[4756]: E0930 20:57:17.117757 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4n9zj_openshift-machine-config-operator(3370c2ca-fec3-4f90-8df7-51e21e6c7e1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" Sep 30 20:57:20 crc kubenswrapper[4756]: I0930 20:57:20.578834 4756 scope.go:117] "RemoveContainer" containerID="583cef01c27733e67bc68f358793355e08e5190a98d85295e4a46acad2aa47a3" Sep 30 20:57:20 crc kubenswrapper[4756]: I0930 20:57:20.625262 4756 scope.go:117] "RemoveContainer" containerID="e8e211d1162dc2a4b47a798ac51ef2dab9e075e6b37613f0d83f200e9a52a324" Sep 30 20:57:20 crc kubenswrapper[4756]: I0930 20:57:20.662862 4756 scope.go:117] "RemoveContainer" containerID="f028b580346d3179d86ab8bc73003d7cb4cda091d0cba083e47123c0ac085277" Sep 30 20:57:25 crc kubenswrapper[4756]: I0930 20:57:25.177909 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/keystone-79dfdbf96-s7xnv" Sep 30 20:57:29 crc kubenswrapper[4756]: I0930 20:57:29.705623 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstackclient"] Sep 30 20:57:29 crc kubenswrapper[4756]: I0930 20:57:29.708986 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Sep 30 20:57:29 crc kubenswrapper[4756]: I0930 20:57:29.714418 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstackclient-openstackclient-dockercfg-m4clr" Sep 30 20:57:29 crc kubenswrapper[4756]: I0930 20:57:29.715219 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config" Sep 30 20:57:29 crc kubenswrapper[4756]: I0930 20:57:29.717627 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Sep 30 20:57:29 crc kubenswrapper[4756]: I0930 20:57:29.727285 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-config-secret" Sep 30 20:57:29 crc kubenswrapper[4756]: I0930 20:57:29.818885 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/530205ed-633e-43aa-bc1b-dd05d2d62bd3-openstack-config-secret\") pod \"openstackclient\" (UID: \"530205ed-633e-43aa-bc1b-dd05d2d62bd3\") " pod="openstack/openstackclient" Sep 30 20:57:29 crc kubenswrapper[4756]: I0930 20:57:29.819356 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/530205ed-633e-43aa-bc1b-dd05d2d62bd3-openstack-config\") pod \"openstackclient\" (UID: \"530205ed-633e-43aa-bc1b-dd05d2d62bd3\") " pod="openstack/openstackclient" Sep 30 20:57:29 crc kubenswrapper[4756]: I0930 20:57:29.819549 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-96ndk\" (UniqueName: \"kubernetes.io/projected/530205ed-633e-43aa-bc1b-dd05d2d62bd3-kube-api-access-96ndk\") pod \"openstackclient\" (UID: \"530205ed-633e-43aa-bc1b-dd05d2d62bd3\") " pod="openstack/openstackclient" Sep 30 20:57:29 crc kubenswrapper[4756]: I0930 20:57:29.921723 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/530205ed-633e-43aa-bc1b-dd05d2d62bd3-openstack-config-secret\") pod \"openstackclient\" (UID: \"530205ed-633e-43aa-bc1b-dd05d2d62bd3\") " pod="openstack/openstackclient" Sep 30 20:57:29 crc kubenswrapper[4756]: I0930 20:57:29.922824 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/530205ed-633e-43aa-bc1b-dd05d2d62bd3-openstack-config\") pod \"openstackclient\" (UID: \"530205ed-633e-43aa-bc1b-dd05d2d62bd3\") " pod="openstack/openstackclient" Sep 30 20:57:29 crc kubenswrapper[4756]: I0930 20:57:29.922929 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-96ndk\" (UniqueName: \"kubernetes.io/projected/530205ed-633e-43aa-bc1b-dd05d2d62bd3-kube-api-access-96ndk\") pod \"openstackclient\" (UID: \"530205ed-633e-43aa-bc1b-dd05d2d62bd3\") " pod="openstack/openstackclient" Sep 30 20:57:29 crc kubenswrapper[4756]: I0930 20:57:29.924185 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/530205ed-633e-43aa-bc1b-dd05d2d62bd3-openstack-config\") pod \"openstackclient\" (UID: \"530205ed-633e-43aa-bc1b-dd05d2d62bd3\") " pod="openstack/openstackclient" Sep 30 20:57:29 crc kubenswrapper[4756]: I0930 20:57:29.928099 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/530205ed-633e-43aa-bc1b-dd05d2d62bd3-openstack-config-secret\") pod \"openstackclient\" (UID: \"530205ed-633e-43aa-bc1b-dd05d2d62bd3\") " pod="openstack/openstackclient" Sep 30 20:57:29 crc kubenswrapper[4756]: I0930 20:57:29.958905 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-96ndk\" (UniqueName: \"kubernetes.io/projected/530205ed-633e-43aa-bc1b-dd05d2d62bd3-kube-api-access-96ndk\") pod \"openstackclient\" (UID: \"530205ed-633e-43aa-bc1b-dd05d2d62bd3\") " pod="openstack/openstackclient" Sep 30 20:57:30 crc kubenswrapper[4756]: I0930 20:57:30.052520 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Sep 30 20:57:30 crc kubenswrapper[4756]: I0930 20:57:30.496172 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Sep 30 20:57:30 crc kubenswrapper[4756]: I0930 20:57:30.684855 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"530205ed-633e-43aa-bc1b-dd05d2d62bd3","Type":"ContainerStarted","Data":"bcbdf78a7e536fa3a7f9a97a058ee635aa8136d871d9f7cf143e030844592c96"} Sep 30 20:57:31 crc kubenswrapper[4756]: I0930 20:57:31.699584 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"530205ed-633e-43aa-bc1b-dd05d2d62bd3","Type":"ContainerStarted","Data":"87a18660104dc824d44430d13a0440c6d942e490cb930b20077b42c735ff8d1f"} Sep 30 20:57:31 crc kubenswrapper[4756]: I0930 20:57:31.730288 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstackclient" podStartSLOduration=2.730261561 podStartE2EDuration="2.730261561s" podCreationTimestamp="2025-09-30 20:57:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 20:57:31.727243632 +0000 UTC m=+5181.348177149" watchObservedRunningTime="2025-09-30 20:57:31.730261561 +0000 UTC m=+5181.351195078" Sep 30 20:57:32 crc kubenswrapper[4756]: I0930 20:57:32.116476 4756 scope.go:117] "RemoveContainer" containerID="47efbbc76271cd44fa05dca3e26855fb46918fbc5a3a511e482c89c79b8cb60c" Sep 30 20:57:32 crc kubenswrapper[4756]: E0930 20:57:32.116816 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4n9zj_openshift-machine-config-operator(3370c2ca-fec3-4f90-8df7-51e21e6c7e1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" Sep 30 20:57:44 crc kubenswrapper[4756]: I0930 20:57:44.117373 4756 scope.go:117] "RemoveContainer" containerID="47efbbc76271cd44fa05dca3e26855fb46918fbc5a3a511e482c89c79b8cb60c" Sep 30 20:57:44 crc kubenswrapper[4756]: E0930 20:57:44.119231 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4n9zj_openshift-machine-config-operator(3370c2ca-fec3-4f90-8df7-51e21e6c7e1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" Sep 30 20:57:49 crc kubenswrapper[4756]: I0930 20:57:49.230688 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-svkt7"] Sep 30 20:57:49 crc kubenswrapper[4756]: I0930 20:57:49.233380 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-svkt7" Sep 30 20:57:49 crc kubenswrapper[4756]: I0930 20:57:49.275767 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-svkt7"] Sep 30 20:57:49 crc kubenswrapper[4756]: I0930 20:57:49.302722 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/11501ad0-70b8-4860-800b-582f0ea78e5f-catalog-content\") pod \"redhat-marketplace-svkt7\" (UID: \"11501ad0-70b8-4860-800b-582f0ea78e5f\") " pod="openshift-marketplace/redhat-marketplace-svkt7" Sep 30 20:57:49 crc kubenswrapper[4756]: I0930 20:57:49.303038 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d5j62\" (UniqueName: \"kubernetes.io/projected/11501ad0-70b8-4860-800b-582f0ea78e5f-kube-api-access-d5j62\") pod \"redhat-marketplace-svkt7\" (UID: \"11501ad0-70b8-4860-800b-582f0ea78e5f\") " pod="openshift-marketplace/redhat-marketplace-svkt7" Sep 30 20:57:49 crc kubenswrapper[4756]: I0930 20:57:49.303160 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/11501ad0-70b8-4860-800b-582f0ea78e5f-utilities\") pod \"redhat-marketplace-svkt7\" (UID: \"11501ad0-70b8-4860-800b-582f0ea78e5f\") " pod="openshift-marketplace/redhat-marketplace-svkt7" Sep 30 20:57:49 crc kubenswrapper[4756]: I0930 20:57:49.404770 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/11501ad0-70b8-4860-800b-582f0ea78e5f-catalog-content\") pod \"redhat-marketplace-svkt7\" (UID: \"11501ad0-70b8-4860-800b-582f0ea78e5f\") " pod="openshift-marketplace/redhat-marketplace-svkt7" Sep 30 20:57:49 crc kubenswrapper[4756]: I0930 20:57:49.404857 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d5j62\" (UniqueName: \"kubernetes.io/projected/11501ad0-70b8-4860-800b-582f0ea78e5f-kube-api-access-d5j62\") pod \"redhat-marketplace-svkt7\" (UID: \"11501ad0-70b8-4860-800b-582f0ea78e5f\") " pod="openshift-marketplace/redhat-marketplace-svkt7" Sep 30 20:57:49 crc kubenswrapper[4756]: I0930 20:57:49.404928 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/11501ad0-70b8-4860-800b-582f0ea78e5f-utilities\") pod \"redhat-marketplace-svkt7\" (UID: \"11501ad0-70b8-4860-800b-582f0ea78e5f\") " pod="openshift-marketplace/redhat-marketplace-svkt7" Sep 30 20:57:49 crc kubenswrapper[4756]: I0930 20:57:49.405346 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/11501ad0-70b8-4860-800b-582f0ea78e5f-catalog-content\") pod \"redhat-marketplace-svkt7\" (UID: \"11501ad0-70b8-4860-800b-582f0ea78e5f\") " pod="openshift-marketplace/redhat-marketplace-svkt7" Sep 30 20:57:49 crc kubenswrapper[4756]: I0930 20:57:49.405523 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/11501ad0-70b8-4860-800b-582f0ea78e5f-utilities\") pod \"redhat-marketplace-svkt7\" (UID: \"11501ad0-70b8-4860-800b-582f0ea78e5f\") " pod="openshift-marketplace/redhat-marketplace-svkt7" Sep 30 20:57:49 crc kubenswrapper[4756]: I0930 20:57:49.424649 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d5j62\" (UniqueName: \"kubernetes.io/projected/11501ad0-70b8-4860-800b-582f0ea78e5f-kube-api-access-d5j62\") pod \"redhat-marketplace-svkt7\" (UID: \"11501ad0-70b8-4860-800b-582f0ea78e5f\") " pod="openshift-marketplace/redhat-marketplace-svkt7" Sep 30 20:57:49 crc kubenswrapper[4756]: I0930 20:57:49.570218 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-svkt7" Sep 30 20:57:50 crc kubenswrapper[4756]: I0930 20:57:50.024491 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-svkt7"] Sep 30 20:57:50 crc kubenswrapper[4756]: I0930 20:57:50.912133 4756 generic.go:334] "Generic (PLEG): container finished" podID="11501ad0-70b8-4860-800b-582f0ea78e5f" containerID="989654d1eab2668d077cdf57e0f5ca87fa2f7b8a166cd16f6122784a131631b7" exitCode=0 Sep 30 20:57:50 crc kubenswrapper[4756]: I0930 20:57:50.912194 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-svkt7" event={"ID":"11501ad0-70b8-4860-800b-582f0ea78e5f","Type":"ContainerDied","Data":"989654d1eab2668d077cdf57e0f5ca87fa2f7b8a166cd16f6122784a131631b7"} Sep 30 20:57:50 crc kubenswrapper[4756]: I0930 20:57:50.912925 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-svkt7" event={"ID":"11501ad0-70b8-4860-800b-582f0ea78e5f","Type":"ContainerStarted","Data":"e328f1b8b7e5bcbd390fdeb8954aa6ccd7e1a9f04520755b4c4e3bee06ae3d1c"} Sep 30 20:57:52 crc kubenswrapper[4756]: I0930 20:57:52.935142 4756 generic.go:334] "Generic (PLEG): container finished" podID="11501ad0-70b8-4860-800b-582f0ea78e5f" containerID="3369592beb37d1c030fb48772628796b78864e225e88a5b4d62392856c53b126" exitCode=0 Sep 30 20:57:52 crc kubenswrapper[4756]: I0930 20:57:52.935326 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-svkt7" event={"ID":"11501ad0-70b8-4860-800b-582f0ea78e5f","Type":"ContainerDied","Data":"3369592beb37d1c030fb48772628796b78864e225e88a5b4d62392856c53b126"} Sep 30 20:57:53 crc kubenswrapper[4756]: I0930 20:57:53.945817 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-svkt7" event={"ID":"11501ad0-70b8-4860-800b-582f0ea78e5f","Type":"ContainerStarted","Data":"36aaca8b83a5948603355eaee36549a0eda9469a45d2d5ac48c5a47acabfc058"} Sep 30 20:57:53 crc kubenswrapper[4756]: I0930 20:57:53.968178 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-svkt7" podStartSLOduration=2.509606097 podStartE2EDuration="4.968161771s" podCreationTimestamp="2025-09-30 20:57:49 +0000 UTC" firstStartedPulling="2025-09-30 20:57:50.916392325 +0000 UTC m=+5200.537325802" lastFinishedPulling="2025-09-30 20:57:53.374947999 +0000 UTC m=+5202.995881476" observedRunningTime="2025-09-30 20:57:53.96351991 +0000 UTC m=+5203.584453407" watchObservedRunningTime="2025-09-30 20:57:53.968161771 +0000 UTC m=+5203.589095248" Sep 30 20:57:56 crc kubenswrapper[4756]: I0930 20:57:56.116097 4756 scope.go:117] "RemoveContainer" containerID="47efbbc76271cd44fa05dca3e26855fb46918fbc5a3a511e482c89c79b8cb60c" Sep 30 20:57:56 crc kubenswrapper[4756]: E0930 20:57:56.116838 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4n9zj_openshift-machine-config-operator(3370c2ca-fec3-4f90-8df7-51e21e6c7e1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" Sep 30 20:57:59 crc kubenswrapper[4756]: I0930 20:57:59.571243 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-svkt7" Sep 30 20:57:59 crc kubenswrapper[4756]: I0930 20:57:59.571636 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-svkt7" Sep 30 20:57:59 crc kubenswrapper[4756]: I0930 20:57:59.629755 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-svkt7" Sep 30 20:58:00 crc kubenswrapper[4756]: I0930 20:58:00.060540 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-svkt7" Sep 30 20:58:00 crc kubenswrapper[4756]: I0930 20:58:00.113057 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-svkt7"] Sep 30 20:58:02 crc kubenswrapper[4756]: I0930 20:58:02.033622 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-svkt7" podUID="11501ad0-70b8-4860-800b-582f0ea78e5f" containerName="registry-server" containerID="cri-o://36aaca8b83a5948603355eaee36549a0eda9469a45d2d5ac48c5a47acabfc058" gracePeriod=2 Sep 30 20:58:02 crc kubenswrapper[4756]: I0930 20:58:02.448052 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-svkt7" Sep 30 20:58:02 crc kubenswrapper[4756]: I0930 20:58:02.626159 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/11501ad0-70b8-4860-800b-582f0ea78e5f-catalog-content\") pod \"11501ad0-70b8-4860-800b-582f0ea78e5f\" (UID: \"11501ad0-70b8-4860-800b-582f0ea78e5f\") " Sep 30 20:58:02 crc kubenswrapper[4756]: I0930 20:58:02.626342 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d5j62\" (UniqueName: \"kubernetes.io/projected/11501ad0-70b8-4860-800b-582f0ea78e5f-kube-api-access-d5j62\") pod \"11501ad0-70b8-4860-800b-582f0ea78e5f\" (UID: \"11501ad0-70b8-4860-800b-582f0ea78e5f\") " Sep 30 20:58:02 crc kubenswrapper[4756]: I0930 20:58:02.626520 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/11501ad0-70b8-4860-800b-582f0ea78e5f-utilities\") pod \"11501ad0-70b8-4860-800b-582f0ea78e5f\" (UID: \"11501ad0-70b8-4860-800b-582f0ea78e5f\") " Sep 30 20:58:02 crc kubenswrapper[4756]: I0930 20:58:02.630100 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/11501ad0-70b8-4860-800b-582f0ea78e5f-utilities" (OuterVolumeSpecName: "utilities") pod "11501ad0-70b8-4860-800b-582f0ea78e5f" (UID: "11501ad0-70b8-4860-800b-582f0ea78e5f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 20:58:02 crc kubenswrapper[4756]: I0930 20:58:02.634450 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/11501ad0-70b8-4860-800b-582f0ea78e5f-kube-api-access-d5j62" (OuterVolumeSpecName: "kube-api-access-d5j62") pod "11501ad0-70b8-4860-800b-582f0ea78e5f" (UID: "11501ad0-70b8-4860-800b-582f0ea78e5f"). InnerVolumeSpecName "kube-api-access-d5j62". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:58:02 crc kubenswrapper[4756]: I0930 20:58:02.642470 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/11501ad0-70b8-4860-800b-582f0ea78e5f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "11501ad0-70b8-4860-800b-582f0ea78e5f" (UID: "11501ad0-70b8-4860-800b-582f0ea78e5f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 20:58:02 crc kubenswrapper[4756]: I0930 20:58:02.728321 4756 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/11501ad0-70b8-4860-800b-582f0ea78e5f-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 20:58:02 crc kubenswrapper[4756]: I0930 20:58:02.728355 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d5j62\" (UniqueName: \"kubernetes.io/projected/11501ad0-70b8-4860-800b-582f0ea78e5f-kube-api-access-d5j62\") on node \"crc\" DevicePath \"\"" Sep 30 20:58:02 crc kubenswrapper[4756]: I0930 20:58:02.728369 4756 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/11501ad0-70b8-4860-800b-582f0ea78e5f-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 20:58:03 crc kubenswrapper[4756]: I0930 20:58:03.048507 4756 generic.go:334] "Generic (PLEG): container finished" podID="11501ad0-70b8-4860-800b-582f0ea78e5f" containerID="36aaca8b83a5948603355eaee36549a0eda9469a45d2d5ac48c5a47acabfc058" exitCode=0 Sep 30 20:58:03 crc kubenswrapper[4756]: I0930 20:58:03.048557 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-svkt7" event={"ID":"11501ad0-70b8-4860-800b-582f0ea78e5f","Type":"ContainerDied","Data":"36aaca8b83a5948603355eaee36549a0eda9469a45d2d5ac48c5a47acabfc058"} Sep 30 20:58:03 crc kubenswrapper[4756]: I0930 20:58:03.048611 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-svkt7" event={"ID":"11501ad0-70b8-4860-800b-582f0ea78e5f","Type":"ContainerDied","Data":"e328f1b8b7e5bcbd390fdeb8954aa6ccd7e1a9f04520755b4c4e3bee06ae3d1c"} Sep 30 20:58:03 crc kubenswrapper[4756]: I0930 20:58:03.048630 4756 scope.go:117] "RemoveContainer" containerID="36aaca8b83a5948603355eaee36549a0eda9469a45d2d5ac48c5a47acabfc058" Sep 30 20:58:03 crc kubenswrapper[4756]: I0930 20:58:03.048687 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-svkt7" Sep 30 20:58:03 crc kubenswrapper[4756]: I0930 20:58:03.092198 4756 scope.go:117] "RemoveContainer" containerID="3369592beb37d1c030fb48772628796b78864e225e88a5b4d62392856c53b126" Sep 30 20:58:03 crc kubenswrapper[4756]: I0930 20:58:03.131874 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-svkt7"] Sep 30 20:58:03 crc kubenswrapper[4756]: I0930 20:58:03.131921 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-svkt7"] Sep 30 20:58:03 crc kubenswrapper[4756]: I0930 20:58:03.144087 4756 scope.go:117] "RemoveContainer" containerID="989654d1eab2668d077cdf57e0f5ca87fa2f7b8a166cd16f6122784a131631b7" Sep 30 20:58:03 crc kubenswrapper[4756]: I0930 20:58:03.169160 4756 scope.go:117] "RemoveContainer" containerID="36aaca8b83a5948603355eaee36549a0eda9469a45d2d5ac48c5a47acabfc058" Sep 30 20:58:03 crc kubenswrapper[4756]: E0930 20:58:03.169577 4756 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"36aaca8b83a5948603355eaee36549a0eda9469a45d2d5ac48c5a47acabfc058\": container with ID starting with 36aaca8b83a5948603355eaee36549a0eda9469a45d2d5ac48c5a47acabfc058 not found: ID does not exist" containerID="36aaca8b83a5948603355eaee36549a0eda9469a45d2d5ac48c5a47acabfc058" Sep 30 20:58:03 crc kubenswrapper[4756]: I0930 20:58:03.169610 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"36aaca8b83a5948603355eaee36549a0eda9469a45d2d5ac48c5a47acabfc058"} err="failed to get container status \"36aaca8b83a5948603355eaee36549a0eda9469a45d2d5ac48c5a47acabfc058\": rpc error: code = NotFound desc = could not find container \"36aaca8b83a5948603355eaee36549a0eda9469a45d2d5ac48c5a47acabfc058\": container with ID starting with 36aaca8b83a5948603355eaee36549a0eda9469a45d2d5ac48c5a47acabfc058 not found: ID does not exist" Sep 30 20:58:03 crc kubenswrapper[4756]: I0930 20:58:03.169630 4756 scope.go:117] "RemoveContainer" containerID="3369592beb37d1c030fb48772628796b78864e225e88a5b4d62392856c53b126" Sep 30 20:58:03 crc kubenswrapper[4756]: E0930 20:58:03.169967 4756 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3369592beb37d1c030fb48772628796b78864e225e88a5b4d62392856c53b126\": container with ID starting with 3369592beb37d1c030fb48772628796b78864e225e88a5b4d62392856c53b126 not found: ID does not exist" containerID="3369592beb37d1c030fb48772628796b78864e225e88a5b4d62392856c53b126" Sep 30 20:58:03 crc kubenswrapper[4756]: I0930 20:58:03.170033 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3369592beb37d1c030fb48772628796b78864e225e88a5b4d62392856c53b126"} err="failed to get container status \"3369592beb37d1c030fb48772628796b78864e225e88a5b4d62392856c53b126\": rpc error: code = NotFound desc = could not find container \"3369592beb37d1c030fb48772628796b78864e225e88a5b4d62392856c53b126\": container with ID starting with 3369592beb37d1c030fb48772628796b78864e225e88a5b4d62392856c53b126 not found: ID does not exist" Sep 30 20:58:03 crc kubenswrapper[4756]: I0930 20:58:03.170060 4756 scope.go:117] "RemoveContainer" containerID="989654d1eab2668d077cdf57e0f5ca87fa2f7b8a166cd16f6122784a131631b7" Sep 30 20:58:03 crc kubenswrapper[4756]: E0930 20:58:03.170359 4756 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"989654d1eab2668d077cdf57e0f5ca87fa2f7b8a166cd16f6122784a131631b7\": container with ID starting with 989654d1eab2668d077cdf57e0f5ca87fa2f7b8a166cd16f6122784a131631b7 not found: ID does not exist" containerID="989654d1eab2668d077cdf57e0f5ca87fa2f7b8a166cd16f6122784a131631b7" Sep 30 20:58:03 crc kubenswrapper[4756]: I0930 20:58:03.170416 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"989654d1eab2668d077cdf57e0f5ca87fa2f7b8a166cd16f6122784a131631b7"} err="failed to get container status \"989654d1eab2668d077cdf57e0f5ca87fa2f7b8a166cd16f6122784a131631b7\": rpc error: code = NotFound desc = could not find container \"989654d1eab2668d077cdf57e0f5ca87fa2f7b8a166cd16f6122784a131631b7\": container with ID starting with 989654d1eab2668d077cdf57e0f5ca87fa2f7b8a166cd16f6122784a131631b7 not found: ID does not exist" Sep 30 20:58:05 crc kubenswrapper[4756]: I0930 20:58:05.130545 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="11501ad0-70b8-4860-800b-582f0ea78e5f" path="/var/lib/kubelet/pods/11501ad0-70b8-4860-800b-582f0ea78e5f/volumes" Sep 30 20:58:09 crc kubenswrapper[4756]: I0930 20:58:09.116374 4756 scope.go:117] "RemoveContainer" containerID="47efbbc76271cd44fa05dca3e26855fb46918fbc5a3a511e482c89c79b8cb60c" Sep 30 20:58:09 crc kubenswrapper[4756]: E0930 20:58:09.116848 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4n9zj_openshift-machine-config-operator(3370c2ca-fec3-4f90-8df7-51e21e6c7e1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" Sep 30 20:58:22 crc kubenswrapper[4756]: I0930 20:58:22.117237 4756 scope.go:117] "RemoveContainer" containerID="47efbbc76271cd44fa05dca3e26855fb46918fbc5a3a511e482c89c79b8cb60c" Sep 30 20:58:22 crc kubenswrapper[4756]: E0930 20:58:22.119823 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4n9zj_openshift-machine-config-operator(3370c2ca-fec3-4f90-8df7-51e21e6c7e1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" Sep 30 20:58:34 crc kubenswrapper[4756]: I0930 20:58:34.117224 4756 scope.go:117] "RemoveContainer" containerID="47efbbc76271cd44fa05dca3e26855fb46918fbc5a3a511e482c89c79b8cb60c" Sep 30 20:58:34 crc kubenswrapper[4756]: E0930 20:58:34.118384 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4n9zj_openshift-machine-config-operator(3370c2ca-fec3-4f90-8df7-51e21e6c7e1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" Sep 30 20:58:47 crc kubenswrapper[4756]: I0930 20:58:47.117191 4756 scope.go:117] "RemoveContainer" containerID="47efbbc76271cd44fa05dca3e26855fb46918fbc5a3a511e482c89c79b8cb60c" Sep 30 20:58:47 crc kubenswrapper[4756]: E0930 20:58:47.118607 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4n9zj_openshift-machine-config-operator(3370c2ca-fec3-4f90-8df7-51e21e6c7e1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" Sep 30 20:58:56 crc kubenswrapper[4756]: E0930 20:58:56.854593 4756 upgradeaware.go:427] Error proxying data from client to backend: readfrom tcp 38.102.83.12:60276->38.102.83.12:35835: write tcp 38.102.83.12:60276->38.102.83.12:35835: write: broken pipe Sep 30 20:59:01 crc kubenswrapper[4756]: I0930 20:59:01.127260 4756 scope.go:117] "RemoveContainer" containerID="47efbbc76271cd44fa05dca3e26855fb46918fbc5a3a511e482c89c79b8cb60c" Sep 30 20:59:01 crc kubenswrapper[4756]: E0930 20:59:01.128940 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4n9zj_openshift-machine-config-operator(3370c2ca-fec3-4f90-8df7-51e21e6c7e1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" Sep 30 20:59:13 crc kubenswrapper[4756]: I0930 20:59:13.116451 4756 scope.go:117] "RemoveContainer" containerID="47efbbc76271cd44fa05dca3e26855fb46918fbc5a3a511e482c89c79b8cb60c" Sep 30 20:59:13 crc kubenswrapper[4756]: E0930 20:59:13.117119 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4n9zj_openshift-machine-config-operator(3370c2ca-fec3-4f90-8df7-51e21e6c7e1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" Sep 30 20:59:13 crc kubenswrapper[4756]: I0930 20:59:13.493005 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-db-create-mvn7x"] Sep 30 20:59:13 crc kubenswrapper[4756]: E0930 20:59:13.493516 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="11501ad0-70b8-4860-800b-582f0ea78e5f" containerName="registry-server" Sep 30 20:59:13 crc kubenswrapper[4756]: I0930 20:59:13.493538 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="11501ad0-70b8-4860-800b-582f0ea78e5f" containerName="registry-server" Sep 30 20:59:13 crc kubenswrapper[4756]: E0930 20:59:13.493561 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="11501ad0-70b8-4860-800b-582f0ea78e5f" containerName="extract-content" Sep 30 20:59:13 crc kubenswrapper[4756]: I0930 20:59:13.493572 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="11501ad0-70b8-4860-800b-582f0ea78e5f" containerName="extract-content" Sep 30 20:59:13 crc kubenswrapper[4756]: E0930 20:59:13.493614 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="11501ad0-70b8-4860-800b-582f0ea78e5f" containerName="extract-utilities" Sep 30 20:59:13 crc kubenswrapper[4756]: I0930 20:59:13.493626 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="11501ad0-70b8-4860-800b-582f0ea78e5f" containerName="extract-utilities" Sep 30 20:59:13 crc kubenswrapper[4756]: I0930 20:59:13.493906 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="11501ad0-70b8-4860-800b-582f0ea78e5f" containerName="registry-server" Sep 30 20:59:13 crc kubenswrapper[4756]: I0930 20:59:13.497084 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-mvn7x" Sep 30 20:59:13 crc kubenswrapper[4756]: I0930 20:59:13.512089 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-create-mvn7x"] Sep 30 20:59:13 crc kubenswrapper[4756]: I0930 20:59:13.542683 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2wl6v\" (UniqueName: \"kubernetes.io/projected/a4a4feac-42b9-4ba1-98ae-4cd5bc24c6d2-kube-api-access-2wl6v\") pod \"barbican-db-create-mvn7x\" (UID: \"a4a4feac-42b9-4ba1-98ae-4cd5bc24c6d2\") " pod="openstack/barbican-db-create-mvn7x" Sep 30 20:59:13 crc kubenswrapper[4756]: I0930 20:59:13.644241 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2wl6v\" (UniqueName: \"kubernetes.io/projected/a4a4feac-42b9-4ba1-98ae-4cd5bc24c6d2-kube-api-access-2wl6v\") pod \"barbican-db-create-mvn7x\" (UID: \"a4a4feac-42b9-4ba1-98ae-4cd5bc24c6d2\") " pod="openstack/barbican-db-create-mvn7x" Sep 30 20:59:13 crc kubenswrapper[4756]: I0930 20:59:13.664720 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2wl6v\" (UniqueName: \"kubernetes.io/projected/a4a4feac-42b9-4ba1-98ae-4cd5bc24c6d2-kube-api-access-2wl6v\") pod \"barbican-db-create-mvn7x\" (UID: \"a4a4feac-42b9-4ba1-98ae-4cd5bc24c6d2\") " pod="openstack/barbican-db-create-mvn7x" Sep 30 20:59:13 crc kubenswrapper[4756]: I0930 20:59:13.821533 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-mvn7x" Sep 30 20:59:14 crc kubenswrapper[4756]: I0930 20:59:14.265064 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-create-mvn7x"] Sep 30 20:59:14 crc kubenswrapper[4756]: I0930 20:59:14.727263 4756 generic.go:334] "Generic (PLEG): container finished" podID="a4a4feac-42b9-4ba1-98ae-4cd5bc24c6d2" containerID="cb2a782ec168a65cb67068fb628fb42ac6ecd46b3f5508c0d07cf893caff3b3b" exitCode=0 Sep 30 20:59:14 crc kubenswrapper[4756]: I0930 20:59:14.727370 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-mvn7x" event={"ID":"a4a4feac-42b9-4ba1-98ae-4cd5bc24c6d2","Type":"ContainerDied","Data":"cb2a782ec168a65cb67068fb628fb42ac6ecd46b3f5508c0d07cf893caff3b3b"} Sep 30 20:59:14 crc kubenswrapper[4756]: I0930 20:59:14.727636 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-mvn7x" event={"ID":"a4a4feac-42b9-4ba1-98ae-4cd5bc24c6d2","Type":"ContainerStarted","Data":"7b4eb46980d9703cfde8e771eaf8250d0c2f455a1c0f3c242dec0b69bce4994f"} Sep 30 20:59:16 crc kubenswrapper[4756]: I0930 20:59:16.058942 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-mvn7x" Sep 30 20:59:16 crc kubenswrapper[4756]: I0930 20:59:16.087258 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2wl6v\" (UniqueName: \"kubernetes.io/projected/a4a4feac-42b9-4ba1-98ae-4cd5bc24c6d2-kube-api-access-2wl6v\") pod \"a4a4feac-42b9-4ba1-98ae-4cd5bc24c6d2\" (UID: \"a4a4feac-42b9-4ba1-98ae-4cd5bc24c6d2\") " Sep 30 20:59:16 crc kubenswrapper[4756]: I0930 20:59:16.096340 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a4a4feac-42b9-4ba1-98ae-4cd5bc24c6d2-kube-api-access-2wl6v" (OuterVolumeSpecName: "kube-api-access-2wl6v") pod "a4a4feac-42b9-4ba1-98ae-4cd5bc24c6d2" (UID: "a4a4feac-42b9-4ba1-98ae-4cd5bc24c6d2"). InnerVolumeSpecName "kube-api-access-2wl6v". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:59:16 crc kubenswrapper[4756]: I0930 20:59:16.188881 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2wl6v\" (UniqueName: \"kubernetes.io/projected/a4a4feac-42b9-4ba1-98ae-4cd5bc24c6d2-kube-api-access-2wl6v\") on node \"crc\" DevicePath \"\"" Sep 30 20:59:16 crc kubenswrapper[4756]: I0930 20:59:16.755135 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-mvn7x" event={"ID":"a4a4feac-42b9-4ba1-98ae-4cd5bc24c6d2","Type":"ContainerDied","Data":"7b4eb46980d9703cfde8e771eaf8250d0c2f455a1c0f3c242dec0b69bce4994f"} Sep 30 20:59:16 crc kubenswrapper[4756]: I0930 20:59:16.755208 4756 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7b4eb46980d9703cfde8e771eaf8250d0c2f455a1c0f3c242dec0b69bce4994f" Sep 30 20:59:16 crc kubenswrapper[4756]: I0930 20:59:16.755307 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-mvn7x" Sep 30 20:59:23 crc kubenswrapper[4756]: I0930 20:59:23.609766 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-cb69-account-create-dhbgr"] Sep 30 20:59:23 crc kubenswrapper[4756]: E0930 20:59:23.610895 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a4a4feac-42b9-4ba1-98ae-4cd5bc24c6d2" containerName="mariadb-database-create" Sep 30 20:59:23 crc kubenswrapper[4756]: I0930 20:59:23.610916 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="a4a4feac-42b9-4ba1-98ae-4cd5bc24c6d2" containerName="mariadb-database-create" Sep 30 20:59:23 crc kubenswrapper[4756]: I0930 20:59:23.611208 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="a4a4feac-42b9-4ba1-98ae-4cd5bc24c6d2" containerName="mariadb-database-create" Sep 30 20:59:23 crc kubenswrapper[4756]: I0930 20:59:23.612142 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-cb69-account-create-dhbgr" Sep 30 20:59:23 crc kubenswrapper[4756]: I0930 20:59:23.614746 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-db-secret" Sep 30 20:59:23 crc kubenswrapper[4756]: I0930 20:59:23.621608 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-cb69-account-create-dhbgr"] Sep 30 20:59:23 crc kubenswrapper[4756]: I0930 20:59:23.675549 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gxwqn\" (UniqueName: \"kubernetes.io/projected/6690dfee-cc9f-47c1-a102-6cd970d9dd9d-kube-api-access-gxwqn\") pod \"barbican-cb69-account-create-dhbgr\" (UID: \"6690dfee-cc9f-47c1-a102-6cd970d9dd9d\") " pod="openstack/barbican-cb69-account-create-dhbgr" Sep 30 20:59:23 crc kubenswrapper[4756]: I0930 20:59:23.777752 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gxwqn\" (UniqueName: \"kubernetes.io/projected/6690dfee-cc9f-47c1-a102-6cd970d9dd9d-kube-api-access-gxwqn\") pod \"barbican-cb69-account-create-dhbgr\" (UID: \"6690dfee-cc9f-47c1-a102-6cd970d9dd9d\") " pod="openstack/barbican-cb69-account-create-dhbgr" Sep 30 20:59:23 crc kubenswrapper[4756]: I0930 20:59:23.800765 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gxwqn\" (UniqueName: \"kubernetes.io/projected/6690dfee-cc9f-47c1-a102-6cd970d9dd9d-kube-api-access-gxwqn\") pod \"barbican-cb69-account-create-dhbgr\" (UID: \"6690dfee-cc9f-47c1-a102-6cd970d9dd9d\") " pod="openstack/barbican-cb69-account-create-dhbgr" Sep 30 20:59:23 crc kubenswrapper[4756]: I0930 20:59:23.947053 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-cb69-account-create-dhbgr" Sep 30 20:59:24 crc kubenswrapper[4756]: I0930 20:59:24.390710 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-cb69-account-create-dhbgr"] Sep 30 20:59:24 crc kubenswrapper[4756]: W0930 20:59:24.399877 4756 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6690dfee_cc9f_47c1_a102_6cd970d9dd9d.slice/crio-203f7d726fad99e10292b3fce3fc6b1b17b473af77d2d95269dcccba9d038c88 WatchSource:0}: Error finding container 203f7d726fad99e10292b3fce3fc6b1b17b473af77d2d95269dcccba9d038c88: Status 404 returned error can't find the container with id 203f7d726fad99e10292b3fce3fc6b1b17b473af77d2d95269dcccba9d038c88 Sep 30 20:59:24 crc kubenswrapper[4756]: I0930 20:59:24.843487 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-cb69-account-create-dhbgr" event={"ID":"6690dfee-cc9f-47c1-a102-6cd970d9dd9d","Type":"ContainerStarted","Data":"203f7d726fad99e10292b3fce3fc6b1b17b473af77d2d95269dcccba9d038c88"} Sep 30 20:59:25 crc kubenswrapper[4756]: I0930 20:59:25.859207 4756 generic.go:334] "Generic (PLEG): container finished" podID="6690dfee-cc9f-47c1-a102-6cd970d9dd9d" containerID="6d97867e24f633c3054e9ac37ce8e0a69414d89e441a0f1a3cf4c9e51c33ce7e" exitCode=0 Sep 30 20:59:25 crc kubenswrapper[4756]: I0930 20:59:25.859258 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-cb69-account-create-dhbgr" event={"ID":"6690dfee-cc9f-47c1-a102-6cd970d9dd9d","Type":"ContainerDied","Data":"6d97867e24f633c3054e9ac37ce8e0a69414d89e441a0f1a3cf4c9e51c33ce7e"} Sep 30 20:59:27 crc kubenswrapper[4756]: I0930 20:59:27.117493 4756 scope.go:117] "RemoveContainer" containerID="47efbbc76271cd44fa05dca3e26855fb46918fbc5a3a511e482c89c79b8cb60c" Sep 30 20:59:27 crc kubenswrapper[4756]: E0930 20:59:27.118626 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4n9zj_openshift-machine-config-operator(3370c2ca-fec3-4f90-8df7-51e21e6c7e1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" Sep 30 20:59:27 crc kubenswrapper[4756]: I0930 20:59:27.250754 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-cb69-account-create-dhbgr" Sep 30 20:59:27 crc kubenswrapper[4756]: I0930 20:59:27.347557 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gxwqn\" (UniqueName: \"kubernetes.io/projected/6690dfee-cc9f-47c1-a102-6cd970d9dd9d-kube-api-access-gxwqn\") pod \"6690dfee-cc9f-47c1-a102-6cd970d9dd9d\" (UID: \"6690dfee-cc9f-47c1-a102-6cd970d9dd9d\") " Sep 30 20:59:27 crc kubenswrapper[4756]: I0930 20:59:27.355680 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6690dfee-cc9f-47c1-a102-6cd970d9dd9d-kube-api-access-gxwqn" (OuterVolumeSpecName: "kube-api-access-gxwqn") pod "6690dfee-cc9f-47c1-a102-6cd970d9dd9d" (UID: "6690dfee-cc9f-47c1-a102-6cd970d9dd9d"). InnerVolumeSpecName "kube-api-access-gxwqn". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:59:27 crc kubenswrapper[4756]: I0930 20:59:27.449638 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gxwqn\" (UniqueName: \"kubernetes.io/projected/6690dfee-cc9f-47c1-a102-6cd970d9dd9d-kube-api-access-gxwqn\") on node \"crc\" DevicePath \"\"" Sep 30 20:59:27 crc kubenswrapper[4756]: I0930 20:59:27.879751 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-cb69-account-create-dhbgr" event={"ID":"6690dfee-cc9f-47c1-a102-6cd970d9dd9d","Type":"ContainerDied","Data":"203f7d726fad99e10292b3fce3fc6b1b17b473af77d2d95269dcccba9d038c88"} Sep 30 20:59:27 crc kubenswrapper[4756]: I0930 20:59:27.880469 4756 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="203f7d726fad99e10292b3fce3fc6b1b17b473af77d2d95269dcccba9d038c88" Sep 30 20:59:27 crc kubenswrapper[4756]: I0930 20:59:27.879831 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-cb69-account-create-dhbgr" Sep 30 20:59:28 crc kubenswrapper[4756]: I0930 20:59:28.806003 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-db-sync-fz2mj"] Sep 30 20:59:28 crc kubenswrapper[4756]: E0930 20:59:28.806713 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6690dfee-cc9f-47c1-a102-6cd970d9dd9d" containerName="mariadb-account-create" Sep 30 20:59:28 crc kubenswrapper[4756]: I0930 20:59:28.806747 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="6690dfee-cc9f-47c1-a102-6cd970d9dd9d" containerName="mariadb-account-create" Sep 30 20:59:28 crc kubenswrapper[4756]: I0930 20:59:28.807188 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="6690dfee-cc9f-47c1-a102-6cd970d9dd9d" containerName="mariadb-account-create" Sep 30 20:59:28 crc kubenswrapper[4756]: I0930 20:59:28.808513 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-fz2mj" Sep 30 20:59:28 crc kubenswrapper[4756]: I0930 20:59:28.817905 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-barbican-dockercfg-2cglz" Sep 30 20:59:28 crc kubenswrapper[4756]: I0930 20:59:28.818270 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-config-data" Sep 30 20:59:28 crc kubenswrapper[4756]: I0930 20:59:28.818539 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-sync-fz2mj"] Sep 30 20:59:28 crc kubenswrapper[4756]: I0930 20:59:28.876219 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fsqz4\" (UniqueName: \"kubernetes.io/projected/bd45f86f-28b1-4422-b1f6-9a2f43e39ae3-kube-api-access-fsqz4\") pod \"barbican-db-sync-fz2mj\" (UID: \"bd45f86f-28b1-4422-b1f6-9a2f43e39ae3\") " pod="openstack/barbican-db-sync-fz2mj" Sep 30 20:59:28 crc kubenswrapper[4756]: I0930 20:59:28.876364 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bd45f86f-28b1-4422-b1f6-9a2f43e39ae3-combined-ca-bundle\") pod \"barbican-db-sync-fz2mj\" (UID: \"bd45f86f-28b1-4422-b1f6-9a2f43e39ae3\") " pod="openstack/barbican-db-sync-fz2mj" Sep 30 20:59:28 crc kubenswrapper[4756]: I0930 20:59:28.876386 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/bd45f86f-28b1-4422-b1f6-9a2f43e39ae3-db-sync-config-data\") pod \"barbican-db-sync-fz2mj\" (UID: \"bd45f86f-28b1-4422-b1f6-9a2f43e39ae3\") " pod="openstack/barbican-db-sync-fz2mj" Sep 30 20:59:28 crc kubenswrapper[4756]: I0930 20:59:28.978420 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bd45f86f-28b1-4422-b1f6-9a2f43e39ae3-combined-ca-bundle\") pod \"barbican-db-sync-fz2mj\" (UID: \"bd45f86f-28b1-4422-b1f6-9a2f43e39ae3\") " pod="openstack/barbican-db-sync-fz2mj" Sep 30 20:59:28 crc kubenswrapper[4756]: I0930 20:59:28.978462 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/bd45f86f-28b1-4422-b1f6-9a2f43e39ae3-db-sync-config-data\") pod \"barbican-db-sync-fz2mj\" (UID: \"bd45f86f-28b1-4422-b1f6-9a2f43e39ae3\") " pod="openstack/barbican-db-sync-fz2mj" Sep 30 20:59:28 crc kubenswrapper[4756]: I0930 20:59:28.978545 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fsqz4\" (UniqueName: \"kubernetes.io/projected/bd45f86f-28b1-4422-b1f6-9a2f43e39ae3-kube-api-access-fsqz4\") pod \"barbican-db-sync-fz2mj\" (UID: \"bd45f86f-28b1-4422-b1f6-9a2f43e39ae3\") " pod="openstack/barbican-db-sync-fz2mj" Sep 30 20:59:28 crc kubenswrapper[4756]: I0930 20:59:28.982949 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/bd45f86f-28b1-4422-b1f6-9a2f43e39ae3-db-sync-config-data\") pod \"barbican-db-sync-fz2mj\" (UID: \"bd45f86f-28b1-4422-b1f6-9a2f43e39ae3\") " pod="openstack/barbican-db-sync-fz2mj" Sep 30 20:59:28 crc kubenswrapper[4756]: I0930 20:59:28.984356 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bd45f86f-28b1-4422-b1f6-9a2f43e39ae3-combined-ca-bundle\") pod \"barbican-db-sync-fz2mj\" (UID: \"bd45f86f-28b1-4422-b1f6-9a2f43e39ae3\") " pod="openstack/barbican-db-sync-fz2mj" Sep 30 20:59:28 crc kubenswrapper[4756]: I0930 20:59:28.995043 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fsqz4\" (UniqueName: \"kubernetes.io/projected/bd45f86f-28b1-4422-b1f6-9a2f43e39ae3-kube-api-access-fsqz4\") pod \"barbican-db-sync-fz2mj\" (UID: \"bd45f86f-28b1-4422-b1f6-9a2f43e39ae3\") " pod="openstack/barbican-db-sync-fz2mj" Sep 30 20:59:29 crc kubenswrapper[4756]: I0930 20:59:29.147035 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-fz2mj" Sep 30 20:59:29 crc kubenswrapper[4756]: I0930 20:59:29.685580 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-sync-fz2mj"] Sep 30 20:59:29 crc kubenswrapper[4756]: I0930 20:59:29.907652 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-fz2mj" event={"ID":"bd45f86f-28b1-4422-b1f6-9a2f43e39ae3","Type":"ContainerStarted","Data":"6790a444f34bed220ed65e18f710789435d04ce2654ed9095e9b752c2732d74c"} Sep 30 20:59:30 crc kubenswrapper[4756]: I0930 20:59:30.919900 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-fz2mj" event={"ID":"bd45f86f-28b1-4422-b1f6-9a2f43e39ae3","Type":"ContainerStarted","Data":"cfd59081ee6a99e2fda0bbc4cb7a10fd0722cc94af12462e6d648d51ee471c2b"} Sep 30 20:59:30 crc kubenswrapper[4756]: I0930 20:59:30.936932 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-db-sync-fz2mj" podStartSLOduration=2.936908969 podStartE2EDuration="2.936908969s" podCreationTimestamp="2025-09-30 20:59:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 20:59:30.935626756 +0000 UTC m=+5300.556560253" watchObservedRunningTime="2025-09-30 20:59:30.936908969 +0000 UTC m=+5300.557842446" Sep 30 20:59:32 crc kubenswrapper[4756]: I0930 20:59:32.941049 4756 generic.go:334] "Generic (PLEG): container finished" podID="bd45f86f-28b1-4422-b1f6-9a2f43e39ae3" containerID="cfd59081ee6a99e2fda0bbc4cb7a10fd0722cc94af12462e6d648d51ee471c2b" exitCode=0 Sep 30 20:59:32 crc kubenswrapper[4756]: I0930 20:59:32.941221 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-fz2mj" event={"ID":"bd45f86f-28b1-4422-b1f6-9a2f43e39ae3","Type":"ContainerDied","Data":"cfd59081ee6a99e2fda0bbc4cb7a10fd0722cc94af12462e6d648d51ee471c2b"} Sep 30 20:59:34 crc kubenswrapper[4756]: I0930 20:59:34.300389 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-fz2mj" Sep 30 20:59:34 crc kubenswrapper[4756]: I0930 20:59:34.369267 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fsqz4\" (UniqueName: \"kubernetes.io/projected/bd45f86f-28b1-4422-b1f6-9a2f43e39ae3-kube-api-access-fsqz4\") pod \"bd45f86f-28b1-4422-b1f6-9a2f43e39ae3\" (UID: \"bd45f86f-28b1-4422-b1f6-9a2f43e39ae3\") " Sep 30 20:59:34 crc kubenswrapper[4756]: I0930 20:59:34.369376 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bd45f86f-28b1-4422-b1f6-9a2f43e39ae3-combined-ca-bundle\") pod \"bd45f86f-28b1-4422-b1f6-9a2f43e39ae3\" (UID: \"bd45f86f-28b1-4422-b1f6-9a2f43e39ae3\") " Sep 30 20:59:34 crc kubenswrapper[4756]: I0930 20:59:34.369614 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/bd45f86f-28b1-4422-b1f6-9a2f43e39ae3-db-sync-config-data\") pod \"bd45f86f-28b1-4422-b1f6-9a2f43e39ae3\" (UID: \"bd45f86f-28b1-4422-b1f6-9a2f43e39ae3\") " Sep 30 20:59:34 crc kubenswrapper[4756]: I0930 20:59:34.374709 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bd45f86f-28b1-4422-b1f6-9a2f43e39ae3-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "bd45f86f-28b1-4422-b1f6-9a2f43e39ae3" (UID: "bd45f86f-28b1-4422-b1f6-9a2f43e39ae3"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:59:34 crc kubenswrapper[4756]: I0930 20:59:34.375100 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bd45f86f-28b1-4422-b1f6-9a2f43e39ae3-kube-api-access-fsqz4" (OuterVolumeSpecName: "kube-api-access-fsqz4") pod "bd45f86f-28b1-4422-b1f6-9a2f43e39ae3" (UID: "bd45f86f-28b1-4422-b1f6-9a2f43e39ae3"). InnerVolumeSpecName "kube-api-access-fsqz4". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:59:34 crc kubenswrapper[4756]: I0930 20:59:34.399369 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bd45f86f-28b1-4422-b1f6-9a2f43e39ae3-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "bd45f86f-28b1-4422-b1f6-9a2f43e39ae3" (UID: "bd45f86f-28b1-4422-b1f6-9a2f43e39ae3"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:59:34 crc kubenswrapper[4756]: I0930 20:59:34.471999 4756 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bd45f86f-28b1-4422-b1f6-9a2f43e39ae3-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 20:59:34 crc kubenswrapper[4756]: I0930 20:59:34.472033 4756 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/bd45f86f-28b1-4422-b1f6-9a2f43e39ae3-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 20:59:34 crc kubenswrapper[4756]: I0930 20:59:34.472046 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fsqz4\" (UniqueName: \"kubernetes.io/projected/bd45f86f-28b1-4422-b1f6-9a2f43e39ae3-kube-api-access-fsqz4\") on node \"crc\" DevicePath \"\"" Sep 30 20:59:34 crc kubenswrapper[4756]: I0930 20:59:34.959788 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-fz2mj" event={"ID":"bd45f86f-28b1-4422-b1f6-9a2f43e39ae3","Type":"ContainerDied","Data":"6790a444f34bed220ed65e18f710789435d04ce2654ed9095e9b752c2732d74c"} Sep 30 20:59:34 crc kubenswrapper[4756]: I0930 20:59:34.959833 4756 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6790a444f34bed220ed65e18f710789435d04ce2654ed9095e9b752c2732d74c" Sep 30 20:59:34 crc kubenswrapper[4756]: I0930 20:59:34.959895 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-fz2mj" Sep 30 20:59:35 crc kubenswrapper[4756]: I0930 20:59:35.217635 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-keystone-listener-749f4bbfdd-7rqmz"] Sep 30 20:59:35 crc kubenswrapper[4756]: E0930 20:59:35.218110 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bd45f86f-28b1-4422-b1f6-9a2f43e39ae3" containerName="barbican-db-sync" Sep 30 20:59:35 crc kubenswrapper[4756]: I0930 20:59:35.218133 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="bd45f86f-28b1-4422-b1f6-9a2f43e39ae3" containerName="barbican-db-sync" Sep 30 20:59:35 crc kubenswrapper[4756]: I0930 20:59:35.218357 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="bd45f86f-28b1-4422-b1f6-9a2f43e39ae3" containerName="barbican-db-sync" Sep 30 20:59:35 crc kubenswrapper[4756]: I0930 20:59:35.219623 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-749f4bbfdd-7rqmz" Sep 30 20:59:35 crc kubenswrapper[4756]: I0930 20:59:35.223392 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-barbican-dockercfg-2cglz" Sep 30 20:59:35 crc kubenswrapper[4756]: I0930 20:59:35.224374 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-keystone-listener-config-data" Sep 30 20:59:35 crc kubenswrapper[4756]: I0930 20:59:35.224411 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-config-data" Sep 30 20:59:35 crc kubenswrapper[4756]: I0930 20:59:35.241976 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-worker-66c7f4f69c-78cb5"] Sep 30 20:59:35 crc kubenswrapper[4756]: I0930 20:59:35.243312 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-66c7f4f69c-78cb5" Sep 30 20:59:35 crc kubenswrapper[4756]: I0930 20:59:35.248626 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-worker-config-data" Sep 30 20:59:35 crc kubenswrapper[4756]: I0930 20:59:35.274854 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-749f4bbfdd-7rqmz"] Sep 30 20:59:35 crc kubenswrapper[4756]: I0930 20:59:35.284482 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-66c7f4f69c-78cb5"] Sep 30 20:59:35 crc kubenswrapper[4756]: I0930 20:59:35.287756 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/094fb104-439b-4882-970f-a3913a9a40de-combined-ca-bundle\") pod \"barbican-worker-66c7f4f69c-78cb5\" (UID: \"094fb104-439b-4882-970f-a3913a9a40de\") " pod="openstack/barbican-worker-66c7f4f69c-78cb5" Sep 30 20:59:35 crc kubenswrapper[4756]: I0930 20:59:35.287985 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nh927\" (UniqueName: \"kubernetes.io/projected/094fb104-439b-4882-970f-a3913a9a40de-kube-api-access-nh927\") pod \"barbican-worker-66c7f4f69c-78cb5\" (UID: \"094fb104-439b-4882-970f-a3913a9a40de\") " pod="openstack/barbican-worker-66c7f4f69c-78cb5" Sep 30 20:59:35 crc kubenswrapper[4756]: I0930 20:59:35.288137 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2038f9e9-ae3d-4e56-aaa7-f3346da48330-logs\") pod \"barbican-keystone-listener-749f4bbfdd-7rqmz\" (UID: \"2038f9e9-ae3d-4e56-aaa7-f3346da48330\") " pod="openstack/barbican-keystone-listener-749f4bbfdd-7rqmz" Sep 30 20:59:35 crc kubenswrapper[4756]: I0930 20:59:35.288241 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/094fb104-439b-4882-970f-a3913a9a40de-config-data\") pod \"barbican-worker-66c7f4f69c-78cb5\" (UID: \"094fb104-439b-4882-970f-a3913a9a40de\") " pod="openstack/barbican-worker-66c7f4f69c-78cb5" Sep 30 20:59:35 crc kubenswrapper[4756]: I0930 20:59:35.288336 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/094fb104-439b-4882-970f-a3913a9a40de-config-data-custom\") pod \"barbican-worker-66c7f4f69c-78cb5\" (UID: \"094fb104-439b-4882-970f-a3913a9a40de\") " pod="openstack/barbican-worker-66c7f4f69c-78cb5" Sep 30 20:59:35 crc kubenswrapper[4756]: I0930 20:59:35.288465 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2038f9e9-ae3d-4e56-aaa7-f3346da48330-config-data\") pod \"barbican-keystone-listener-749f4bbfdd-7rqmz\" (UID: \"2038f9e9-ae3d-4e56-aaa7-f3346da48330\") " pod="openstack/barbican-keystone-listener-749f4bbfdd-7rqmz" Sep 30 20:59:35 crc kubenswrapper[4756]: I0930 20:59:35.288613 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/094fb104-439b-4882-970f-a3913a9a40de-logs\") pod \"barbican-worker-66c7f4f69c-78cb5\" (UID: \"094fb104-439b-4882-970f-a3913a9a40de\") " pod="openstack/barbican-worker-66c7f4f69c-78cb5" Sep 30 20:59:35 crc kubenswrapper[4756]: I0930 20:59:35.288729 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/2038f9e9-ae3d-4e56-aaa7-f3346da48330-config-data-custom\") pod \"barbican-keystone-listener-749f4bbfdd-7rqmz\" (UID: \"2038f9e9-ae3d-4e56-aaa7-f3346da48330\") " pod="openstack/barbican-keystone-listener-749f4bbfdd-7rqmz" Sep 30 20:59:35 crc kubenswrapper[4756]: I0930 20:59:35.288900 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2038f9e9-ae3d-4e56-aaa7-f3346da48330-combined-ca-bundle\") pod \"barbican-keystone-listener-749f4bbfdd-7rqmz\" (UID: \"2038f9e9-ae3d-4e56-aaa7-f3346da48330\") " pod="openstack/barbican-keystone-listener-749f4bbfdd-7rqmz" Sep 30 20:59:35 crc kubenswrapper[4756]: I0930 20:59:35.289039 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n5mp2\" (UniqueName: \"kubernetes.io/projected/2038f9e9-ae3d-4e56-aaa7-f3346da48330-kube-api-access-n5mp2\") pod \"barbican-keystone-listener-749f4bbfdd-7rqmz\" (UID: \"2038f9e9-ae3d-4e56-aaa7-f3346da48330\") " pod="openstack/barbican-keystone-listener-749f4bbfdd-7rqmz" Sep 30 20:59:35 crc kubenswrapper[4756]: I0930 20:59:35.348960 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-66f576dd9f-d2wxs"] Sep 30 20:59:35 crc kubenswrapper[4756]: I0930 20:59:35.350418 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-66f576dd9f-d2wxs" Sep 30 20:59:35 crc kubenswrapper[4756]: I0930 20:59:35.358201 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-66f576dd9f-d2wxs"] Sep 30 20:59:35 crc kubenswrapper[4756]: I0930 20:59:35.390976 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/094fb104-439b-4882-970f-a3913a9a40de-logs\") pod \"barbican-worker-66c7f4f69c-78cb5\" (UID: \"094fb104-439b-4882-970f-a3913a9a40de\") " pod="openstack/barbican-worker-66c7f4f69c-78cb5" Sep 30 20:59:35 crc kubenswrapper[4756]: I0930 20:59:35.391048 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/2038f9e9-ae3d-4e56-aaa7-f3346da48330-config-data-custom\") pod \"barbican-keystone-listener-749f4bbfdd-7rqmz\" (UID: \"2038f9e9-ae3d-4e56-aaa7-f3346da48330\") " pod="openstack/barbican-keystone-listener-749f4bbfdd-7rqmz" Sep 30 20:59:35 crc kubenswrapper[4756]: I0930 20:59:35.391092 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e63ce598-f06c-407e-a685-254b00a977d4-config\") pod \"dnsmasq-dns-66f576dd9f-d2wxs\" (UID: \"e63ce598-f06c-407e-a685-254b00a977d4\") " pod="openstack/dnsmasq-dns-66f576dd9f-d2wxs" Sep 30 20:59:35 crc kubenswrapper[4756]: I0930 20:59:35.391115 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e63ce598-f06c-407e-a685-254b00a977d4-ovsdbserver-nb\") pod \"dnsmasq-dns-66f576dd9f-d2wxs\" (UID: \"e63ce598-f06c-407e-a685-254b00a977d4\") " pod="openstack/dnsmasq-dns-66f576dd9f-d2wxs" Sep 30 20:59:35 crc kubenswrapper[4756]: I0930 20:59:35.391140 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2038f9e9-ae3d-4e56-aaa7-f3346da48330-combined-ca-bundle\") pod \"barbican-keystone-listener-749f4bbfdd-7rqmz\" (UID: \"2038f9e9-ae3d-4e56-aaa7-f3346da48330\") " pod="openstack/barbican-keystone-listener-749f4bbfdd-7rqmz" Sep 30 20:59:35 crc kubenswrapper[4756]: I0930 20:59:35.391173 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e63ce598-f06c-407e-a685-254b00a977d4-ovsdbserver-sb\") pod \"dnsmasq-dns-66f576dd9f-d2wxs\" (UID: \"e63ce598-f06c-407e-a685-254b00a977d4\") " pod="openstack/dnsmasq-dns-66f576dd9f-d2wxs" Sep 30 20:59:35 crc kubenswrapper[4756]: I0930 20:59:35.391206 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n5mp2\" (UniqueName: \"kubernetes.io/projected/2038f9e9-ae3d-4e56-aaa7-f3346da48330-kube-api-access-n5mp2\") pod \"barbican-keystone-listener-749f4bbfdd-7rqmz\" (UID: \"2038f9e9-ae3d-4e56-aaa7-f3346da48330\") " pod="openstack/barbican-keystone-listener-749f4bbfdd-7rqmz" Sep 30 20:59:35 crc kubenswrapper[4756]: I0930 20:59:35.391250 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tqwh5\" (UniqueName: \"kubernetes.io/projected/e63ce598-f06c-407e-a685-254b00a977d4-kube-api-access-tqwh5\") pod \"dnsmasq-dns-66f576dd9f-d2wxs\" (UID: \"e63ce598-f06c-407e-a685-254b00a977d4\") " pod="openstack/dnsmasq-dns-66f576dd9f-d2wxs" Sep 30 20:59:35 crc kubenswrapper[4756]: I0930 20:59:35.391312 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/094fb104-439b-4882-970f-a3913a9a40de-combined-ca-bundle\") pod \"barbican-worker-66c7f4f69c-78cb5\" (UID: \"094fb104-439b-4882-970f-a3913a9a40de\") " pod="openstack/barbican-worker-66c7f4f69c-78cb5" Sep 30 20:59:35 crc kubenswrapper[4756]: I0930 20:59:35.391333 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e63ce598-f06c-407e-a685-254b00a977d4-dns-svc\") pod \"dnsmasq-dns-66f576dd9f-d2wxs\" (UID: \"e63ce598-f06c-407e-a685-254b00a977d4\") " pod="openstack/dnsmasq-dns-66f576dd9f-d2wxs" Sep 30 20:59:35 crc kubenswrapper[4756]: I0930 20:59:35.391367 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nh927\" (UniqueName: \"kubernetes.io/projected/094fb104-439b-4882-970f-a3913a9a40de-kube-api-access-nh927\") pod \"barbican-worker-66c7f4f69c-78cb5\" (UID: \"094fb104-439b-4882-970f-a3913a9a40de\") " pod="openstack/barbican-worker-66c7f4f69c-78cb5" Sep 30 20:59:35 crc kubenswrapper[4756]: I0930 20:59:35.391407 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2038f9e9-ae3d-4e56-aaa7-f3346da48330-logs\") pod \"barbican-keystone-listener-749f4bbfdd-7rqmz\" (UID: \"2038f9e9-ae3d-4e56-aaa7-f3346da48330\") " pod="openstack/barbican-keystone-listener-749f4bbfdd-7rqmz" Sep 30 20:59:35 crc kubenswrapper[4756]: I0930 20:59:35.391434 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/094fb104-439b-4882-970f-a3913a9a40de-config-data\") pod \"barbican-worker-66c7f4f69c-78cb5\" (UID: \"094fb104-439b-4882-970f-a3913a9a40de\") " pod="openstack/barbican-worker-66c7f4f69c-78cb5" Sep 30 20:59:35 crc kubenswrapper[4756]: I0930 20:59:35.391456 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/094fb104-439b-4882-970f-a3913a9a40de-config-data-custom\") pod \"barbican-worker-66c7f4f69c-78cb5\" (UID: \"094fb104-439b-4882-970f-a3913a9a40de\") " pod="openstack/barbican-worker-66c7f4f69c-78cb5" Sep 30 20:59:35 crc kubenswrapper[4756]: I0930 20:59:35.391481 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2038f9e9-ae3d-4e56-aaa7-f3346da48330-config-data\") pod \"barbican-keystone-listener-749f4bbfdd-7rqmz\" (UID: \"2038f9e9-ae3d-4e56-aaa7-f3346da48330\") " pod="openstack/barbican-keystone-listener-749f4bbfdd-7rqmz" Sep 30 20:59:35 crc kubenswrapper[4756]: I0930 20:59:35.392893 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/094fb104-439b-4882-970f-a3913a9a40de-logs\") pod \"barbican-worker-66c7f4f69c-78cb5\" (UID: \"094fb104-439b-4882-970f-a3913a9a40de\") " pod="openstack/barbican-worker-66c7f4f69c-78cb5" Sep 30 20:59:35 crc kubenswrapper[4756]: I0930 20:59:35.394918 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2038f9e9-ae3d-4e56-aaa7-f3346da48330-logs\") pod \"barbican-keystone-listener-749f4bbfdd-7rqmz\" (UID: \"2038f9e9-ae3d-4e56-aaa7-f3346da48330\") " pod="openstack/barbican-keystone-listener-749f4bbfdd-7rqmz" Sep 30 20:59:35 crc kubenswrapper[4756]: I0930 20:59:35.399427 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/094fb104-439b-4882-970f-a3913a9a40de-config-data-custom\") pod \"barbican-worker-66c7f4f69c-78cb5\" (UID: \"094fb104-439b-4882-970f-a3913a9a40de\") " pod="openstack/barbican-worker-66c7f4f69c-78cb5" Sep 30 20:59:35 crc kubenswrapper[4756]: I0930 20:59:35.399471 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2038f9e9-ae3d-4e56-aaa7-f3346da48330-combined-ca-bundle\") pod \"barbican-keystone-listener-749f4bbfdd-7rqmz\" (UID: \"2038f9e9-ae3d-4e56-aaa7-f3346da48330\") " pod="openstack/barbican-keystone-listener-749f4bbfdd-7rqmz" Sep 30 20:59:35 crc kubenswrapper[4756]: I0930 20:59:35.404342 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2038f9e9-ae3d-4e56-aaa7-f3346da48330-config-data\") pod \"barbican-keystone-listener-749f4bbfdd-7rqmz\" (UID: \"2038f9e9-ae3d-4e56-aaa7-f3346da48330\") " pod="openstack/barbican-keystone-listener-749f4bbfdd-7rqmz" Sep 30 20:59:35 crc kubenswrapper[4756]: I0930 20:59:35.405188 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/2038f9e9-ae3d-4e56-aaa7-f3346da48330-config-data-custom\") pod \"barbican-keystone-listener-749f4bbfdd-7rqmz\" (UID: \"2038f9e9-ae3d-4e56-aaa7-f3346da48330\") " pod="openstack/barbican-keystone-listener-749f4bbfdd-7rqmz" Sep 30 20:59:35 crc kubenswrapper[4756]: I0930 20:59:35.410676 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/094fb104-439b-4882-970f-a3913a9a40de-combined-ca-bundle\") pod \"barbican-worker-66c7f4f69c-78cb5\" (UID: \"094fb104-439b-4882-970f-a3913a9a40de\") " pod="openstack/barbican-worker-66c7f4f69c-78cb5" Sep 30 20:59:35 crc kubenswrapper[4756]: I0930 20:59:35.411119 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/094fb104-439b-4882-970f-a3913a9a40de-config-data\") pod \"barbican-worker-66c7f4f69c-78cb5\" (UID: \"094fb104-439b-4882-970f-a3913a9a40de\") " pod="openstack/barbican-worker-66c7f4f69c-78cb5" Sep 30 20:59:35 crc kubenswrapper[4756]: I0930 20:59:35.414046 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n5mp2\" (UniqueName: \"kubernetes.io/projected/2038f9e9-ae3d-4e56-aaa7-f3346da48330-kube-api-access-n5mp2\") pod \"barbican-keystone-listener-749f4bbfdd-7rqmz\" (UID: \"2038f9e9-ae3d-4e56-aaa7-f3346da48330\") " pod="openstack/barbican-keystone-listener-749f4bbfdd-7rqmz" Sep 30 20:59:35 crc kubenswrapper[4756]: I0930 20:59:35.414508 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nh927\" (UniqueName: \"kubernetes.io/projected/094fb104-439b-4882-970f-a3913a9a40de-kube-api-access-nh927\") pod \"barbican-worker-66c7f4f69c-78cb5\" (UID: \"094fb104-439b-4882-970f-a3913a9a40de\") " pod="openstack/barbican-worker-66c7f4f69c-78cb5" Sep 30 20:59:35 crc kubenswrapper[4756]: I0930 20:59:35.446498 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-api-77676b8946-qhgfw"] Sep 30 20:59:35 crc kubenswrapper[4756]: I0930 20:59:35.451685 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-77676b8946-qhgfw" Sep 30 20:59:35 crc kubenswrapper[4756]: I0930 20:59:35.456690 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-api-config-data" Sep 30 20:59:35 crc kubenswrapper[4756]: I0930 20:59:35.469276 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-77676b8946-qhgfw"] Sep 30 20:59:35 crc kubenswrapper[4756]: I0930 20:59:35.494686 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e63ce598-f06c-407e-a685-254b00a977d4-dns-svc\") pod \"dnsmasq-dns-66f576dd9f-d2wxs\" (UID: \"e63ce598-f06c-407e-a685-254b00a977d4\") " pod="openstack/dnsmasq-dns-66f576dd9f-d2wxs" Sep 30 20:59:35 crc kubenswrapper[4756]: I0930 20:59:35.494763 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e19642da-ea64-4716-9b75-96656c381743-logs\") pod \"barbican-api-77676b8946-qhgfw\" (UID: \"e19642da-ea64-4716-9b75-96656c381743\") " pod="openstack/barbican-api-77676b8946-qhgfw" Sep 30 20:59:35 crc kubenswrapper[4756]: I0930 20:59:35.494789 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e19642da-ea64-4716-9b75-96656c381743-config-data\") pod \"barbican-api-77676b8946-qhgfw\" (UID: \"e19642da-ea64-4716-9b75-96656c381743\") " pod="openstack/barbican-api-77676b8946-qhgfw" Sep 30 20:59:35 crc kubenswrapper[4756]: I0930 20:59:35.494873 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e19642da-ea64-4716-9b75-96656c381743-combined-ca-bundle\") pod \"barbican-api-77676b8946-qhgfw\" (UID: \"e19642da-ea64-4716-9b75-96656c381743\") " pod="openstack/barbican-api-77676b8946-qhgfw" Sep 30 20:59:35 crc kubenswrapper[4756]: I0930 20:59:35.495043 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/e19642da-ea64-4716-9b75-96656c381743-config-data-custom\") pod \"barbican-api-77676b8946-qhgfw\" (UID: \"e19642da-ea64-4716-9b75-96656c381743\") " pod="openstack/barbican-api-77676b8946-qhgfw" Sep 30 20:59:35 crc kubenswrapper[4756]: I0930 20:59:35.495185 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e63ce598-f06c-407e-a685-254b00a977d4-config\") pod \"dnsmasq-dns-66f576dd9f-d2wxs\" (UID: \"e63ce598-f06c-407e-a685-254b00a977d4\") " pod="openstack/dnsmasq-dns-66f576dd9f-d2wxs" Sep 30 20:59:35 crc kubenswrapper[4756]: I0930 20:59:35.495209 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e63ce598-f06c-407e-a685-254b00a977d4-ovsdbserver-nb\") pod \"dnsmasq-dns-66f576dd9f-d2wxs\" (UID: \"e63ce598-f06c-407e-a685-254b00a977d4\") " pod="openstack/dnsmasq-dns-66f576dd9f-d2wxs" Sep 30 20:59:35 crc kubenswrapper[4756]: I0930 20:59:35.495276 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e63ce598-f06c-407e-a685-254b00a977d4-ovsdbserver-sb\") pod \"dnsmasq-dns-66f576dd9f-d2wxs\" (UID: \"e63ce598-f06c-407e-a685-254b00a977d4\") " pod="openstack/dnsmasq-dns-66f576dd9f-d2wxs" Sep 30 20:59:35 crc kubenswrapper[4756]: I0930 20:59:35.495365 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tqwh5\" (UniqueName: \"kubernetes.io/projected/e63ce598-f06c-407e-a685-254b00a977d4-kube-api-access-tqwh5\") pod \"dnsmasq-dns-66f576dd9f-d2wxs\" (UID: \"e63ce598-f06c-407e-a685-254b00a977d4\") " pod="openstack/dnsmasq-dns-66f576dd9f-d2wxs" Sep 30 20:59:35 crc kubenswrapper[4756]: I0930 20:59:35.495408 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hkmnq\" (UniqueName: \"kubernetes.io/projected/e19642da-ea64-4716-9b75-96656c381743-kube-api-access-hkmnq\") pod \"barbican-api-77676b8946-qhgfw\" (UID: \"e19642da-ea64-4716-9b75-96656c381743\") " pod="openstack/barbican-api-77676b8946-qhgfw" Sep 30 20:59:35 crc kubenswrapper[4756]: I0930 20:59:35.496317 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e63ce598-f06c-407e-a685-254b00a977d4-config\") pod \"dnsmasq-dns-66f576dd9f-d2wxs\" (UID: \"e63ce598-f06c-407e-a685-254b00a977d4\") " pod="openstack/dnsmasq-dns-66f576dd9f-d2wxs" Sep 30 20:59:35 crc kubenswrapper[4756]: I0930 20:59:35.496988 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e63ce598-f06c-407e-a685-254b00a977d4-ovsdbserver-sb\") pod \"dnsmasq-dns-66f576dd9f-d2wxs\" (UID: \"e63ce598-f06c-407e-a685-254b00a977d4\") " pod="openstack/dnsmasq-dns-66f576dd9f-d2wxs" Sep 30 20:59:35 crc kubenswrapper[4756]: I0930 20:59:35.498310 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e63ce598-f06c-407e-a685-254b00a977d4-ovsdbserver-nb\") pod \"dnsmasq-dns-66f576dd9f-d2wxs\" (UID: \"e63ce598-f06c-407e-a685-254b00a977d4\") " pod="openstack/dnsmasq-dns-66f576dd9f-d2wxs" Sep 30 20:59:35 crc kubenswrapper[4756]: I0930 20:59:35.498571 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e63ce598-f06c-407e-a685-254b00a977d4-dns-svc\") pod \"dnsmasq-dns-66f576dd9f-d2wxs\" (UID: \"e63ce598-f06c-407e-a685-254b00a977d4\") " pod="openstack/dnsmasq-dns-66f576dd9f-d2wxs" Sep 30 20:59:35 crc kubenswrapper[4756]: I0930 20:59:35.521889 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tqwh5\" (UniqueName: \"kubernetes.io/projected/e63ce598-f06c-407e-a685-254b00a977d4-kube-api-access-tqwh5\") pod \"dnsmasq-dns-66f576dd9f-d2wxs\" (UID: \"e63ce598-f06c-407e-a685-254b00a977d4\") " pod="openstack/dnsmasq-dns-66f576dd9f-d2wxs" Sep 30 20:59:35 crc kubenswrapper[4756]: I0930 20:59:35.566004 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-749f4bbfdd-7rqmz" Sep 30 20:59:35 crc kubenswrapper[4756]: I0930 20:59:35.596648 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e19642da-ea64-4716-9b75-96656c381743-logs\") pod \"barbican-api-77676b8946-qhgfw\" (UID: \"e19642da-ea64-4716-9b75-96656c381743\") " pod="openstack/barbican-api-77676b8946-qhgfw" Sep 30 20:59:35 crc kubenswrapper[4756]: I0930 20:59:35.596686 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e19642da-ea64-4716-9b75-96656c381743-config-data\") pod \"barbican-api-77676b8946-qhgfw\" (UID: \"e19642da-ea64-4716-9b75-96656c381743\") " pod="openstack/barbican-api-77676b8946-qhgfw" Sep 30 20:59:35 crc kubenswrapper[4756]: I0930 20:59:35.596703 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e19642da-ea64-4716-9b75-96656c381743-combined-ca-bundle\") pod \"barbican-api-77676b8946-qhgfw\" (UID: \"e19642da-ea64-4716-9b75-96656c381743\") " pod="openstack/barbican-api-77676b8946-qhgfw" Sep 30 20:59:35 crc kubenswrapper[4756]: I0930 20:59:35.596740 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/e19642da-ea64-4716-9b75-96656c381743-config-data-custom\") pod \"barbican-api-77676b8946-qhgfw\" (UID: \"e19642da-ea64-4716-9b75-96656c381743\") " pod="openstack/barbican-api-77676b8946-qhgfw" Sep 30 20:59:35 crc kubenswrapper[4756]: I0930 20:59:35.596809 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hkmnq\" (UniqueName: \"kubernetes.io/projected/e19642da-ea64-4716-9b75-96656c381743-kube-api-access-hkmnq\") pod \"barbican-api-77676b8946-qhgfw\" (UID: \"e19642da-ea64-4716-9b75-96656c381743\") " pod="openstack/barbican-api-77676b8946-qhgfw" Sep 30 20:59:35 crc kubenswrapper[4756]: I0930 20:59:35.597522 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e19642da-ea64-4716-9b75-96656c381743-logs\") pod \"barbican-api-77676b8946-qhgfw\" (UID: \"e19642da-ea64-4716-9b75-96656c381743\") " pod="openstack/barbican-api-77676b8946-qhgfw" Sep 30 20:59:35 crc kubenswrapper[4756]: I0930 20:59:35.601317 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e19642da-ea64-4716-9b75-96656c381743-config-data\") pod \"barbican-api-77676b8946-qhgfw\" (UID: \"e19642da-ea64-4716-9b75-96656c381743\") " pod="openstack/barbican-api-77676b8946-qhgfw" Sep 30 20:59:35 crc kubenswrapper[4756]: I0930 20:59:35.602892 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/e19642da-ea64-4716-9b75-96656c381743-config-data-custom\") pod \"barbican-api-77676b8946-qhgfw\" (UID: \"e19642da-ea64-4716-9b75-96656c381743\") " pod="openstack/barbican-api-77676b8946-qhgfw" Sep 30 20:59:35 crc kubenswrapper[4756]: I0930 20:59:35.606085 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e19642da-ea64-4716-9b75-96656c381743-combined-ca-bundle\") pod \"barbican-api-77676b8946-qhgfw\" (UID: \"e19642da-ea64-4716-9b75-96656c381743\") " pod="openstack/barbican-api-77676b8946-qhgfw" Sep 30 20:59:35 crc kubenswrapper[4756]: I0930 20:59:35.616879 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-66c7f4f69c-78cb5" Sep 30 20:59:35 crc kubenswrapper[4756]: I0930 20:59:35.622485 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hkmnq\" (UniqueName: \"kubernetes.io/projected/e19642da-ea64-4716-9b75-96656c381743-kube-api-access-hkmnq\") pod \"barbican-api-77676b8946-qhgfw\" (UID: \"e19642da-ea64-4716-9b75-96656c381743\") " pod="openstack/barbican-api-77676b8946-qhgfw" Sep 30 20:59:35 crc kubenswrapper[4756]: I0930 20:59:35.680034 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-66f576dd9f-d2wxs" Sep 30 20:59:35 crc kubenswrapper[4756]: I0930 20:59:35.812344 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-77676b8946-qhgfw" Sep 30 20:59:36 crc kubenswrapper[4756]: I0930 20:59:36.047717 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-749f4bbfdd-7rqmz"] Sep 30 20:59:36 crc kubenswrapper[4756]: I0930 20:59:36.124563 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-66c7f4f69c-78cb5"] Sep 30 20:59:36 crc kubenswrapper[4756]: W0930 20:59:36.129359 4756 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod094fb104_439b_4882_970f_a3913a9a40de.slice/crio-914a69538a5858079ed0bd2818d03f9fc36e7a7ecafba1cbf29442e2f834f9ed WatchSource:0}: Error finding container 914a69538a5858079ed0bd2818d03f9fc36e7a7ecafba1cbf29442e2f834f9ed: Status 404 returned error can't find the container with id 914a69538a5858079ed0bd2818d03f9fc36e7a7ecafba1cbf29442e2f834f9ed Sep 30 20:59:36 crc kubenswrapper[4756]: I0930 20:59:36.182019 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-66f576dd9f-d2wxs"] Sep 30 20:59:36 crc kubenswrapper[4756]: I0930 20:59:36.304884 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-77676b8946-qhgfw"] Sep 30 20:59:36 crc kubenswrapper[4756]: W0930 20:59:36.310135 4756 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode19642da_ea64_4716_9b75_96656c381743.slice/crio-f8d05956b05265ab83b099b4022f4fe9b92f589c4b6fdb9eb0de2e464434c62a WatchSource:0}: Error finding container f8d05956b05265ab83b099b4022f4fe9b92f589c4b6fdb9eb0de2e464434c62a: Status 404 returned error can't find the container with id f8d05956b05265ab83b099b4022f4fe9b92f589c4b6fdb9eb0de2e464434c62a Sep 30 20:59:36 crc kubenswrapper[4756]: I0930 20:59:36.997898 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-66f576dd9f-d2wxs" event={"ID":"e63ce598-f06c-407e-a685-254b00a977d4","Type":"ContainerStarted","Data":"ec0090f0e63605e52218c643f42d35bc27d09568c583a09e091f826a3d28bcd0"} Sep 30 20:59:36 crc kubenswrapper[4756]: I0930 20:59:36.997969 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-66f576dd9f-d2wxs" event={"ID":"e63ce598-f06c-407e-a685-254b00a977d4","Type":"ContainerStarted","Data":"18724503c873688c0623495d9ee66dbeed2804b265f321f7143b605704a108c8"} Sep 30 20:59:37 crc kubenswrapper[4756]: I0930 20:59:37.000898 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-77676b8946-qhgfw" event={"ID":"e19642da-ea64-4716-9b75-96656c381743","Type":"ContainerStarted","Data":"204cf17628becca58dc77a132f21500228035e408448df629ed554b90c580d21"} Sep 30 20:59:37 crc kubenswrapper[4756]: I0930 20:59:37.000946 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-77676b8946-qhgfw" event={"ID":"e19642da-ea64-4716-9b75-96656c381743","Type":"ContainerStarted","Data":"f8d05956b05265ab83b099b4022f4fe9b92f589c4b6fdb9eb0de2e464434c62a"} Sep 30 20:59:37 crc kubenswrapper[4756]: I0930 20:59:37.003199 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-66c7f4f69c-78cb5" event={"ID":"094fb104-439b-4882-970f-a3913a9a40de","Type":"ContainerStarted","Data":"13e3b5b3febd0fe47ddf1821040189cb47177501c9701093580fb155e0065864"} Sep 30 20:59:37 crc kubenswrapper[4756]: I0930 20:59:37.003270 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-66c7f4f69c-78cb5" event={"ID":"094fb104-439b-4882-970f-a3913a9a40de","Type":"ContainerStarted","Data":"914a69538a5858079ed0bd2818d03f9fc36e7a7ecafba1cbf29442e2f834f9ed"} Sep 30 20:59:37 crc kubenswrapper[4756]: I0930 20:59:37.019965 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-749f4bbfdd-7rqmz" event={"ID":"2038f9e9-ae3d-4e56-aaa7-f3346da48330","Type":"ContainerStarted","Data":"80b93e63acf71dc285be84dac908069bbf0e5f85b3eb04fbf98ecfe742da31f7"} Sep 30 20:59:37 crc kubenswrapper[4756]: I0930 20:59:37.020288 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-749f4bbfdd-7rqmz" event={"ID":"2038f9e9-ae3d-4e56-aaa7-f3346da48330","Type":"ContainerStarted","Data":"13ce2bf54366845d87dcee8575048647afd5b28c610ba5acb830ca00e112cb1b"} Sep 30 20:59:37 crc kubenswrapper[4756]: I0930 20:59:37.676468 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-s6tbw"] Sep 30 20:59:37 crc kubenswrapper[4756]: I0930 20:59:37.678587 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-s6tbw" Sep 30 20:59:37 crc kubenswrapper[4756]: I0930 20:59:37.687070 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-s6tbw"] Sep 30 20:59:37 crc kubenswrapper[4756]: I0930 20:59:37.737389 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9fbc2bc5-6ca8-4f83-b3d9-5b7f3cd9664a-catalog-content\") pod \"certified-operators-s6tbw\" (UID: \"9fbc2bc5-6ca8-4f83-b3d9-5b7f3cd9664a\") " pod="openshift-marketplace/certified-operators-s6tbw" Sep 30 20:59:37 crc kubenswrapper[4756]: I0930 20:59:37.737629 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-spfnf\" (UniqueName: \"kubernetes.io/projected/9fbc2bc5-6ca8-4f83-b3d9-5b7f3cd9664a-kube-api-access-spfnf\") pod \"certified-operators-s6tbw\" (UID: \"9fbc2bc5-6ca8-4f83-b3d9-5b7f3cd9664a\") " pod="openshift-marketplace/certified-operators-s6tbw" Sep 30 20:59:37 crc kubenswrapper[4756]: I0930 20:59:37.737665 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9fbc2bc5-6ca8-4f83-b3d9-5b7f3cd9664a-utilities\") pod \"certified-operators-s6tbw\" (UID: \"9fbc2bc5-6ca8-4f83-b3d9-5b7f3cd9664a\") " pod="openshift-marketplace/certified-operators-s6tbw" Sep 30 20:59:37 crc kubenswrapper[4756]: I0930 20:59:37.838813 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9fbc2bc5-6ca8-4f83-b3d9-5b7f3cd9664a-catalog-content\") pod \"certified-operators-s6tbw\" (UID: \"9fbc2bc5-6ca8-4f83-b3d9-5b7f3cd9664a\") " pod="openshift-marketplace/certified-operators-s6tbw" Sep 30 20:59:37 crc kubenswrapper[4756]: I0930 20:59:37.838894 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-spfnf\" (UniqueName: \"kubernetes.io/projected/9fbc2bc5-6ca8-4f83-b3d9-5b7f3cd9664a-kube-api-access-spfnf\") pod \"certified-operators-s6tbw\" (UID: \"9fbc2bc5-6ca8-4f83-b3d9-5b7f3cd9664a\") " pod="openshift-marketplace/certified-operators-s6tbw" Sep 30 20:59:37 crc kubenswrapper[4756]: I0930 20:59:37.838933 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9fbc2bc5-6ca8-4f83-b3d9-5b7f3cd9664a-utilities\") pod \"certified-operators-s6tbw\" (UID: \"9fbc2bc5-6ca8-4f83-b3d9-5b7f3cd9664a\") " pod="openshift-marketplace/certified-operators-s6tbw" Sep 30 20:59:37 crc kubenswrapper[4756]: I0930 20:59:37.839441 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9fbc2bc5-6ca8-4f83-b3d9-5b7f3cd9664a-catalog-content\") pod \"certified-operators-s6tbw\" (UID: \"9fbc2bc5-6ca8-4f83-b3d9-5b7f3cd9664a\") " pod="openshift-marketplace/certified-operators-s6tbw" Sep 30 20:59:37 crc kubenswrapper[4756]: I0930 20:59:37.839467 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9fbc2bc5-6ca8-4f83-b3d9-5b7f3cd9664a-utilities\") pod \"certified-operators-s6tbw\" (UID: \"9fbc2bc5-6ca8-4f83-b3d9-5b7f3cd9664a\") " pod="openshift-marketplace/certified-operators-s6tbw" Sep 30 20:59:37 crc kubenswrapper[4756]: I0930 20:59:37.860195 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-spfnf\" (UniqueName: \"kubernetes.io/projected/9fbc2bc5-6ca8-4f83-b3d9-5b7f3cd9664a-kube-api-access-spfnf\") pod \"certified-operators-s6tbw\" (UID: \"9fbc2bc5-6ca8-4f83-b3d9-5b7f3cd9664a\") " pod="openshift-marketplace/certified-operators-s6tbw" Sep 30 20:59:38 crc kubenswrapper[4756]: I0930 20:59:38.000174 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-s6tbw" Sep 30 20:59:38 crc kubenswrapper[4756]: I0930 20:59:38.044665 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-66c7f4f69c-78cb5" event={"ID":"094fb104-439b-4882-970f-a3913a9a40de","Type":"ContainerStarted","Data":"548c94279803254d19d822b6f53bbb1facb38fb06f4b9db8102a69d4f04c3ee8"} Sep 30 20:59:38 crc kubenswrapper[4756]: I0930 20:59:38.053721 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-749f4bbfdd-7rqmz" event={"ID":"2038f9e9-ae3d-4e56-aaa7-f3346da48330","Type":"ContainerStarted","Data":"6106c60ff13c2f1b4d0d3b382ac98914a569c7e1e4790bd118a18e1fa0704012"} Sep 30 20:59:38 crc kubenswrapper[4756]: I0930 20:59:38.059552 4756 generic.go:334] "Generic (PLEG): container finished" podID="e63ce598-f06c-407e-a685-254b00a977d4" containerID="ec0090f0e63605e52218c643f42d35bc27d09568c583a09e091f826a3d28bcd0" exitCode=0 Sep 30 20:59:38 crc kubenswrapper[4756]: I0930 20:59:38.059612 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-66f576dd9f-d2wxs" event={"ID":"e63ce598-f06c-407e-a685-254b00a977d4","Type":"ContainerDied","Data":"ec0090f0e63605e52218c643f42d35bc27d09568c583a09e091f826a3d28bcd0"} Sep 30 20:59:38 crc kubenswrapper[4756]: I0930 20:59:38.062070 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-worker-66c7f4f69c-78cb5" podStartSLOduration=3.062049927 podStartE2EDuration="3.062049927s" podCreationTimestamp="2025-09-30 20:59:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 20:59:38.062008086 +0000 UTC m=+5307.682941573" watchObservedRunningTime="2025-09-30 20:59:38.062049927 +0000 UTC m=+5307.682983404" Sep 30 20:59:38 crc kubenswrapper[4756]: I0930 20:59:38.073880 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-77676b8946-qhgfw" event={"ID":"e19642da-ea64-4716-9b75-96656c381743","Type":"ContainerStarted","Data":"4368b64da02ac5079b80deb451f745d3d8fa39d075ab038d164c7028a8ee2d21"} Sep 30 20:59:38 crc kubenswrapper[4756]: I0930 20:59:38.074161 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-77676b8946-qhgfw" Sep 30 20:59:38 crc kubenswrapper[4756]: I0930 20:59:38.074287 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-77676b8946-qhgfw" Sep 30 20:59:38 crc kubenswrapper[4756]: I0930 20:59:38.104136 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-keystone-listener-749f4bbfdd-7rqmz" podStartSLOduration=3.104121204 podStartE2EDuration="3.104121204s" podCreationTimestamp="2025-09-30 20:59:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 20:59:38.102386859 +0000 UTC m=+5307.723320336" watchObservedRunningTime="2025-09-30 20:59:38.104121204 +0000 UTC m=+5307.725054681" Sep 30 20:59:38 crc kubenswrapper[4756]: I0930 20:59:38.117817 4756 scope.go:117] "RemoveContainer" containerID="47efbbc76271cd44fa05dca3e26855fb46918fbc5a3a511e482c89c79b8cb60c" Sep 30 20:59:38 crc kubenswrapper[4756]: E0930 20:59:38.118030 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4n9zj_openshift-machine-config-operator(3370c2ca-fec3-4f90-8df7-51e21e6c7e1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" Sep 30 20:59:38 crc kubenswrapper[4756]: I0930 20:59:38.130542 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-api-77676b8946-qhgfw" podStartSLOduration=3.130526053 podStartE2EDuration="3.130526053s" podCreationTimestamp="2025-09-30 20:59:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 20:59:38.126449167 +0000 UTC m=+5307.747382644" watchObservedRunningTime="2025-09-30 20:59:38.130526053 +0000 UTC m=+5307.751459530" Sep 30 20:59:38 crc kubenswrapper[4756]: I0930 20:59:38.605465 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-s6tbw"] Sep 30 20:59:38 crc kubenswrapper[4756]: W0930 20:59:38.612952 4756 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9fbc2bc5_6ca8_4f83_b3d9_5b7f3cd9664a.slice/crio-315c880ce01743d31d7a12294108f8f70861147fef6d3bb00231aed0d67fc1cc WatchSource:0}: Error finding container 315c880ce01743d31d7a12294108f8f70861147fef6d3bb00231aed0d67fc1cc: Status 404 returned error can't find the container with id 315c880ce01743d31d7a12294108f8f70861147fef6d3bb00231aed0d67fc1cc Sep 30 20:59:39 crc kubenswrapper[4756]: I0930 20:59:39.084896 4756 generic.go:334] "Generic (PLEG): container finished" podID="9fbc2bc5-6ca8-4f83-b3d9-5b7f3cd9664a" containerID="a2dfc578ac116ea02c5c1c529e100d40e3fbd5e76eed4c6798c2ae7bf64c7c3a" exitCode=0 Sep 30 20:59:39 crc kubenswrapper[4756]: I0930 20:59:39.084977 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-s6tbw" event={"ID":"9fbc2bc5-6ca8-4f83-b3d9-5b7f3cd9664a","Type":"ContainerDied","Data":"a2dfc578ac116ea02c5c1c529e100d40e3fbd5e76eed4c6798c2ae7bf64c7c3a"} Sep 30 20:59:39 crc kubenswrapper[4756]: I0930 20:59:39.085024 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-s6tbw" event={"ID":"9fbc2bc5-6ca8-4f83-b3d9-5b7f3cd9664a","Type":"ContainerStarted","Data":"315c880ce01743d31d7a12294108f8f70861147fef6d3bb00231aed0d67fc1cc"} Sep 30 20:59:39 crc kubenswrapper[4756]: I0930 20:59:39.087238 4756 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Sep 30 20:59:39 crc kubenswrapper[4756]: I0930 20:59:39.090996 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-66f576dd9f-d2wxs" event={"ID":"e63ce598-f06c-407e-a685-254b00a977d4","Type":"ContainerStarted","Data":"085fda321b92255a388fb0d3241241008a63c414612da6d616c9feb07a3697cb"} Sep 30 20:59:40 crc kubenswrapper[4756]: I0930 20:59:40.099812 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-66f576dd9f-d2wxs" Sep 30 20:59:45 crc kubenswrapper[4756]: I0930 20:59:45.682644 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-66f576dd9f-d2wxs" Sep 30 20:59:45 crc kubenswrapper[4756]: I0930 20:59:45.701011 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-66f576dd9f-d2wxs" podStartSLOduration=10.700988006 podStartE2EDuration="10.700988006s" podCreationTimestamp="2025-09-30 20:59:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 20:59:39.209793403 +0000 UTC m=+5308.830726880" watchObservedRunningTime="2025-09-30 20:59:45.700988006 +0000 UTC m=+5315.321921483" Sep 30 20:59:45 crc kubenswrapper[4756]: I0930 20:59:45.754050 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7b596d9679-bmcnx"] Sep 30 20:59:45 crc kubenswrapper[4756]: I0930 20:59:45.754318 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-7b596d9679-bmcnx" podUID="71236825-df25-4a45-803f-4fb5ea260419" containerName="dnsmasq-dns" containerID="cri-o://eec329a6c55b6287d8ff03be11ca35a5a4e6f341df066cc8d52a09347c0f4c6f" gracePeriod=10 Sep 30 20:59:45 crc kubenswrapper[4756]: I0930 20:59:45.795534 4756 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-7b596d9679-bmcnx" podUID="71236825-df25-4a45-803f-4fb5ea260419" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.1.20:5353: connect: connection refused" Sep 30 20:59:46 crc kubenswrapper[4756]: I0930 20:59:46.173617 4756 generic.go:334] "Generic (PLEG): container finished" podID="71236825-df25-4a45-803f-4fb5ea260419" containerID="eec329a6c55b6287d8ff03be11ca35a5a4e6f341df066cc8d52a09347c0f4c6f" exitCode=0 Sep 30 20:59:46 crc kubenswrapper[4756]: I0930 20:59:46.173672 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7b596d9679-bmcnx" event={"ID":"71236825-df25-4a45-803f-4fb5ea260419","Type":"ContainerDied","Data":"eec329a6c55b6287d8ff03be11ca35a5a4e6f341df066cc8d52a09347c0f4c6f"} Sep 30 20:59:46 crc kubenswrapper[4756]: I0930 20:59:46.177584 4756 generic.go:334] "Generic (PLEG): container finished" podID="9fbc2bc5-6ca8-4f83-b3d9-5b7f3cd9664a" containerID="b3197b98d4aad12d1f9c4d57c73593407816570b906b2aafa09efab469438e95" exitCode=0 Sep 30 20:59:46 crc kubenswrapper[4756]: I0930 20:59:46.177629 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-s6tbw" event={"ID":"9fbc2bc5-6ca8-4f83-b3d9-5b7f3cd9664a","Type":"ContainerDied","Data":"b3197b98d4aad12d1f9c4d57c73593407816570b906b2aafa09efab469438e95"} Sep 30 20:59:46 crc kubenswrapper[4756]: I0930 20:59:46.254664 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7b596d9679-bmcnx" Sep 30 20:59:46 crc kubenswrapper[4756]: I0930 20:59:46.426452 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/71236825-df25-4a45-803f-4fb5ea260419-ovsdbserver-sb\") pod \"71236825-df25-4a45-803f-4fb5ea260419\" (UID: \"71236825-df25-4a45-803f-4fb5ea260419\") " Sep 30 20:59:46 crc kubenswrapper[4756]: I0930 20:59:46.426568 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/71236825-df25-4a45-803f-4fb5ea260419-ovsdbserver-nb\") pod \"71236825-df25-4a45-803f-4fb5ea260419\" (UID: \"71236825-df25-4a45-803f-4fb5ea260419\") " Sep 30 20:59:46 crc kubenswrapper[4756]: I0930 20:59:46.426636 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6d4kl\" (UniqueName: \"kubernetes.io/projected/71236825-df25-4a45-803f-4fb5ea260419-kube-api-access-6d4kl\") pod \"71236825-df25-4a45-803f-4fb5ea260419\" (UID: \"71236825-df25-4a45-803f-4fb5ea260419\") " Sep 30 20:59:46 crc kubenswrapper[4756]: I0930 20:59:46.426695 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/71236825-df25-4a45-803f-4fb5ea260419-config\") pod \"71236825-df25-4a45-803f-4fb5ea260419\" (UID: \"71236825-df25-4a45-803f-4fb5ea260419\") " Sep 30 20:59:46 crc kubenswrapper[4756]: I0930 20:59:46.426752 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/71236825-df25-4a45-803f-4fb5ea260419-dns-svc\") pod \"71236825-df25-4a45-803f-4fb5ea260419\" (UID: \"71236825-df25-4a45-803f-4fb5ea260419\") " Sep 30 20:59:46 crc kubenswrapper[4756]: I0930 20:59:46.442741 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/71236825-df25-4a45-803f-4fb5ea260419-kube-api-access-6d4kl" (OuterVolumeSpecName: "kube-api-access-6d4kl") pod "71236825-df25-4a45-803f-4fb5ea260419" (UID: "71236825-df25-4a45-803f-4fb5ea260419"). InnerVolumeSpecName "kube-api-access-6d4kl". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:59:46 crc kubenswrapper[4756]: I0930 20:59:46.478640 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/71236825-df25-4a45-803f-4fb5ea260419-config" (OuterVolumeSpecName: "config") pod "71236825-df25-4a45-803f-4fb5ea260419" (UID: "71236825-df25-4a45-803f-4fb5ea260419"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:59:46 crc kubenswrapper[4756]: I0930 20:59:46.484143 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/71236825-df25-4a45-803f-4fb5ea260419-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "71236825-df25-4a45-803f-4fb5ea260419" (UID: "71236825-df25-4a45-803f-4fb5ea260419"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:59:46 crc kubenswrapper[4756]: I0930 20:59:46.485620 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/71236825-df25-4a45-803f-4fb5ea260419-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "71236825-df25-4a45-803f-4fb5ea260419" (UID: "71236825-df25-4a45-803f-4fb5ea260419"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:59:46 crc kubenswrapper[4756]: I0930 20:59:46.493235 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/71236825-df25-4a45-803f-4fb5ea260419-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "71236825-df25-4a45-803f-4fb5ea260419" (UID: "71236825-df25-4a45-803f-4fb5ea260419"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:59:46 crc kubenswrapper[4756]: I0930 20:59:46.528966 4756 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/71236825-df25-4a45-803f-4fb5ea260419-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Sep 30 20:59:46 crc kubenswrapper[4756]: I0930 20:59:46.529001 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6d4kl\" (UniqueName: \"kubernetes.io/projected/71236825-df25-4a45-803f-4fb5ea260419-kube-api-access-6d4kl\") on node \"crc\" DevicePath \"\"" Sep 30 20:59:46 crc kubenswrapper[4756]: I0930 20:59:46.529011 4756 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/71236825-df25-4a45-803f-4fb5ea260419-config\") on node \"crc\" DevicePath \"\"" Sep 30 20:59:46 crc kubenswrapper[4756]: I0930 20:59:46.529021 4756 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/71236825-df25-4a45-803f-4fb5ea260419-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 30 20:59:46 crc kubenswrapper[4756]: I0930 20:59:46.529030 4756 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/71236825-df25-4a45-803f-4fb5ea260419-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Sep 30 20:59:47 crc kubenswrapper[4756]: I0930 20:59:47.192616 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7b596d9679-bmcnx" event={"ID":"71236825-df25-4a45-803f-4fb5ea260419","Type":"ContainerDied","Data":"809cd950c6da0d943945c278e1d7137d9fef0e0ac34f556f88c2a95e919e3b2a"} Sep 30 20:59:47 crc kubenswrapper[4756]: I0930 20:59:47.192957 4756 scope.go:117] "RemoveContainer" containerID="eec329a6c55b6287d8ff03be11ca35a5a4e6f341df066cc8d52a09347c0f4c6f" Sep 30 20:59:47 crc kubenswrapper[4756]: I0930 20:59:47.193089 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7b596d9679-bmcnx" Sep 30 20:59:47 crc kubenswrapper[4756]: I0930 20:59:47.198310 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-s6tbw" event={"ID":"9fbc2bc5-6ca8-4f83-b3d9-5b7f3cd9664a","Type":"ContainerStarted","Data":"907331896a37a05e3bbf61f5fe7433d6a5fd403019ea7c383ff10c4379114c51"} Sep 30 20:59:47 crc kubenswrapper[4756]: I0930 20:59:47.225498 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-s6tbw" podStartSLOduration=2.52484234 podStartE2EDuration="10.225476959s" podCreationTimestamp="2025-09-30 20:59:37 +0000 UTC" firstStartedPulling="2025-09-30 20:59:39.08701996 +0000 UTC m=+5308.707953437" lastFinishedPulling="2025-09-30 20:59:46.787654579 +0000 UTC m=+5316.408588056" observedRunningTime="2025-09-30 20:59:47.22360965 +0000 UTC m=+5316.844543157" watchObservedRunningTime="2025-09-30 20:59:47.225476959 +0000 UTC m=+5316.846410436" Sep 30 20:59:47 crc kubenswrapper[4756]: I0930 20:59:47.257215 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7b596d9679-bmcnx"] Sep 30 20:59:47 crc kubenswrapper[4756]: I0930 20:59:47.262241 4756 scope.go:117] "RemoveContainer" containerID="54cab6884d3b690aa37bf60644a4e11bcfb78b3348a3ae2d32e9913d10dc8865" Sep 30 20:59:47 crc kubenswrapper[4756]: I0930 20:59:47.263986 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-7b596d9679-bmcnx"] Sep 30 20:59:47 crc kubenswrapper[4756]: I0930 20:59:47.314912 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-77676b8946-qhgfw" Sep 30 20:59:47 crc kubenswrapper[4756]: I0930 20:59:47.319020 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-77676b8946-qhgfw" Sep 30 20:59:48 crc kubenswrapper[4756]: I0930 20:59:48.000651 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-s6tbw" Sep 30 20:59:48 crc kubenswrapper[4756]: I0930 20:59:48.000723 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-s6tbw" Sep 30 20:59:49 crc kubenswrapper[4756]: I0930 20:59:49.068752 4756 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/certified-operators-s6tbw" podUID="9fbc2bc5-6ca8-4f83-b3d9-5b7f3cd9664a" containerName="registry-server" probeResult="failure" output=< Sep 30 20:59:49 crc kubenswrapper[4756]: timeout: failed to connect service ":50051" within 1s Sep 30 20:59:49 crc kubenswrapper[4756]: > Sep 30 20:59:49 crc kubenswrapper[4756]: I0930 20:59:49.125032 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="71236825-df25-4a45-803f-4fb5ea260419" path="/var/lib/kubelet/pods/71236825-df25-4a45-803f-4fb5ea260419/volumes" Sep 30 20:59:51 crc kubenswrapper[4756]: I0930 20:59:51.120125 4756 scope.go:117] "RemoveContainer" containerID="47efbbc76271cd44fa05dca3e26855fb46918fbc5a3a511e482c89c79b8cb60c" Sep 30 20:59:52 crc kubenswrapper[4756]: I0930 20:59:52.242857 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" event={"ID":"3370c2ca-fec3-4f90-8df7-51e21e6c7e1c","Type":"ContainerStarted","Data":"b07c7b0720849ce41d8fe35c4a65c847659c084f93d181b0409c8a9ac837fcae"} Sep 30 20:59:58 crc kubenswrapper[4756]: I0930 20:59:58.065310 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-s6tbw" Sep 30 20:59:58 crc kubenswrapper[4756]: I0930 20:59:58.136584 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-s6tbw" Sep 30 20:59:58 crc kubenswrapper[4756]: I0930 20:59:58.312917 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-s6tbw"] Sep 30 20:59:59 crc kubenswrapper[4756]: I0930 20:59:59.305641 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-s6tbw" podUID="9fbc2bc5-6ca8-4f83-b3d9-5b7f3cd9664a" containerName="registry-server" containerID="cri-o://907331896a37a05e3bbf61f5fe7433d6a5fd403019ea7c383ff10c4379114c51" gracePeriod=2 Sep 30 21:00:00 crc kubenswrapper[4756]: I0930 21:00:00.146107 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29321100-fmfxb"] Sep 30 21:00:00 crc kubenswrapper[4756]: E0930 21:00:00.146895 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="71236825-df25-4a45-803f-4fb5ea260419" containerName="dnsmasq-dns" Sep 30 21:00:00 crc kubenswrapper[4756]: I0930 21:00:00.146912 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="71236825-df25-4a45-803f-4fb5ea260419" containerName="dnsmasq-dns" Sep 30 21:00:00 crc kubenswrapper[4756]: E0930 21:00:00.146936 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="71236825-df25-4a45-803f-4fb5ea260419" containerName="init" Sep 30 21:00:00 crc kubenswrapper[4756]: I0930 21:00:00.146944 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="71236825-df25-4a45-803f-4fb5ea260419" containerName="init" Sep 30 21:00:00 crc kubenswrapper[4756]: I0930 21:00:00.147166 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="71236825-df25-4a45-803f-4fb5ea260419" containerName="dnsmasq-dns" Sep 30 21:00:00 crc kubenswrapper[4756]: I0930 21:00:00.147905 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29321100-fmfxb" Sep 30 21:00:00 crc kubenswrapper[4756]: I0930 21:00:00.150461 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Sep 30 21:00:00 crc kubenswrapper[4756]: I0930 21:00:00.150569 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Sep 30 21:00:00 crc kubenswrapper[4756]: I0930 21:00:00.155043 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29321100-fmfxb"] Sep 30 21:00:00 crc kubenswrapper[4756]: I0930 21:00:00.243753 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-s6tbw" Sep 30 21:00:00 crc kubenswrapper[4756]: I0930 21:00:00.292483 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/9aea8105-86ee-4a7a-a4d5-1cbab9d09e3e-config-volume\") pod \"collect-profiles-29321100-fmfxb\" (UID: \"9aea8105-86ee-4a7a-a4d5-1cbab9d09e3e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321100-fmfxb" Sep 30 21:00:00 crc kubenswrapper[4756]: I0930 21:00:00.292542 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ctsxg\" (UniqueName: \"kubernetes.io/projected/9aea8105-86ee-4a7a-a4d5-1cbab9d09e3e-kube-api-access-ctsxg\") pod \"collect-profiles-29321100-fmfxb\" (UID: \"9aea8105-86ee-4a7a-a4d5-1cbab9d09e3e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321100-fmfxb" Sep 30 21:00:00 crc kubenswrapper[4756]: I0930 21:00:00.292576 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/9aea8105-86ee-4a7a-a4d5-1cbab9d09e3e-secret-volume\") pod \"collect-profiles-29321100-fmfxb\" (UID: \"9aea8105-86ee-4a7a-a4d5-1cbab9d09e3e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321100-fmfxb" Sep 30 21:00:00 crc kubenswrapper[4756]: I0930 21:00:00.313432 4756 generic.go:334] "Generic (PLEG): container finished" podID="9fbc2bc5-6ca8-4f83-b3d9-5b7f3cd9664a" containerID="907331896a37a05e3bbf61f5fe7433d6a5fd403019ea7c383ff10c4379114c51" exitCode=0 Sep 30 21:00:00 crc kubenswrapper[4756]: I0930 21:00:00.313480 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-s6tbw" event={"ID":"9fbc2bc5-6ca8-4f83-b3d9-5b7f3cd9664a","Type":"ContainerDied","Data":"907331896a37a05e3bbf61f5fe7433d6a5fd403019ea7c383ff10c4379114c51"} Sep 30 21:00:00 crc kubenswrapper[4756]: I0930 21:00:00.314542 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-s6tbw" event={"ID":"9fbc2bc5-6ca8-4f83-b3d9-5b7f3cd9664a","Type":"ContainerDied","Data":"315c880ce01743d31d7a12294108f8f70861147fef6d3bb00231aed0d67fc1cc"} Sep 30 21:00:00 crc kubenswrapper[4756]: I0930 21:00:00.313527 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-s6tbw" Sep 30 21:00:00 crc kubenswrapper[4756]: I0930 21:00:00.314618 4756 scope.go:117] "RemoveContainer" containerID="907331896a37a05e3bbf61f5fe7433d6a5fd403019ea7c383ff10c4379114c51" Sep 30 21:00:00 crc kubenswrapper[4756]: I0930 21:00:00.347845 4756 scope.go:117] "RemoveContainer" containerID="b3197b98d4aad12d1f9c4d57c73593407816570b906b2aafa09efab469438e95" Sep 30 21:00:00 crc kubenswrapper[4756]: I0930 21:00:00.364945 4756 scope.go:117] "RemoveContainer" containerID="a2dfc578ac116ea02c5c1c529e100d40e3fbd5e76eed4c6798c2ae7bf64c7c3a" Sep 30 21:00:00 crc kubenswrapper[4756]: I0930 21:00:00.393915 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9fbc2bc5-6ca8-4f83-b3d9-5b7f3cd9664a-utilities\") pod \"9fbc2bc5-6ca8-4f83-b3d9-5b7f3cd9664a\" (UID: \"9fbc2bc5-6ca8-4f83-b3d9-5b7f3cd9664a\") " Sep 30 21:00:00 crc kubenswrapper[4756]: I0930 21:00:00.394007 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-spfnf\" (UniqueName: \"kubernetes.io/projected/9fbc2bc5-6ca8-4f83-b3d9-5b7f3cd9664a-kube-api-access-spfnf\") pod \"9fbc2bc5-6ca8-4f83-b3d9-5b7f3cd9664a\" (UID: \"9fbc2bc5-6ca8-4f83-b3d9-5b7f3cd9664a\") " Sep 30 21:00:00 crc kubenswrapper[4756]: I0930 21:00:00.395053 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9fbc2bc5-6ca8-4f83-b3d9-5b7f3cd9664a-catalog-content\") pod \"9fbc2bc5-6ca8-4f83-b3d9-5b7f3cd9664a\" (UID: \"9fbc2bc5-6ca8-4f83-b3d9-5b7f3cd9664a\") " Sep 30 21:00:00 crc kubenswrapper[4756]: I0930 21:00:00.395520 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/9aea8105-86ee-4a7a-a4d5-1cbab9d09e3e-config-volume\") pod \"collect-profiles-29321100-fmfxb\" (UID: \"9aea8105-86ee-4a7a-a4d5-1cbab9d09e3e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321100-fmfxb" Sep 30 21:00:00 crc kubenswrapper[4756]: I0930 21:00:00.395587 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ctsxg\" (UniqueName: \"kubernetes.io/projected/9aea8105-86ee-4a7a-a4d5-1cbab9d09e3e-kube-api-access-ctsxg\") pod \"collect-profiles-29321100-fmfxb\" (UID: \"9aea8105-86ee-4a7a-a4d5-1cbab9d09e3e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321100-fmfxb" Sep 30 21:00:00 crc kubenswrapper[4756]: I0930 21:00:00.395639 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/9aea8105-86ee-4a7a-a4d5-1cbab9d09e3e-secret-volume\") pod \"collect-profiles-29321100-fmfxb\" (UID: \"9aea8105-86ee-4a7a-a4d5-1cbab9d09e3e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321100-fmfxb" Sep 30 21:00:00 crc kubenswrapper[4756]: I0930 21:00:00.395936 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9fbc2bc5-6ca8-4f83-b3d9-5b7f3cd9664a-utilities" (OuterVolumeSpecName: "utilities") pod "9fbc2bc5-6ca8-4f83-b3d9-5b7f3cd9664a" (UID: "9fbc2bc5-6ca8-4f83-b3d9-5b7f3cd9664a"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 21:00:00 crc kubenswrapper[4756]: I0930 21:00:00.397103 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/9aea8105-86ee-4a7a-a4d5-1cbab9d09e3e-config-volume\") pod \"collect-profiles-29321100-fmfxb\" (UID: \"9aea8105-86ee-4a7a-a4d5-1cbab9d09e3e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321100-fmfxb" Sep 30 21:00:00 crc kubenswrapper[4756]: I0930 21:00:00.400882 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9fbc2bc5-6ca8-4f83-b3d9-5b7f3cd9664a-kube-api-access-spfnf" (OuterVolumeSpecName: "kube-api-access-spfnf") pod "9fbc2bc5-6ca8-4f83-b3d9-5b7f3cd9664a" (UID: "9fbc2bc5-6ca8-4f83-b3d9-5b7f3cd9664a"). InnerVolumeSpecName "kube-api-access-spfnf". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 21:00:00 crc kubenswrapper[4756]: I0930 21:00:00.401210 4756 scope.go:117] "RemoveContainer" containerID="907331896a37a05e3bbf61f5fe7433d6a5fd403019ea7c383ff10c4379114c51" Sep 30 21:00:00 crc kubenswrapper[4756]: E0930 21:00:00.401696 4756 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"907331896a37a05e3bbf61f5fe7433d6a5fd403019ea7c383ff10c4379114c51\": container with ID starting with 907331896a37a05e3bbf61f5fe7433d6a5fd403019ea7c383ff10c4379114c51 not found: ID does not exist" containerID="907331896a37a05e3bbf61f5fe7433d6a5fd403019ea7c383ff10c4379114c51" Sep 30 21:00:00 crc kubenswrapper[4756]: I0930 21:00:00.401765 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"907331896a37a05e3bbf61f5fe7433d6a5fd403019ea7c383ff10c4379114c51"} err="failed to get container status \"907331896a37a05e3bbf61f5fe7433d6a5fd403019ea7c383ff10c4379114c51\": rpc error: code = NotFound desc = could not find container \"907331896a37a05e3bbf61f5fe7433d6a5fd403019ea7c383ff10c4379114c51\": container with ID starting with 907331896a37a05e3bbf61f5fe7433d6a5fd403019ea7c383ff10c4379114c51 not found: ID does not exist" Sep 30 21:00:00 crc kubenswrapper[4756]: I0930 21:00:00.401790 4756 scope.go:117] "RemoveContainer" containerID="b3197b98d4aad12d1f9c4d57c73593407816570b906b2aafa09efab469438e95" Sep 30 21:00:00 crc kubenswrapper[4756]: E0930 21:00:00.402099 4756 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b3197b98d4aad12d1f9c4d57c73593407816570b906b2aafa09efab469438e95\": container with ID starting with b3197b98d4aad12d1f9c4d57c73593407816570b906b2aafa09efab469438e95 not found: ID does not exist" containerID="b3197b98d4aad12d1f9c4d57c73593407816570b906b2aafa09efab469438e95" Sep 30 21:00:00 crc kubenswrapper[4756]: I0930 21:00:00.402129 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b3197b98d4aad12d1f9c4d57c73593407816570b906b2aafa09efab469438e95"} err="failed to get container status \"b3197b98d4aad12d1f9c4d57c73593407816570b906b2aafa09efab469438e95\": rpc error: code = NotFound desc = could not find container \"b3197b98d4aad12d1f9c4d57c73593407816570b906b2aafa09efab469438e95\": container with ID starting with b3197b98d4aad12d1f9c4d57c73593407816570b906b2aafa09efab469438e95 not found: ID does not exist" Sep 30 21:00:00 crc kubenswrapper[4756]: I0930 21:00:00.402150 4756 scope.go:117] "RemoveContainer" containerID="a2dfc578ac116ea02c5c1c529e100d40e3fbd5e76eed4c6798c2ae7bf64c7c3a" Sep 30 21:00:00 crc kubenswrapper[4756]: E0930 21:00:00.403809 4756 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a2dfc578ac116ea02c5c1c529e100d40e3fbd5e76eed4c6798c2ae7bf64c7c3a\": container with ID starting with a2dfc578ac116ea02c5c1c529e100d40e3fbd5e76eed4c6798c2ae7bf64c7c3a not found: ID does not exist" containerID="a2dfc578ac116ea02c5c1c529e100d40e3fbd5e76eed4c6798c2ae7bf64c7c3a" Sep 30 21:00:00 crc kubenswrapper[4756]: I0930 21:00:00.403834 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a2dfc578ac116ea02c5c1c529e100d40e3fbd5e76eed4c6798c2ae7bf64c7c3a"} err="failed to get container status \"a2dfc578ac116ea02c5c1c529e100d40e3fbd5e76eed4c6798c2ae7bf64c7c3a\": rpc error: code = NotFound desc = could not find container \"a2dfc578ac116ea02c5c1c529e100d40e3fbd5e76eed4c6798c2ae7bf64c7c3a\": container with ID starting with a2dfc578ac116ea02c5c1c529e100d40e3fbd5e76eed4c6798c2ae7bf64c7c3a not found: ID does not exist" Sep 30 21:00:00 crc kubenswrapper[4756]: I0930 21:00:00.410385 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/9aea8105-86ee-4a7a-a4d5-1cbab9d09e3e-secret-volume\") pod \"collect-profiles-29321100-fmfxb\" (UID: \"9aea8105-86ee-4a7a-a4d5-1cbab9d09e3e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321100-fmfxb" Sep 30 21:00:00 crc kubenswrapper[4756]: I0930 21:00:00.413804 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ctsxg\" (UniqueName: \"kubernetes.io/projected/9aea8105-86ee-4a7a-a4d5-1cbab9d09e3e-kube-api-access-ctsxg\") pod \"collect-profiles-29321100-fmfxb\" (UID: \"9aea8105-86ee-4a7a-a4d5-1cbab9d09e3e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321100-fmfxb" Sep 30 21:00:00 crc kubenswrapper[4756]: I0930 21:00:00.446064 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9fbc2bc5-6ca8-4f83-b3d9-5b7f3cd9664a-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "9fbc2bc5-6ca8-4f83-b3d9-5b7f3cd9664a" (UID: "9fbc2bc5-6ca8-4f83-b3d9-5b7f3cd9664a"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 21:00:00 crc kubenswrapper[4756]: I0930 21:00:00.464095 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29321100-fmfxb" Sep 30 21:00:00 crc kubenswrapper[4756]: I0930 21:00:00.512509 4756 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9fbc2bc5-6ca8-4f83-b3d9-5b7f3cd9664a-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 21:00:00 crc kubenswrapper[4756]: I0930 21:00:00.512546 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-spfnf\" (UniqueName: \"kubernetes.io/projected/9fbc2bc5-6ca8-4f83-b3d9-5b7f3cd9664a-kube-api-access-spfnf\") on node \"crc\" DevicePath \"\"" Sep 30 21:00:00 crc kubenswrapper[4756]: I0930 21:00:00.512557 4756 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9fbc2bc5-6ca8-4f83-b3d9-5b7f3cd9664a-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 21:00:00 crc kubenswrapper[4756]: I0930 21:00:00.667447 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-s6tbw"] Sep 30 21:00:00 crc kubenswrapper[4756]: I0930 21:00:00.678132 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-s6tbw"] Sep 30 21:00:00 crc kubenswrapper[4756]: I0930 21:00:00.939443 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-db-create-dq47d"] Sep 30 21:00:00 crc kubenswrapper[4756]: E0930 21:00:00.940113 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9fbc2bc5-6ca8-4f83-b3d9-5b7f3cd9664a" containerName="registry-server" Sep 30 21:00:00 crc kubenswrapper[4756]: I0930 21:00:00.940132 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="9fbc2bc5-6ca8-4f83-b3d9-5b7f3cd9664a" containerName="registry-server" Sep 30 21:00:00 crc kubenswrapper[4756]: E0930 21:00:00.940153 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9fbc2bc5-6ca8-4f83-b3d9-5b7f3cd9664a" containerName="extract-content" Sep 30 21:00:00 crc kubenswrapper[4756]: I0930 21:00:00.940160 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="9fbc2bc5-6ca8-4f83-b3d9-5b7f3cd9664a" containerName="extract-content" Sep 30 21:00:00 crc kubenswrapper[4756]: E0930 21:00:00.940180 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9fbc2bc5-6ca8-4f83-b3d9-5b7f3cd9664a" containerName="extract-utilities" Sep 30 21:00:00 crc kubenswrapper[4756]: I0930 21:00:00.940190 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="9fbc2bc5-6ca8-4f83-b3d9-5b7f3cd9664a" containerName="extract-utilities" Sep 30 21:00:00 crc kubenswrapper[4756]: I0930 21:00:00.940376 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="9fbc2bc5-6ca8-4f83-b3d9-5b7f3cd9664a" containerName="registry-server" Sep 30 21:00:00 crc kubenswrapper[4756]: I0930 21:00:00.941043 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-dq47d" Sep 30 21:00:00 crc kubenswrapper[4756]: I0930 21:00:00.979504 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-create-dq47d"] Sep 30 21:00:01 crc kubenswrapper[4756]: I0930 21:00:01.020828 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qtg9q\" (UniqueName: \"kubernetes.io/projected/f2985e22-daaf-45c5-8650-beac6a5bbc44-kube-api-access-qtg9q\") pod \"neutron-db-create-dq47d\" (UID: \"f2985e22-daaf-45c5-8650-beac6a5bbc44\") " pod="openstack/neutron-db-create-dq47d" Sep 30 21:00:01 crc kubenswrapper[4756]: I0930 21:00:01.070664 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29321100-fmfxb"] Sep 30 21:00:01 crc kubenswrapper[4756]: I0930 21:00:01.134362 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qtg9q\" (UniqueName: \"kubernetes.io/projected/f2985e22-daaf-45c5-8650-beac6a5bbc44-kube-api-access-qtg9q\") pod \"neutron-db-create-dq47d\" (UID: \"f2985e22-daaf-45c5-8650-beac6a5bbc44\") " pod="openstack/neutron-db-create-dq47d" Sep 30 21:00:01 crc kubenswrapper[4756]: I0930 21:00:01.135647 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9fbc2bc5-6ca8-4f83-b3d9-5b7f3cd9664a" path="/var/lib/kubelet/pods/9fbc2bc5-6ca8-4f83-b3d9-5b7f3cd9664a/volumes" Sep 30 21:00:01 crc kubenswrapper[4756]: I0930 21:00:01.158973 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qtg9q\" (UniqueName: \"kubernetes.io/projected/f2985e22-daaf-45c5-8650-beac6a5bbc44-kube-api-access-qtg9q\") pod \"neutron-db-create-dq47d\" (UID: \"f2985e22-daaf-45c5-8650-beac6a5bbc44\") " pod="openstack/neutron-db-create-dq47d" Sep 30 21:00:01 crc kubenswrapper[4756]: I0930 21:00:01.260756 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-dq47d" Sep 30 21:00:01 crc kubenswrapper[4756]: I0930 21:00:01.335221 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29321100-fmfxb" event={"ID":"9aea8105-86ee-4a7a-a4d5-1cbab9d09e3e","Type":"ContainerStarted","Data":"42130064f5f4bea2ddd85d94a16cce0ab176cc8960ac7954aafa97a8e78404b2"} Sep 30 21:00:01 crc kubenswrapper[4756]: I0930 21:00:01.335264 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29321100-fmfxb" event={"ID":"9aea8105-86ee-4a7a-a4d5-1cbab9d09e3e","Type":"ContainerStarted","Data":"4215638ed5debf4041d9d28dbc037008d549a824f006cb329a0e2aeb4f4d939a"} Sep 30 21:00:01 crc kubenswrapper[4756]: I0930 21:00:01.369586 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29321100-fmfxb" podStartSLOduration=1.3695449069999999 podStartE2EDuration="1.369544907s" podCreationTimestamp="2025-09-30 21:00:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 21:00:01.364007483 +0000 UTC m=+5330.984940970" watchObservedRunningTime="2025-09-30 21:00:01.369544907 +0000 UTC m=+5330.990478384" Sep 30 21:00:01 crc kubenswrapper[4756]: I0930 21:00:01.724433 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-create-dq47d"] Sep 30 21:00:02 crc kubenswrapper[4756]: I0930 21:00:02.347334 4756 generic.go:334] "Generic (PLEG): container finished" podID="f2985e22-daaf-45c5-8650-beac6a5bbc44" containerID="fd14b88b951bc7a26b152f4798c9eacb17743f095cd8b957da0f4036e9c7a593" exitCode=0 Sep 30 21:00:02 crc kubenswrapper[4756]: I0930 21:00:02.347476 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-dq47d" event={"ID":"f2985e22-daaf-45c5-8650-beac6a5bbc44","Type":"ContainerDied","Data":"fd14b88b951bc7a26b152f4798c9eacb17743f095cd8b957da0f4036e9c7a593"} Sep 30 21:00:02 crc kubenswrapper[4756]: I0930 21:00:02.347777 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-dq47d" event={"ID":"f2985e22-daaf-45c5-8650-beac6a5bbc44","Type":"ContainerStarted","Data":"25f92aeea886c9268c15ab98b0e92e8949c342ecb2e6ee8a1439bf6402bbcd96"} Sep 30 21:00:02 crc kubenswrapper[4756]: I0930 21:00:02.349423 4756 generic.go:334] "Generic (PLEG): container finished" podID="9aea8105-86ee-4a7a-a4d5-1cbab9d09e3e" containerID="42130064f5f4bea2ddd85d94a16cce0ab176cc8960ac7954aafa97a8e78404b2" exitCode=0 Sep 30 21:00:02 crc kubenswrapper[4756]: I0930 21:00:02.349451 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29321100-fmfxb" event={"ID":"9aea8105-86ee-4a7a-a4d5-1cbab9d09e3e","Type":"ContainerDied","Data":"42130064f5f4bea2ddd85d94a16cce0ab176cc8960ac7954aafa97a8e78404b2"} Sep 30 21:00:03 crc kubenswrapper[4756]: I0930 21:00:03.724224 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-dq47d" Sep 30 21:00:03 crc kubenswrapper[4756]: I0930 21:00:03.729842 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29321100-fmfxb" Sep 30 21:00:03 crc kubenswrapper[4756]: I0930 21:00:03.893530 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/9aea8105-86ee-4a7a-a4d5-1cbab9d09e3e-secret-volume\") pod \"9aea8105-86ee-4a7a-a4d5-1cbab9d09e3e\" (UID: \"9aea8105-86ee-4a7a-a4d5-1cbab9d09e3e\") " Sep 30 21:00:03 crc kubenswrapper[4756]: I0930 21:00:03.893685 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ctsxg\" (UniqueName: \"kubernetes.io/projected/9aea8105-86ee-4a7a-a4d5-1cbab9d09e3e-kube-api-access-ctsxg\") pod \"9aea8105-86ee-4a7a-a4d5-1cbab9d09e3e\" (UID: \"9aea8105-86ee-4a7a-a4d5-1cbab9d09e3e\") " Sep 30 21:00:03 crc kubenswrapper[4756]: I0930 21:00:03.893881 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qtg9q\" (UniqueName: \"kubernetes.io/projected/f2985e22-daaf-45c5-8650-beac6a5bbc44-kube-api-access-qtg9q\") pod \"f2985e22-daaf-45c5-8650-beac6a5bbc44\" (UID: \"f2985e22-daaf-45c5-8650-beac6a5bbc44\") " Sep 30 21:00:03 crc kubenswrapper[4756]: I0930 21:00:03.893934 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/9aea8105-86ee-4a7a-a4d5-1cbab9d09e3e-config-volume\") pod \"9aea8105-86ee-4a7a-a4d5-1cbab9d09e3e\" (UID: \"9aea8105-86ee-4a7a-a4d5-1cbab9d09e3e\") " Sep 30 21:00:03 crc kubenswrapper[4756]: I0930 21:00:03.894512 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9aea8105-86ee-4a7a-a4d5-1cbab9d09e3e-config-volume" (OuterVolumeSpecName: "config-volume") pod "9aea8105-86ee-4a7a-a4d5-1cbab9d09e3e" (UID: "9aea8105-86ee-4a7a-a4d5-1cbab9d09e3e"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 21:00:03 crc kubenswrapper[4756]: I0930 21:00:03.898659 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9aea8105-86ee-4a7a-a4d5-1cbab9d09e3e-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "9aea8105-86ee-4a7a-a4d5-1cbab9d09e3e" (UID: "9aea8105-86ee-4a7a-a4d5-1cbab9d09e3e"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 21:00:03 crc kubenswrapper[4756]: I0930 21:00:03.899039 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9aea8105-86ee-4a7a-a4d5-1cbab9d09e3e-kube-api-access-ctsxg" (OuterVolumeSpecName: "kube-api-access-ctsxg") pod "9aea8105-86ee-4a7a-a4d5-1cbab9d09e3e" (UID: "9aea8105-86ee-4a7a-a4d5-1cbab9d09e3e"). InnerVolumeSpecName "kube-api-access-ctsxg". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 21:00:03 crc kubenswrapper[4756]: I0930 21:00:03.900996 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f2985e22-daaf-45c5-8650-beac6a5bbc44-kube-api-access-qtg9q" (OuterVolumeSpecName: "kube-api-access-qtg9q") pod "f2985e22-daaf-45c5-8650-beac6a5bbc44" (UID: "f2985e22-daaf-45c5-8650-beac6a5bbc44"). InnerVolumeSpecName "kube-api-access-qtg9q". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 21:00:03 crc kubenswrapper[4756]: I0930 21:00:03.996477 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qtg9q\" (UniqueName: \"kubernetes.io/projected/f2985e22-daaf-45c5-8650-beac6a5bbc44-kube-api-access-qtg9q\") on node \"crc\" DevicePath \"\"" Sep 30 21:00:03 crc kubenswrapper[4756]: I0930 21:00:03.996522 4756 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/9aea8105-86ee-4a7a-a4d5-1cbab9d09e3e-config-volume\") on node \"crc\" DevicePath \"\"" Sep 30 21:00:03 crc kubenswrapper[4756]: I0930 21:00:03.996534 4756 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/9aea8105-86ee-4a7a-a4d5-1cbab9d09e3e-secret-volume\") on node \"crc\" DevicePath \"\"" Sep 30 21:00:03 crc kubenswrapper[4756]: I0930 21:00:03.996545 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ctsxg\" (UniqueName: \"kubernetes.io/projected/9aea8105-86ee-4a7a-a4d5-1cbab9d09e3e-kube-api-access-ctsxg\") on node \"crc\" DevicePath \"\"" Sep 30 21:00:04 crc kubenswrapper[4756]: I0930 21:00:04.220711 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29321055-bzml4"] Sep 30 21:00:04 crc kubenswrapper[4756]: I0930 21:00:04.231121 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29321055-bzml4"] Sep 30 21:00:04 crc kubenswrapper[4756]: I0930 21:00:04.366162 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29321100-fmfxb" event={"ID":"9aea8105-86ee-4a7a-a4d5-1cbab9d09e3e","Type":"ContainerDied","Data":"4215638ed5debf4041d9d28dbc037008d549a824f006cb329a0e2aeb4f4d939a"} Sep 30 21:00:04 crc kubenswrapper[4756]: I0930 21:00:04.366209 4756 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4215638ed5debf4041d9d28dbc037008d549a824f006cb329a0e2aeb4f4d939a" Sep 30 21:00:04 crc kubenswrapper[4756]: I0930 21:00:04.366218 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29321100-fmfxb" Sep 30 21:00:04 crc kubenswrapper[4756]: I0930 21:00:04.367849 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-dq47d" event={"ID":"f2985e22-daaf-45c5-8650-beac6a5bbc44","Type":"ContainerDied","Data":"25f92aeea886c9268c15ab98b0e92e8949c342ecb2e6ee8a1439bf6402bbcd96"} Sep 30 21:00:04 crc kubenswrapper[4756]: I0930 21:00:04.367890 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-dq47d" Sep 30 21:00:04 crc kubenswrapper[4756]: I0930 21:00:04.367898 4756 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="25f92aeea886c9268c15ab98b0e92e8949c342ecb2e6ee8a1439bf6402bbcd96" Sep 30 21:00:05 crc kubenswrapper[4756]: I0930 21:00:05.129385 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="77313252-ffb7-4283-8c2b-b0b9d51ce480" path="/var/lib/kubelet/pods/77313252-ffb7-4283-8c2b-b0b9d51ce480/volumes" Sep 30 21:00:11 crc kubenswrapper[4756]: I0930 21:00:11.056032 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-4b1e-account-create-7q5gc"] Sep 30 21:00:11 crc kubenswrapper[4756]: E0930 21:00:11.057029 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9aea8105-86ee-4a7a-a4d5-1cbab9d09e3e" containerName="collect-profiles" Sep 30 21:00:11 crc kubenswrapper[4756]: I0930 21:00:11.057045 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="9aea8105-86ee-4a7a-a4d5-1cbab9d09e3e" containerName="collect-profiles" Sep 30 21:00:11 crc kubenswrapper[4756]: E0930 21:00:11.057078 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f2985e22-daaf-45c5-8650-beac6a5bbc44" containerName="mariadb-database-create" Sep 30 21:00:11 crc kubenswrapper[4756]: I0930 21:00:11.057086 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="f2985e22-daaf-45c5-8650-beac6a5bbc44" containerName="mariadb-database-create" Sep 30 21:00:11 crc kubenswrapper[4756]: I0930 21:00:11.057286 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="9aea8105-86ee-4a7a-a4d5-1cbab9d09e3e" containerName="collect-profiles" Sep 30 21:00:11 crc kubenswrapper[4756]: I0930 21:00:11.057311 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="f2985e22-daaf-45c5-8650-beac6a5bbc44" containerName="mariadb-database-create" Sep 30 21:00:11 crc kubenswrapper[4756]: I0930 21:00:11.058046 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-4b1e-account-create-7q5gc" Sep 30 21:00:11 crc kubenswrapper[4756]: I0930 21:00:11.063770 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-db-secret" Sep 30 21:00:11 crc kubenswrapper[4756]: I0930 21:00:11.069979 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-4b1e-account-create-7q5gc"] Sep 30 21:00:11 crc kubenswrapper[4756]: I0930 21:00:11.113596 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bqmpf\" (UniqueName: \"kubernetes.io/projected/1ab97981-55b0-43e6-80b8-cce94d3c3b5c-kube-api-access-bqmpf\") pod \"neutron-4b1e-account-create-7q5gc\" (UID: \"1ab97981-55b0-43e6-80b8-cce94d3c3b5c\") " pod="openstack/neutron-4b1e-account-create-7q5gc" Sep 30 21:00:11 crc kubenswrapper[4756]: I0930 21:00:11.215127 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bqmpf\" (UniqueName: \"kubernetes.io/projected/1ab97981-55b0-43e6-80b8-cce94d3c3b5c-kube-api-access-bqmpf\") pod \"neutron-4b1e-account-create-7q5gc\" (UID: \"1ab97981-55b0-43e6-80b8-cce94d3c3b5c\") " pod="openstack/neutron-4b1e-account-create-7q5gc" Sep 30 21:00:11 crc kubenswrapper[4756]: I0930 21:00:11.238465 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bqmpf\" (UniqueName: \"kubernetes.io/projected/1ab97981-55b0-43e6-80b8-cce94d3c3b5c-kube-api-access-bqmpf\") pod \"neutron-4b1e-account-create-7q5gc\" (UID: \"1ab97981-55b0-43e6-80b8-cce94d3c3b5c\") " pod="openstack/neutron-4b1e-account-create-7q5gc" Sep 30 21:00:11 crc kubenswrapper[4756]: I0930 21:00:11.389600 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-4b1e-account-create-7q5gc" Sep 30 21:00:11 crc kubenswrapper[4756]: I0930 21:00:11.807064 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-4b1e-account-create-7q5gc"] Sep 30 21:00:11 crc kubenswrapper[4756]: I0930 21:00:11.822137 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-db-secret" Sep 30 21:00:12 crc kubenswrapper[4756]: I0930 21:00:12.462867 4756 generic.go:334] "Generic (PLEG): container finished" podID="1ab97981-55b0-43e6-80b8-cce94d3c3b5c" containerID="0c443525020f5b5a61558dbd6b4a09d54852337dcb65e03b38c4e7a0dd179a35" exitCode=0 Sep 30 21:00:12 crc kubenswrapper[4756]: I0930 21:00:12.462928 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-4b1e-account-create-7q5gc" event={"ID":"1ab97981-55b0-43e6-80b8-cce94d3c3b5c","Type":"ContainerDied","Data":"0c443525020f5b5a61558dbd6b4a09d54852337dcb65e03b38c4e7a0dd179a35"} Sep 30 21:00:12 crc kubenswrapper[4756]: I0930 21:00:12.462966 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-4b1e-account-create-7q5gc" event={"ID":"1ab97981-55b0-43e6-80b8-cce94d3c3b5c","Type":"ContainerStarted","Data":"326d2fc89b888e2d2d9978b59a3ea339e5939814b0fa3f1fd31a447f6d8c0bad"} Sep 30 21:00:13 crc kubenswrapper[4756]: I0930 21:00:13.834921 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-4b1e-account-create-7q5gc" Sep 30 21:00:13 crc kubenswrapper[4756]: I0930 21:00:13.859370 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bqmpf\" (UniqueName: \"kubernetes.io/projected/1ab97981-55b0-43e6-80b8-cce94d3c3b5c-kube-api-access-bqmpf\") pod \"1ab97981-55b0-43e6-80b8-cce94d3c3b5c\" (UID: \"1ab97981-55b0-43e6-80b8-cce94d3c3b5c\") " Sep 30 21:00:13 crc kubenswrapper[4756]: I0930 21:00:13.867134 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1ab97981-55b0-43e6-80b8-cce94d3c3b5c-kube-api-access-bqmpf" (OuterVolumeSpecName: "kube-api-access-bqmpf") pod "1ab97981-55b0-43e6-80b8-cce94d3c3b5c" (UID: "1ab97981-55b0-43e6-80b8-cce94d3c3b5c"). InnerVolumeSpecName "kube-api-access-bqmpf". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 21:00:13 crc kubenswrapper[4756]: I0930 21:00:13.960709 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bqmpf\" (UniqueName: \"kubernetes.io/projected/1ab97981-55b0-43e6-80b8-cce94d3c3b5c-kube-api-access-bqmpf\") on node \"crc\" DevicePath \"\"" Sep 30 21:00:14 crc kubenswrapper[4756]: I0930 21:00:14.492316 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-4b1e-account-create-7q5gc" event={"ID":"1ab97981-55b0-43e6-80b8-cce94d3c3b5c","Type":"ContainerDied","Data":"326d2fc89b888e2d2d9978b59a3ea339e5939814b0fa3f1fd31a447f6d8c0bad"} Sep 30 21:00:14 crc kubenswrapper[4756]: I0930 21:00:14.492390 4756 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="326d2fc89b888e2d2d9978b59a3ea339e5939814b0fa3f1fd31a447f6d8c0bad" Sep 30 21:00:14 crc kubenswrapper[4756]: I0930 21:00:14.492505 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-4b1e-account-create-7q5gc" Sep 30 21:00:16 crc kubenswrapper[4756]: I0930 21:00:16.207064 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-db-sync-mshpd"] Sep 30 21:00:16 crc kubenswrapper[4756]: E0930 21:00:16.207864 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1ab97981-55b0-43e6-80b8-cce94d3c3b5c" containerName="mariadb-account-create" Sep 30 21:00:16 crc kubenswrapper[4756]: I0930 21:00:16.207884 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="1ab97981-55b0-43e6-80b8-cce94d3c3b5c" containerName="mariadb-account-create" Sep 30 21:00:16 crc kubenswrapper[4756]: I0930 21:00:16.208206 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="1ab97981-55b0-43e6-80b8-cce94d3c3b5c" containerName="mariadb-account-create" Sep 30 21:00:16 crc kubenswrapper[4756]: I0930 21:00:16.209092 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-mshpd" Sep 30 21:00:16 crc kubenswrapper[4756]: I0930 21:00:16.213918 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-neutron-dockercfg-jrcsd" Sep 30 21:00:16 crc kubenswrapper[4756]: I0930 21:00:16.214484 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-config" Sep 30 21:00:16 crc kubenswrapper[4756]: I0930 21:00:16.216874 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-sync-mshpd"] Sep 30 21:00:16 crc kubenswrapper[4756]: I0930 21:00:16.217102 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-httpd-config" Sep 30 21:00:16 crc kubenswrapper[4756]: I0930 21:00:16.304873 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/d09fbc18-1295-4ffb-8e64-8dedd940711f-config\") pod \"neutron-db-sync-mshpd\" (UID: \"d09fbc18-1295-4ffb-8e64-8dedd940711f\") " pod="openstack/neutron-db-sync-mshpd" Sep 30 21:00:16 crc kubenswrapper[4756]: I0930 21:00:16.304961 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9zwf6\" (UniqueName: \"kubernetes.io/projected/d09fbc18-1295-4ffb-8e64-8dedd940711f-kube-api-access-9zwf6\") pod \"neutron-db-sync-mshpd\" (UID: \"d09fbc18-1295-4ffb-8e64-8dedd940711f\") " pod="openstack/neutron-db-sync-mshpd" Sep 30 21:00:16 crc kubenswrapper[4756]: I0930 21:00:16.304998 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d09fbc18-1295-4ffb-8e64-8dedd940711f-combined-ca-bundle\") pod \"neutron-db-sync-mshpd\" (UID: \"d09fbc18-1295-4ffb-8e64-8dedd940711f\") " pod="openstack/neutron-db-sync-mshpd" Sep 30 21:00:16 crc kubenswrapper[4756]: I0930 21:00:16.405788 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/d09fbc18-1295-4ffb-8e64-8dedd940711f-config\") pod \"neutron-db-sync-mshpd\" (UID: \"d09fbc18-1295-4ffb-8e64-8dedd940711f\") " pod="openstack/neutron-db-sync-mshpd" Sep 30 21:00:16 crc kubenswrapper[4756]: I0930 21:00:16.405868 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9zwf6\" (UniqueName: \"kubernetes.io/projected/d09fbc18-1295-4ffb-8e64-8dedd940711f-kube-api-access-9zwf6\") pod \"neutron-db-sync-mshpd\" (UID: \"d09fbc18-1295-4ffb-8e64-8dedd940711f\") " pod="openstack/neutron-db-sync-mshpd" Sep 30 21:00:16 crc kubenswrapper[4756]: I0930 21:00:16.405905 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d09fbc18-1295-4ffb-8e64-8dedd940711f-combined-ca-bundle\") pod \"neutron-db-sync-mshpd\" (UID: \"d09fbc18-1295-4ffb-8e64-8dedd940711f\") " pod="openstack/neutron-db-sync-mshpd" Sep 30 21:00:16 crc kubenswrapper[4756]: I0930 21:00:16.409586 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d09fbc18-1295-4ffb-8e64-8dedd940711f-combined-ca-bundle\") pod \"neutron-db-sync-mshpd\" (UID: \"d09fbc18-1295-4ffb-8e64-8dedd940711f\") " pod="openstack/neutron-db-sync-mshpd" Sep 30 21:00:16 crc kubenswrapper[4756]: I0930 21:00:16.410614 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/d09fbc18-1295-4ffb-8e64-8dedd940711f-config\") pod \"neutron-db-sync-mshpd\" (UID: \"d09fbc18-1295-4ffb-8e64-8dedd940711f\") " pod="openstack/neutron-db-sync-mshpd" Sep 30 21:00:16 crc kubenswrapper[4756]: I0930 21:00:16.429325 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9zwf6\" (UniqueName: \"kubernetes.io/projected/d09fbc18-1295-4ffb-8e64-8dedd940711f-kube-api-access-9zwf6\") pod \"neutron-db-sync-mshpd\" (UID: \"d09fbc18-1295-4ffb-8e64-8dedd940711f\") " pod="openstack/neutron-db-sync-mshpd" Sep 30 21:00:16 crc kubenswrapper[4756]: I0930 21:00:16.543827 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-mshpd" Sep 30 21:00:16 crc kubenswrapper[4756]: I0930 21:00:16.810246 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-sync-mshpd"] Sep 30 21:00:17 crc kubenswrapper[4756]: I0930 21:00:17.523142 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-mshpd" event={"ID":"d09fbc18-1295-4ffb-8e64-8dedd940711f","Type":"ContainerStarted","Data":"8f2c20459fb858d6582871d59bfc78f100424cb03945e62d93fd01506f14963e"} Sep 30 21:00:17 crc kubenswrapper[4756]: I0930 21:00:17.524669 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-mshpd" event={"ID":"d09fbc18-1295-4ffb-8e64-8dedd940711f","Type":"ContainerStarted","Data":"846cc94db45c2eea363c974e604afb16f170cb2e746b10317da234e32be6b43a"} Sep 30 21:00:20 crc kubenswrapper[4756]: I0930 21:00:20.834151 4756 scope.go:117] "RemoveContainer" containerID="7f6a94a30a43da655de65a4c148f696a31bffc49e5af2e8aaf47c176a878d8f6" Sep 30 21:00:21 crc kubenswrapper[4756]: I0930 21:00:21.562382 4756 generic.go:334] "Generic (PLEG): container finished" podID="d09fbc18-1295-4ffb-8e64-8dedd940711f" containerID="8f2c20459fb858d6582871d59bfc78f100424cb03945e62d93fd01506f14963e" exitCode=0 Sep 30 21:00:21 crc kubenswrapper[4756]: I0930 21:00:21.562516 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-mshpd" event={"ID":"d09fbc18-1295-4ffb-8e64-8dedd940711f","Type":"ContainerDied","Data":"8f2c20459fb858d6582871d59bfc78f100424cb03945e62d93fd01506f14963e"} Sep 30 21:00:22 crc kubenswrapper[4756]: I0930 21:00:22.941564 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-mshpd" Sep 30 21:00:23 crc kubenswrapper[4756]: I0930 21:00:23.130156 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9zwf6\" (UniqueName: \"kubernetes.io/projected/d09fbc18-1295-4ffb-8e64-8dedd940711f-kube-api-access-9zwf6\") pod \"d09fbc18-1295-4ffb-8e64-8dedd940711f\" (UID: \"d09fbc18-1295-4ffb-8e64-8dedd940711f\") " Sep 30 21:00:23 crc kubenswrapper[4756]: I0930 21:00:23.130441 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d09fbc18-1295-4ffb-8e64-8dedd940711f-combined-ca-bundle\") pod \"d09fbc18-1295-4ffb-8e64-8dedd940711f\" (UID: \"d09fbc18-1295-4ffb-8e64-8dedd940711f\") " Sep 30 21:00:23 crc kubenswrapper[4756]: I0930 21:00:23.130516 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/d09fbc18-1295-4ffb-8e64-8dedd940711f-config\") pod \"d09fbc18-1295-4ffb-8e64-8dedd940711f\" (UID: \"d09fbc18-1295-4ffb-8e64-8dedd940711f\") " Sep 30 21:00:23 crc kubenswrapper[4756]: I0930 21:00:23.141332 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d09fbc18-1295-4ffb-8e64-8dedd940711f-kube-api-access-9zwf6" (OuterVolumeSpecName: "kube-api-access-9zwf6") pod "d09fbc18-1295-4ffb-8e64-8dedd940711f" (UID: "d09fbc18-1295-4ffb-8e64-8dedd940711f"). InnerVolumeSpecName "kube-api-access-9zwf6". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 21:00:23 crc kubenswrapper[4756]: I0930 21:00:23.174670 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d09fbc18-1295-4ffb-8e64-8dedd940711f-config" (OuterVolumeSpecName: "config") pod "d09fbc18-1295-4ffb-8e64-8dedd940711f" (UID: "d09fbc18-1295-4ffb-8e64-8dedd940711f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 21:00:23 crc kubenswrapper[4756]: I0930 21:00:23.183265 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d09fbc18-1295-4ffb-8e64-8dedd940711f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d09fbc18-1295-4ffb-8e64-8dedd940711f" (UID: "d09fbc18-1295-4ffb-8e64-8dedd940711f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 21:00:23 crc kubenswrapper[4756]: I0930 21:00:23.236094 4756 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d09fbc18-1295-4ffb-8e64-8dedd940711f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 21:00:23 crc kubenswrapper[4756]: I0930 21:00:23.236304 4756 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/d09fbc18-1295-4ffb-8e64-8dedd940711f-config\") on node \"crc\" DevicePath \"\"" Sep 30 21:00:23 crc kubenswrapper[4756]: I0930 21:00:23.236370 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9zwf6\" (UniqueName: \"kubernetes.io/projected/d09fbc18-1295-4ffb-8e64-8dedd940711f-kube-api-access-9zwf6\") on node \"crc\" DevicePath \"\"" Sep 30 21:00:23 crc kubenswrapper[4756]: I0930 21:00:23.583087 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-mshpd" event={"ID":"d09fbc18-1295-4ffb-8e64-8dedd940711f","Type":"ContainerDied","Data":"846cc94db45c2eea363c974e604afb16f170cb2e746b10317da234e32be6b43a"} Sep 30 21:00:23 crc kubenswrapper[4756]: I0930 21:00:23.583144 4756 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="846cc94db45c2eea363c974e604afb16f170cb2e746b10317da234e32be6b43a" Sep 30 21:00:23 crc kubenswrapper[4756]: I0930 21:00:23.583241 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-mshpd" Sep 30 21:00:23 crc kubenswrapper[4756]: I0930 21:00:23.753173 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-76f679cbb7-txrg2"] Sep 30 21:00:23 crc kubenswrapper[4756]: E0930 21:00:23.753511 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d09fbc18-1295-4ffb-8e64-8dedd940711f" containerName="neutron-db-sync" Sep 30 21:00:23 crc kubenswrapper[4756]: I0930 21:00:23.753528 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="d09fbc18-1295-4ffb-8e64-8dedd940711f" containerName="neutron-db-sync" Sep 30 21:00:23 crc kubenswrapper[4756]: I0930 21:00:23.753688 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="d09fbc18-1295-4ffb-8e64-8dedd940711f" containerName="neutron-db-sync" Sep 30 21:00:23 crc kubenswrapper[4756]: I0930 21:00:23.754585 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-76f679cbb7-txrg2" Sep 30 21:00:23 crc kubenswrapper[4756]: I0930 21:00:23.768024 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-76f679cbb7-txrg2"] Sep 30 21:00:23 crc kubenswrapper[4756]: I0930 21:00:23.897219 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-796b75875-n2t79"] Sep 30 21:00:23 crc kubenswrapper[4756]: I0930 21:00:23.901318 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-796b75875-n2t79" Sep 30 21:00:23 crc kubenswrapper[4756]: I0930 21:00:23.903428 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-neutron-dockercfg-jrcsd" Sep 30 21:00:23 crc kubenswrapper[4756]: I0930 21:00:23.903859 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-config" Sep 30 21:00:23 crc kubenswrapper[4756]: I0930 21:00:23.905582 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-httpd-config" Sep 30 21:00:23 crc kubenswrapper[4756]: I0930 21:00:23.910898 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-796b75875-n2t79"] Sep 30 21:00:23 crc kubenswrapper[4756]: I0930 21:00:23.950924 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/87fc56ab-659e-43b0-a7e9-ccf8fc454b86-ovsdbserver-sb\") pod \"dnsmasq-dns-76f679cbb7-txrg2\" (UID: \"87fc56ab-659e-43b0-a7e9-ccf8fc454b86\") " pod="openstack/dnsmasq-dns-76f679cbb7-txrg2" Sep 30 21:00:23 crc kubenswrapper[4756]: I0930 21:00:23.954279 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vrrbx\" (UniqueName: \"kubernetes.io/projected/87fc56ab-659e-43b0-a7e9-ccf8fc454b86-kube-api-access-vrrbx\") pod \"dnsmasq-dns-76f679cbb7-txrg2\" (UID: \"87fc56ab-659e-43b0-a7e9-ccf8fc454b86\") " pod="openstack/dnsmasq-dns-76f679cbb7-txrg2" Sep 30 21:00:23 crc kubenswrapper[4756]: I0930 21:00:23.954493 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/87fc56ab-659e-43b0-a7e9-ccf8fc454b86-config\") pod \"dnsmasq-dns-76f679cbb7-txrg2\" (UID: \"87fc56ab-659e-43b0-a7e9-ccf8fc454b86\") " pod="openstack/dnsmasq-dns-76f679cbb7-txrg2" Sep 30 21:00:23 crc kubenswrapper[4756]: I0930 21:00:23.954624 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/87fc56ab-659e-43b0-a7e9-ccf8fc454b86-dns-svc\") pod \"dnsmasq-dns-76f679cbb7-txrg2\" (UID: \"87fc56ab-659e-43b0-a7e9-ccf8fc454b86\") " pod="openstack/dnsmasq-dns-76f679cbb7-txrg2" Sep 30 21:00:23 crc kubenswrapper[4756]: I0930 21:00:23.954862 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/87fc56ab-659e-43b0-a7e9-ccf8fc454b86-ovsdbserver-nb\") pod \"dnsmasq-dns-76f679cbb7-txrg2\" (UID: \"87fc56ab-659e-43b0-a7e9-ccf8fc454b86\") " pod="openstack/dnsmasq-dns-76f679cbb7-txrg2" Sep 30 21:00:24 crc kubenswrapper[4756]: I0930 21:00:24.056272 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/ebb9042d-1363-40d6-85e0-c6325b2eac1d-config\") pod \"neutron-796b75875-n2t79\" (UID: \"ebb9042d-1363-40d6-85e0-c6325b2eac1d\") " pod="openstack/neutron-796b75875-n2t79" Sep 30 21:00:24 crc kubenswrapper[4756]: I0930 21:00:24.056344 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/ebb9042d-1363-40d6-85e0-c6325b2eac1d-httpd-config\") pod \"neutron-796b75875-n2t79\" (UID: \"ebb9042d-1363-40d6-85e0-c6325b2eac1d\") " pod="openstack/neutron-796b75875-n2t79" Sep 30 21:00:24 crc kubenswrapper[4756]: I0930 21:00:24.057509 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/87fc56ab-659e-43b0-a7e9-ccf8fc454b86-ovsdbserver-sb\") pod \"dnsmasq-dns-76f679cbb7-txrg2\" (UID: \"87fc56ab-659e-43b0-a7e9-ccf8fc454b86\") " pod="openstack/dnsmasq-dns-76f679cbb7-txrg2" Sep 30 21:00:24 crc kubenswrapper[4756]: I0930 21:00:24.057576 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9ng25\" (UniqueName: \"kubernetes.io/projected/ebb9042d-1363-40d6-85e0-c6325b2eac1d-kube-api-access-9ng25\") pod \"neutron-796b75875-n2t79\" (UID: \"ebb9042d-1363-40d6-85e0-c6325b2eac1d\") " pod="openstack/neutron-796b75875-n2t79" Sep 30 21:00:24 crc kubenswrapper[4756]: I0930 21:00:24.057641 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vrrbx\" (UniqueName: \"kubernetes.io/projected/87fc56ab-659e-43b0-a7e9-ccf8fc454b86-kube-api-access-vrrbx\") pod \"dnsmasq-dns-76f679cbb7-txrg2\" (UID: \"87fc56ab-659e-43b0-a7e9-ccf8fc454b86\") " pod="openstack/dnsmasq-dns-76f679cbb7-txrg2" Sep 30 21:00:24 crc kubenswrapper[4756]: I0930 21:00:24.057673 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/87fc56ab-659e-43b0-a7e9-ccf8fc454b86-config\") pod \"dnsmasq-dns-76f679cbb7-txrg2\" (UID: \"87fc56ab-659e-43b0-a7e9-ccf8fc454b86\") " pod="openstack/dnsmasq-dns-76f679cbb7-txrg2" Sep 30 21:00:24 crc kubenswrapper[4756]: I0930 21:00:24.057706 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ebb9042d-1363-40d6-85e0-c6325b2eac1d-combined-ca-bundle\") pod \"neutron-796b75875-n2t79\" (UID: \"ebb9042d-1363-40d6-85e0-c6325b2eac1d\") " pod="openstack/neutron-796b75875-n2t79" Sep 30 21:00:24 crc kubenswrapper[4756]: I0930 21:00:24.057737 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/87fc56ab-659e-43b0-a7e9-ccf8fc454b86-dns-svc\") pod \"dnsmasq-dns-76f679cbb7-txrg2\" (UID: \"87fc56ab-659e-43b0-a7e9-ccf8fc454b86\") " pod="openstack/dnsmasq-dns-76f679cbb7-txrg2" Sep 30 21:00:24 crc kubenswrapper[4756]: I0930 21:00:24.057897 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/87fc56ab-659e-43b0-a7e9-ccf8fc454b86-ovsdbserver-nb\") pod \"dnsmasq-dns-76f679cbb7-txrg2\" (UID: \"87fc56ab-659e-43b0-a7e9-ccf8fc454b86\") " pod="openstack/dnsmasq-dns-76f679cbb7-txrg2" Sep 30 21:00:24 crc kubenswrapper[4756]: I0930 21:00:24.059018 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/87fc56ab-659e-43b0-a7e9-ccf8fc454b86-ovsdbserver-nb\") pod \"dnsmasq-dns-76f679cbb7-txrg2\" (UID: \"87fc56ab-659e-43b0-a7e9-ccf8fc454b86\") " pod="openstack/dnsmasq-dns-76f679cbb7-txrg2" Sep 30 21:00:24 crc kubenswrapper[4756]: I0930 21:00:24.059344 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/87fc56ab-659e-43b0-a7e9-ccf8fc454b86-ovsdbserver-sb\") pod \"dnsmasq-dns-76f679cbb7-txrg2\" (UID: \"87fc56ab-659e-43b0-a7e9-ccf8fc454b86\") " pod="openstack/dnsmasq-dns-76f679cbb7-txrg2" Sep 30 21:00:24 crc kubenswrapper[4756]: I0930 21:00:24.059936 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/87fc56ab-659e-43b0-a7e9-ccf8fc454b86-config\") pod \"dnsmasq-dns-76f679cbb7-txrg2\" (UID: \"87fc56ab-659e-43b0-a7e9-ccf8fc454b86\") " pod="openstack/dnsmasq-dns-76f679cbb7-txrg2" Sep 30 21:00:24 crc kubenswrapper[4756]: I0930 21:00:24.060592 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/87fc56ab-659e-43b0-a7e9-ccf8fc454b86-dns-svc\") pod \"dnsmasq-dns-76f679cbb7-txrg2\" (UID: \"87fc56ab-659e-43b0-a7e9-ccf8fc454b86\") " pod="openstack/dnsmasq-dns-76f679cbb7-txrg2" Sep 30 21:00:24 crc kubenswrapper[4756]: I0930 21:00:24.076980 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vrrbx\" (UniqueName: \"kubernetes.io/projected/87fc56ab-659e-43b0-a7e9-ccf8fc454b86-kube-api-access-vrrbx\") pod \"dnsmasq-dns-76f679cbb7-txrg2\" (UID: \"87fc56ab-659e-43b0-a7e9-ccf8fc454b86\") " pod="openstack/dnsmasq-dns-76f679cbb7-txrg2" Sep 30 21:00:24 crc kubenswrapper[4756]: I0930 21:00:24.158626 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/ebb9042d-1363-40d6-85e0-c6325b2eac1d-config\") pod \"neutron-796b75875-n2t79\" (UID: \"ebb9042d-1363-40d6-85e0-c6325b2eac1d\") " pod="openstack/neutron-796b75875-n2t79" Sep 30 21:00:24 crc kubenswrapper[4756]: I0930 21:00:24.158683 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/ebb9042d-1363-40d6-85e0-c6325b2eac1d-httpd-config\") pod \"neutron-796b75875-n2t79\" (UID: \"ebb9042d-1363-40d6-85e0-c6325b2eac1d\") " pod="openstack/neutron-796b75875-n2t79" Sep 30 21:00:24 crc kubenswrapper[4756]: I0930 21:00:24.158741 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9ng25\" (UniqueName: \"kubernetes.io/projected/ebb9042d-1363-40d6-85e0-c6325b2eac1d-kube-api-access-9ng25\") pod \"neutron-796b75875-n2t79\" (UID: \"ebb9042d-1363-40d6-85e0-c6325b2eac1d\") " pod="openstack/neutron-796b75875-n2t79" Sep 30 21:00:24 crc kubenswrapper[4756]: I0930 21:00:24.158792 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ebb9042d-1363-40d6-85e0-c6325b2eac1d-combined-ca-bundle\") pod \"neutron-796b75875-n2t79\" (UID: \"ebb9042d-1363-40d6-85e0-c6325b2eac1d\") " pod="openstack/neutron-796b75875-n2t79" Sep 30 21:00:24 crc kubenswrapper[4756]: I0930 21:00:24.164831 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/ebb9042d-1363-40d6-85e0-c6325b2eac1d-config\") pod \"neutron-796b75875-n2t79\" (UID: \"ebb9042d-1363-40d6-85e0-c6325b2eac1d\") " pod="openstack/neutron-796b75875-n2t79" Sep 30 21:00:24 crc kubenswrapper[4756]: I0930 21:00:24.166043 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ebb9042d-1363-40d6-85e0-c6325b2eac1d-combined-ca-bundle\") pod \"neutron-796b75875-n2t79\" (UID: \"ebb9042d-1363-40d6-85e0-c6325b2eac1d\") " pod="openstack/neutron-796b75875-n2t79" Sep 30 21:00:24 crc kubenswrapper[4756]: I0930 21:00:24.171544 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/ebb9042d-1363-40d6-85e0-c6325b2eac1d-httpd-config\") pod \"neutron-796b75875-n2t79\" (UID: \"ebb9042d-1363-40d6-85e0-c6325b2eac1d\") " pod="openstack/neutron-796b75875-n2t79" Sep 30 21:00:24 crc kubenswrapper[4756]: I0930 21:00:24.188025 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9ng25\" (UniqueName: \"kubernetes.io/projected/ebb9042d-1363-40d6-85e0-c6325b2eac1d-kube-api-access-9ng25\") pod \"neutron-796b75875-n2t79\" (UID: \"ebb9042d-1363-40d6-85e0-c6325b2eac1d\") " pod="openstack/neutron-796b75875-n2t79" Sep 30 21:00:24 crc kubenswrapper[4756]: I0930 21:00:24.224870 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-796b75875-n2t79" Sep 30 21:00:24 crc kubenswrapper[4756]: I0930 21:00:24.375842 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-76f679cbb7-txrg2" Sep 30 21:00:24 crc kubenswrapper[4756]: I0930 21:00:24.795831 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-796b75875-n2t79"] Sep 30 21:00:24 crc kubenswrapper[4756]: I0930 21:00:24.852377 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-76f679cbb7-txrg2"] Sep 30 21:00:24 crc kubenswrapper[4756]: W0930 21:00:24.875301 4756 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod87fc56ab_659e_43b0_a7e9_ccf8fc454b86.slice/crio-a4b8388a54fbc396c1ad2155ed1f3fe91ae1739c1de6c1c9f0c216ca787035a7 WatchSource:0}: Error finding container a4b8388a54fbc396c1ad2155ed1f3fe91ae1739c1de6c1c9f0c216ca787035a7: Status 404 returned error can't find the container with id a4b8388a54fbc396c1ad2155ed1f3fe91ae1739c1de6c1c9f0c216ca787035a7 Sep 30 21:00:25 crc kubenswrapper[4756]: I0930 21:00:25.599544 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-796b75875-n2t79" event={"ID":"ebb9042d-1363-40d6-85e0-c6325b2eac1d","Type":"ContainerStarted","Data":"ffae619288cd4cffd6e3d6723fe290f09078b1431c45d901fd3bd4c0ed1f5435"} Sep 30 21:00:25 crc kubenswrapper[4756]: I0930 21:00:25.599583 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-796b75875-n2t79" event={"ID":"ebb9042d-1363-40d6-85e0-c6325b2eac1d","Type":"ContainerStarted","Data":"ca1d39562143c1ba585f6d92952a0353f3a390752ed5d59aa244e6ff9b1db974"} Sep 30 21:00:25 crc kubenswrapper[4756]: I0930 21:00:25.599596 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-796b75875-n2t79" event={"ID":"ebb9042d-1363-40d6-85e0-c6325b2eac1d","Type":"ContainerStarted","Data":"9bec0a23553d5a9d0f50072649d37913e553e3393960c4cda7e56e70d10be7db"} Sep 30 21:00:25 crc kubenswrapper[4756]: I0930 21:00:25.599683 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/neutron-796b75875-n2t79" Sep 30 21:00:25 crc kubenswrapper[4756]: I0930 21:00:25.601074 4756 generic.go:334] "Generic (PLEG): container finished" podID="87fc56ab-659e-43b0-a7e9-ccf8fc454b86" containerID="15f77c4e7cbc2e0118367340f65248032c429c408863d10cb8cf0f9d0f0c1db4" exitCode=0 Sep 30 21:00:25 crc kubenswrapper[4756]: I0930 21:00:25.601117 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-76f679cbb7-txrg2" event={"ID":"87fc56ab-659e-43b0-a7e9-ccf8fc454b86","Type":"ContainerDied","Data":"15f77c4e7cbc2e0118367340f65248032c429c408863d10cb8cf0f9d0f0c1db4"} Sep 30 21:00:25 crc kubenswrapper[4756]: I0930 21:00:25.601172 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-76f679cbb7-txrg2" event={"ID":"87fc56ab-659e-43b0-a7e9-ccf8fc454b86","Type":"ContainerStarted","Data":"a4b8388a54fbc396c1ad2155ed1f3fe91ae1739c1de6c1c9f0c216ca787035a7"} Sep 30 21:00:25 crc kubenswrapper[4756]: I0930 21:00:25.626716 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-796b75875-n2t79" podStartSLOduration=2.626699816 podStartE2EDuration="2.626699816s" podCreationTimestamp="2025-09-30 21:00:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 21:00:25.620699379 +0000 UTC m=+5355.241632846" watchObservedRunningTime="2025-09-30 21:00:25.626699816 +0000 UTC m=+5355.247633293" Sep 30 21:00:26 crc kubenswrapper[4756]: I0930 21:00:26.627206 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-76f679cbb7-txrg2" event={"ID":"87fc56ab-659e-43b0-a7e9-ccf8fc454b86","Type":"ContainerStarted","Data":"cfa9e7ac9e7a80ca52ccfc7754d68810c7c8fa972c976eb6d89c19410fd01cac"} Sep 30 21:00:26 crc kubenswrapper[4756]: I0930 21:00:26.628667 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-76f679cbb7-txrg2" Sep 30 21:00:26 crc kubenswrapper[4756]: I0930 21:00:26.651087 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-76f679cbb7-txrg2" podStartSLOduration=3.6510680840000003 podStartE2EDuration="3.651068084s" podCreationTimestamp="2025-09-30 21:00:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 21:00:26.648758003 +0000 UTC m=+5356.269691490" watchObservedRunningTime="2025-09-30 21:00:26.651068084 +0000 UTC m=+5356.272001581" Sep 30 21:00:34 crc kubenswrapper[4756]: I0930 21:00:34.378170 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-76f679cbb7-txrg2" Sep 30 21:00:34 crc kubenswrapper[4756]: I0930 21:00:34.454909 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-66f576dd9f-d2wxs"] Sep 30 21:00:34 crc kubenswrapper[4756]: I0930 21:00:34.455138 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-66f576dd9f-d2wxs" podUID="e63ce598-f06c-407e-a685-254b00a977d4" containerName="dnsmasq-dns" containerID="cri-o://085fda321b92255a388fb0d3241241008a63c414612da6d616c9feb07a3697cb" gracePeriod=10 Sep 30 21:00:34 crc kubenswrapper[4756]: I0930 21:00:34.715167 4756 generic.go:334] "Generic (PLEG): container finished" podID="e63ce598-f06c-407e-a685-254b00a977d4" containerID="085fda321b92255a388fb0d3241241008a63c414612da6d616c9feb07a3697cb" exitCode=0 Sep 30 21:00:34 crc kubenswrapper[4756]: I0930 21:00:34.715470 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-66f576dd9f-d2wxs" event={"ID":"e63ce598-f06c-407e-a685-254b00a977d4","Type":"ContainerDied","Data":"085fda321b92255a388fb0d3241241008a63c414612da6d616c9feb07a3697cb"} Sep 30 21:00:34 crc kubenswrapper[4756]: I0930 21:00:34.982128 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-66f576dd9f-d2wxs" Sep 30 21:00:35 crc kubenswrapper[4756]: I0930 21:00:35.161462 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e63ce598-f06c-407e-a685-254b00a977d4-ovsdbserver-sb\") pod \"e63ce598-f06c-407e-a685-254b00a977d4\" (UID: \"e63ce598-f06c-407e-a685-254b00a977d4\") " Sep 30 21:00:35 crc kubenswrapper[4756]: I0930 21:00:35.161714 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tqwh5\" (UniqueName: \"kubernetes.io/projected/e63ce598-f06c-407e-a685-254b00a977d4-kube-api-access-tqwh5\") pod \"e63ce598-f06c-407e-a685-254b00a977d4\" (UID: \"e63ce598-f06c-407e-a685-254b00a977d4\") " Sep 30 21:00:35 crc kubenswrapper[4756]: I0930 21:00:35.161833 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e63ce598-f06c-407e-a685-254b00a977d4-ovsdbserver-nb\") pod \"e63ce598-f06c-407e-a685-254b00a977d4\" (UID: \"e63ce598-f06c-407e-a685-254b00a977d4\") " Sep 30 21:00:35 crc kubenswrapper[4756]: I0930 21:00:35.161976 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e63ce598-f06c-407e-a685-254b00a977d4-dns-svc\") pod \"e63ce598-f06c-407e-a685-254b00a977d4\" (UID: \"e63ce598-f06c-407e-a685-254b00a977d4\") " Sep 30 21:00:35 crc kubenswrapper[4756]: I0930 21:00:35.162054 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e63ce598-f06c-407e-a685-254b00a977d4-config\") pod \"e63ce598-f06c-407e-a685-254b00a977d4\" (UID: \"e63ce598-f06c-407e-a685-254b00a977d4\") " Sep 30 21:00:35 crc kubenswrapper[4756]: I0930 21:00:35.170691 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e63ce598-f06c-407e-a685-254b00a977d4-kube-api-access-tqwh5" (OuterVolumeSpecName: "kube-api-access-tqwh5") pod "e63ce598-f06c-407e-a685-254b00a977d4" (UID: "e63ce598-f06c-407e-a685-254b00a977d4"). InnerVolumeSpecName "kube-api-access-tqwh5". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 21:00:35 crc kubenswrapper[4756]: I0930 21:00:35.204978 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e63ce598-f06c-407e-a685-254b00a977d4-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "e63ce598-f06c-407e-a685-254b00a977d4" (UID: "e63ce598-f06c-407e-a685-254b00a977d4"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 21:00:35 crc kubenswrapper[4756]: I0930 21:00:35.232921 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e63ce598-f06c-407e-a685-254b00a977d4-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "e63ce598-f06c-407e-a685-254b00a977d4" (UID: "e63ce598-f06c-407e-a685-254b00a977d4"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 21:00:35 crc kubenswrapper[4756]: I0930 21:00:35.236198 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e63ce598-f06c-407e-a685-254b00a977d4-config" (OuterVolumeSpecName: "config") pod "e63ce598-f06c-407e-a685-254b00a977d4" (UID: "e63ce598-f06c-407e-a685-254b00a977d4"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 21:00:35 crc kubenswrapper[4756]: I0930 21:00:35.242298 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e63ce598-f06c-407e-a685-254b00a977d4-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "e63ce598-f06c-407e-a685-254b00a977d4" (UID: "e63ce598-f06c-407e-a685-254b00a977d4"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 21:00:35 crc kubenswrapper[4756]: I0930 21:00:35.263942 4756 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e63ce598-f06c-407e-a685-254b00a977d4-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Sep 30 21:00:35 crc kubenswrapper[4756]: I0930 21:00:35.263975 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tqwh5\" (UniqueName: \"kubernetes.io/projected/e63ce598-f06c-407e-a685-254b00a977d4-kube-api-access-tqwh5\") on node \"crc\" DevicePath \"\"" Sep 30 21:00:35 crc kubenswrapper[4756]: I0930 21:00:35.263985 4756 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e63ce598-f06c-407e-a685-254b00a977d4-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Sep 30 21:00:35 crc kubenswrapper[4756]: I0930 21:00:35.263994 4756 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e63ce598-f06c-407e-a685-254b00a977d4-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 30 21:00:35 crc kubenswrapper[4756]: I0930 21:00:35.264004 4756 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e63ce598-f06c-407e-a685-254b00a977d4-config\") on node \"crc\" DevicePath \"\"" Sep 30 21:00:35 crc kubenswrapper[4756]: I0930 21:00:35.725565 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-66f576dd9f-d2wxs" event={"ID":"e63ce598-f06c-407e-a685-254b00a977d4","Type":"ContainerDied","Data":"18724503c873688c0623495d9ee66dbeed2804b265f321f7143b605704a108c8"} Sep 30 21:00:35 crc kubenswrapper[4756]: I0930 21:00:35.725637 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-66f576dd9f-d2wxs" Sep 30 21:00:35 crc kubenswrapper[4756]: I0930 21:00:35.726386 4756 scope.go:117] "RemoveContainer" containerID="085fda321b92255a388fb0d3241241008a63c414612da6d616c9feb07a3697cb" Sep 30 21:00:35 crc kubenswrapper[4756]: I0930 21:00:35.746555 4756 scope.go:117] "RemoveContainer" containerID="ec0090f0e63605e52218c643f42d35bc27d09568c583a09e091f826a3d28bcd0" Sep 30 21:00:35 crc kubenswrapper[4756]: I0930 21:00:35.756606 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-66f576dd9f-d2wxs"] Sep 30 21:00:35 crc kubenswrapper[4756]: I0930 21:00:35.764304 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-66f576dd9f-d2wxs"] Sep 30 21:00:37 crc kubenswrapper[4756]: I0930 21:00:37.133640 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e63ce598-f06c-407e-a685-254b00a977d4" path="/var/lib/kubelet/pods/e63ce598-f06c-407e-a685-254b00a977d4/volumes" Sep 30 21:00:45 crc kubenswrapper[4756]: I0930 21:00:45.160895 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-gc2kt"] Sep 30 21:00:45 crc kubenswrapper[4756]: E0930 21:00:45.162800 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e63ce598-f06c-407e-a685-254b00a977d4" containerName="init" Sep 30 21:00:45 crc kubenswrapper[4756]: I0930 21:00:45.164433 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="e63ce598-f06c-407e-a685-254b00a977d4" containerName="init" Sep 30 21:00:45 crc kubenswrapper[4756]: E0930 21:00:45.164486 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e63ce598-f06c-407e-a685-254b00a977d4" containerName="dnsmasq-dns" Sep 30 21:00:45 crc kubenswrapper[4756]: I0930 21:00:45.164539 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="e63ce598-f06c-407e-a685-254b00a977d4" containerName="dnsmasq-dns" Sep 30 21:00:45 crc kubenswrapper[4756]: I0930 21:00:45.165246 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="e63ce598-f06c-407e-a685-254b00a977d4" containerName="dnsmasq-dns" Sep 30 21:00:45 crc kubenswrapper[4756]: I0930 21:00:45.191818 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-gc2kt" Sep 30 21:00:45 crc kubenswrapper[4756]: I0930 21:00:45.219983 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-gc2kt"] Sep 30 21:00:45 crc kubenswrapper[4756]: I0930 21:00:45.247971 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k4ww7\" (UniqueName: \"kubernetes.io/projected/f3bed750-a016-4761-bde7-418537f16f7b-kube-api-access-k4ww7\") pod \"community-operators-gc2kt\" (UID: \"f3bed750-a016-4761-bde7-418537f16f7b\") " pod="openshift-marketplace/community-operators-gc2kt" Sep 30 21:00:45 crc kubenswrapper[4756]: I0930 21:00:45.248054 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f3bed750-a016-4761-bde7-418537f16f7b-utilities\") pod \"community-operators-gc2kt\" (UID: \"f3bed750-a016-4761-bde7-418537f16f7b\") " pod="openshift-marketplace/community-operators-gc2kt" Sep 30 21:00:45 crc kubenswrapper[4756]: I0930 21:00:45.248097 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f3bed750-a016-4761-bde7-418537f16f7b-catalog-content\") pod \"community-operators-gc2kt\" (UID: \"f3bed750-a016-4761-bde7-418537f16f7b\") " pod="openshift-marketplace/community-operators-gc2kt" Sep 30 21:00:45 crc kubenswrapper[4756]: I0930 21:00:45.349513 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k4ww7\" (UniqueName: \"kubernetes.io/projected/f3bed750-a016-4761-bde7-418537f16f7b-kube-api-access-k4ww7\") pod \"community-operators-gc2kt\" (UID: \"f3bed750-a016-4761-bde7-418537f16f7b\") " pod="openshift-marketplace/community-operators-gc2kt" Sep 30 21:00:45 crc kubenswrapper[4756]: I0930 21:00:45.349579 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f3bed750-a016-4761-bde7-418537f16f7b-utilities\") pod \"community-operators-gc2kt\" (UID: \"f3bed750-a016-4761-bde7-418537f16f7b\") " pod="openshift-marketplace/community-operators-gc2kt" Sep 30 21:00:45 crc kubenswrapper[4756]: I0930 21:00:45.349615 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f3bed750-a016-4761-bde7-418537f16f7b-catalog-content\") pod \"community-operators-gc2kt\" (UID: \"f3bed750-a016-4761-bde7-418537f16f7b\") " pod="openshift-marketplace/community-operators-gc2kt" Sep 30 21:00:45 crc kubenswrapper[4756]: I0930 21:00:45.350095 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f3bed750-a016-4761-bde7-418537f16f7b-utilities\") pod \"community-operators-gc2kt\" (UID: \"f3bed750-a016-4761-bde7-418537f16f7b\") " pod="openshift-marketplace/community-operators-gc2kt" Sep 30 21:00:45 crc kubenswrapper[4756]: I0930 21:00:45.350202 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f3bed750-a016-4761-bde7-418537f16f7b-catalog-content\") pod \"community-operators-gc2kt\" (UID: \"f3bed750-a016-4761-bde7-418537f16f7b\") " pod="openshift-marketplace/community-operators-gc2kt" Sep 30 21:00:45 crc kubenswrapper[4756]: I0930 21:00:45.368618 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k4ww7\" (UniqueName: \"kubernetes.io/projected/f3bed750-a016-4761-bde7-418537f16f7b-kube-api-access-k4ww7\") pod \"community-operators-gc2kt\" (UID: \"f3bed750-a016-4761-bde7-418537f16f7b\") " pod="openshift-marketplace/community-operators-gc2kt" Sep 30 21:00:45 crc kubenswrapper[4756]: I0930 21:00:45.521518 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-gc2kt" Sep 30 21:00:45 crc kubenswrapper[4756]: I0930 21:00:45.998812 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-gc2kt"] Sep 30 21:00:46 crc kubenswrapper[4756]: I0930 21:00:46.850772 4756 generic.go:334] "Generic (PLEG): container finished" podID="f3bed750-a016-4761-bde7-418537f16f7b" containerID="32e0a21dcdf4030dab4e7b1d5fd36f390f2c1888933ee2b268b7ec215bb2d59a" exitCode=0 Sep 30 21:00:46 crc kubenswrapper[4756]: I0930 21:00:46.850808 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-gc2kt" event={"ID":"f3bed750-a016-4761-bde7-418537f16f7b","Type":"ContainerDied","Data":"32e0a21dcdf4030dab4e7b1d5fd36f390f2c1888933ee2b268b7ec215bb2d59a"} Sep 30 21:00:46 crc kubenswrapper[4756]: I0930 21:00:46.852100 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-gc2kt" event={"ID":"f3bed750-a016-4761-bde7-418537f16f7b","Type":"ContainerStarted","Data":"d17bd543652a75dff4ddacbf1fba34db6bfe40e22f8c1d334d9f8f6797043239"} Sep 30 21:00:47 crc kubenswrapper[4756]: I0930 21:00:47.861907 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-gc2kt" event={"ID":"f3bed750-a016-4761-bde7-418537f16f7b","Type":"ContainerStarted","Data":"9457c6975994649639ce482f79af73ded5580603d006d14e33aa0a1ecd48b6b3"} Sep 30 21:00:48 crc kubenswrapper[4756]: I0930 21:00:48.874450 4756 generic.go:334] "Generic (PLEG): container finished" podID="f3bed750-a016-4761-bde7-418537f16f7b" containerID="9457c6975994649639ce482f79af73ded5580603d006d14e33aa0a1ecd48b6b3" exitCode=0 Sep 30 21:00:48 crc kubenswrapper[4756]: I0930 21:00:48.874516 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-gc2kt" event={"ID":"f3bed750-a016-4761-bde7-418537f16f7b","Type":"ContainerDied","Data":"9457c6975994649639ce482f79af73ded5580603d006d14e33aa0a1ecd48b6b3"} Sep 30 21:00:49 crc kubenswrapper[4756]: I0930 21:00:49.889054 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-gc2kt" event={"ID":"f3bed750-a016-4761-bde7-418537f16f7b","Type":"ContainerStarted","Data":"158cd709e78d0a5646a197c115ed85c428fc7b92fff954abae2a743f3278854b"} Sep 30 21:00:49 crc kubenswrapper[4756]: I0930 21:00:49.942221 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-gc2kt" podStartSLOduration=2.434179922 podStartE2EDuration="4.942197326s" podCreationTimestamp="2025-09-30 21:00:45 +0000 UTC" firstStartedPulling="2025-09-30 21:00:46.853157578 +0000 UTC m=+5376.474091085" lastFinishedPulling="2025-09-30 21:00:49.361174992 +0000 UTC m=+5378.982108489" observedRunningTime="2025-09-30 21:00:49.90629933 +0000 UTC m=+5379.527232807" watchObservedRunningTime="2025-09-30 21:00:49.942197326 +0000 UTC m=+5379.563130803" Sep 30 21:00:54 crc kubenswrapper[4756]: I0930 21:00:54.235569 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/neutron-796b75875-n2t79" Sep 30 21:00:55 crc kubenswrapper[4756]: I0930 21:00:55.521633 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-gc2kt" Sep 30 21:00:55 crc kubenswrapper[4756]: I0930 21:00:55.521965 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-gc2kt" Sep 30 21:00:55 crc kubenswrapper[4756]: I0930 21:00:55.604331 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-gc2kt" Sep 30 21:00:56 crc kubenswrapper[4756]: I0930 21:00:56.018682 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-gc2kt" Sep 30 21:00:56 crc kubenswrapper[4756]: I0930 21:00:56.078498 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-gc2kt"] Sep 30 21:00:57 crc kubenswrapper[4756]: I0930 21:00:57.960532 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-gc2kt" podUID="f3bed750-a016-4761-bde7-418537f16f7b" containerName="registry-server" containerID="cri-o://158cd709e78d0a5646a197c115ed85c428fc7b92fff954abae2a743f3278854b" gracePeriod=2 Sep 30 21:00:58 crc kubenswrapper[4756]: I0930 21:00:58.426957 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-gc2kt" Sep 30 21:00:58 crc kubenswrapper[4756]: I0930 21:00:58.493050 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-k4ww7\" (UniqueName: \"kubernetes.io/projected/f3bed750-a016-4761-bde7-418537f16f7b-kube-api-access-k4ww7\") pod \"f3bed750-a016-4761-bde7-418537f16f7b\" (UID: \"f3bed750-a016-4761-bde7-418537f16f7b\") " Sep 30 21:00:58 crc kubenswrapper[4756]: I0930 21:00:58.493120 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f3bed750-a016-4761-bde7-418537f16f7b-catalog-content\") pod \"f3bed750-a016-4761-bde7-418537f16f7b\" (UID: \"f3bed750-a016-4761-bde7-418537f16f7b\") " Sep 30 21:00:58 crc kubenswrapper[4756]: I0930 21:00:58.493140 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f3bed750-a016-4761-bde7-418537f16f7b-utilities\") pod \"f3bed750-a016-4761-bde7-418537f16f7b\" (UID: \"f3bed750-a016-4761-bde7-418537f16f7b\") " Sep 30 21:00:58 crc kubenswrapper[4756]: I0930 21:00:58.494943 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f3bed750-a016-4761-bde7-418537f16f7b-utilities" (OuterVolumeSpecName: "utilities") pod "f3bed750-a016-4761-bde7-418537f16f7b" (UID: "f3bed750-a016-4761-bde7-418537f16f7b"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 21:00:58 crc kubenswrapper[4756]: I0930 21:00:58.502353 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f3bed750-a016-4761-bde7-418537f16f7b-kube-api-access-k4ww7" (OuterVolumeSpecName: "kube-api-access-k4ww7") pod "f3bed750-a016-4761-bde7-418537f16f7b" (UID: "f3bed750-a016-4761-bde7-418537f16f7b"). InnerVolumeSpecName "kube-api-access-k4ww7". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 21:00:58 crc kubenswrapper[4756]: I0930 21:00:58.595573 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-k4ww7\" (UniqueName: \"kubernetes.io/projected/f3bed750-a016-4761-bde7-418537f16f7b-kube-api-access-k4ww7\") on node \"crc\" DevicePath \"\"" Sep 30 21:00:58 crc kubenswrapper[4756]: I0930 21:00:58.595622 4756 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f3bed750-a016-4761-bde7-418537f16f7b-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 21:00:58 crc kubenswrapper[4756]: I0930 21:00:58.979025 4756 generic.go:334] "Generic (PLEG): container finished" podID="f3bed750-a016-4761-bde7-418537f16f7b" containerID="158cd709e78d0a5646a197c115ed85c428fc7b92fff954abae2a743f3278854b" exitCode=0 Sep 30 21:00:58 crc kubenswrapper[4756]: I0930 21:00:58.979077 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-gc2kt" event={"ID":"f3bed750-a016-4761-bde7-418537f16f7b","Type":"ContainerDied","Data":"158cd709e78d0a5646a197c115ed85c428fc7b92fff954abae2a743f3278854b"} Sep 30 21:00:58 crc kubenswrapper[4756]: I0930 21:00:58.979105 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-gc2kt" event={"ID":"f3bed750-a016-4761-bde7-418537f16f7b","Type":"ContainerDied","Data":"d17bd543652a75dff4ddacbf1fba34db6bfe40e22f8c1d334d9f8f6797043239"} Sep 30 21:00:58 crc kubenswrapper[4756]: I0930 21:00:58.979125 4756 scope.go:117] "RemoveContainer" containerID="158cd709e78d0a5646a197c115ed85c428fc7b92fff954abae2a743f3278854b" Sep 30 21:00:58 crc kubenswrapper[4756]: I0930 21:00:58.980992 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-gc2kt" Sep 30 21:00:59 crc kubenswrapper[4756]: I0930 21:00:59.001438 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f3bed750-a016-4761-bde7-418537f16f7b-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "f3bed750-a016-4761-bde7-418537f16f7b" (UID: "f3bed750-a016-4761-bde7-418537f16f7b"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 21:00:59 crc kubenswrapper[4756]: I0930 21:00:59.004661 4756 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f3bed750-a016-4761-bde7-418537f16f7b-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 21:00:59 crc kubenswrapper[4756]: I0930 21:00:59.017482 4756 scope.go:117] "RemoveContainer" containerID="9457c6975994649639ce482f79af73ded5580603d006d14e33aa0a1ecd48b6b3" Sep 30 21:00:59 crc kubenswrapper[4756]: I0930 21:00:59.066662 4756 scope.go:117] "RemoveContainer" containerID="32e0a21dcdf4030dab4e7b1d5fd36f390f2c1888933ee2b268b7ec215bb2d59a" Sep 30 21:00:59 crc kubenswrapper[4756]: I0930 21:00:59.116119 4756 scope.go:117] "RemoveContainer" containerID="158cd709e78d0a5646a197c115ed85c428fc7b92fff954abae2a743f3278854b" Sep 30 21:00:59 crc kubenswrapper[4756]: E0930 21:00:59.116555 4756 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"158cd709e78d0a5646a197c115ed85c428fc7b92fff954abae2a743f3278854b\": container with ID starting with 158cd709e78d0a5646a197c115ed85c428fc7b92fff954abae2a743f3278854b not found: ID does not exist" containerID="158cd709e78d0a5646a197c115ed85c428fc7b92fff954abae2a743f3278854b" Sep 30 21:00:59 crc kubenswrapper[4756]: I0930 21:00:59.116604 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"158cd709e78d0a5646a197c115ed85c428fc7b92fff954abae2a743f3278854b"} err="failed to get container status \"158cd709e78d0a5646a197c115ed85c428fc7b92fff954abae2a743f3278854b\": rpc error: code = NotFound desc = could not find container \"158cd709e78d0a5646a197c115ed85c428fc7b92fff954abae2a743f3278854b\": container with ID starting with 158cd709e78d0a5646a197c115ed85c428fc7b92fff954abae2a743f3278854b not found: ID does not exist" Sep 30 21:00:59 crc kubenswrapper[4756]: I0930 21:00:59.116639 4756 scope.go:117] "RemoveContainer" containerID="9457c6975994649639ce482f79af73ded5580603d006d14e33aa0a1ecd48b6b3" Sep 30 21:00:59 crc kubenswrapper[4756]: E0930 21:00:59.116933 4756 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9457c6975994649639ce482f79af73ded5580603d006d14e33aa0a1ecd48b6b3\": container with ID starting with 9457c6975994649639ce482f79af73ded5580603d006d14e33aa0a1ecd48b6b3 not found: ID does not exist" containerID="9457c6975994649639ce482f79af73ded5580603d006d14e33aa0a1ecd48b6b3" Sep 30 21:00:59 crc kubenswrapper[4756]: I0930 21:00:59.116962 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9457c6975994649639ce482f79af73ded5580603d006d14e33aa0a1ecd48b6b3"} err="failed to get container status \"9457c6975994649639ce482f79af73ded5580603d006d14e33aa0a1ecd48b6b3\": rpc error: code = NotFound desc = could not find container \"9457c6975994649639ce482f79af73ded5580603d006d14e33aa0a1ecd48b6b3\": container with ID starting with 9457c6975994649639ce482f79af73ded5580603d006d14e33aa0a1ecd48b6b3 not found: ID does not exist" Sep 30 21:00:59 crc kubenswrapper[4756]: I0930 21:00:59.116985 4756 scope.go:117] "RemoveContainer" containerID="32e0a21dcdf4030dab4e7b1d5fd36f390f2c1888933ee2b268b7ec215bb2d59a" Sep 30 21:00:59 crc kubenswrapper[4756]: E0930 21:00:59.117202 4756 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"32e0a21dcdf4030dab4e7b1d5fd36f390f2c1888933ee2b268b7ec215bb2d59a\": container with ID starting with 32e0a21dcdf4030dab4e7b1d5fd36f390f2c1888933ee2b268b7ec215bb2d59a not found: ID does not exist" containerID="32e0a21dcdf4030dab4e7b1d5fd36f390f2c1888933ee2b268b7ec215bb2d59a" Sep 30 21:00:59 crc kubenswrapper[4756]: I0930 21:00:59.117228 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"32e0a21dcdf4030dab4e7b1d5fd36f390f2c1888933ee2b268b7ec215bb2d59a"} err="failed to get container status \"32e0a21dcdf4030dab4e7b1d5fd36f390f2c1888933ee2b268b7ec215bb2d59a\": rpc error: code = NotFound desc = could not find container \"32e0a21dcdf4030dab4e7b1d5fd36f390f2c1888933ee2b268b7ec215bb2d59a\": container with ID starting with 32e0a21dcdf4030dab4e7b1d5fd36f390f2c1888933ee2b268b7ec215bb2d59a not found: ID does not exist" Sep 30 21:00:59 crc kubenswrapper[4756]: I0930 21:00:59.316629 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-gc2kt"] Sep 30 21:00:59 crc kubenswrapper[4756]: I0930 21:00:59.326962 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-gc2kt"] Sep 30 21:01:00 crc kubenswrapper[4756]: I0930 21:01:00.158957 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-cron-29321101-8h52b"] Sep 30 21:01:00 crc kubenswrapper[4756]: E0930 21:01:00.159331 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f3bed750-a016-4761-bde7-418537f16f7b" containerName="extract-content" Sep 30 21:01:00 crc kubenswrapper[4756]: I0930 21:01:00.159346 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="f3bed750-a016-4761-bde7-418537f16f7b" containerName="extract-content" Sep 30 21:01:00 crc kubenswrapper[4756]: E0930 21:01:00.159366 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f3bed750-a016-4761-bde7-418537f16f7b" containerName="extract-utilities" Sep 30 21:01:00 crc kubenswrapper[4756]: I0930 21:01:00.159376 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="f3bed750-a016-4761-bde7-418537f16f7b" containerName="extract-utilities" Sep 30 21:01:00 crc kubenswrapper[4756]: E0930 21:01:00.159461 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f3bed750-a016-4761-bde7-418537f16f7b" containerName="registry-server" Sep 30 21:01:00 crc kubenswrapper[4756]: I0930 21:01:00.159473 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="f3bed750-a016-4761-bde7-418537f16f7b" containerName="registry-server" Sep 30 21:01:00 crc kubenswrapper[4756]: I0930 21:01:00.159752 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="f3bed750-a016-4761-bde7-418537f16f7b" containerName="registry-server" Sep 30 21:01:00 crc kubenswrapper[4756]: I0930 21:01:00.160506 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29321101-8h52b" Sep 30 21:01:00 crc kubenswrapper[4756]: I0930 21:01:00.178166 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-cron-29321101-8h52b"] Sep 30 21:01:00 crc kubenswrapper[4756]: I0930 21:01:00.224437 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bccb727e-65d8-493b-a3cb-f5f8e535dd57-combined-ca-bundle\") pod \"keystone-cron-29321101-8h52b\" (UID: \"bccb727e-65d8-493b-a3cb-f5f8e535dd57\") " pod="openstack/keystone-cron-29321101-8h52b" Sep 30 21:01:00 crc kubenswrapper[4756]: I0930 21:01:00.224780 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/bccb727e-65d8-493b-a3cb-f5f8e535dd57-fernet-keys\") pod \"keystone-cron-29321101-8h52b\" (UID: \"bccb727e-65d8-493b-a3cb-f5f8e535dd57\") " pod="openstack/keystone-cron-29321101-8h52b" Sep 30 21:01:00 crc kubenswrapper[4756]: I0930 21:01:00.224809 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2nmtk\" (UniqueName: \"kubernetes.io/projected/bccb727e-65d8-493b-a3cb-f5f8e535dd57-kube-api-access-2nmtk\") pod \"keystone-cron-29321101-8h52b\" (UID: \"bccb727e-65d8-493b-a3cb-f5f8e535dd57\") " pod="openstack/keystone-cron-29321101-8h52b" Sep 30 21:01:00 crc kubenswrapper[4756]: I0930 21:01:00.224847 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bccb727e-65d8-493b-a3cb-f5f8e535dd57-config-data\") pod \"keystone-cron-29321101-8h52b\" (UID: \"bccb727e-65d8-493b-a3cb-f5f8e535dd57\") " pod="openstack/keystone-cron-29321101-8h52b" Sep 30 21:01:00 crc kubenswrapper[4756]: I0930 21:01:00.325759 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/bccb727e-65d8-493b-a3cb-f5f8e535dd57-fernet-keys\") pod \"keystone-cron-29321101-8h52b\" (UID: \"bccb727e-65d8-493b-a3cb-f5f8e535dd57\") " pod="openstack/keystone-cron-29321101-8h52b" Sep 30 21:01:00 crc kubenswrapper[4756]: I0930 21:01:00.325800 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2nmtk\" (UniqueName: \"kubernetes.io/projected/bccb727e-65d8-493b-a3cb-f5f8e535dd57-kube-api-access-2nmtk\") pod \"keystone-cron-29321101-8h52b\" (UID: \"bccb727e-65d8-493b-a3cb-f5f8e535dd57\") " pod="openstack/keystone-cron-29321101-8h52b" Sep 30 21:01:00 crc kubenswrapper[4756]: I0930 21:01:00.325829 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bccb727e-65d8-493b-a3cb-f5f8e535dd57-config-data\") pod \"keystone-cron-29321101-8h52b\" (UID: \"bccb727e-65d8-493b-a3cb-f5f8e535dd57\") " pod="openstack/keystone-cron-29321101-8h52b" Sep 30 21:01:00 crc kubenswrapper[4756]: I0930 21:01:00.325868 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bccb727e-65d8-493b-a3cb-f5f8e535dd57-combined-ca-bundle\") pod \"keystone-cron-29321101-8h52b\" (UID: \"bccb727e-65d8-493b-a3cb-f5f8e535dd57\") " pod="openstack/keystone-cron-29321101-8h52b" Sep 30 21:01:00 crc kubenswrapper[4756]: I0930 21:01:00.331386 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bccb727e-65d8-493b-a3cb-f5f8e535dd57-combined-ca-bundle\") pod \"keystone-cron-29321101-8h52b\" (UID: \"bccb727e-65d8-493b-a3cb-f5f8e535dd57\") " pod="openstack/keystone-cron-29321101-8h52b" Sep 30 21:01:00 crc kubenswrapper[4756]: I0930 21:01:00.331620 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/bccb727e-65d8-493b-a3cb-f5f8e535dd57-fernet-keys\") pod \"keystone-cron-29321101-8h52b\" (UID: \"bccb727e-65d8-493b-a3cb-f5f8e535dd57\") " pod="openstack/keystone-cron-29321101-8h52b" Sep 30 21:01:00 crc kubenswrapper[4756]: I0930 21:01:00.331825 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bccb727e-65d8-493b-a3cb-f5f8e535dd57-config-data\") pod \"keystone-cron-29321101-8h52b\" (UID: \"bccb727e-65d8-493b-a3cb-f5f8e535dd57\") " pod="openstack/keystone-cron-29321101-8h52b" Sep 30 21:01:00 crc kubenswrapper[4756]: I0930 21:01:00.343346 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2nmtk\" (UniqueName: \"kubernetes.io/projected/bccb727e-65d8-493b-a3cb-f5f8e535dd57-kube-api-access-2nmtk\") pod \"keystone-cron-29321101-8h52b\" (UID: \"bccb727e-65d8-493b-a3cb-f5f8e535dd57\") " pod="openstack/keystone-cron-29321101-8h52b" Sep 30 21:01:00 crc kubenswrapper[4756]: I0930 21:01:00.532711 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29321101-8h52b" Sep 30 21:01:01 crc kubenswrapper[4756]: I0930 21:01:01.018565 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-cron-29321101-8h52b"] Sep 30 21:01:01 crc kubenswrapper[4756]: W0930 21:01:01.046191 4756 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podbccb727e_65d8_493b_a3cb_f5f8e535dd57.slice/crio-c2880e0e3692b3339f215f77285695e5bc71ce6a001693b82eca6f16c06eeeb4 WatchSource:0}: Error finding container c2880e0e3692b3339f215f77285695e5bc71ce6a001693b82eca6f16c06eeeb4: Status 404 returned error can't find the container with id c2880e0e3692b3339f215f77285695e5bc71ce6a001693b82eca6f16c06eeeb4 Sep 30 21:01:01 crc kubenswrapper[4756]: I0930 21:01:01.143888 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f3bed750-a016-4761-bde7-418537f16f7b" path="/var/lib/kubelet/pods/f3bed750-a016-4761-bde7-418537f16f7b/volumes" Sep 30 21:01:01 crc kubenswrapper[4756]: I0930 21:01:01.378867 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-db-create-t48lv"] Sep 30 21:01:01 crc kubenswrapper[4756]: I0930 21:01:01.380572 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-t48lv" Sep 30 21:01:01 crc kubenswrapper[4756]: I0930 21:01:01.386851 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-create-t48lv"] Sep 30 21:01:01 crc kubenswrapper[4756]: I0930 21:01:01.449502 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9ht2z\" (UniqueName: \"kubernetes.io/projected/7b4bfb72-efc3-44b1-9467-f61964ed636a-kube-api-access-9ht2z\") pod \"glance-db-create-t48lv\" (UID: \"7b4bfb72-efc3-44b1-9467-f61964ed636a\") " pod="openstack/glance-db-create-t48lv" Sep 30 21:01:01 crc kubenswrapper[4756]: I0930 21:01:01.550936 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9ht2z\" (UniqueName: \"kubernetes.io/projected/7b4bfb72-efc3-44b1-9467-f61964ed636a-kube-api-access-9ht2z\") pod \"glance-db-create-t48lv\" (UID: \"7b4bfb72-efc3-44b1-9467-f61964ed636a\") " pod="openstack/glance-db-create-t48lv" Sep 30 21:01:01 crc kubenswrapper[4756]: I0930 21:01:01.569301 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9ht2z\" (UniqueName: \"kubernetes.io/projected/7b4bfb72-efc3-44b1-9467-f61964ed636a-kube-api-access-9ht2z\") pod \"glance-db-create-t48lv\" (UID: \"7b4bfb72-efc3-44b1-9467-f61964ed636a\") " pod="openstack/glance-db-create-t48lv" Sep 30 21:01:01 crc kubenswrapper[4756]: I0930 21:01:01.796216 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-t48lv" Sep 30 21:01:02 crc kubenswrapper[4756]: I0930 21:01:02.019035 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29321101-8h52b" event={"ID":"bccb727e-65d8-493b-a3cb-f5f8e535dd57","Type":"ContainerStarted","Data":"d203133595d3d2f2226202f8ae627d6e4b6cb71b49184d8b5935c06da9061c60"} Sep 30 21:01:02 crc kubenswrapper[4756]: I0930 21:01:02.019380 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29321101-8h52b" event={"ID":"bccb727e-65d8-493b-a3cb-f5f8e535dd57","Type":"ContainerStarted","Data":"c2880e0e3692b3339f215f77285695e5bc71ce6a001693b82eca6f16c06eeeb4"} Sep 30 21:01:02 crc kubenswrapper[4756]: I0930 21:01:02.042154 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-cron-29321101-8h52b" podStartSLOduration=2.042129809 podStartE2EDuration="2.042129809s" podCreationTimestamp="2025-09-30 21:01:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 21:01:02.040216219 +0000 UTC m=+5391.661149716" watchObservedRunningTime="2025-09-30 21:01:02.042129809 +0000 UTC m=+5391.663063306" Sep 30 21:01:02 crc kubenswrapper[4756]: I0930 21:01:02.299493 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-create-t48lv"] Sep 30 21:01:02 crc kubenswrapper[4756]: W0930 21:01:02.307006 4756 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7b4bfb72_efc3_44b1_9467_f61964ed636a.slice/crio-39d421588e611289e9ee40c4b375abbe64cd134e3083e521e5c303bea80c89f6 WatchSource:0}: Error finding container 39d421588e611289e9ee40c4b375abbe64cd134e3083e521e5c303bea80c89f6: Status 404 returned error can't find the container with id 39d421588e611289e9ee40c4b375abbe64cd134e3083e521e5c303bea80c89f6 Sep 30 21:01:03 crc kubenswrapper[4756]: I0930 21:01:03.030951 4756 generic.go:334] "Generic (PLEG): container finished" podID="7b4bfb72-efc3-44b1-9467-f61964ed636a" containerID="4dd265c409a1699d3e5c079e15bfecc5bf99abec5907bc1455452b68cd9b7679" exitCode=0 Sep 30 21:01:03 crc kubenswrapper[4756]: I0930 21:01:03.031230 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-t48lv" event={"ID":"7b4bfb72-efc3-44b1-9467-f61964ed636a","Type":"ContainerDied","Data":"4dd265c409a1699d3e5c079e15bfecc5bf99abec5907bc1455452b68cd9b7679"} Sep 30 21:01:03 crc kubenswrapper[4756]: I0930 21:01:03.031312 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-t48lv" event={"ID":"7b4bfb72-efc3-44b1-9467-f61964ed636a","Type":"ContainerStarted","Data":"39d421588e611289e9ee40c4b375abbe64cd134e3083e521e5c303bea80c89f6"} Sep 30 21:01:04 crc kubenswrapper[4756]: I0930 21:01:04.044674 4756 generic.go:334] "Generic (PLEG): container finished" podID="bccb727e-65d8-493b-a3cb-f5f8e535dd57" containerID="d203133595d3d2f2226202f8ae627d6e4b6cb71b49184d8b5935c06da9061c60" exitCode=0 Sep 30 21:01:04 crc kubenswrapper[4756]: I0930 21:01:04.044807 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29321101-8h52b" event={"ID":"bccb727e-65d8-493b-a3cb-f5f8e535dd57","Type":"ContainerDied","Data":"d203133595d3d2f2226202f8ae627d6e4b6cb71b49184d8b5935c06da9061c60"} Sep 30 21:01:04 crc kubenswrapper[4756]: I0930 21:01:04.376265 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-t48lv" Sep 30 21:01:04 crc kubenswrapper[4756]: I0930 21:01:04.506433 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9ht2z\" (UniqueName: \"kubernetes.io/projected/7b4bfb72-efc3-44b1-9467-f61964ed636a-kube-api-access-9ht2z\") pod \"7b4bfb72-efc3-44b1-9467-f61964ed636a\" (UID: \"7b4bfb72-efc3-44b1-9467-f61964ed636a\") " Sep 30 21:01:04 crc kubenswrapper[4756]: I0930 21:01:04.517017 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7b4bfb72-efc3-44b1-9467-f61964ed636a-kube-api-access-9ht2z" (OuterVolumeSpecName: "kube-api-access-9ht2z") pod "7b4bfb72-efc3-44b1-9467-f61964ed636a" (UID: "7b4bfb72-efc3-44b1-9467-f61964ed636a"). InnerVolumeSpecName "kube-api-access-9ht2z". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 21:01:04 crc kubenswrapper[4756]: I0930 21:01:04.609096 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9ht2z\" (UniqueName: \"kubernetes.io/projected/7b4bfb72-efc3-44b1-9467-f61964ed636a-kube-api-access-9ht2z\") on node \"crc\" DevicePath \"\"" Sep 30 21:01:05 crc kubenswrapper[4756]: I0930 21:01:05.054115 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-t48lv" Sep 30 21:01:05 crc kubenswrapper[4756]: I0930 21:01:05.054112 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-t48lv" event={"ID":"7b4bfb72-efc3-44b1-9467-f61964ed636a","Type":"ContainerDied","Data":"39d421588e611289e9ee40c4b375abbe64cd134e3083e521e5c303bea80c89f6"} Sep 30 21:01:05 crc kubenswrapper[4756]: I0930 21:01:05.054538 4756 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="39d421588e611289e9ee40c4b375abbe64cd134e3083e521e5c303bea80c89f6" Sep 30 21:01:05 crc kubenswrapper[4756]: I0930 21:01:05.365503 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29321101-8h52b" Sep 30 21:01:05 crc kubenswrapper[4756]: I0930 21:01:05.532837 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/bccb727e-65d8-493b-a3cb-f5f8e535dd57-fernet-keys\") pod \"bccb727e-65d8-493b-a3cb-f5f8e535dd57\" (UID: \"bccb727e-65d8-493b-a3cb-f5f8e535dd57\") " Sep 30 21:01:05 crc kubenswrapper[4756]: I0930 21:01:05.533243 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bccb727e-65d8-493b-a3cb-f5f8e535dd57-config-data\") pod \"bccb727e-65d8-493b-a3cb-f5f8e535dd57\" (UID: \"bccb727e-65d8-493b-a3cb-f5f8e535dd57\") " Sep 30 21:01:05 crc kubenswrapper[4756]: I0930 21:01:05.533509 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bccb727e-65d8-493b-a3cb-f5f8e535dd57-combined-ca-bundle\") pod \"bccb727e-65d8-493b-a3cb-f5f8e535dd57\" (UID: \"bccb727e-65d8-493b-a3cb-f5f8e535dd57\") " Sep 30 21:01:05 crc kubenswrapper[4756]: I0930 21:01:05.533655 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2nmtk\" (UniqueName: \"kubernetes.io/projected/bccb727e-65d8-493b-a3cb-f5f8e535dd57-kube-api-access-2nmtk\") pod \"bccb727e-65d8-493b-a3cb-f5f8e535dd57\" (UID: \"bccb727e-65d8-493b-a3cb-f5f8e535dd57\") " Sep 30 21:01:05 crc kubenswrapper[4756]: I0930 21:01:05.540254 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bccb727e-65d8-493b-a3cb-f5f8e535dd57-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "bccb727e-65d8-493b-a3cb-f5f8e535dd57" (UID: "bccb727e-65d8-493b-a3cb-f5f8e535dd57"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 21:01:05 crc kubenswrapper[4756]: I0930 21:01:05.540553 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bccb727e-65d8-493b-a3cb-f5f8e535dd57-kube-api-access-2nmtk" (OuterVolumeSpecName: "kube-api-access-2nmtk") pod "bccb727e-65d8-493b-a3cb-f5f8e535dd57" (UID: "bccb727e-65d8-493b-a3cb-f5f8e535dd57"). InnerVolumeSpecName "kube-api-access-2nmtk". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 21:01:05 crc kubenswrapper[4756]: I0930 21:01:05.566220 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bccb727e-65d8-493b-a3cb-f5f8e535dd57-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "bccb727e-65d8-493b-a3cb-f5f8e535dd57" (UID: "bccb727e-65d8-493b-a3cb-f5f8e535dd57"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 21:01:05 crc kubenswrapper[4756]: I0930 21:01:05.588439 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bccb727e-65d8-493b-a3cb-f5f8e535dd57-config-data" (OuterVolumeSpecName: "config-data") pod "bccb727e-65d8-493b-a3cb-f5f8e535dd57" (UID: "bccb727e-65d8-493b-a3cb-f5f8e535dd57"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 21:01:05 crc kubenswrapper[4756]: I0930 21:01:05.635719 4756 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bccb727e-65d8-493b-a3cb-f5f8e535dd57-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 21:01:05 crc kubenswrapper[4756]: I0930 21:01:05.635762 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2nmtk\" (UniqueName: \"kubernetes.io/projected/bccb727e-65d8-493b-a3cb-f5f8e535dd57-kube-api-access-2nmtk\") on node \"crc\" DevicePath \"\"" Sep 30 21:01:05 crc kubenswrapper[4756]: I0930 21:01:05.635776 4756 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/bccb727e-65d8-493b-a3cb-f5f8e535dd57-fernet-keys\") on node \"crc\" DevicePath \"\"" Sep 30 21:01:05 crc kubenswrapper[4756]: I0930 21:01:05.635788 4756 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bccb727e-65d8-493b-a3cb-f5f8e535dd57-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 21:01:06 crc kubenswrapper[4756]: I0930 21:01:06.072185 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29321101-8h52b" event={"ID":"bccb727e-65d8-493b-a3cb-f5f8e535dd57","Type":"ContainerDied","Data":"c2880e0e3692b3339f215f77285695e5bc71ce6a001693b82eca6f16c06eeeb4"} Sep 30 21:01:06 crc kubenswrapper[4756]: I0930 21:01:06.073668 4756 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c2880e0e3692b3339f215f77285695e5bc71ce6a001693b82eca6f16c06eeeb4" Sep 30 21:01:06 crc kubenswrapper[4756]: I0930 21:01:06.072333 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29321101-8h52b" Sep 30 21:01:11 crc kubenswrapper[4756]: I0930 21:01:11.505831 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-7b75-account-create-xgm9w"] Sep 30 21:01:11 crc kubenswrapper[4756]: E0930 21:01:11.506720 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7b4bfb72-efc3-44b1-9467-f61964ed636a" containerName="mariadb-database-create" Sep 30 21:01:11 crc kubenswrapper[4756]: I0930 21:01:11.506736 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="7b4bfb72-efc3-44b1-9467-f61964ed636a" containerName="mariadb-database-create" Sep 30 21:01:11 crc kubenswrapper[4756]: E0930 21:01:11.506771 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bccb727e-65d8-493b-a3cb-f5f8e535dd57" containerName="keystone-cron" Sep 30 21:01:11 crc kubenswrapper[4756]: I0930 21:01:11.506780 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="bccb727e-65d8-493b-a3cb-f5f8e535dd57" containerName="keystone-cron" Sep 30 21:01:11 crc kubenswrapper[4756]: I0930 21:01:11.506980 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="bccb727e-65d8-493b-a3cb-f5f8e535dd57" containerName="keystone-cron" Sep 30 21:01:11 crc kubenswrapper[4756]: I0930 21:01:11.507001 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="7b4bfb72-efc3-44b1-9467-f61964ed636a" containerName="mariadb-database-create" Sep 30 21:01:11 crc kubenswrapper[4756]: I0930 21:01:11.507673 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-7b75-account-create-xgm9w" Sep 30 21:01:11 crc kubenswrapper[4756]: I0930 21:01:11.510384 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-db-secret" Sep 30 21:01:11 crc kubenswrapper[4756]: I0930 21:01:11.517355 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-7b75-account-create-xgm9w"] Sep 30 21:01:11 crc kubenswrapper[4756]: I0930 21:01:11.549138 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8fnwd\" (UniqueName: \"kubernetes.io/projected/46c44632-7e0e-4302-8261-83270bb41439-kube-api-access-8fnwd\") pod \"glance-7b75-account-create-xgm9w\" (UID: \"46c44632-7e0e-4302-8261-83270bb41439\") " pod="openstack/glance-7b75-account-create-xgm9w" Sep 30 21:01:11 crc kubenswrapper[4756]: I0930 21:01:11.651452 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8fnwd\" (UniqueName: \"kubernetes.io/projected/46c44632-7e0e-4302-8261-83270bb41439-kube-api-access-8fnwd\") pod \"glance-7b75-account-create-xgm9w\" (UID: \"46c44632-7e0e-4302-8261-83270bb41439\") " pod="openstack/glance-7b75-account-create-xgm9w" Sep 30 21:01:11 crc kubenswrapper[4756]: I0930 21:01:11.674217 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8fnwd\" (UniqueName: \"kubernetes.io/projected/46c44632-7e0e-4302-8261-83270bb41439-kube-api-access-8fnwd\") pod \"glance-7b75-account-create-xgm9w\" (UID: \"46c44632-7e0e-4302-8261-83270bb41439\") " pod="openstack/glance-7b75-account-create-xgm9w" Sep 30 21:01:11 crc kubenswrapper[4756]: I0930 21:01:11.832497 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-7b75-account-create-xgm9w" Sep 30 21:01:12 crc kubenswrapper[4756]: I0930 21:01:12.279077 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-7b75-account-create-xgm9w"] Sep 30 21:01:12 crc kubenswrapper[4756]: W0930 21:01:12.283441 4756 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod46c44632_7e0e_4302_8261_83270bb41439.slice/crio-89b8e6b8965a345520f7485b6e804720d4790de134ce70f0511a71c97a773e43 WatchSource:0}: Error finding container 89b8e6b8965a345520f7485b6e804720d4790de134ce70f0511a71c97a773e43: Status 404 returned error can't find the container with id 89b8e6b8965a345520f7485b6e804720d4790de134ce70f0511a71c97a773e43 Sep 30 21:01:13 crc kubenswrapper[4756]: I0930 21:01:13.144995 4756 generic.go:334] "Generic (PLEG): container finished" podID="46c44632-7e0e-4302-8261-83270bb41439" containerID="ab6dba53d51e0daddd120cde2c04642861f4699de3c2cac2b934fa7c65623f3c" exitCode=0 Sep 30 21:01:13 crc kubenswrapper[4756]: I0930 21:01:13.145060 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-7b75-account-create-xgm9w" event={"ID":"46c44632-7e0e-4302-8261-83270bb41439","Type":"ContainerDied","Data":"ab6dba53d51e0daddd120cde2c04642861f4699de3c2cac2b934fa7c65623f3c"} Sep 30 21:01:13 crc kubenswrapper[4756]: I0930 21:01:13.145585 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-7b75-account-create-xgm9w" event={"ID":"46c44632-7e0e-4302-8261-83270bb41439","Type":"ContainerStarted","Data":"89b8e6b8965a345520f7485b6e804720d4790de134ce70f0511a71c97a773e43"} Sep 30 21:01:14 crc kubenswrapper[4756]: I0930 21:01:14.484971 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-7b75-account-create-xgm9w" Sep 30 21:01:14 crc kubenswrapper[4756]: I0930 21:01:14.507243 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8fnwd\" (UniqueName: \"kubernetes.io/projected/46c44632-7e0e-4302-8261-83270bb41439-kube-api-access-8fnwd\") pod \"46c44632-7e0e-4302-8261-83270bb41439\" (UID: \"46c44632-7e0e-4302-8261-83270bb41439\") " Sep 30 21:01:14 crc kubenswrapper[4756]: I0930 21:01:14.516301 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/46c44632-7e0e-4302-8261-83270bb41439-kube-api-access-8fnwd" (OuterVolumeSpecName: "kube-api-access-8fnwd") pod "46c44632-7e0e-4302-8261-83270bb41439" (UID: "46c44632-7e0e-4302-8261-83270bb41439"). InnerVolumeSpecName "kube-api-access-8fnwd". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 21:01:14 crc kubenswrapper[4756]: I0930 21:01:14.608379 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8fnwd\" (UniqueName: \"kubernetes.io/projected/46c44632-7e0e-4302-8261-83270bb41439-kube-api-access-8fnwd\") on node \"crc\" DevicePath \"\"" Sep 30 21:01:15 crc kubenswrapper[4756]: I0930 21:01:15.166553 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-7b75-account-create-xgm9w" event={"ID":"46c44632-7e0e-4302-8261-83270bb41439","Type":"ContainerDied","Data":"89b8e6b8965a345520f7485b6e804720d4790de134ce70f0511a71c97a773e43"} Sep 30 21:01:15 crc kubenswrapper[4756]: I0930 21:01:15.166638 4756 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="89b8e6b8965a345520f7485b6e804720d4790de134ce70f0511a71c97a773e43" Sep 30 21:01:15 crc kubenswrapper[4756]: I0930 21:01:15.166583 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-7b75-account-create-xgm9w" Sep 30 21:01:16 crc kubenswrapper[4756]: I0930 21:01:16.649991 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-db-sync-2rw4m"] Sep 30 21:01:16 crc kubenswrapper[4756]: E0930 21:01:16.650649 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="46c44632-7e0e-4302-8261-83270bb41439" containerName="mariadb-account-create" Sep 30 21:01:16 crc kubenswrapper[4756]: I0930 21:01:16.650669 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="46c44632-7e0e-4302-8261-83270bb41439" containerName="mariadb-account-create" Sep 30 21:01:16 crc kubenswrapper[4756]: I0930 21:01:16.650856 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="46c44632-7e0e-4302-8261-83270bb41439" containerName="mariadb-account-create" Sep 30 21:01:16 crc kubenswrapper[4756]: I0930 21:01:16.651457 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-2rw4m" Sep 30 21:01:16 crc kubenswrapper[4756]: I0930 21:01:16.654500 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-2hdft" Sep 30 21:01:16 crc kubenswrapper[4756]: I0930 21:01:16.654956 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-config-data" Sep 30 21:01:16 crc kubenswrapper[4756]: I0930 21:01:16.660872 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-sync-2rw4m"] Sep 30 21:01:16 crc kubenswrapper[4756]: I0930 21:01:16.847799 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/14a010fd-e7ad-43dc-bc3e-a19cfcfe0a9c-db-sync-config-data\") pod \"glance-db-sync-2rw4m\" (UID: \"14a010fd-e7ad-43dc-bc3e-a19cfcfe0a9c\") " pod="openstack/glance-db-sync-2rw4m" Sep 30 21:01:16 crc kubenswrapper[4756]: I0930 21:01:16.847867 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4p4nd\" (UniqueName: \"kubernetes.io/projected/14a010fd-e7ad-43dc-bc3e-a19cfcfe0a9c-kube-api-access-4p4nd\") pod \"glance-db-sync-2rw4m\" (UID: \"14a010fd-e7ad-43dc-bc3e-a19cfcfe0a9c\") " pod="openstack/glance-db-sync-2rw4m" Sep 30 21:01:16 crc kubenswrapper[4756]: I0930 21:01:16.847933 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/14a010fd-e7ad-43dc-bc3e-a19cfcfe0a9c-combined-ca-bundle\") pod \"glance-db-sync-2rw4m\" (UID: \"14a010fd-e7ad-43dc-bc3e-a19cfcfe0a9c\") " pod="openstack/glance-db-sync-2rw4m" Sep 30 21:01:16 crc kubenswrapper[4756]: I0930 21:01:16.848030 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/14a010fd-e7ad-43dc-bc3e-a19cfcfe0a9c-config-data\") pod \"glance-db-sync-2rw4m\" (UID: \"14a010fd-e7ad-43dc-bc3e-a19cfcfe0a9c\") " pod="openstack/glance-db-sync-2rw4m" Sep 30 21:01:16 crc kubenswrapper[4756]: I0930 21:01:16.949994 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/14a010fd-e7ad-43dc-bc3e-a19cfcfe0a9c-combined-ca-bundle\") pod \"glance-db-sync-2rw4m\" (UID: \"14a010fd-e7ad-43dc-bc3e-a19cfcfe0a9c\") " pod="openstack/glance-db-sync-2rw4m" Sep 30 21:01:16 crc kubenswrapper[4756]: I0930 21:01:16.950190 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/14a010fd-e7ad-43dc-bc3e-a19cfcfe0a9c-config-data\") pod \"glance-db-sync-2rw4m\" (UID: \"14a010fd-e7ad-43dc-bc3e-a19cfcfe0a9c\") " pod="openstack/glance-db-sync-2rw4m" Sep 30 21:01:16 crc kubenswrapper[4756]: I0930 21:01:16.950275 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/14a010fd-e7ad-43dc-bc3e-a19cfcfe0a9c-db-sync-config-data\") pod \"glance-db-sync-2rw4m\" (UID: \"14a010fd-e7ad-43dc-bc3e-a19cfcfe0a9c\") " pod="openstack/glance-db-sync-2rw4m" Sep 30 21:01:16 crc kubenswrapper[4756]: I0930 21:01:16.950332 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4p4nd\" (UniqueName: \"kubernetes.io/projected/14a010fd-e7ad-43dc-bc3e-a19cfcfe0a9c-kube-api-access-4p4nd\") pod \"glance-db-sync-2rw4m\" (UID: \"14a010fd-e7ad-43dc-bc3e-a19cfcfe0a9c\") " pod="openstack/glance-db-sync-2rw4m" Sep 30 21:01:16 crc kubenswrapper[4756]: I0930 21:01:16.954838 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/14a010fd-e7ad-43dc-bc3e-a19cfcfe0a9c-db-sync-config-data\") pod \"glance-db-sync-2rw4m\" (UID: \"14a010fd-e7ad-43dc-bc3e-a19cfcfe0a9c\") " pod="openstack/glance-db-sync-2rw4m" Sep 30 21:01:16 crc kubenswrapper[4756]: I0930 21:01:16.955331 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/14a010fd-e7ad-43dc-bc3e-a19cfcfe0a9c-config-data\") pod \"glance-db-sync-2rw4m\" (UID: \"14a010fd-e7ad-43dc-bc3e-a19cfcfe0a9c\") " pod="openstack/glance-db-sync-2rw4m" Sep 30 21:01:16 crc kubenswrapper[4756]: I0930 21:01:16.957154 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/14a010fd-e7ad-43dc-bc3e-a19cfcfe0a9c-combined-ca-bundle\") pod \"glance-db-sync-2rw4m\" (UID: \"14a010fd-e7ad-43dc-bc3e-a19cfcfe0a9c\") " pod="openstack/glance-db-sync-2rw4m" Sep 30 21:01:16 crc kubenswrapper[4756]: I0930 21:01:16.975687 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4p4nd\" (UniqueName: \"kubernetes.io/projected/14a010fd-e7ad-43dc-bc3e-a19cfcfe0a9c-kube-api-access-4p4nd\") pod \"glance-db-sync-2rw4m\" (UID: \"14a010fd-e7ad-43dc-bc3e-a19cfcfe0a9c\") " pod="openstack/glance-db-sync-2rw4m" Sep 30 21:01:16 crc kubenswrapper[4756]: I0930 21:01:16.983483 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-2rw4m" Sep 30 21:01:17 crc kubenswrapper[4756]: I0930 21:01:17.542345 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-sync-2rw4m"] Sep 30 21:01:18 crc kubenswrapper[4756]: I0930 21:01:18.204610 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-2rw4m" event={"ID":"14a010fd-e7ad-43dc-bc3e-a19cfcfe0a9c","Type":"ContainerStarted","Data":"a62eb74ea6ea7fb5bf1917c31450c012f9a6da3fab7321edb9ee09e478e98a13"} Sep 30 21:01:18 crc kubenswrapper[4756]: I0930 21:01:18.205808 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-2rw4m" event={"ID":"14a010fd-e7ad-43dc-bc3e-a19cfcfe0a9c","Type":"ContainerStarted","Data":"760d1beae17a0434b7963b564f2cc62f8bd50cb45044d69715e67eeec874ec82"} Sep 30 21:01:18 crc kubenswrapper[4756]: I0930 21:01:18.226217 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-db-sync-2rw4m" podStartSLOduration=2.226196324 podStartE2EDuration="2.226196324s" podCreationTimestamp="2025-09-30 21:01:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 21:01:18.218119104 +0000 UTC m=+5407.839052571" watchObservedRunningTime="2025-09-30 21:01:18.226196324 +0000 UTC m=+5407.847129801" Sep 30 21:01:20 crc kubenswrapper[4756]: I0930 21:01:20.946221 4756 scope.go:117] "RemoveContainer" containerID="0f9a3dcf9b4cf9ab232985a3aa39a5f486136192d8b1349cd04275faa7d9c32b" Sep 30 21:01:21 crc kubenswrapper[4756]: I0930 21:01:21.238051 4756 generic.go:334] "Generic (PLEG): container finished" podID="14a010fd-e7ad-43dc-bc3e-a19cfcfe0a9c" containerID="a62eb74ea6ea7fb5bf1917c31450c012f9a6da3fab7321edb9ee09e478e98a13" exitCode=0 Sep 30 21:01:21 crc kubenswrapper[4756]: I0930 21:01:21.238182 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-2rw4m" event={"ID":"14a010fd-e7ad-43dc-bc3e-a19cfcfe0a9c","Type":"ContainerDied","Data":"a62eb74ea6ea7fb5bf1917c31450c012f9a6da3fab7321edb9ee09e478e98a13"} Sep 30 21:01:22 crc kubenswrapper[4756]: I0930 21:01:22.767761 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-2rw4m" Sep 30 21:01:22 crc kubenswrapper[4756]: I0930 21:01:22.857702 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/14a010fd-e7ad-43dc-bc3e-a19cfcfe0a9c-combined-ca-bundle\") pod \"14a010fd-e7ad-43dc-bc3e-a19cfcfe0a9c\" (UID: \"14a010fd-e7ad-43dc-bc3e-a19cfcfe0a9c\") " Sep 30 21:01:22 crc kubenswrapper[4756]: I0930 21:01:22.858181 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/14a010fd-e7ad-43dc-bc3e-a19cfcfe0a9c-config-data\") pod \"14a010fd-e7ad-43dc-bc3e-a19cfcfe0a9c\" (UID: \"14a010fd-e7ad-43dc-bc3e-a19cfcfe0a9c\") " Sep 30 21:01:22 crc kubenswrapper[4756]: I0930 21:01:22.858574 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4p4nd\" (UniqueName: \"kubernetes.io/projected/14a010fd-e7ad-43dc-bc3e-a19cfcfe0a9c-kube-api-access-4p4nd\") pod \"14a010fd-e7ad-43dc-bc3e-a19cfcfe0a9c\" (UID: \"14a010fd-e7ad-43dc-bc3e-a19cfcfe0a9c\") " Sep 30 21:01:22 crc kubenswrapper[4756]: I0930 21:01:22.858947 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/14a010fd-e7ad-43dc-bc3e-a19cfcfe0a9c-db-sync-config-data\") pod \"14a010fd-e7ad-43dc-bc3e-a19cfcfe0a9c\" (UID: \"14a010fd-e7ad-43dc-bc3e-a19cfcfe0a9c\") " Sep 30 21:01:22 crc kubenswrapper[4756]: I0930 21:01:22.863606 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/14a010fd-e7ad-43dc-bc3e-a19cfcfe0a9c-kube-api-access-4p4nd" (OuterVolumeSpecName: "kube-api-access-4p4nd") pod "14a010fd-e7ad-43dc-bc3e-a19cfcfe0a9c" (UID: "14a010fd-e7ad-43dc-bc3e-a19cfcfe0a9c"). InnerVolumeSpecName "kube-api-access-4p4nd". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 21:01:22 crc kubenswrapper[4756]: I0930 21:01:22.864758 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/14a010fd-e7ad-43dc-bc3e-a19cfcfe0a9c-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "14a010fd-e7ad-43dc-bc3e-a19cfcfe0a9c" (UID: "14a010fd-e7ad-43dc-bc3e-a19cfcfe0a9c"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 21:01:22 crc kubenswrapper[4756]: I0930 21:01:22.889066 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/14a010fd-e7ad-43dc-bc3e-a19cfcfe0a9c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "14a010fd-e7ad-43dc-bc3e-a19cfcfe0a9c" (UID: "14a010fd-e7ad-43dc-bc3e-a19cfcfe0a9c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 21:01:22 crc kubenswrapper[4756]: I0930 21:01:22.931665 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/14a010fd-e7ad-43dc-bc3e-a19cfcfe0a9c-config-data" (OuterVolumeSpecName: "config-data") pod "14a010fd-e7ad-43dc-bc3e-a19cfcfe0a9c" (UID: "14a010fd-e7ad-43dc-bc3e-a19cfcfe0a9c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 21:01:22 crc kubenswrapper[4756]: I0930 21:01:22.964485 4756 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/14a010fd-e7ad-43dc-bc3e-a19cfcfe0a9c-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 21:01:22 crc kubenswrapper[4756]: I0930 21:01:22.964573 4756 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/14a010fd-e7ad-43dc-bc3e-a19cfcfe0a9c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 21:01:22 crc kubenswrapper[4756]: I0930 21:01:22.964608 4756 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/14a010fd-e7ad-43dc-bc3e-a19cfcfe0a9c-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 21:01:22 crc kubenswrapper[4756]: I0930 21:01:22.964630 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4p4nd\" (UniqueName: \"kubernetes.io/projected/14a010fd-e7ad-43dc-bc3e-a19cfcfe0a9c-kube-api-access-4p4nd\") on node \"crc\" DevicePath \"\"" Sep 30 21:01:23 crc kubenswrapper[4756]: I0930 21:01:23.288863 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-2rw4m" event={"ID":"14a010fd-e7ad-43dc-bc3e-a19cfcfe0a9c","Type":"ContainerDied","Data":"760d1beae17a0434b7963b564f2cc62f8bd50cb45044d69715e67eeec874ec82"} Sep 30 21:01:23 crc kubenswrapper[4756]: I0930 21:01:23.289315 4756 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="760d1beae17a0434b7963b564f2cc62f8bd50cb45044d69715e67eeec874ec82" Sep 30 21:01:23 crc kubenswrapper[4756]: I0930 21:01:23.288986 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-2rw4m" Sep 30 21:01:23 crc kubenswrapper[4756]: I0930 21:01:23.682200 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Sep 30 21:01:23 crc kubenswrapper[4756]: E0930 21:01:23.682592 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="14a010fd-e7ad-43dc-bc3e-a19cfcfe0a9c" containerName="glance-db-sync" Sep 30 21:01:23 crc kubenswrapper[4756]: I0930 21:01:23.682609 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="14a010fd-e7ad-43dc-bc3e-a19cfcfe0a9c" containerName="glance-db-sync" Sep 30 21:01:23 crc kubenswrapper[4756]: I0930 21:01:23.682754 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="14a010fd-e7ad-43dc-bc3e-a19cfcfe0a9c" containerName="glance-db-sync" Sep 30 21:01:23 crc kubenswrapper[4756]: I0930 21:01:23.683605 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Sep 30 21:01:23 crc kubenswrapper[4756]: I0930 21:01:23.685567 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceph-conf-files" Sep 30 21:01:23 crc kubenswrapper[4756]: I0930 21:01:23.685971 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-scripts" Sep 30 21:01:23 crc kubenswrapper[4756]: I0930 21:01:23.686350 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Sep 30 21:01:23 crc kubenswrapper[4756]: I0930 21:01:23.686886 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-2hdft" Sep 30 21:01:23 crc kubenswrapper[4756]: I0930 21:01:23.705731 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Sep 30 21:01:23 crc kubenswrapper[4756]: I0930 21:01:23.718588 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-7655654869-kfkn4"] Sep 30 21:01:23 crc kubenswrapper[4756]: I0930 21:01:23.720501 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7655654869-kfkn4" Sep 30 21:01:23 crc kubenswrapper[4756]: I0930 21:01:23.746741 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7655654869-kfkn4"] Sep 30 21:01:23 crc kubenswrapper[4756]: I0930 21:01:23.779066 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c226n\" (UniqueName: \"kubernetes.io/projected/107a6195-b349-4f58-ac1b-2fd9b6be8e83-kube-api-access-c226n\") pod \"glance-default-external-api-0\" (UID: \"107a6195-b349-4f58-ac1b-2fd9b6be8e83\") " pod="openstack/glance-default-external-api-0" Sep 30 21:01:23 crc kubenswrapper[4756]: I0930 21:01:23.779165 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/107a6195-b349-4f58-ac1b-2fd9b6be8e83-logs\") pod \"glance-default-external-api-0\" (UID: \"107a6195-b349-4f58-ac1b-2fd9b6be8e83\") " pod="openstack/glance-default-external-api-0" Sep 30 21:01:23 crc kubenswrapper[4756]: I0930 21:01:23.779209 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/107a6195-b349-4f58-ac1b-2fd9b6be8e83-config-data\") pod \"glance-default-external-api-0\" (UID: \"107a6195-b349-4f58-ac1b-2fd9b6be8e83\") " pod="openstack/glance-default-external-api-0" Sep 30 21:01:23 crc kubenswrapper[4756]: I0930 21:01:23.779288 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/107a6195-b349-4f58-ac1b-2fd9b6be8e83-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"107a6195-b349-4f58-ac1b-2fd9b6be8e83\") " pod="openstack/glance-default-external-api-0" Sep 30 21:01:23 crc kubenswrapper[4756]: I0930 21:01:23.779352 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/107a6195-b349-4f58-ac1b-2fd9b6be8e83-scripts\") pod \"glance-default-external-api-0\" (UID: \"107a6195-b349-4f58-ac1b-2fd9b6be8e83\") " pod="openstack/glance-default-external-api-0" Sep 30 21:01:23 crc kubenswrapper[4756]: I0930 21:01:23.779383 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/107a6195-b349-4f58-ac1b-2fd9b6be8e83-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"107a6195-b349-4f58-ac1b-2fd9b6be8e83\") " pod="openstack/glance-default-external-api-0" Sep 30 21:01:23 crc kubenswrapper[4756]: I0930 21:01:23.779593 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/107a6195-b349-4f58-ac1b-2fd9b6be8e83-ceph\") pod \"glance-default-external-api-0\" (UID: \"107a6195-b349-4f58-ac1b-2fd9b6be8e83\") " pod="openstack/glance-default-external-api-0" Sep 30 21:01:23 crc kubenswrapper[4756]: I0930 21:01:23.791804 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 30 21:01:23 crc kubenswrapper[4756]: I0930 21:01:23.793811 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Sep 30 21:01:23 crc kubenswrapper[4756]: I0930 21:01:23.795961 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Sep 30 21:01:23 crc kubenswrapper[4756]: I0930 21:01:23.809125 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 30 21:01:23 crc kubenswrapper[4756]: I0930 21:01:23.880954 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/107a6195-b349-4f58-ac1b-2fd9b6be8e83-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"107a6195-b349-4f58-ac1b-2fd9b6be8e83\") " pod="openstack/glance-default-external-api-0" Sep 30 21:01:23 crc kubenswrapper[4756]: I0930 21:01:23.881318 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tgmjx\" (UniqueName: \"kubernetes.io/projected/e9cf1b3d-95ca-4954-9482-fda5d81d2a1f-kube-api-access-tgmjx\") pod \"dnsmasq-dns-7655654869-kfkn4\" (UID: \"e9cf1b3d-95ca-4954-9482-fda5d81d2a1f\") " pod="openstack/dnsmasq-dns-7655654869-kfkn4" Sep 30 21:01:23 crc kubenswrapper[4756]: I0930 21:01:23.881349 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b7eae54c-a425-4bfd-8d71-98836fc5b4a2-logs\") pod \"glance-default-internal-api-0\" (UID: \"b7eae54c-a425-4bfd-8d71-98836fc5b4a2\") " pod="openstack/glance-default-internal-api-0" Sep 30 21:01:23 crc kubenswrapper[4756]: I0930 21:01:23.881415 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/107a6195-b349-4f58-ac1b-2fd9b6be8e83-scripts\") pod \"glance-default-external-api-0\" (UID: \"107a6195-b349-4f58-ac1b-2fd9b6be8e83\") " pod="openstack/glance-default-external-api-0" Sep 30 21:01:23 crc kubenswrapper[4756]: I0930 21:01:23.881447 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/107a6195-b349-4f58-ac1b-2fd9b6be8e83-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"107a6195-b349-4f58-ac1b-2fd9b6be8e83\") " pod="openstack/glance-default-external-api-0" Sep 30 21:01:23 crc kubenswrapper[4756]: I0930 21:01:23.881477 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e9cf1b3d-95ca-4954-9482-fda5d81d2a1f-config\") pod \"dnsmasq-dns-7655654869-kfkn4\" (UID: \"e9cf1b3d-95ca-4954-9482-fda5d81d2a1f\") " pod="openstack/dnsmasq-dns-7655654869-kfkn4" Sep 30 21:01:23 crc kubenswrapper[4756]: I0930 21:01:23.881499 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/107a6195-b349-4f58-ac1b-2fd9b6be8e83-ceph\") pod \"glance-default-external-api-0\" (UID: \"107a6195-b349-4f58-ac1b-2fd9b6be8e83\") " pod="openstack/glance-default-external-api-0" Sep 30 21:01:23 crc kubenswrapper[4756]: I0930 21:01:23.881537 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c226n\" (UniqueName: \"kubernetes.io/projected/107a6195-b349-4f58-ac1b-2fd9b6be8e83-kube-api-access-c226n\") pod \"glance-default-external-api-0\" (UID: \"107a6195-b349-4f58-ac1b-2fd9b6be8e83\") " pod="openstack/glance-default-external-api-0" Sep 30 21:01:23 crc kubenswrapper[4756]: I0930 21:01:23.881564 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/b7eae54c-a425-4bfd-8d71-98836fc5b4a2-ceph\") pod \"glance-default-internal-api-0\" (UID: \"b7eae54c-a425-4bfd-8d71-98836fc5b4a2\") " pod="openstack/glance-default-internal-api-0" Sep 30 21:01:23 crc kubenswrapper[4756]: I0930 21:01:23.881584 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/b7eae54c-a425-4bfd-8d71-98836fc5b4a2-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"b7eae54c-a425-4bfd-8d71-98836fc5b4a2\") " pod="openstack/glance-default-internal-api-0" Sep 30 21:01:23 crc kubenswrapper[4756]: I0930 21:01:23.881613 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l6vmr\" (UniqueName: \"kubernetes.io/projected/b7eae54c-a425-4bfd-8d71-98836fc5b4a2-kube-api-access-l6vmr\") pod \"glance-default-internal-api-0\" (UID: \"b7eae54c-a425-4bfd-8d71-98836fc5b4a2\") " pod="openstack/glance-default-internal-api-0" Sep 30 21:01:23 crc kubenswrapper[4756]: I0930 21:01:23.881651 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e9cf1b3d-95ca-4954-9482-fda5d81d2a1f-ovsdbserver-nb\") pod \"dnsmasq-dns-7655654869-kfkn4\" (UID: \"e9cf1b3d-95ca-4954-9482-fda5d81d2a1f\") " pod="openstack/dnsmasq-dns-7655654869-kfkn4" Sep 30 21:01:23 crc kubenswrapper[4756]: I0930 21:01:23.881676 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e9cf1b3d-95ca-4954-9482-fda5d81d2a1f-ovsdbserver-sb\") pod \"dnsmasq-dns-7655654869-kfkn4\" (UID: \"e9cf1b3d-95ca-4954-9482-fda5d81d2a1f\") " pod="openstack/dnsmasq-dns-7655654869-kfkn4" Sep 30 21:01:23 crc kubenswrapper[4756]: I0930 21:01:23.881704 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/107a6195-b349-4f58-ac1b-2fd9b6be8e83-logs\") pod \"glance-default-external-api-0\" (UID: \"107a6195-b349-4f58-ac1b-2fd9b6be8e83\") " pod="openstack/glance-default-external-api-0" Sep 30 21:01:23 crc kubenswrapper[4756]: I0930 21:01:23.881735 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e9cf1b3d-95ca-4954-9482-fda5d81d2a1f-dns-svc\") pod \"dnsmasq-dns-7655654869-kfkn4\" (UID: \"e9cf1b3d-95ca-4954-9482-fda5d81d2a1f\") " pod="openstack/dnsmasq-dns-7655654869-kfkn4" Sep 30 21:01:23 crc kubenswrapper[4756]: I0930 21:01:23.881760 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/107a6195-b349-4f58-ac1b-2fd9b6be8e83-config-data\") pod \"glance-default-external-api-0\" (UID: \"107a6195-b349-4f58-ac1b-2fd9b6be8e83\") " pod="openstack/glance-default-external-api-0" Sep 30 21:01:23 crc kubenswrapper[4756]: I0930 21:01:23.881795 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b7eae54c-a425-4bfd-8d71-98836fc5b4a2-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"b7eae54c-a425-4bfd-8d71-98836fc5b4a2\") " pod="openstack/glance-default-internal-api-0" Sep 30 21:01:23 crc kubenswrapper[4756]: I0930 21:01:23.881821 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b7eae54c-a425-4bfd-8d71-98836fc5b4a2-scripts\") pod \"glance-default-internal-api-0\" (UID: \"b7eae54c-a425-4bfd-8d71-98836fc5b4a2\") " pod="openstack/glance-default-internal-api-0" Sep 30 21:01:23 crc kubenswrapper[4756]: I0930 21:01:23.881857 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b7eae54c-a425-4bfd-8d71-98836fc5b4a2-config-data\") pod \"glance-default-internal-api-0\" (UID: \"b7eae54c-a425-4bfd-8d71-98836fc5b4a2\") " pod="openstack/glance-default-internal-api-0" Sep 30 21:01:23 crc kubenswrapper[4756]: I0930 21:01:23.882697 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/107a6195-b349-4f58-ac1b-2fd9b6be8e83-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"107a6195-b349-4f58-ac1b-2fd9b6be8e83\") " pod="openstack/glance-default-external-api-0" Sep 30 21:01:23 crc kubenswrapper[4756]: I0930 21:01:23.883370 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/107a6195-b349-4f58-ac1b-2fd9b6be8e83-logs\") pod \"glance-default-external-api-0\" (UID: \"107a6195-b349-4f58-ac1b-2fd9b6be8e83\") " pod="openstack/glance-default-external-api-0" Sep 30 21:01:23 crc kubenswrapper[4756]: I0930 21:01:23.886211 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/107a6195-b349-4f58-ac1b-2fd9b6be8e83-scripts\") pod \"glance-default-external-api-0\" (UID: \"107a6195-b349-4f58-ac1b-2fd9b6be8e83\") " pod="openstack/glance-default-external-api-0" Sep 30 21:01:23 crc kubenswrapper[4756]: I0930 21:01:23.886311 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/107a6195-b349-4f58-ac1b-2fd9b6be8e83-ceph\") pod \"glance-default-external-api-0\" (UID: \"107a6195-b349-4f58-ac1b-2fd9b6be8e83\") " pod="openstack/glance-default-external-api-0" Sep 30 21:01:23 crc kubenswrapper[4756]: I0930 21:01:23.887090 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/107a6195-b349-4f58-ac1b-2fd9b6be8e83-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"107a6195-b349-4f58-ac1b-2fd9b6be8e83\") " pod="openstack/glance-default-external-api-0" Sep 30 21:01:23 crc kubenswrapper[4756]: I0930 21:01:23.888628 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/107a6195-b349-4f58-ac1b-2fd9b6be8e83-config-data\") pod \"glance-default-external-api-0\" (UID: \"107a6195-b349-4f58-ac1b-2fd9b6be8e83\") " pod="openstack/glance-default-external-api-0" Sep 30 21:01:23 crc kubenswrapper[4756]: I0930 21:01:23.905574 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c226n\" (UniqueName: \"kubernetes.io/projected/107a6195-b349-4f58-ac1b-2fd9b6be8e83-kube-api-access-c226n\") pod \"glance-default-external-api-0\" (UID: \"107a6195-b349-4f58-ac1b-2fd9b6be8e83\") " pod="openstack/glance-default-external-api-0" Sep 30 21:01:23 crc kubenswrapper[4756]: I0930 21:01:23.983149 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b7eae54c-a425-4bfd-8d71-98836fc5b4a2-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"b7eae54c-a425-4bfd-8d71-98836fc5b4a2\") " pod="openstack/glance-default-internal-api-0" Sep 30 21:01:23 crc kubenswrapper[4756]: I0930 21:01:23.983209 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b7eae54c-a425-4bfd-8d71-98836fc5b4a2-scripts\") pod \"glance-default-internal-api-0\" (UID: \"b7eae54c-a425-4bfd-8d71-98836fc5b4a2\") " pod="openstack/glance-default-internal-api-0" Sep 30 21:01:23 crc kubenswrapper[4756]: I0930 21:01:23.983259 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b7eae54c-a425-4bfd-8d71-98836fc5b4a2-config-data\") pod \"glance-default-internal-api-0\" (UID: \"b7eae54c-a425-4bfd-8d71-98836fc5b4a2\") " pod="openstack/glance-default-internal-api-0" Sep 30 21:01:23 crc kubenswrapper[4756]: I0930 21:01:23.983314 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tgmjx\" (UniqueName: \"kubernetes.io/projected/e9cf1b3d-95ca-4954-9482-fda5d81d2a1f-kube-api-access-tgmjx\") pod \"dnsmasq-dns-7655654869-kfkn4\" (UID: \"e9cf1b3d-95ca-4954-9482-fda5d81d2a1f\") " pod="openstack/dnsmasq-dns-7655654869-kfkn4" Sep 30 21:01:23 crc kubenswrapper[4756]: I0930 21:01:23.983823 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b7eae54c-a425-4bfd-8d71-98836fc5b4a2-logs\") pod \"glance-default-internal-api-0\" (UID: \"b7eae54c-a425-4bfd-8d71-98836fc5b4a2\") " pod="openstack/glance-default-internal-api-0" Sep 30 21:01:23 crc kubenswrapper[4756]: I0930 21:01:23.983995 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e9cf1b3d-95ca-4954-9482-fda5d81d2a1f-config\") pod \"dnsmasq-dns-7655654869-kfkn4\" (UID: \"e9cf1b3d-95ca-4954-9482-fda5d81d2a1f\") " pod="openstack/dnsmasq-dns-7655654869-kfkn4" Sep 30 21:01:23 crc kubenswrapper[4756]: I0930 21:01:23.984123 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/b7eae54c-a425-4bfd-8d71-98836fc5b4a2-ceph\") pod \"glance-default-internal-api-0\" (UID: \"b7eae54c-a425-4bfd-8d71-98836fc5b4a2\") " pod="openstack/glance-default-internal-api-0" Sep 30 21:01:23 crc kubenswrapper[4756]: I0930 21:01:23.984157 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/b7eae54c-a425-4bfd-8d71-98836fc5b4a2-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"b7eae54c-a425-4bfd-8d71-98836fc5b4a2\") " pod="openstack/glance-default-internal-api-0" Sep 30 21:01:23 crc kubenswrapper[4756]: I0930 21:01:23.984211 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l6vmr\" (UniqueName: \"kubernetes.io/projected/b7eae54c-a425-4bfd-8d71-98836fc5b4a2-kube-api-access-l6vmr\") pod \"glance-default-internal-api-0\" (UID: \"b7eae54c-a425-4bfd-8d71-98836fc5b4a2\") " pod="openstack/glance-default-internal-api-0" Sep 30 21:01:23 crc kubenswrapper[4756]: I0930 21:01:23.984279 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e9cf1b3d-95ca-4954-9482-fda5d81d2a1f-ovsdbserver-nb\") pod \"dnsmasq-dns-7655654869-kfkn4\" (UID: \"e9cf1b3d-95ca-4954-9482-fda5d81d2a1f\") " pod="openstack/dnsmasq-dns-7655654869-kfkn4" Sep 30 21:01:23 crc kubenswrapper[4756]: I0930 21:01:23.984312 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e9cf1b3d-95ca-4954-9482-fda5d81d2a1f-ovsdbserver-sb\") pod \"dnsmasq-dns-7655654869-kfkn4\" (UID: \"e9cf1b3d-95ca-4954-9482-fda5d81d2a1f\") " pod="openstack/dnsmasq-dns-7655654869-kfkn4" Sep 30 21:01:23 crc kubenswrapper[4756]: I0930 21:01:23.984375 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e9cf1b3d-95ca-4954-9482-fda5d81d2a1f-dns-svc\") pod \"dnsmasq-dns-7655654869-kfkn4\" (UID: \"e9cf1b3d-95ca-4954-9482-fda5d81d2a1f\") " pod="openstack/dnsmasq-dns-7655654869-kfkn4" Sep 30 21:01:23 crc kubenswrapper[4756]: I0930 21:01:23.984417 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b7eae54c-a425-4bfd-8d71-98836fc5b4a2-logs\") pod \"glance-default-internal-api-0\" (UID: \"b7eae54c-a425-4bfd-8d71-98836fc5b4a2\") " pod="openstack/glance-default-internal-api-0" Sep 30 21:01:23 crc kubenswrapper[4756]: I0930 21:01:23.984814 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e9cf1b3d-95ca-4954-9482-fda5d81d2a1f-config\") pod \"dnsmasq-dns-7655654869-kfkn4\" (UID: \"e9cf1b3d-95ca-4954-9482-fda5d81d2a1f\") " pod="openstack/dnsmasq-dns-7655654869-kfkn4" Sep 30 21:01:23 crc kubenswrapper[4756]: I0930 21:01:23.985018 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/b7eae54c-a425-4bfd-8d71-98836fc5b4a2-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"b7eae54c-a425-4bfd-8d71-98836fc5b4a2\") " pod="openstack/glance-default-internal-api-0" Sep 30 21:01:23 crc kubenswrapper[4756]: I0930 21:01:23.985491 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e9cf1b3d-95ca-4954-9482-fda5d81d2a1f-dns-svc\") pod \"dnsmasq-dns-7655654869-kfkn4\" (UID: \"e9cf1b3d-95ca-4954-9482-fda5d81d2a1f\") " pod="openstack/dnsmasq-dns-7655654869-kfkn4" Sep 30 21:01:23 crc kubenswrapper[4756]: I0930 21:01:23.985505 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e9cf1b3d-95ca-4954-9482-fda5d81d2a1f-ovsdbserver-nb\") pod \"dnsmasq-dns-7655654869-kfkn4\" (UID: \"e9cf1b3d-95ca-4954-9482-fda5d81d2a1f\") " pod="openstack/dnsmasq-dns-7655654869-kfkn4" Sep 30 21:01:23 crc kubenswrapper[4756]: I0930 21:01:23.985511 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e9cf1b3d-95ca-4954-9482-fda5d81d2a1f-ovsdbserver-sb\") pod \"dnsmasq-dns-7655654869-kfkn4\" (UID: \"e9cf1b3d-95ca-4954-9482-fda5d81d2a1f\") " pod="openstack/dnsmasq-dns-7655654869-kfkn4" Sep 30 21:01:23 crc kubenswrapper[4756]: I0930 21:01:23.989707 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b7eae54c-a425-4bfd-8d71-98836fc5b4a2-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"b7eae54c-a425-4bfd-8d71-98836fc5b4a2\") " pod="openstack/glance-default-internal-api-0" Sep 30 21:01:23 crc kubenswrapper[4756]: I0930 21:01:23.989707 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/b7eae54c-a425-4bfd-8d71-98836fc5b4a2-ceph\") pod \"glance-default-internal-api-0\" (UID: \"b7eae54c-a425-4bfd-8d71-98836fc5b4a2\") " pod="openstack/glance-default-internal-api-0" Sep 30 21:01:23 crc kubenswrapper[4756]: I0930 21:01:23.991149 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b7eae54c-a425-4bfd-8d71-98836fc5b4a2-scripts\") pod \"glance-default-internal-api-0\" (UID: \"b7eae54c-a425-4bfd-8d71-98836fc5b4a2\") " pod="openstack/glance-default-internal-api-0" Sep 30 21:01:24 crc kubenswrapper[4756]: I0930 21:01:24.001343 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b7eae54c-a425-4bfd-8d71-98836fc5b4a2-config-data\") pod \"glance-default-internal-api-0\" (UID: \"b7eae54c-a425-4bfd-8d71-98836fc5b4a2\") " pod="openstack/glance-default-internal-api-0" Sep 30 21:01:24 crc kubenswrapper[4756]: I0930 21:01:24.001746 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Sep 30 21:01:24 crc kubenswrapper[4756]: I0930 21:01:24.010011 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tgmjx\" (UniqueName: \"kubernetes.io/projected/e9cf1b3d-95ca-4954-9482-fda5d81d2a1f-kube-api-access-tgmjx\") pod \"dnsmasq-dns-7655654869-kfkn4\" (UID: \"e9cf1b3d-95ca-4954-9482-fda5d81d2a1f\") " pod="openstack/dnsmasq-dns-7655654869-kfkn4" Sep 30 21:01:24 crc kubenswrapper[4756]: I0930 21:01:24.011385 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l6vmr\" (UniqueName: \"kubernetes.io/projected/b7eae54c-a425-4bfd-8d71-98836fc5b4a2-kube-api-access-l6vmr\") pod \"glance-default-internal-api-0\" (UID: \"b7eae54c-a425-4bfd-8d71-98836fc5b4a2\") " pod="openstack/glance-default-internal-api-0" Sep 30 21:01:24 crc kubenswrapper[4756]: I0930 21:01:24.048537 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7655654869-kfkn4" Sep 30 21:01:24 crc kubenswrapper[4756]: I0930 21:01:24.124861 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Sep 30 21:01:24 crc kubenswrapper[4756]: I0930 21:01:24.518139 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Sep 30 21:01:24 crc kubenswrapper[4756]: W0930 21:01:24.527830 4756 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod107a6195_b349_4f58_ac1b_2fd9b6be8e83.slice/crio-6096a500e36729c2799e2dd841b142ca62623835b5e78b5930007444f3e85cf5 WatchSource:0}: Error finding container 6096a500e36729c2799e2dd841b142ca62623835b5e78b5930007444f3e85cf5: Status 404 returned error can't find the container with id 6096a500e36729c2799e2dd841b142ca62623835b5e78b5930007444f3e85cf5 Sep 30 21:01:24 crc kubenswrapper[4756]: I0930 21:01:24.554690 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7655654869-kfkn4"] Sep 30 21:01:24 crc kubenswrapper[4756]: I0930 21:01:24.701712 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 30 21:01:24 crc kubenswrapper[4756]: W0930 21:01:24.712503 4756 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb7eae54c_a425_4bfd_8d71_98836fc5b4a2.slice/crio-46c206927ec44b6463cab801212b54c8189356d282369b0721afd9da8a880999 WatchSource:0}: Error finding container 46c206927ec44b6463cab801212b54c8189356d282369b0721afd9da8a880999: Status 404 returned error can't find the container with id 46c206927ec44b6463cab801212b54c8189356d282369b0721afd9da8a880999 Sep 30 21:01:24 crc kubenswrapper[4756]: I0930 21:01:24.929876 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Sep 30 21:01:25 crc kubenswrapper[4756]: I0930 21:01:25.345708 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"107a6195-b349-4f58-ac1b-2fd9b6be8e83","Type":"ContainerStarted","Data":"9bb12477abe06126711624a3284618198a9b87dad281ca54a74770b48bb0ac64"} Sep 30 21:01:25 crc kubenswrapper[4756]: I0930 21:01:25.346082 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"107a6195-b349-4f58-ac1b-2fd9b6be8e83","Type":"ContainerStarted","Data":"6096a500e36729c2799e2dd841b142ca62623835b5e78b5930007444f3e85cf5"} Sep 30 21:01:25 crc kubenswrapper[4756]: I0930 21:01:25.354437 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"b7eae54c-a425-4bfd-8d71-98836fc5b4a2","Type":"ContainerStarted","Data":"c119e86f3fd64e039f46114074332a95b784e3bcd0ccd2de01ec3e82f12598de"} Sep 30 21:01:25 crc kubenswrapper[4756]: I0930 21:01:25.354492 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"b7eae54c-a425-4bfd-8d71-98836fc5b4a2","Type":"ContainerStarted","Data":"46c206927ec44b6463cab801212b54c8189356d282369b0721afd9da8a880999"} Sep 30 21:01:25 crc kubenswrapper[4756]: I0930 21:01:25.361915 4756 generic.go:334] "Generic (PLEG): container finished" podID="e9cf1b3d-95ca-4954-9482-fda5d81d2a1f" containerID="9646756d4eb532e72f10d8e9631db724b45625739e82259841ef19b312ab4b7d" exitCode=0 Sep 30 21:01:25 crc kubenswrapper[4756]: I0930 21:01:25.362216 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7655654869-kfkn4" event={"ID":"e9cf1b3d-95ca-4954-9482-fda5d81d2a1f","Type":"ContainerDied","Data":"9646756d4eb532e72f10d8e9631db724b45625739e82259841ef19b312ab4b7d"} Sep 30 21:01:25 crc kubenswrapper[4756]: I0930 21:01:25.362362 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7655654869-kfkn4" event={"ID":"e9cf1b3d-95ca-4954-9482-fda5d81d2a1f","Type":"ContainerStarted","Data":"869469bd4741a88699ea6a9a7ddea62b591c54dc446b2cbe62d1c1ae30f9a618"} Sep 30 21:01:26 crc kubenswrapper[4756]: I0930 21:01:26.370833 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"107a6195-b349-4f58-ac1b-2fd9b6be8e83","Type":"ContainerStarted","Data":"03e068f446b00d4d59daf55c8dedf671942248027afa2f7a8b92a077541392f2"} Sep 30 21:01:26 crc kubenswrapper[4756]: I0930 21:01:26.370896 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="107a6195-b349-4f58-ac1b-2fd9b6be8e83" containerName="glance-log" containerID="cri-o://9bb12477abe06126711624a3284618198a9b87dad281ca54a74770b48bb0ac64" gracePeriod=30 Sep 30 21:01:26 crc kubenswrapper[4756]: I0930 21:01:26.370925 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="107a6195-b349-4f58-ac1b-2fd9b6be8e83" containerName="glance-httpd" containerID="cri-o://03e068f446b00d4d59daf55c8dedf671942248027afa2f7a8b92a077541392f2" gracePeriod=30 Sep 30 21:01:26 crc kubenswrapper[4756]: I0930 21:01:26.374983 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"b7eae54c-a425-4bfd-8d71-98836fc5b4a2","Type":"ContainerStarted","Data":"3f60f926116db48497a91cf4ed07a647d1b1dae16e1ae7dcb86aed900be42544"} Sep 30 21:01:26 crc kubenswrapper[4756]: I0930 21:01:26.380142 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7655654869-kfkn4" event={"ID":"e9cf1b3d-95ca-4954-9482-fda5d81d2a1f","Type":"ContainerStarted","Data":"93fd5ce377841a0ef6f7283b9d60fd632ae6a162d26d7dfabf48f313d3a82766"} Sep 30 21:01:26 crc kubenswrapper[4756]: I0930 21:01:26.380366 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-7655654869-kfkn4" Sep 30 21:01:26 crc kubenswrapper[4756]: I0930 21:01:26.400413 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=3.400376475 podStartE2EDuration="3.400376475s" podCreationTimestamp="2025-09-30 21:01:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 21:01:26.392864939 +0000 UTC m=+5416.013798416" watchObservedRunningTime="2025-09-30 21:01:26.400376475 +0000 UTC m=+5416.021309952" Sep 30 21:01:26 crc kubenswrapper[4756]: I0930 21:01:26.416632 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-7655654869-kfkn4" podStartSLOduration=3.416615349 podStartE2EDuration="3.416615349s" podCreationTimestamp="2025-09-30 21:01:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 21:01:26.412893422 +0000 UTC m=+5416.033826899" watchObservedRunningTime="2025-09-30 21:01:26.416615349 +0000 UTC m=+5416.037548826" Sep 30 21:01:26 crc kubenswrapper[4756]: I0930 21:01:26.429813 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=3.429800012 podStartE2EDuration="3.429800012s" podCreationTimestamp="2025-09-30 21:01:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 21:01:26.427470782 +0000 UTC m=+5416.048404279" watchObservedRunningTime="2025-09-30 21:01:26.429800012 +0000 UTC m=+5416.050733489" Sep 30 21:01:27 crc kubenswrapper[4756]: I0930 21:01:27.100675 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Sep 30 21:01:27 crc kubenswrapper[4756]: I0930 21:01:27.240685 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/107a6195-b349-4f58-ac1b-2fd9b6be8e83-ceph\") pod \"107a6195-b349-4f58-ac1b-2fd9b6be8e83\" (UID: \"107a6195-b349-4f58-ac1b-2fd9b6be8e83\") " Sep 30 21:01:27 crc kubenswrapper[4756]: I0930 21:01:27.240946 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/107a6195-b349-4f58-ac1b-2fd9b6be8e83-scripts\") pod \"107a6195-b349-4f58-ac1b-2fd9b6be8e83\" (UID: \"107a6195-b349-4f58-ac1b-2fd9b6be8e83\") " Sep 30 21:01:27 crc kubenswrapper[4756]: I0930 21:01:27.240969 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c226n\" (UniqueName: \"kubernetes.io/projected/107a6195-b349-4f58-ac1b-2fd9b6be8e83-kube-api-access-c226n\") pod \"107a6195-b349-4f58-ac1b-2fd9b6be8e83\" (UID: \"107a6195-b349-4f58-ac1b-2fd9b6be8e83\") " Sep 30 21:01:27 crc kubenswrapper[4756]: I0930 21:01:27.240989 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/107a6195-b349-4f58-ac1b-2fd9b6be8e83-combined-ca-bundle\") pod \"107a6195-b349-4f58-ac1b-2fd9b6be8e83\" (UID: \"107a6195-b349-4f58-ac1b-2fd9b6be8e83\") " Sep 30 21:01:27 crc kubenswrapper[4756]: I0930 21:01:27.241072 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/107a6195-b349-4f58-ac1b-2fd9b6be8e83-logs\") pod \"107a6195-b349-4f58-ac1b-2fd9b6be8e83\" (UID: \"107a6195-b349-4f58-ac1b-2fd9b6be8e83\") " Sep 30 21:01:27 crc kubenswrapper[4756]: I0930 21:01:27.241147 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/107a6195-b349-4f58-ac1b-2fd9b6be8e83-httpd-run\") pod \"107a6195-b349-4f58-ac1b-2fd9b6be8e83\" (UID: \"107a6195-b349-4f58-ac1b-2fd9b6be8e83\") " Sep 30 21:01:27 crc kubenswrapper[4756]: I0930 21:01:27.241175 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/107a6195-b349-4f58-ac1b-2fd9b6be8e83-config-data\") pod \"107a6195-b349-4f58-ac1b-2fd9b6be8e83\" (UID: \"107a6195-b349-4f58-ac1b-2fd9b6be8e83\") " Sep 30 21:01:27 crc kubenswrapper[4756]: I0930 21:01:27.242158 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/107a6195-b349-4f58-ac1b-2fd9b6be8e83-logs" (OuterVolumeSpecName: "logs") pod "107a6195-b349-4f58-ac1b-2fd9b6be8e83" (UID: "107a6195-b349-4f58-ac1b-2fd9b6be8e83"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 21:01:27 crc kubenswrapper[4756]: I0930 21:01:27.242284 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/107a6195-b349-4f58-ac1b-2fd9b6be8e83-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "107a6195-b349-4f58-ac1b-2fd9b6be8e83" (UID: "107a6195-b349-4f58-ac1b-2fd9b6be8e83"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 21:01:27 crc kubenswrapper[4756]: I0930 21:01:27.246498 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/107a6195-b349-4f58-ac1b-2fd9b6be8e83-ceph" (OuterVolumeSpecName: "ceph") pod "107a6195-b349-4f58-ac1b-2fd9b6be8e83" (UID: "107a6195-b349-4f58-ac1b-2fd9b6be8e83"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 21:01:27 crc kubenswrapper[4756]: I0930 21:01:27.246563 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/107a6195-b349-4f58-ac1b-2fd9b6be8e83-scripts" (OuterVolumeSpecName: "scripts") pod "107a6195-b349-4f58-ac1b-2fd9b6be8e83" (UID: "107a6195-b349-4f58-ac1b-2fd9b6be8e83"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 21:01:27 crc kubenswrapper[4756]: I0930 21:01:27.252898 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/107a6195-b349-4f58-ac1b-2fd9b6be8e83-kube-api-access-c226n" (OuterVolumeSpecName: "kube-api-access-c226n") pod "107a6195-b349-4f58-ac1b-2fd9b6be8e83" (UID: "107a6195-b349-4f58-ac1b-2fd9b6be8e83"). InnerVolumeSpecName "kube-api-access-c226n". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 21:01:27 crc kubenswrapper[4756]: I0930 21:01:27.266113 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/107a6195-b349-4f58-ac1b-2fd9b6be8e83-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "107a6195-b349-4f58-ac1b-2fd9b6be8e83" (UID: "107a6195-b349-4f58-ac1b-2fd9b6be8e83"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 21:01:27 crc kubenswrapper[4756]: I0930 21:01:27.304636 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/107a6195-b349-4f58-ac1b-2fd9b6be8e83-config-data" (OuterVolumeSpecName: "config-data") pod "107a6195-b349-4f58-ac1b-2fd9b6be8e83" (UID: "107a6195-b349-4f58-ac1b-2fd9b6be8e83"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 21:01:27 crc kubenswrapper[4756]: I0930 21:01:27.343794 4756 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/107a6195-b349-4f58-ac1b-2fd9b6be8e83-logs\") on node \"crc\" DevicePath \"\"" Sep 30 21:01:27 crc kubenswrapper[4756]: I0930 21:01:27.343834 4756 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/107a6195-b349-4f58-ac1b-2fd9b6be8e83-httpd-run\") on node \"crc\" DevicePath \"\"" Sep 30 21:01:27 crc kubenswrapper[4756]: I0930 21:01:27.343847 4756 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/107a6195-b349-4f58-ac1b-2fd9b6be8e83-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 21:01:27 crc kubenswrapper[4756]: I0930 21:01:27.343860 4756 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/107a6195-b349-4f58-ac1b-2fd9b6be8e83-ceph\") on node \"crc\" DevicePath \"\"" Sep 30 21:01:27 crc kubenswrapper[4756]: I0930 21:01:27.343872 4756 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/107a6195-b349-4f58-ac1b-2fd9b6be8e83-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 21:01:27 crc kubenswrapper[4756]: I0930 21:01:27.343883 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c226n\" (UniqueName: \"kubernetes.io/projected/107a6195-b349-4f58-ac1b-2fd9b6be8e83-kube-api-access-c226n\") on node \"crc\" DevicePath \"\"" Sep 30 21:01:27 crc kubenswrapper[4756]: I0930 21:01:27.343897 4756 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/107a6195-b349-4f58-ac1b-2fd9b6be8e83-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 21:01:27 crc kubenswrapper[4756]: I0930 21:01:27.391212 4756 generic.go:334] "Generic (PLEG): container finished" podID="107a6195-b349-4f58-ac1b-2fd9b6be8e83" containerID="03e068f446b00d4d59daf55c8dedf671942248027afa2f7a8b92a077541392f2" exitCode=0 Sep 30 21:01:27 crc kubenswrapper[4756]: I0930 21:01:27.391250 4756 generic.go:334] "Generic (PLEG): container finished" podID="107a6195-b349-4f58-ac1b-2fd9b6be8e83" containerID="9bb12477abe06126711624a3284618198a9b87dad281ca54a74770b48bb0ac64" exitCode=143 Sep 30 21:01:27 crc kubenswrapper[4756]: I0930 21:01:27.392129 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Sep 30 21:01:27 crc kubenswrapper[4756]: I0930 21:01:27.394009 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"107a6195-b349-4f58-ac1b-2fd9b6be8e83","Type":"ContainerDied","Data":"03e068f446b00d4d59daf55c8dedf671942248027afa2f7a8b92a077541392f2"} Sep 30 21:01:27 crc kubenswrapper[4756]: I0930 21:01:27.394053 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"107a6195-b349-4f58-ac1b-2fd9b6be8e83","Type":"ContainerDied","Data":"9bb12477abe06126711624a3284618198a9b87dad281ca54a74770b48bb0ac64"} Sep 30 21:01:27 crc kubenswrapper[4756]: I0930 21:01:27.394067 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"107a6195-b349-4f58-ac1b-2fd9b6be8e83","Type":"ContainerDied","Data":"6096a500e36729c2799e2dd841b142ca62623835b5e78b5930007444f3e85cf5"} Sep 30 21:01:27 crc kubenswrapper[4756]: I0930 21:01:27.394085 4756 scope.go:117] "RemoveContainer" containerID="03e068f446b00d4d59daf55c8dedf671942248027afa2f7a8b92a077541392f2" Sep 30 21:01:27 crc kubenswrapper[4756]: I0930 21:01:27.429577 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Sep 30 21:01:27 crc kubenswrapper[4756]: I0930 21:01:27.429732 4756 scope.go:117] "RemoveContainer" containerID="9bb12477abe06126711624a3284618198a9b87dad281ca54a74770b48bb0ac64" Sep 30 21:01:27 crc kubenswrapper[4756]: I0930 21:01:27.439168 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Sep 30 21:01:27 crc kubenswrapper[4756]: I0930 21:01:27.458663 4756 scope.go:117] "RemoveContainer" containerID="03e068f446b00d4d59daf55c8dedf671942248027afa2f7a8b92a077541392f2" Sep 30 21:01:27 crc kubenswrapper[4756]: E0930 21:01:27.459308 4756 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"03e068f446b00d4d59daf55c8dedf671942248027afa2f7a8b92a077541392f2\": container with ID starting with 03e068f446b00d4d59daf55c8dedf671942248027afa2f7a8b92a077541392f2 not found: ID does not exist" containerID="03e068f446b00d4d59daf55c8dedf671942248027afa2f7a8b92a077541392f2" Sep 30 21:01:27 crc kubenswrapper[4756]: I0930 21:01:27.459358 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"03e068f446b00d4d59daf55c8dedf671942248027afa2f7a8b92a077541392f2"} err="failed to get container status \"03e068f446b00d4d59daf55c8dedf671942248027afa2f7a8b92a077541392f2\": rpc error: code = NotFound desc = could not find container \"03e068f446b00d4d59daf55c8dedf671942248027afa2f7a8b92a077541392f2\": container with ID starting with 03e068f446b00d4d59daf55c8dedf671942248027afa2f7a8b92a077541392f2 not found: ID does not exist" Sep 30 21:01:27 crc kubenswrapper[4756]: I0930 21:01:27.459412 4756 scope.go:117] "RemoveContainer" containerID="9bb12477abe06126711624a3284618198a9b87dad281ca54a74770b48bb0ac64" Sep 30 21:01:27 crc kubenswrapper[4756]: E0930 21:01:27.459769 4756 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9bb12477abe06126711624a3284618198a9b87dad281ca54a74770b48bb0ac64\": container with ID starting with 9bb12477abe06126711624a3284618198a9b87dad281ca54a74770b48bb0ac64 not found: ID does not exist" containerID="9bb12477abe06126711624a3284618198a9b87dad281ca54a74770b48bb0ac64" Sep 30 21:01:27 crc kubenswrapper[4756]: I0930 21:01:27.459817 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9bb12477abe06126711624a3284618198a9b87dad281ca54a74770b48bb0ac64"} err="failed to get container status \"9bb12477abe06126711624a3284618198a9b87dad281ca54a74770b48bb0ac64\": rpc error: code = NotFound desc = could not find container \"9bb12477abe06126711624a3284618198a9b87dad281ca54a74770b48bb0ac64\": container with ID starting with 9bb12477abe06126711624a3284618198a9b87dad281ca54a74770b48bb0ac64 not found: ID does not exist" Sep 30 21:01:27 crc kubenswrapper[4756]: I0930 21:01:27.459850 4756 scope.go:117] "RemoveContainer" containerID="03e068f446b00d4d59daf55c8dedf671942248027afa2f7a8b92a077541392f2" Sep 30 21:01:27 crc kubenswrapper[4756]: I0930 21:01:27.460640 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"03e068f446b00d4d59daf55c8dedf671942248027afa2f7a8b92a077541392f2"} err="failed to get container status \"03e068f446b00d4d59daf55c8dedf671942248027afa2f7a8b92a077541392f2\": rpc error: code = NotFound desc = could not find container \"03e068f446b00d4d59daf55c8dedf671942248027afa2f7a8b92a077541392f2\": container with ID starting with 03e068f446b00d4d59daf55c8dedf671942248027afa2f7a8b92a077541392f2 not found: ID does not exist" Sep 30 21:01:27 crc kubenswrapper[4756]: I0930 21:01:27.460661 4756 scope.go:117] "RemoveContainer" containerID="9bb12477abe06126711624a3284618198a9b87dad281ca54a74770b48bb0ac64" Sep 30 21:01:27 crc kubenswrapper[4756]: I0930 21:01:27.460843 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9bb12477abe06126711624a3284618198a9b87dad281ca54a74770b48bb0ac64"} err="failed to get container status \"9bb12477abe06126711624a3284618198a9b87dad281ca54a74770b48bb0ac64\": rpc error: code = NotFound desc = could not find container \"9bb12477abe06126711624a3284618198a9b87dad281ca54a74770b48bb0ac64\": container with ID starting with 9bb12477abe06126711624a3284618198a9b87dad281ca54a74770b48bb0ac64 not found: ID does not exist" Sep 30 21:01:27 crc kubenswrapper[4756]: I0930 21:01:27.471422 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Sep 30 21:01:27 crc kubenswrapper[4756]: E0930 21:01:27.471857 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="107a6195-b349-4f58-ac1b-2fd9b6be8e83" containerName="glance-log" Sep 30 21:01:27 crc kubenswrapper[4756]: I0930 21:01:27.471878 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="107a6195-b349-4f58-ac1b-2fd9b6be8e83" containerName="glance-log" Sep 30 21:01:27 crc kubenswrapper[4756]: E0930 21:01:27.471912 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="107a6195-b349-4f58-ac1b-2fd9b6be8e83" containerName="glance-httpd" Sep 30 21:01:27 crc kubenswrapper[4756]: I0930 21:01:27.471921 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="107a6195-b349-4f58-ac1b-2fd9b6be8e83" containerName="glance-httpd" Sep 30 21:01:27 crc kubenswrapper[4756]: I0930 21:01:27.472131 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="107a6195-b349-4f58-ac1b-2fd9b6be8e83" containerName="glance-httpd" Sep 30 21:01:27 crc kubenswrapper[4756]: I0930 21:01:27.472155 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="107a6195-b349-4f58-ac1b-2fd9b6be8e83" containerName="glance-log" Sep 30 21:01:27 crc kubenswrapper[4756]: I0930 21:01:27.473759 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Sep 30 21:01:27 crc kubenswrapper[4756]: I0930 21:01:27.478891 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Sep 30 21:01:27 crc kubenswrapper[4756]: I0930 21:01:27.481780 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Sep 30 21:01:27 crc kubenswrapper[4756]: I0930 21:01:27.550544 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/45b49577-8ba6-43a8-86be-987744002d0c-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"45b49577-8ba6-43a8-86be-987744002d0c\") " pod="openstack/glance-default-external-api-0" Sep 30 21:01:27 crc kubenswrapper[4756]: I0930 21:01:27.550684 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/45b49577-8ba6-43a8-86be-987744002d0c-config-data\") pod \"glance-default-external-api-0\" (UID: \"45b49577-8ba6-43a8-86be-987744002d0c\") " pod="openstack/glance-default-external-api-0" Sep 30 21:01:27 crc kubenswrapper[4756]: I0930 21:01:27.550710 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kzbvv\" (UniqueName: \"kubernetes.io/projected/45b49577-8ba6-43a8-86be-987744002d0c-kube-api-access-kzbvv\") pod \"glance-default-external-api-0\" (UID: \"45b49577-8ba6-43a8-86be-987744002d0c\") " pod="openstack/glance-default-external-api-0" Sep 30 21:01:27 crc kubenswrapper[4756]: I0930 21:01:27.550755 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/45b49577-8ba6-43a8-86be-987744002d0c-ceph\") pod \"glance-default-external-api-0\" (UID: \"45b49577-8ba6-43a8-86be-987744002d0c\") " pod="openstack/glance-default-external-api-0" Sep 30 21:01:27 crc kubenswrapper[4756]: I0930 21:01:27.550787 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/45b49577-8ba6-43a8-86be-987744002d0c-logs\") pod \"glance-default-external-api-0\" (UID: \"45b49577-8ba6-43a8-86be-987744002d0c\") " pod="openstack/glance-default-external-api-0" Sep 30 21:01:27 crc kubenswrapper[4756]: I0930 21:01:27.550809 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/45b49577-8ba6-43a8-86be-987744002d0c-scripts\") pod \"glance-default-external-api-0\" (UID: \"45b49577-8ba6-43a8-86be-987744002d0c\") " pod="openstack/glance-default-external-api-0" Sep 30 21:01:27 crc kubenswrapper[4756]: I0930 21:01:27.550853 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/45b49577-8ba6-43a8-86be-987744002d0c-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"45b49577-8ba6-43a8-86be-987744002d0c\") " pod="openstack/glance-default-external-api-0" Sep 30 21:01:27 crc kubenswrapper[4756]: I0930 21:01:27.652745 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/45b49577-8ba6-43a8-86be-987744002d0c-config-data\") pod \"glance-default-external-api-0\" (UID: \"45b49577-8ba6-43a8-86be-987744002d0c\") " pod="openstack/glance-default-external-api-0" Sep 30 21:01:27 crc kubenswrapper[4756]: I0930 21:01:27.652788 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kzbvv\" (UniqueName: \"kubernetes.io/projected/45b49577-8ba6-43a8-86be-987744002d0c-kube-api-access-kzbvv\") pod \"glance-default-external-api-0\" (UID: \"45b49577-8ba6-43a8-86be-987744002d0c\") " pod="openstack/glance-default-external-api-0" Sep 30 21:01:27 crc kubenswrapper[4756]: I0930 21:01:27.652815 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/45b49577-8ba6-43a8-86be-987744002d0c-ceph\") pod \"glance-default-external-api-0\" (UID: \"45b49577-8ba6-43a8-86be-987744002d0c\") " pod="openstack/glance-default-external-api-0" Sep 30 21:01:27 crc kubenswrapper[4756]: I0930 21:01:27.652831 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/45b49577-8ba6-43a8-86be-987744002d0c-logs\") pod \"glance-default-external-api-0\" (UID: \"45b49577-8ba6-43a8-86be-987744002d0c\") " pod="openstack/glance-default-external-api-0" Sep 30 21:01:27 crc kubenswrapper[4756]: I0930 21:01:27.652850 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/45b49577-8ba6-43a8-86be-987744002d0c-scripts\") pod \"glance-default-external-api-0\" (UID: \"45b49577-8ba6-43a8-86be-987744002d0c\") " pod="openstack/glance-default-external-api-0" Sep 30 21:01:27 crc kubenswrapper[4756]: I0930 21:01:27.652876 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/45b49577-8ba6-43a8-86be-987744002d0c-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"45b49577-8ba6-43a8-86be-987744002d0c\") " pod="openstack/glance-default-external-api-0" Sep 30 21:01:27 crc kubenswrapper[4756]: I0930 21:01:27.652978 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/45b49577-8ba6-43a8-86be-987744002d0c-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"45b49577-8ba6-43a8-86be-987744002d0c\") " pod="openstack/glance-default-external-api-0" Sep 30 21:01:27 crc kubenswrapper[4756]: I0930 21:01:27.653298 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/45b49577-8ba6-43a8-86be-987744002d0c-logs\") pod \"glance-default-external-api-0\" (UID: \"45b49577-8ba6-43a8-86be-987744002d0c\") " pod="openstack/glance-default-external-api-0" Sep 30 21:01:27 crc kubenswrapper[4756]: I0930 21:01:27.653331 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/45b49577-8ba6-43a8-86be-987744002d0c-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"45b49577-8ba6-43a8-86be-987744002d0c\") " pod="openstack/glance-default-external-api-0" Sep 30 21:01:27 crc kubenswrapper[4756]: I0930 21:01:27.656253 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/45b49577-8ba6-43a8-86be-987744002d0c-scripts\") pod \"glance-default-external-api-0\" (UID: \"45b49577-8ba6-43a8-86be-987744002d0c\") " pod="openstack/glance-default-external-api-0" Sep 30 21:01:27 crc kubenswrapper[4756]: I0930 21:01:27.656265 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/45b49577-8ba6-43a8-86be-987744002d0c-ceph\") pod \"glance-default-external-api-0\" (UID: \"45b49577-8ba6-43a8-86be-987744002d0c\") " pod="openstack/glance-default-external-api-0" Sep 30 21:01:27 crc kubenswrapper[4756]: I0930 21:01:27.656947 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/45b49577-8ba6-43a8-86be-987744002d0c-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"45b49577-8ba6-43a8-86be-987744002d0c\") " pod="openstack/glance-default-external-api-0" Sep 30 21:01:27 crc kubenswrapper[4756]: I0930 21:01:27.658564 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/45b49577-8ba6-43a8-86be-987744002d0c-config-data\") pod \"glance-default-external-api-0\" (UID: \"45b49577-8ba6-43a8-86be-987744002d0c\") " pod="openstack/glance-default-external-api-0" Sep 30 21:01:27 crc kubenswrapper[4756]: I0930 21:01:27.668886 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kzbvv\" (UniqueName: \"kubernetes.io/projected/45b49577-8ba6-43a8-86be-987744002d0c-kube-api-access-kzbvv\") pod \"glance-default-external-api-0\" (UID: \"45b49577-8ba6-43a8-86be-987744002d0c\") " pod="openstack/glance-default-external-api-0" Sep 30 21:01:27 crc kubenswrapper[4756]: I0930 21:01:27.810693 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Sep 30 21:01:27 crc kubenswrapper[4756]: I0930 21:01:27.879802 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 30 21:01:28 crc kubenswrapper[4756]: I0930 21:01:28.408117 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="b7eae54c-a425-4bfd-8d71-98836fc5b4a2" containerName="glance-log" containerID="cri-o://c119e86f3fd64e039f46114074332a95b784e3bcd0ccd2de01ec3e82f12598de" gracePeriod=30 Sep 30 21:01:28 crc kubenswrapper[4756]: I0930 21:01:28.408719 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="b7eae54c-a425-4bfd-8d71-98836fc5b4a2" containerName="glance-httpd" containerID="cri-o://3f60f926116db48497a91cf4ed07a647d1b1dae16e1ae7dcb86aed900be42544" gracePeriod=30 Sep 30 21:01:28 crc kubenswrapper[4756]: I0930 21:01:28.442862 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Sep 30 21:01:28 crc kubenswrapper[4756]: W0930 21:01:28.447821 4756 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod45b49577_8ba6_43a8_86be_987744002d0c.slice/crio-cd01ffb157dd6c79ed8a3381f61ba28d5f3d69d847936f3f5e5e5a444dff8748 WatchSource:0}: Error finding container cd01ffb157dd6c79ed8a3381f61ba28d5f3d69d847936f3f5e5e5a444dff8748: Status 404 returned error can't find the container with id cd01ffb157dd6c79ed8a3381f61ba28d5f3d69d847936f3f5e5e5a444dff8748 Sep 30 21:01:28 crc kubenswrapper[4756]: I0930 21:01:28.965709 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Sep 30 21:01:29 crc kubenswrapper[4756]: I0930 21:01:29.102958 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-l6vmr\" (UniqueName: \"kubernetes.io/projected/b7eae54c-a425-4bfd-8d71-98836fc5b4a2-kube-api-access-l6vmr\") pod \"b7eae54c-a425-4bfd-8d71-98836fc5b4a2\" (UID: \"b7eae54c-a425-4bfd-8d71-98836fc5b4a2\") " Sep 30 21:01:29 crc kubenswrapper[4756]: I0930 21:01:29.103369 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b7eae54c-a425-4bfd-8d71-98836fc5b4a2-logs\") pod \"b7eae54c-a425-4bfd-8d71-98836fc5b4a2\" (UID: \"b7eae54c-a425-4bfd-8d71-98836fc5b4a2\") " Sep 30 21:01:29 crc kubenswrapper[4756]: I0930 21:01:29.103418 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/b7eae54c-a425-4bfd-8d71-98836fc5b4a2-ceph\") pod \"b7eae54c-a425-4bfd-8d71-98836fc5b4a2\" (UID: \"b7eae54c-a425-4bfd-8d71-98836fc5b4a2\") " Sep 30 21:01:29 crc kubenswrapper[4756]: I0930 21:01:29.103476 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b7eae54c-a425-4bfd-8d71-98836fc5b4a2-scripts\") pod \"b7eae54c-a425-4bfd-8d71-98836fc5b4a2\" (UID: \"b7eae54c-a425-4bfd-8d71-98836fc5b4a2\") " Sep 30 21:01:29 crc kubenswrapper[4756]: I0930 21:01:29.103497 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b7eae54c-a425-4bfd-8d71-98836fc5b4a2-config-data\") pod \"b7eae54c-a425-4bfd-8d71-98836fc5b4a2\" (UID: \"b7eae54c-a425-4bfd-8d71-98836fc5b4a2\") " Sep 30 21:01:29 crc kubenswrapper[4756]: I0930 21:01:29.103539 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b7eae54c-a425-4bfd-8d71-98836fc5b4a2-combined-ca-bundle\") pod \"b7eae54c-a425-4bfd-8d71-98836fc5b4a2\" (UID: \"b7eae54c-a425-4bfd-8d71-98836fc5b4a2\") " Sep 30 21:01:29 crc kubenswrapper[4756]: I0930 21:01:29.103568 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/b7eae54c-a425-4bfd-8d71-98836fc5b4a2-httpd-run\") pod \"b7eae54c-a425-4bfd-8d71-98836fc5b4a2\" (UID: \"b7eae54c-a425-4bfd-8d71-98836fc5b4a2\") " Sep 30 21:01:29 crc kubenswrapper[4756]: I0930 21:01:29.104116 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b7eae54c-a425-4bfd-8d71-98836fc5b4a2-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "b7eae54c-a425-4bfd-8d71-98836fc5b4a2" (UID: "b7eae54c-a425-4bfd-8d71-98836fc5b4a2"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 21:01:29 crc kubenswrapper[4756]: I0930 21:01:29.104254 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b7eae54c-a425-4bfd-8d71-98836fc5b4a2-logs" (OuterVolumeSpecName: "logs") pod "b7eae54c-a425-4bfd-8d71-98836fc5b4a2" (UID: "b7eae54c-a425-4bfd-8d71-98836fc5b4a2"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 21:01:29 crc kubenswrapper[4756]: I0930 21:01:29.106492 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b7eae54c-a425-4bfd-8d71-98836fc5b4a2-kube-api-access-l6vmr" (OuterVolumeSpecName: "kube-api-access-l6vmr") pod "b7eae54c-a425-4bfd-8d71-98836fc5b4a2" (UID: "b7eae54c-a425-4bfd-8d71-98836fc5b4a2"). InnerVolumeSpecName "kube-api-access-l6vmr". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 21:01:29 crc kubenswrapper[4756]: I0930 21:01:29.108542 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b7eae54c-a425-4bfd-8d71-98836fc5b4a2-scripts" (OuterVolumeSpecName: "scripts") pod "b7eae54c-a425-4bfd-8d71-98836fc5b4a2" (UID: "b7eae54c-a425-4bfd-8d71-98836fc5b4a2"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 21:01:29 crc kubenswrapper[4756]: I0930 21:01:29.108710 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b7eae54c-a425-4bfd-8d71-98836fc5b4a2-ceph" (OuterVolumeSpecName: "ceph") pod "b7eae54c-a425-4bfd-8d71-98836fc5b4a2" (UID: "b7eae54c-a425-4bfd-8d71-98836fc5b4a2"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 21:01:29 crc kubenswrapper[4756]: I0930 21:01:29.133798 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b7eae54c-a425-4bfd-8d71-98836fc5b4a2-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b7eae54c-a425-4bfd-8d71-98836fc5b4a2" (UID: "b7eae54c-a425-4bfd-8d71-98836fc5b4a2"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 21:01:29 crc kubenswrapper[4756]: I0930 21:01:29.142274 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="107a6195-b349-4f58-ac1b-2fd9b6be8e83" path="/var/lib/kubelet/pods/107a6195-b349-4f58-ac1b-2fd9b6be8e83/volumes" Sep 30 21:01:29 crc kubenswrapper[4756]: I0930 21:01:29.163844 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b7eae54c-a425-4bfd-8d71-98836fc5b4a2-config-data" (OuterVolumeSpecName: "config-data") pod "b7eae54c-a425-4bfd-8d71-98836fc5b4a2" (UID: "b7eae54c-a425-4bfd-8d71-98836fc5b4a2"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 21:01:29 crc kubenswrapper[4756]: I0930 21:01:29.205437 4756 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b7eae54c-a425-4bfd-8d71-98836fc5b4a2-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 21:01:29 crc kubenswrapper[4756]: I0930 21:01:29.205477 4756 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/b7eae54c-a425-4bfd-8d71-98836fc5b4a2-httpd-run\") on node \"crc\" DevicePath \"\"" Sep 30 21:01:29 crc kubenswrapper[4756]: I0930 21:01:29.205490 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-l6vmr\" (UniqueName: \"kubernetes.io/projected/b7eae54c-a425-4bfd-8d71-98836fc5b4a2-kube-api-access-l6vmr\") on node \"crc\" DevicePath \"\"" Sep 30 21:01:29 crc kubenswrapper[4756]: I0930 21:01:29.205503 4756 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b7eae54c-a425-4bfd-8d71-98836fc5b4a2-logs\") on node \"crc\" DevicePath \"\"" Sep 30 21:01:29 crc kubenswrapper[4756]: I0930 21:01:29.205514 4756 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/b7eae54c-a425-4bfd-8d71-98836fc5b4a2-ceph\") on node \"crc\" DevicePath \"\"" Sep 30 21:01:29 crc kubenswrapper[4756]: I0930 21:01:29.205527 4756 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b7eae54c-a425-4bfd-8d71-98836fc5b4a2-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 21:01:29 crc kubenswrapper[4756]: I0930 21:01:29.205537 4756 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b7eae54c-a425-4bfd-8d71-98836fc5b4a2-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 21:01:29 crc kubenswrapper[4756]: I0930 21:01:29.420104 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"45b49577-8ba6-43a8-86be-987744002d0c","Type":"ContainerStarted","Data":"85b0b4211214b1b0940d27fac90ef0fc7dadfc4c7afc5981e944a56f3e595727"} Sep 30 21:01:29 crc kubenswrapper[4756]: I0930 21:01:29.420165 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"45b49577-8ba6-43a8-86be-987744002d0c","Type":"ContainerStarted","Data":"cd01ffb157dd6c79ed8a3381f61ba28d5f3d69d847936f3f5e5e5a444dff8748"} Sep 30 21:01:29 crc kubenswrapper[4756]: I0930 21:01:29.426671 4756 generic.go:334] "Generic (PLEG): container finished" podID="b7eae54c-a425-4bfd-8d71-98836fc5b4a2" containerID="3f60f926116db48497a91cf4ed07a647d1b1dae16e1ae7dcb86aed900be42544" exitCode=0 Sep 30 21:01:29 crc kubenswrapper[4756]: I0930 21:01:29.426690 4756 generic.go:334] "Generic (PLEG): container finished" podID="b7eae54c-a425-4bfd-8d71-98836fc5b4a2" containerID="c119e86f3fd64e039f46114074332a95b784e3bcd0ccd2de01ec3e82f12598de" exitCode=143 Sep 30 21:01:29 crc kubenswrapper[4756]: I0930 21:01:29.426703 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"b7eae54c-a425-4bfd-8d71-98836fc5b4a2","Type":"ContainerDied","Data":"3f60f926116db48497a91cf4ed07a647d1b1dae16e1ae7dcb86aed900be42544"} Sep 30 21:01:29 crc kubenswrapper[4756]: I0930 21:01:29.426741 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"b7eae54c-a425-4bfd-8d71-98836fc5b4a2","Type":"ContainerDied","Data":"c119e86f3fd64e039f46114074332a95b784e3bcd0ccd2de01ec3e82f12598de"} Sep 30 21:01:29 crc kubenswrapper[4756]: I0930 21:01:29.426756 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"b7eae54c-a425-4bfd-8d71-98836fc5b4a2","Type":"ContainerDied","Data":"46c206927ec44b6463cab801212b54c8189356d282369b0721afd9da8a880999"} Sep 30 21:01:29 crc kubenswrapper[4756]: I0930 21:01:29.426759 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Sep 30 21:01:29 crc kubenswrapper[4756]: I0930 21:01:29.426773 4756 scope.go:117] "RemoveContainer" containerID="3f60f926116db48497a91cf4ed07a647d1b1dae16e1ae7dcb86aed900be42544" Sep 30 21:01:29 crc kubenswrapper[4756]: I0930 21:01:29.458427 4756 scope.go:117] "RemoveContainer" containerID="c119e86f3fd64e039f46114074332a95b784e3bcd0ccd2de01ec3e82f12598de" Sep 30 21:01:29 crc kubenswrapper[4756]: I0930 21:01:29.493521 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 30 21:01:29 crc kubenswrapper[4756]: I0930 21:01:29.512816 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 30 21:01:29 crc kubenswrapper[4756]: I0930 21:01:29.521702 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 30 21:01:29 crc kubenswrapper[4756]: E0930 21:01:29.522283 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b7eae54c-a425-4bfd-8d71-98836fc5b4a2" containerName="glance-log" Sep 30 21:01:29 crc kubenswrapper[4756]: I0930 21:01:29.522410 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="b7eae54c-a425-4bfd-8d71-98836fc5b4a2" containerName="glance-log" Sep 30 21:01:29 crc kubenswrapper[4756]: E0930 21:01:29.522485 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b7eae54c-a425-4bfd-8d71-98836fc5b4a2" containerName="glance-httpd" Sep 30 21:01:29 crc kubenswrapper[4756]: I0930 21:01:29.522565 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="b7eae54c-a425-4bfd-8d71-98836fc5b4a2" containerName="glance-httpd" Sep 30 21:01:29 crc kubenswrapper[4756]: I0930 21:01:29.522795 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="b7eae54c-a425-4bfd-8d71-98836fc5b4a2" containerName="glance-log" Sep 30 21:01:29 crc kubenswrapper[4756]: I0930 21:01:29.522873 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="b7eae54c-a425-4bfd-8d71-98836fc5b4a2" containerName="glance-httpd" Sep 30 21:01:29 crc kubenswrapper[4756]: I0930 21:01:29.523852 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Sep 30 21:01:29 crc kubenswrapper[4756]: I0930 21:01:29.528419 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 30 21:01:29 crc kubenswrapper[4756]: I0930 21:01:29.529789 4756 scope.go:117] "RemoveContainer" containerID="3f60f926116db48497a91cf4ed07a647d1b1dae16e1ae7dcb86aed900be42544" Sep 30 21:01:29 crc kubenswrapper[4756]: E0930 21:01:29.530221 4756 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3f60f926116db48497a91cf4ed07a647d1b1dae16e1ae7dcb86aed900be42544\": container with ID starting with 3f60f926116db48497a91cf4ed07a647d1b1dae16e1ae7dcb86aed900be42544 not found: ID does not exist" containerID="3f60f926116db48497a91cf4ed07a647d1b1dae16e1ae7dcb86aed900be42544" Sep 30 21:01:29 crc kubenswrapper[4756]: I0930 21:01:29.530254 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3f60f926116db48497a91cf4ed07a647d1b1dae16e1ae7dcb86aed900be42544"} err="failed to get container status \"3f60f926116db48497a91cf4ed07a647d1b1dae16e1ae7dcb86aed900be42544\": rpc error: code = NotFound desc = could not find container \"3f60f926116db48497a91cf4ed07a647d1b1dae16e1ae7dcb86aed900be42544\": container with ID starting with 3f60f926116db48497a91cf4ed07a647d1b1dae16e1ae7dcb86aed900be42544 not found: ID does not exist" Sep 30 21:01:29 crc kubenswrapper[4756]: I0930 21:01:29.530280 4756 scope.go:117] "RemoveContainer" containerID="c119e86f3fd64e039f46114074332a95b784e3bcd0ccd2de01ec3e82f12598de" Sep 30 21:01:29 crc kubenswrapper[4756]: E0930 21:01:29.530748 4756 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c119e86f3fd64e039f46114074332a95b784e3bcd0ccd2de01ec3e82f12598de\": container with ID starting with c119e86f3fd64e039f46114074332a95b784e3bcd0ccd2de01ec3e82f12598de not found: ID does not exist" containerID="c119e86f3fd64e039f46114074332a95b784e3bcd0ccd2de01ec3e82f12598de" Sep 30 21:01:29 crc kubenswrapper[4756]: I0930 21:01:29.530795 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c119e86f3fd64e039f46114074332a95b784e3bcd0ccd2de01ec3e82f12598de"} err="failed to get container status \"c119e86f3fd64e039f46114074332a95b784e3bcd0ccd2de01ec3e82f12598de\": rpc error: code = NotFound desc = could not find container \"c119e86f3fd64e039f46114074332a95b784e3bcd0ccd2de01ec3e82f12598de\": container with ID starting with c119e86f3fd64e039f46114074332a95b784e3bcd0ccd2de01ec3e82f12598de not found: ID does not exist" Sep 30 21:01:29 crc kubenswrapper[4756]: I0930 21:01:29.530828 4756 scope.go:117] "RemoveContainer" containerID="3f60f926116db48497a91cf4ed07a647d1b1dae16e1ae7dcb86aed900be42544" Sep 30 21:01:29 crc kubenswrapper[4756]: I0930 21:01:29.531664 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3f60f926116db48497a91cf4ed07a647d1b1dae16e1ae7dcb86aed900be42544"} err="failed to get container status \"3f60f926116db48497a91cf4ed07a647d1b1dae16e1ae7dcb86aed900be42544\": rpc error: code = NotFound desc = could not find container \"3f60f926116db48497a91cf4ed07a647d1b1dae16e1ae7dcb86aed900be42544\": container with ID starting with 3f60f926116db48497a91cf4ed07a647d1b1dae16e1ae7dcb86aed900be42544 not found: ID does not exist" Sep 30 21:01:29 crc kubenswrapper[4756]: I0930 21:01:29.531691 4756 scope.go:117] "RemoveContainer" containerID="c119e86f3fd64e039f46114074332a95b784e3bcd0ccd2de01ec3e82f12598de" Sep 30 21:01:29 crc kubenswrapper[4756]: I0930 21:01:29.532098 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c119e86f3fd64e039f46114074332a95b784e3bcd0ccd2de01ec3e82f12598de"} err="failed to get container status \"c119e86f3fd64e039f46114074332a95b784e3bcd0ccd2de01ec3e82f12598de\": rpc error: code = NotFound desc = could not find container \"c119e86f3fd64e039f46114074332a95b784e3bcd0ccd2de01ec3e82f12598de\": container with ID starting with c119e86f3fd64e039f46114074332a95b784e3bcd0ccd2de01ec3e82f12598de not found: ID does not exist" Sep 30 21:01:29 crc kubenswrapper[4756]: I0930 21:01:29.533225 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Sep 30 21:01:29 crc kubenswrapper[4756]: I0930 21:01:29.713254 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/0ed70cdf-5345-4faf-ba3b-2ca0f06fb3d6-ceph\") pod \"glance-default-internal-api-0\" (UID: \"0ed70cdf-5345-4faf-ba3b-2ca0f06fb3d6\") " pod="openstack/glance-default-internal-api-0" Sep 30 21:01:29 crc kubenswrapper[4756]: I0930 21:01:29.713323 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/0ed70cdf-5345-4faf-ba3b-2ca0f06fb3d6-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"0ed70cdf-5345-4faf-ba3b-2ca0f06fb3d6\") " pod="openstack/glance-default-internal-api-0" Sep 30 21:01:29 crc kubenswrapper[4756]: I0930 21:01:29.713483 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0ed70cdf-5345-4faf-ba3b-2ca0f06fb3d6-scripts\") pod \"glance-default-internal-api-0\" (UID: \"0ed70cdf-5345-4faf-ba3b-2ca0f06fb3d6\") " pod="openstack/glance-default-internal-api-0" Sep 30 21:01:29 crc kubenswrapper[4756]: I0930 21:01:29.713548 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0ed70cdf-5345-4faf-ba3b-2ca0f06fb3d6-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"0ed70cdf-5345-4faf-ba3b-2ca0f06fb3d6\") " pod="openstack/glance-default-internal-api-0" Sep 30 21:01:29 crc kubenswrapper[4756]: I0930 21:01:29.713579 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gxnbp\" (UniqueName: \"kubernetes.io/projected/0ed70cdf-5345-4faf-ba3b-2ca0f06fb3d6-kube-api-access-gxnbp\") pod \"glance-default-internal-api-0\" (UID: \"0ed70cdf-5345-4faf-ba3b-2ca0f06fb3d6\") " pod="openstack/glance-default-internal-api-0" Sep 30 21:01:29 crc kubenswrapper[4756]: I0930 21:01:29.713715 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0ed70cdf-5345-4faf-ba3b-2ca0f06fb3d6-config-data\") pod \"glance-default-internal-api-0\" (UID: \"0ed70cdf-5345-4faf-ba3b-2ca0f06fb3d6\") " pod="openstack/glance-default-internal-api-0" Sep 30 21:01:29 crc kubenswrapper[4756]: I0930 21:01:29.713768 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0ed70cdf-5345-4faf-ba3b-2ca0f06fb3d6-logs\") pod \"glance-default-internal-api-0\" (UID: \"0ed70cdf-5345-4faf-ba3b-2ca0f06fb3d6\") " pod="openstack/glance-default-internal-api-0" Sep 30 21:01:29 crc kubenswrapper[4756]: I0930 21:01:29.815023 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0ed70cdf-5345-4faf-ba3b-2ca0f06fb3d6-scripts\") pod \"glance-default-internal-api-0\" (UID: \"0ed70cdf-5345-4faf-ba3b-2ca0f06fb3d6\") " pod="openstack/glance-default-internal-api-0" Sep 30 21:01:29 crc kubenswrapper[4756]: I0930 21:01:29.815341 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0ed70cdf-5345-4faf-ba3b-2ca0f06fb3d6-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"0ed70cdf-5345-4faf-ba3b-2ca0f06fb3d6\") " pod="openstack/glance-default-internal-api-0" Sep 30 21:01:29 crc kubenswrapper[4756]: I0930 21:01:29.815372 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gxnbp\" (UniqueName: \"kubernetes.io/projected/0ed70cdf-5345-4faf-ba3b-2ca0f06fb3d6-kube-api-access-gxnbp\") pod \"glance-default-internal-api-0\" (UID: \"0ed70cdf-5345-4faf-ba3b-2ca0f06fb3d6\") " pod="openstack/glance-default-internal-api-0" Sep 30 21:01:29 crc kubenswrapper[4756]: I0930 21:01:29.815441 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0ed70cdf-5345-4faf-ba3b-2ca0f06fb3d6-config-data\") pod \"glance-default-internal-api-0\" (UID: \"0ed70cdf-5345-4faf-ba3b-2ca0f06fb3d6\") " pod="openstack/glance-default-internal-api-0" Sep 30 21:01:29 crc kubenswrapper[4756]: I0930 21:01:29.815478 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0ed70cdf-5345-4faf-ba3b-2ca0f06fb3d6-logs\") pod \"glance-default-internal-api-0\" (UID: \"0ed70cdf-5345-4faf-ba3b-2ca0f06fb3d6\") " pod="openstack/glance-default-internal-api-0" Sep 30 21:01:29 crc kubenswrapper[4756]: I0930 21:01:29.815553 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/0ed70cdf-5345-4faf-ba3b-2ca0f06fb3d6-ceph\") pod \"glance-default-internal-api-0\" (UID: \"0ed70cdf-5345-4faf-ba3b-2ca0f06fb3d6\") " pod="openstack/glance-default-internal-api-0" Sep 30 21:01:29 crc kubenswrapper[4756]: I0930 21:01:29.815582 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/0ed70cdf-5345-4faf-ba3b-2ca0f06fb3d6-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"0ed70cdf-5345-4faf-ba3b-2ca0f06fb3d6\") " pod="openstack/glance-default-internal-api-0" Sep 30 21:01:29 crc kubenswrapper[4756]: I0930 21:01:29.816222 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/0ed70cdf-5345-4faf-ba3b-2ca0f06fb3d6-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"0ed70cdf-5345-4faf-ba3b-2ca0f06fb3d6\") " pod="openstack/glance-default-internal-api-0" Sep 30 21:01:29 crc kubenswrapper[4756]: I0930 21:01:29.819973 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0ed70cdf-5345-4faf-ba3b-2ca0f06fb3d6-scripts\") pod \"glance-default-internal-api-0\" (UID: \"0ed70cdf-5345-4faf-ba3b-2ca0f06fb3d6\") " pod="openstack/glance-default-internal-api-0" Sep 30 21:01:29 crc kubenswrapper[4756]: I0930 21:01:29.820363 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0ed70cdf-5345-4faf-ba3b-2ca0f06fb3d6-logs\") pod \"glance-default-internal-api-0\" (UID: \"0ed70cdf-5345-4faf-ba3b-2ca0f06fb3d6\") " pod="openstack/glance-default-internal-api-0" Sep 30 21:01:29 crc kubenswrapper[4756]: I0930 21:01:29.822414 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0ed70cdf-5345-4faf-ba3b-2ca0f06fb3d6-config-data\") pod \"glance-default-internal-api-0\" (UID: \"0ed70cdf-5345-4faf-ba3b-2ca0f06fb3d6\") " pod="openstack/glance-default-internal-api-0" Sep 30 21:01:29 crc kubenswrapper[4756]: I0930 21:01:29.823458 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0ed70cdf-5345-4faf-ba3b-2ca0f06fb3d6-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"0ed70cdf-5345-4faf-ba3b-2ca0f06fb3d6\") " pod="openstack/glance-default-internal-api-0" Sep 30 21:01:29 crc kubenswrapper[4756]: I0930 21:01:29.827816 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/0ed70cdf-5345-4faf-ba3b-2ca0f06fb3d6-ceph\") pod \"glance-default-internal-api-0\" (UID: \"0ed70cdf-5345-4faf-ba3b-2ca0f06fb3d6\") " pod="openstack/glance-default-internal-api-0" Sep 30 21:01:29 crc kubenswrapper[4756]: I0930 21:01:29.850094 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gxnbp\" (UniqueName: \"kubernetes.io/projected/0ed70cdf-5345-4faf-ba3b-2ca0f06fb3d6-kube-api-access-gxnbp\") pod \"glance-default-internal-api-0\" (UID: \"0ed70cdf-5345-4faf-ba3b-2ca0f06fb3d6\") " pod="openstack/glance-default-internal-api-0" Sep 30 21:01:30 crc kubenswrapper[4756]: I0930 21:01:30.144678 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Sep 30 21:01:30 crc kubenswrapper[4756]: I0930 21:01:30.437177 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"45b49577-8ba6-43a8-86be-987744002d0c","Type":"ContainerStarted","Data":"1ed06ca741613b448b386d339a6050885134868e51bebd7fb347b43ca5e6c977"} Sep 30 21:01:30 crc kubenswrapper[4756]: I0930 21:01:30.464247 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=3.464227019 podStartE2EDuration="3.464227019s" podCreationTimestamp="2025-09-30 21:01:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 21:01:30.457578036 +0000 UTC m=+5420.078511613" watchObservedRunningTime="2025-09-30 21:01:30.464227019 +0000 UTC m=+5420.085160506" Sep 30 21:01:30 crc kubenswrapper[4756]: I0930 21:01:30.640983 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 30 21:01:30 crc kubenswrapper[4756]: W0930 21:01:30.648000 4756 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0ed70cdf_5345_4faf_ba3b_2ca0f06fb3d6.slice/crio-b6d205723e0951dd271d700836757a772aaafa8cf4367532d5ec72a7513ae970 WatchSource:0}: Error finding container b6d205723e0951dd271d700836757a772aaafa8cf4367532d5ec72a7513ae970: Status 404 returned error can't find the container with id b6d205723e0951dd271d700836757a772aaafa8cf4367532d5ec72a7513ae970 Sep 30 21:01:31 crc kubenswrapper[4756]: I0930 21:01:31.132388 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b7eae54c-a425-4bfd-8d71-98836fc5b4a2" path="/var/lib/kubelet/pods/b7eae54c-a425-4bfd-8d71-98836fc5b4a2/volumes" Sep 30 21:01:31 crc kubenswrapper[4756]: I0930 21:01:31.451815 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"0ed70cdf-5345-4faf-ba3b-2ca0f06fb3d6","Type":"ContainerStarted","Data":"4ae49e1404ac093ad727eb55bcadf7520d4b489934cc4899c2ab03d72ed55621"} Sep 30 21:01:31 crc kubenswrapper[4756]: I0930 21:01:31.451918 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"0ed70cdf-5345-4faf-ba3b-2ca0f06fb3d6","Type":"ContainerStarted","Data":"b6d205723e0951dd271d700836757a772aaafa8cf4367532d5ec72a7513ae970"} Sep 30 21:01:32 crc kubenswrapper[4756]: I0930 21:01:32.466169 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"0ed70cdf-5345-4faf-ba3b-2ca0f06fb3d6","Type":"ContainerStarted","Data":"30e5fe0a52e1838b034a83f46a24e63f375c89cd78121be89f384bc094ce4abc"} Sep 30 21:01:32 crc kubenswrapper[4756]: I0930 21:01:32.497818 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=3.497794128 podStartE2EDuration="3.497794128s" podCreationTimestamp="2025-09-30 21:01:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 21:01:32.489565663 +0000 UTC m=+5422.110499140" watchObservedRunningTime="2025-09-30 21:01:32.497794128 +0000 UTC m=+5422.118727605" Sep 30 21:01:34 crc kubenswrapper[4756]: I0930 21:01:34.051211 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-7655654869-kfkn4" Sep 30 21:01:34 crc kubenswrapper[4756]: I0930 21:01:34.114209 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-76f679cbb7-txrg2"] Sep 30 21:01:34 crc kubenswrapper[4756]: I0930 21:01:34.114504 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-76f679cbb7-txrg2" podUID="87fc56ab-659e-43b0-a7e9-ccf8fc454b86" containerName="dnsmasq-dns" containerID="cri-o://cfa9e7ac9e7a80ca52ccfc7754d68810c7c8fa972c976eb6d89c19410fd01cac" gracePeriod=10 Sep 30 21:01:34 crc kubenswrapper[4756]: I0930 21:01:34.492577 4756 generic.go:334] "Generic (PLEG): container finished" podID="87fc56ab-659e-43b0-a7e9-ccf8fc454b86" containerID="cfa9e7ac9e7a80ca52ccfc7754d68810c7c8fa972c976eb6d89c19410fd01cac" exitCode=0 Sep 30 21:01:34 crc kubenswrapper[4756]: I0930 21:01:34.492688 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-76f679cbb7-txrg2" event={"ID":"87fc56ab-659e-43b0-a7e9-ccf8fc454b86","Type":"ContainerDied","Data":"cfa9e7ac9e7a80ca52ccfc7754d68810c7c8fa972c976eb6d89c19410fd01cac"} Sep 30 21:01:34 crc kubenswrapper[4756]: I0930 21:01:34.608271 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-76f679cbb7-txrg2" Sep 30 21:01:34 crc kubenswrapper[4756]: I0930 21:01:34.705564 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/87fc56ab-659e-43b0-a7e9-ccf8fc454b86-ovsdbserver-sb\") pod \"87fc56ab-659e-43b0-a7e9-ccf8fc454b86\" (UID: \"87fc56ab-659e-43b0-a7e9-ccf8fc454b86\") " Sep 30 21:01:34 crc kubenswrapper[4756]: I0930 21:01:34.705671 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/87fc56ab-659e-43b0-a7e9-ccf8fc454b86-ovsdbserver-nb\") pod \"87fc56ab-659e-43b0-a7e9-ccf8fc454b86\" (UID: \"87fc56ab-659e-43b0-a7e9-ccf8fc454b86\") " Sep 30 21:01:34 crc kubenswrapper[4756]: I0930 21:01:34.743577 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/87fc56ab-659e-43b0-a7e9-ccf8fc454b86-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "87fc56ab-659e-43b0-a7e9-ccf8fc454b86" (UID: "87fc56ab-659e-43b0-a7e9-ccf8fc454b86"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 21:01:34 crc kubenswrapper[4756]: I0930 21:01:34.757559 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/87fc56ab-659e-43b0-a7e9-ccf8fc454b86-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "87fc56ab-659e-43b0-a7e9-ccf8fc454b86" (UID: "87fc56ab-659e-43b0-a7e9-ccf8fc454b86"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 21:01:34 crc kubenswrapper[4756]: I0930 21:01:34.806980 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vrrbx\" (UniqueName: \"kubernetes.io/projected/87fc56ab-659e-43b0-a7e9-ccf8fc454b86-kube-api-access-vrrbx\") pod \"87fc56ab-659e-43b0-a7e9-ccf8fc454b86\" (UID: \"87fc56ab-659e-43b0-a7e9-ccf8fc454b86\") " Sep 30 21:01:34 crc kubenswrapper[4756]: I0930 21:01:34.807192 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/87fc56ab-659e-43b0-a7e9-ccf8fc454b86-config\") pod \"87fc56ab-659e-43b0-a7e9-ccf8fc454b86\" (UID: \"87fc56ab-659e-43b0-a7e9-ccf8fc454b86\") " Sep 30 21:01:34 crc kubenswrapper[4756]: I0930 21:01:34.807229 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/87fc56ab-659e-43b0-a7e9-ccf8fc454b86-dns-svc\") pod \"87fc56ab-659e-43b0-a7e9-ccf8fc454b86\" (UID: \"87fc56ab-659e-43b0-a7e9-ccf8fc454b86\") " Sep 30 21:01:34 crc kubenswrapper[4756]: I0930 21:01:34.807580 4756 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/87fc56ab-659e-43b0-a7e9-ccf8fc454b86-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Sep 30 21:01:34 crc kubenswrapper[4756]: I0930 21:01:34.807596 4756 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/87fc56ab-659e-43b0-a7e9-ccf8fc454b86-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Sep 30 21:01:34 crc kubenswrapper[4756]: I0930 21:01:34.810719 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/87fc56ab-659e-43b0-a7e9-ccf8fc454b86-kube-api-access-vrrbx" (OuterVolumeSpecName: "kube-api-access-vrrbx") pod "87fc56ab-659e-43b0-a7e9-ccf8fc454b86" (UID: "87fc56ab-659e-43b0-a7e9-ccf8fc454b86"). InnerVolumeSpecName "kube-api-access-vrrbx". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 21:01:34 crc kubenswrapper[4756]: I0930 21:01:34.844019 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/87fc56ab-659e-43b0-a7e9-ccf8fc454b86-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "87fc56ab-659e-43b0-a7e9-ccf8fc454b86" (UID: "87fc56ab-659e-43b0-a7e9-ccf8fc454b86"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 21:01:34 crc kubenswrapper[4756]: I0930 21:01:34.863939 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/87fc56ab-659e-43b0-a7e9-ccf8fc454b86-config" (OuterVolumeSpecName: "config") pod "87fc56ab-659e-43b0-a7e9-ccf8fc454b86" (UID: "87fc56ab-659e-43b0-a7e9-ccf8fc454b86"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 21:01:34 crc kubenswrapper[4756]: I0930 21:01:34.909788 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vrrbx\" (UniqueName: \"kubernetes.io/projected/87fc56ab-659e-43b0-a7e9-ccf8fc454b86-kube-api-access-vrrbx\") on node \"crc\" DevicePath \"\"" Sep 30 21:01:34 crc kubenswrapper[4756]: I0930 21:01:34.910022 4756 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/87fc56ab-659e-43b0-a7e9-ccf8fc454b86-config\") on node \"crc\" DevicePath \"\"" Sep 30 21:01:34 crc kubenswrapper[4756]: I0930 21:01:34.910030 4756 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/87fc56ab-659e-43b0-a7e9-ccf8fc454b86-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 30 21:01:35 crc kubenswrapper[4756]: I0930 21:01:35.510831 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-76f679cbb7-txrg2" event={"ID":"87fc56ab-659e-43b0-a7e9-ccf8fc454b86","Type":"ContainerDied","Data":"a4b8388a54fbc396c1ad2155ed1f3fe91ae1739c1de6c1c9f0c216ca787035a7"} Sep 30 21:01:35 crc kubenswrapper[4756]: I0930 21:01:35.510885 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-76f679cbb7-txrg2" Sep 30 21:01:35 crc kubenswrapper[4756]: I0930 21:01:35.510944 4756 scope.go:117] "RemoveContainer" containerID="cfa9e7ac9e7a80ca52ccfc7754d68810c7c8fa972c976eb6d89c19410fd01cac" Sep 30 21:01:35 crc kubenswrapper[4756]: I0930 21:01:35.549088 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-76f679cbb7-txrg2"] Sep 30 21:01:35 crc kubenswrapper[4756]: I0930 21:01:35.556530 4756 scope.go:117] "RemoveContainer" containerID="15f77c4e7cbc2e0118367340f65248032c429c408863d10cb8cf0f9d0f0c1db4" Sep 30 21:01:35 crc kubenswrapper[4756]: I0930 21:01:35.566861 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-76f679cbb7-txrg2"] Sep 30 21:01:37 crc kubenswrapper[4756]: I0930 21:01:37.132765 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="87fc56ab-659e-43b0-a7e9-ccf8fc454b86" path="/var/lib/kubelet/pods/87fc56ab-659e-43b0-a7e9-ccf8fc454b86/volumes" Sep 30 21:01:37 crc kubenswrapper[4756]: I0930 21:01:37.811521 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Sep 30 21:01:37 crc kubenswrapper[4756]: I0930 21:01:37.812159 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Sep 30 21:01:37 crc kubenswrapper[4756]: I0930 21:01:37.869391 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Sep 30 21:01:37 crc kubenswrapper[4756]: I0930 21:01:37.874283 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Sep 30 21:01:38 crc kubenswrapper[4756]: I0930 21:01:38.545789 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Sep 30 21:01:38 crc kubenswrapper[4756]: I0930 21:01:38.545877 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Sep 30 21:01:39 crc kubenswrapper[4756]: I0930 21:01:39.377225 4756 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-76f679cbb7-txrg2" podUID="87fc56ab-659e-43b0-a7e9-ccf8fc454b86" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.1.37:5353: i/o timeout" Sep 30 21:01:40 crc kubenswrapper[4756]: I0930 21:01:40.145991 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Sep 30 21:01:40 crc kubenswrapper[4756]: I0930 21:01:40.146068 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Sep 30 21:01:40 crc kubenswrapper[4756]: I0930 21:01:40.183886 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Sep 30 21:01:40 crc kubenswrapper[4756]: I0930 21:01:40.188115 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Sep 30 21:01:40 crc kubenswrapper[4756]: I0930 21:01:40.564959 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Sep 30 21:01:40 crc kubenswrapper[4756]: I0930 21:01:40.565217 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Sep 30 21:01:40 crc kubenswrapper[4756]: I0930 21:01:40.590155 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Sep 30 21:01:40 crc kubenswrapper[4756]: I0930 21:01:40.590318 4756 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Sep 30 21:01:40 crc kubenswrapper[4756]: I0930 21:01:40.594014 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Sep 30 21:01:42 crc kubenswrapper[4756]: I0930 21:01:42.452697 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Sep 30 21:01:42 crc kubenswrapper[4756]: I0930 21:01:42.551541 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Sep 30 21:01:50 crc kubenswrapper[4756]: I0930 21:01:50.613955 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-db-create-cj4b4"] Sep 30 21:01:50 crc kubenswrapper[4756]: E0930 21:01:50.614849 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="87fc56ab-659e-43b0-a7e9-ccf8fc454b86" containerName="init" Sep 30 21:01:50 crc kubenswrapper[4756]: I0930 21:01:50.614865 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="87fc56ab-659e-43b0-a7e9-ccf8fc454b86" containerName="init" Sep 30 21:01:50 crc kubenswrapper[4756]: E0930 21:01:50.614897 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="87fc56ab-659e-43b0-a7e9-ccf8fc454b86" containerName="dnsmasq-dns" Sep 30 21:01:50 crc kubenswrapper[4756]: I0930 21:01:50.614903 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="87fc56ab-659e-43b0-a7e9-ccf8fc454b86" containerName="dnsmasq-dns" Sep 30 21:01:50 crc kubenswrapper[4756]: I0930 21:01:50.615110 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="87fc56ab-659e-43b0-a7e9-ccf8fc454b86" containerName="dnsmasq-dns" Sep 30 21:01:50 crc kubenswrapper[4756]: I0930 21:01:50.615732 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-cj4b4" Sep 30 21:01:50 crc kubenswrapper[4756]: I0930 21:01:50.622271 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-create-cj4b4"] Sep 30 21:01:50 crc kubenswrapper[4756]: I0930 21:01:50.784174 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6kkbt\" (UniqueName: \"kubernetes.io/projected/6451acc6-4e73-4d50-b309-adb65a297d63-kube-api-access-6kkbt\") pod \"placement-db-create-cj4b4\" (UID: \"6451acc6-4e73-4d50-b309-adb65a297d63\") " pod="openstack/placement-db-create-cj4b4" Sep 30 21:01:50 crc kubenswrapper[4756]: I0930 21:01:50.885617 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6kkbt\" (UniqueName: \"kubernetes.io/projected/6451acc6-4e73-4d50-b309-adb65a297d63-kube-api-access-6kkbt\") pod \"placement-db-create-cj4b4\" (UID: \"6451acc6-4e73-4d50-b309-adb65a297d63\") " pod="openstack/placement-db-create-cj4b4" Sep 30 21:01:50 crc kubenswrapper[4756]: I0930 21:01:50.907265 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6kkbt\" (UniqueName: \"kubernetes.io/projected/6451acc6-4e73-4d50-b309-adb65a297d63-kube-api-access-6kkbt\") pod \"placement-db-create-cj4b4\" (UID: \"6451acc6-4e73-4d50-b309-adb65a297d63\") " pod="openstack/placement-db-create-cj4b4" Sep 30 21:01:50 crc kubenswrapper[4756]: I0930 21:01:50.936496 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-cj4b4" Sep 30 21:01:51 crc kubenswrapper[4756]: I0930 21:01:51.898901 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-create-cj4b4"] Sep 30 21:01:51 crc kubenswrapper[4756]: W0930 21:01:51.902044 4756 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6451acc6_4e73_4d50_b309_adb65a297d63.slice/crio-9e68cbb997c59ea19ea0e3ebfcace1109ba66501bcc84fe6b848fd3d1897c71e WatchSource:0}: Error finding container 9e68cbb997c59ea19ea0e3ebfcace1109ba66501bcc84fe6b848fd3d1897c71e: Status 404 returned error can't find the container with id 9e68cbb997c59ea19ea0e3ebfcace1109ba66501bcc84fe6b848fd3d1897c71e Sep 30 21:01:52 crc kubenswrapper[4756]: I0930 21:01:52.711913 4756 generic.go:334] "Generic (PLEG): container finished" podID="6451acc6-4e73-4d50-b309-adb65a297d63" containerID="9a3649c9dbeee4aed8b0754aa266b9bf14510988fa809693258ef1a89aa92a61" exitCode=0 Sep 30 21:01:52 crc kubenswrapper[4756]: I0930 21:01:52.711953 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-cj4b4" event={"ID":"6451acc6-4e73-4d50-b309-adb65a297d63","Type":"ContainerDied","Data":"9a3649c9dbeee4aed8b0754aa266b9bf14510988fa809693258ef1a89aa92a61"} Sep 30 21:01:52 crc kubenswrapper[4756]: I0930 21:01:52.712567 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-cj4b4" event={"ID":"6451acc6-4e73-4d50-b309-adb65a297d63","Type":"ContainerStarted","Data":"9e68cbb997c59ea19ea0e3ebfcace1109ba66501bcc84fe6b848fd3d1897c71e"} Sep 30 21:01:54 crc kubenswrapper[4756]: I0930 21:01:54.146151 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-cj4b4" Sep 30 21:01:54 crc kubenswrapper[4756]: I0930 21:01:54.169241 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6kkbt\" (UniqueName: \"kubernetes.io/projected/6451acc6-4e73-4d50-b309-adb65a297d63-kube-api-access-6kkbt\") pod \"6451acc6-4e73-4d50-b309-adb65a297d63\" (UID: \"6451acc6-4e73-4d50-b309-adb65a297d63\") " Sep 30 21:01:54 crc kubenswrapper[4756]: I0930 21:01:54.177469 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6451acc6-4e73-4d50-b309-adb65a297d63-kube-api-access-6kkbt" (OuterVolumeSpecName: "kube-api-access-6kkbt") pod "6451acc6-4e73-4d50-b309-adb65a297d63" (UID: "6451acc6-4e73-4d50-b309-adb65a297d63"). InnerVolumeSpecName "kube-api-access-6kkbt". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 21:01:54 crc kubenswrapper[4756]: I0930 21:01:54.271103 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6kkbt\" (UniqueName: \"kubernetes.io/projected/6451acc6-4e73-4d50-b309-adb65a297d63-kube-api-access-6kkbt\") on node \"crc\" DevicePath \"\"" Sep 30 21:01:54 crc kubenswrapper[4756]: I0930 21:01:54.742682 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-cj4b4" event={"ID":"6451acc6-4e73-4d50-b309-adb65a297d63","Type":"ContainerDied","Data":"9e68cbb997c59ea19ea0e3ebfcace1109ba66501bcc84fe6b848fd3d1897c71e"} Sep 30 21:01:54 crc kubenswrapper[4756]: I0930 21:01:54.742725 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-cj4b4" Sep 30 21:01:54 crc kubenswrapper[4756]: I0930 21:01:54.742747 4756 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9e68cbb997c59ea19ea0e3ebfcace1109ba66501bcc84fe6b848fd3d1897c71e" Sep 30 21:02:00 crc kubenswrapper[4756]: I0930 21:02:00.752202 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-5e82-account-create-rphpt"] Sep 30 21:02:00 crc kubenswrapper[4756]: E0930 21:02:00.753362 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6451acc6-4e73-4d50-b309-adb65a297d63" containerName="mariadb-database-create" Sep 30 21:02:00 crc kubenswrapper[4756]: I0930 21:02:00.753385 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="6451acc6-4e73-4d50-b309-adb65a297d63" containerName="mariadb-database-create" Sep 30 21:02:00 crc kubenswrapper[4756]: I0930 21:02:00.753789 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="6451acc6-4e73-4d50-b309-adb65a297d63" containerName="mariadb-database-create" Sep 30 21:02:00 crc kubenswrapper[4756]: I0930 21:02:00.754992 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-5e82-account-create-rphpt" Sep 30 21:02:00 crc kubenswrapper[4756]: I0930 21:02:00.758627 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-db-secret" Sep 30 21:02:00 crc kubenswrapper[4756]: I0930 21:02:00.767817 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-5e82-account-create-rphpt"] Sep 30 21:02:00 crc kubenswrapper[4756]: I0930 21:02:00.903901 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kz8nv\" (UniqueName: \"kubernetes.io/projected/85a3f0b6-c541-4d6f-8d22-83b1e99e4713-kube-api-access-kz8nv\") pod \"placement-5e82-account-create-rphpt\" (UID: \"85a3f0b6-c541-4d6f-8d22-83b1e99e4713\") " pod="openstack/placement-5e82-account-create-rphpt" Sep 30 21:02:01 crc kubenswrapper[4756]: I0930 21:02:01.006258 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kz8nv\" (UniqueName: \"kubernetes.io/projected/85a3f0b6-c541-4d6f-8d22-83b1e99e4713-kube-api-access-kz8nv\") pod \"placement-5e82-account-create-rphpt\" (UID: \"85a3f0b6-c541-4d6f-8d22-83b1e99e4713\") " pod="openstack/placement-5e82-account-create-rphpt" Sep 30 21:02:01 crc kubenswrapper[4756]: I0930 21:02:01.057335 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kz8nv\" (UniqueName: \"kubernetes.io/projected/85a3f0b6-c541-4d6f-8d22-83b1e99e4713-kube-api-access-kz8nv\") pod \"placement-5e82-account-create-rphpt\" (UID: \"85a3f0b6-c541-4d6f-8d22-83b1e99e4713\") " pod="openstack/placement-5e82-account-create-rphpt" Sep 30 21:02:01 crc kubenswrapper[4756]: I0930 21:02:01.078812 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-5e82-account-create-rphpt" Sep 30 21:02:01 crc kubenswrapper[4756]: I0930 21:02:01.583985 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-5e82-account-create-rphpt"] Sep 30 21:02:01 crc kubenswrapper[4756]: I0930 21:02:01.826731 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-5e82-account-create-rphpt" event={"ID":"85a3f0b6-c541-4d6f-8d22-83b1e99e4713","Type":"ContainerStarted","Data":"dac1f145b7259b22522b30be300f16e0c63bba40c22562c75e9311a0e4aa4fb3"} Sep 30 21:02:01 crc kubenswrapper[4756]: I0930 21:02:01.826783 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-5e82-account-create-rphpt" event={"ID":"85a3f0b6-c541-4d6f-8d22-83b1e99e4713","Type":"ContainerStarted","Data":"ad4a7380695a93737e8ef6ecc3cd60c1f7d700ef2ee55934366b3424c2540f24"} Sep 30 21:02:01 crc kubenswrapper[4756]: I0930 21:02:01.849800 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-5e82-account-create-rphpt" podStartSLOduration=1.849781192 podStartE2EDuration="1.849781192s" podCreationTimestamp="2025-09-30 21:02:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 21:02:01.842580544 +0000 UTC m=+5451.463514081" watchObservedRunningTime="2025-09-30 21:02:01.849781192 +0000 UTC m=+5451.470714679" Sep 30 21:02:02 crc kubenswrapper[4756]: I0930 21:02:02.842795 4756 generic.go:334] "Generic (PLEG): container finished" podID="85a3f0b6-c541-4d6f-8d22-83b1e99e4713" containerID="dac1f145b7259b22522b30be300f16e0c63bba40c22562c75e9311a0e4aa4fb3" exitCode=0 Sep 30 21:02:02 crc kubenswrapper[4756]: I0930 21:02:02.842881 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-5e82-account-create-rphpt" event={"ID":"85a3f0b6-c541-4d6f-8d22-83b1e99e4713","Type":"ContainerDied","Data":"dac1f145b7259b22522b30be300f16e0c63bba40c22562c75e9311a0e4aa4fb3"} Sep 30 21:02:04 crc kubenswrapper[4756]: I0930 21:02:04.175211 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-5e82-account-create-rphpt" Sep 30 21:02:04 crc kubenswrapper[4756]: I0930 21:02:04.273580 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kz8nv\" (UniqueName: \"kubernetes.io/projected/85a3f0b6-c541-4d6f-8d22-83b1e99e4713-kube-api-access-kz8nv\") pod \"85a3f0b6-c541-4d6f-8d22-83b1e99e4713\" (UID: \"85a3f0b6-c541-4d6f-8d22-83b1e99e4713\") " Sep 30 21:02:04 crc kubenswrapper[4756]: I0930 21:02:04.278439 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/85a3f0b6-c541-4d6f-8d22-83b1e99e4713-kube-api-access-kz8nv" (OuterVolumeSpecName: "kube-api-access-kz8nv") pod "85a3f0b6-c541-4d6f-8d22-83b1e99e4713" (UID: "85a3f0b6-c541-4d6f-8d22-83b1e99e4713"). InnerVolumeSpecName "kube-api-access-kz8nv". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 21:02:04 crc kubenswrapper[4756]: I0930 21:02:04.375515 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kz8nv\" (UniqueName: \"kubernetes.io/projected/85a3f0b6-c541-4d6f-8d22-83b1e99e4713-kube-api-access-kz8nv\") on node \"crc\" DevicePath \"\"" Sep 30 21:02:04 crc kubenswrapper[4756]: I0930 21:02:04.864965 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-5e82-account-create-rphpt" event={"ID":"85a3f0b6-c541-4d6f-8d22-83b1e99e4713","Type":"ContainerDied","Data":"ad4a7380695a93737e8ef6ecc3cd60c1f7d700ef2ee55934366b3424c2540f24"} Sep 30 21:02:04 crc kubenswrapper[4756]: I0930 21:02:04.865039 4756 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ad4a7380695a93737e8ef6ecc3cd60c1f7d700ef2ee55934366b3424c2540f24" Sep 30 21:02:04 crc kubenswrapper[4756]: I0930 21:02:04.865045 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-5e82-account-create-rphpt" Sep 30 21:02:06 crc kubenswrapper[4756]: I0930 21:02:06.026366 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-6cc6b65957-62z7q"] Sep 30 21:02:06 crc kubenswrapper[4756]: E0930 21:02:06.027122 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="85a3f0b6-c541-4d6f-8d22-83b1e99e4713" containerName="mariadb-account-create" Sep 30 21:02:06 crc kubenswrapper[4756]: I0930 21:02:06.027139 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="85a3f0b6-c541-4d6f-8d22-83b1e99e4713" containerName="mariadb-account-create" Sep 30 21:02:06 crc kubenswrapper[4756]: I0930 21:02:06.030632 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="85a3f0b6-c541-4d6f-8d22-83b1e99e4713" containerName="mariadb-account-create" Sep 30 21:02:06 crc kubenswrapper[4756]: I0930 21:02:06.031995 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6cc6b65957-62z7q" Sep 30 21:02:06 crc kubenswrapper[4756]: I0930 21:02:06.049656 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-db-sync-8rdqr"] Sep 30 21:02:06 crc kubenswrapper[4756]: I0930 21:02:06.052583 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-8rdqr" Sep 30 21:02:06 crc kubenswrapper[4756]: I0930 21:02:06.055496 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-config-data" Sep 30 21:02:06 crc kubenswrapper[4756]: I0930 21:02:06.058059 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-scripts" Sep 30 21:02:06 crc kubenswrapper[4756]: I0930 21:02:06.058064 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-placement-dockercfg-75tqk" Sep 30 21:02:06 crc kubenswrapper[4756]: I0930 21:02:06.064940 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6cc6b65957-62z7q"] Sep 30 21:02:06 crc kubenswrapper[4756]: I0930 21:02:06.073363 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-sync-8rdqr"] Sep 30 21:02:06 crc kubenswrapper[4756]: I0930 21:02:06.108571 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c60a0dcf-789e-4d15-9456-6e3e414e18b1-dns-svc\") pod \"dnsmasq-dns-6cc6b65957-62z7q\" (UID: \"c60a0dcf-789e-4d15-9456-6e3e414e18b1\") " pod="openstack/dnsmasq-dns-6cc6b65957-62z7q" Sep 30 21:02:06 crc kubenswrapper[4756]: I0930 21:02:06.108649 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-488qq\" (UniqueName: \"kubernetes.io/projected/c60a0dcf-789e-4d15-9456-6e3e414e18b1-kube-api-access-488qq\") pod \"dnsmasq-dns-6cc6b65957-62z7q\" (UID: \"c60a0dcf-789e-4d15-9456-6e3e414e18b1\") " pod="openstack/dnsmasq-dns-6cc6b65957-62z7q" Sep 30 21:02:06 crc kubenswrapper[4756]: I0930 21:02:06.108677 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c60a0dcf-789e-4d15-9456-6e3e414e18b1-ovsdbserver-sb\") pod \"dnsmasq-dns-6cc6b65957-62z7q\" (UID: \"c60a0dcf-789e-4d15-9456-6e3e414e18b1\") " pod="openstack/dnsmasq-dns-6cc6b65957-62z7q" Sep 30 21:02:06 crc kubenswrapper[4756]: I0930 21:02:06.108701 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c60a0dcf-789e-4d15-9456-6e3e414e18b1-ovsdbserver-nb\") pod \"dnsmasq-dns-6cc6b65957-62z7q\" (UID: \"c60a0dcf-789e-4d15-9456-6e3e414e18b1\") " pod="openstack/dnsmasq-dns-6cc6b65957-62z7q" Sep 30 21:02:06 crc kubenswrapper[4756]: I0930 21:02:06.108740 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c60a0dcf-789e-4d15-9456-6e3e414e18b1-config\") pod \"dnsmasq-dns-6cc6b65957-62z7q\" (UID: \"c60a0dcf-789e-4d15-9456-6e3e414e18b1\") " pod="openstack/dnsmasq-dns-6cc6b65957-62z7q" Sep 30 21:02:06 crc kubenswrapper[4756]: I0930 21:02:06.108764 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9bdf287b-f9d9-44e4-926c-958054dd0ca7-config-data\") pod \"placement-db-sync-8rdqr\" (UID: \"9bdf287b-f9d9-44e4-926c-958054dd0ca7\") " pod="openstack/placement-db-sync-8rdqr" Sep 30 21:02:06 crc kubenswrapper[4756]: I0930 21:02:06.108806 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9bdf287b-f9d9-44e4-926c-958054dd0ca7-logs\") pod \"placement-db-sync-8rdqr\" (UID: \"9bdf287b-f9d9-44e4-926c-958054dd0ca7\") " pod="openstack/placement-db-sync-8rdqr" Sep 30 21:02:06 crc kubenswrapper[4756]: I0930 21:02:06.108849 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gt89s\" (UniqueName: \"kubernetes.io/projected/9bdf287b-f9d9-44e4-926c-958054dd0ca7-kube-api-access-gt89s\") pod \"placement-db-sync-8rdqr\" (UID: \"9bdf287b-f9d9-44e4-926c-958054dd0ca7\") " pod="openstack/placement-db-sync-8rdqr" Sep 30 21:02:06 crc kubenswrapper[4756]: I0930 21:02:06.108903 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9bdf287b-f9d9-44e4-926c-958054dd0ca7-scripts\") pod \"placement-db-sync-8rdqr\" (UID: \"9bdf287b-f9d9-44e4-926c-958054dd0ca7\") " pod="openstack/placement-db-sync-8rdqr" Sep 30 21:02:06 crc kubenswrapper[4756]: I0930 21:02:06.108931 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9bdf287b-f9d9-44e4-926c-958054dd0ca7-combined-ca-bundle\") pod \"placement-db-sync-8rdqr\" (UID: \"9bdf287b-f9d9-44e4-926c-958054dd0ca7\") " pod="openstack/placement-db-sync-8rdqr" Sep 30 21:02:06 crc kubenswrapper[4756]: I0930 21:02:06.210705 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gt89s\" (UniqueName: \"kubernetes.io/projected/9bdf287b-f9d9-44e4-926c-958054dd0ca7-kube-api-access-gt89s\") pod \"placement-db-sync-8rdqr\" (UID: \"9bdf287b-f9d9-44e4-926c-958054dd0ca7\") " pod="openstack/placement-db-sync-8rdqr" Sep 30 21:02:06 crc kubenswrapper[4756]: I0930 21:02:06.210773 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9bdf287b-f9d9-44e4-926c-958054dd0ca7-scripts\") pod \"placement-db-sync-8rdqr\" (UID: \"9bdf287b-f9d9-44e4-926c-958054dd0ca7\") " pod="openstack/placement-db-sync-8rdqr" Sep 30 21:02:06 crc kubenswrapper[4756]: I0930 21:02:06.210797 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9bdf287b-f9d9-44e4-926c-958054dd0ca7-combined-ca-bundle\") pod \"placement-db-sync-8rdqr\" (UID: \"9bdf287b-f9d9-44e4-926c-958054dd0ca7\") " pod="openstack/placement-db-sync-8rdqr" Sep 30 21:02:06 crc kubenswrapper[4756]: I0930 21:02:06.210844 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c60a0dcf-789e-4d15-9456-6e3e414e18b1-dns-svc\") pod \"dnsmasq-dns-6cc6b65957-62z7q\" (UID: \"c60a0dcf-789e-4d15-9456-6e3e414e18b1\") " pod="openstack/dnsmasq-dns-6cc6b65957-62z7q" Sep 30 21:02:06 crc kubenswrapper[4756]: I0930 21:02:06.210875 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-488qq\" (UniqueName: \"kubernetes.io/projected/c60a0dcf-789e-4d15-9456-6e3e414e18b1-kube-api-access-488qq\") pod \"dnsmasq-dns-6cc6b65957-62z7q\" (UID: \"c60a0dcf-789e-4d15-9456-6e3e414e18b1\") " pod="openstack/dnsmasq-dns-6cc6b65957-62z7q" Sep 30 21:02:06 crc kubenswrapper[4756]: I0930 21:02:06.210890 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c60a0dcf-789e-4d15-9456-6e3e414e18b1-ovsdbserver-sb\") pod \"dnsmasq-dns-6cc6b65957-62z7q\" (UID: \"c60a0dcf-789e-4d15-9456-6e3e414e18b1\") " pod="openstack/dnsmasq-dns-6cc6b65957-62z7q" Sep 30 21:02:06 crc kubenswrapper[4756]: I0930 21:02:06.210908 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c60a0dcf-789e-4d15-9456-6e3e414e18b1-ovsdbserver-nb\") pod \"dnsmasq-dns-6cc6b65957-62z7q\" (UID: \"c60a0dcf-789e-4d15-9456-6e3e414e18b1\") " pod="openstack/dnsmasq-dns-6cc6b65957-62z7q" Sep 30 21:02:06 crc kubenswrapper[4756]: I0930 21:02:06.210936 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c60a0dcf-789e-4d15-9456-6e3e414e18b1-config\") pod \"dnsmasq-dns-6cc6b65957-62z7q\" (UID: \"c60a0dcf-789e-4d15-9456-6e3e414e18b1\") " pod="openstack/dnsmasq-dns-6cc6b65957-62z7q" Sep 30 21:02:06 crc kubenswrapper[4756]: I0930 21:02:06.210954 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9bdf287b-f9d9-44e4-926c-958054dd0ca7-config-data\") pod \"placement-db-sync-8rdqr\" (UID: \"9bdf287b-f9d9-44e4-926c-958054dd0ca7\") " pod="openstack/placement-db-sync-8rdqr" Sep 30 21:02:06 crc kubenswrapper[4756]: I0930 21:02:06.210981 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9bdf287b-f9d9-44e4-926c-958054dd0ca7-logs\") pod \"placement-db-sync-8rdqr\" (UID: \"9bdf287b-f9d9-44e4-926c-958054dd0ca7\") " pod="openstack/placement-db-sync-8rdqr" Sep 30 21:02:06 crc kubenswrapper[4756]: I0930 21:02:06.211383 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9bdf287b-f9d9-44e4-926c-958054dd0ca7-logs\") pod \"placement-db-sync-8rdqr\" (UID: \"9bdf287b-f9d9-44e4-926c-958054dd0ca7\") " pod="openstack/placement-db-sync-8rdqr" Sep 30 21:02:06 crc kubenswrapper[4756]: I0930 21:02:06.212129 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c60a0dcf-789e-4d15-9456-6e3e414e18b1-ovsdbserver-sb\") pod \"dnsmasq-dns-6cc6b65957-62z7q\" (UID: \"c60a0dcf-789e-4d15-9456-6e3e414e18b1\") " pod="openstack/dnsmasq-dns-6cc6b65957-62z7q" Sep 30 21:02:06 crc kubenswrapper[4756]: I0930 21:02:06.212661 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c60a0dcf-789e-4d15-9456-6e3e414e18b1-config\") pod \"dnsmasq-dns-6cc6b65957-62z7q\" (UID: \"c60a0dcf-789e-4d15-9456-6e3e414e18b1\") " pod="openstack/dnsmasq-dns-6cc6b65957-62z7q" Sep 30 21:02:06 crc kubenswrapper[4756]: I0930 21:02:06.212950 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c60a0dcf-789e-4d15-9456-6e3e414e18b1-dns-svc\") pod \"dnsmasq-dns-6cc6b65957-62z7q\" (UID: \"c60a0dcf-789e-4d15-9456-6e3e414e18b1\") " pod="openstack/dnsmasq-dns-6cc6b65957-62z7q" Sep 30 21:02:06 crc kubenswrapper[4756]: I0930 21:02:06.212993 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c60a0dcf-789e-4d15-9456-6e3e414e18b1-ovsdbserver-nb\") pod \"dnsmasq-dns-6cc6b65957-62z7q\" (UID: \"c60a0dcf-789e-4d15-9456-6e3e414e18b1\") " pod="openstack/dnsmasq-dns-6cc6b65957-62z7q" Sep 30 21:02:06 crc kubenswrapper[4756]: I0930 21:02:06.216612 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9bdf287b-f9d9-44e4-926c-958054dd0ca7-combined-ca-bundle\") pod \"placement-db-sync-8rdqr\" (UID: \"9bdf287b-f9d9-44e4-926c-958054dd0ca7\") " pod="openstack/placement-db-sync-8rdqr" Sep 30 21:02:06 crc kubenswrapper[4756]: I0930 21:02:06.217110 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9bdf287b-f9d9-44e4-926c-958054dd0ca7-config-data\") pod \"placement-db-sync-8rdqr\" (UID: \"9bdf287b-f9d9-44e4-926c-958054dd0ca7\") " pod="openstack/placement-db-sync-8rdqr" Sep 30 21:02:06 crc kubenswrapper[4756]: I0930 21:02:06.230065 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9bdf287b-f9d9-44e4-926c-958054dd0ca7-scripts\") pod \"placement-db-sync-8rdqr\" (UID: \"9bdf287b-f9d9-44e4-926c-958054dd0ca7\") " pod="openstack/placement-db-sync-8rdqr" Sep 30 21:02:06 crc kubenswrapper[4756]: I0930 21:02:06.231535 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-488qq\" (UniqueName: \"kubernetes.io/projected/c60a0dcf-789e-4d15-9456-6e3e414e18b1-kube-api-access-488qq\") pod \"dnsmasq-dns-6cc6b65957-62z7q\" (UID: \"c60a0dcf-789e-4d15-9456-6e3e414e18b1\") " pod="openstack/dnsmasq-dns-6cc6b65957-62z7q" Sep 30 21:02:06 crc kubenswrapper[4756]: I0930 21:02:06.231867 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gt89s\" (UniqueName: \"kubernetes.io/projected/9bdf287b-f9d9-44e4-926c-958054dd0ca7-kube-api-access-gt89s\") pod \"placement-db-sync-8rdqr\" (UID: \"9bdf287b-f9d9-44e4-926c-958054dd0ca7\") " pod="openstack/placement-db-sync-8rdqr" Sep 30 21:02:06 crc kubenswrapper[4756]: I0930 21:02:06.355153 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6cc6b65957-62z7q" Sep 30 21:02:06 crc kubenswrapper[4756]: I0930 21:02:06.375696 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-8rdqr" Sep 30 21:02:06 crc kubenswrapper[4756]: I0930 21:02:06.857262 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6cc6b65957-62z7q"] Sep 30 21:02:06 crc kubenswrapper[4756]: I0930 21:02:06.881246 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6cc6b65957-62z7q" event={"ID":"c60a0dcf-789e-4d15-9456-6e3e414e18b1","Type":"ContainerStarted","Data":"d6967756770d2370a23eb281034458feb1076c821757a6e4f736daaf33b3567c"} Sep 30 21:02:06 crc kubenswrapper[4756]: I0930 21:02:06.917613 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-sync-8rdqr"] Sep 30 21:02:06 crc kubenswrapper[4756]: W0930 21:02:06.920316 4756 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9bdf287b_f9d9_44e4_926c_958054dd0ca7.slice/crio-a66a492e470b433f06e033687bc6958ecc6f6edb908caf311196127da80a038f WatchSource:0}: Error finding container a66a492e470b433f06e033687bc6958ecc6f6edb908caf311196127da80a038f: Status 404 returned error can't find the container with id a66a492e470b433f06e033687bc6958ecc6f6edb908caf311196127da80a038f Sep 30 21:02:07 crc kubenswrapper[4756]: I0930 21:02:07.890592 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-8rdqr" event={"ID":"9bdf287b-f9d9-44e4-926c-958054dd0ca7","Type":"ContainerStarted","Data":"a7428519240363e8f4dbbb9d294845a32cf47fb9d46aa848158c3ed7999b53db"} Sep 30 21:02:07 crc kubenswrapper[4756]: I0930 21:02:07.890976 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-8rdqr" event={"ID":"9bdf287b-f9d9-44e4-926c-958054dd0ca7","Type":"ContainerStarted","Data":"a66a492e470b433f06e033687bc6958ecc6f6edb908caf311196127da80a038f"} Sep 30 21:02:07 crc kubenswrapper[4756]: I0930 21:02:07.892900 4756 generic.go:334] "Generic (PLEG): container finished" podID="c60a0dcf-789e-4d15-9456-6e3e414e18b1" containerID="63efcd988aee0ce6166f8dd15ee3000ca0d243974ecff44d6b97cd1d3d78fb61" exitCode=0 Sep 30 21:02:07 crc kubenswrapper[4756]: I0930 21:02:07.892931 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6cc6b65957-62z7q" event={"ID":"c60a0dcf-789e-4d15-9456-6e3e414e18b1","Type":"ContainerDied","Data":"63efcd988aee0ce6166f8dd15ee3000ca0d243974ecff44d6b97cd1d3d78fb61"} Sep 30 21:02:07 crc kubenswrapper[4756]: I0930 21:02:07.926289 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-db-sync-8rdqr" podStartSLOduration=1.92626691 podStartE2EDuration="1.92626691s" podCreationTimestamp="2025-09-30 21:02:06 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 21:02:07.91706315 +0000 UTC m=+5457.537996717" watchObservedRunningTime="2025-09-30 21:02:07.92626691 +0000 UTC m=+5457.547200387" Sep 30 21:02:08 crc kubenswrapper[4756]: I0930 21:02:08.905900 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6cc6b65957-62z7q" event={"ID":"c60a0dcf-789e-4d15-9456-6e3e414e18b1","Type":"ContainerStarted","Data":"b5cd482a383163dd81a4bf76899b377ec56ec2a8e22ac5dae8229bea29157163"} Sep 30 21:02:08 crc kubenswrapper[4756]: I0930 21:02:08.906341 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-6cc6b65957-62z7q" Sep 30 21:02:08 crc kubenswrapper[4756]: I0930 21:02:08.908761 4756 generic.go:334] "Generic (PLEG): container finished" podID="9bdf287b-f9d9-44e4-926c-958054dd0ca7" containerID="a7428519240363e8f4dbbb9d294845a32cf47fb9d46aa848158c3ed7999b53db" exitCode=0 Sep 30 21:02:08 crc kubenswrapper[4756]: I0930 21:02:08.909543 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-8rdqr" event={"ID":"9bdf287b-f9d9-44e4-926c-958054dd0ca7","Type":"ContainerDied","Data":"a7428519240363e8f4dbbb9d294845a32cf47fb9d46aa848158c3ed7999b53db"} Sep 30 21:02:08 crc kubenswrapper[4756]: I0930 21:02:08.950824 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-6cc6b65957-62z7q" podStartSLOduration=3.950801102 podStartE2EDuration="3.950801102s" podCreationTimestamp="2025-09-30 21:02:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 21:02:08.939507687 +0000 UTC m=+5458.560441214" watchObservedRunningTime="2025-09-30 21:02:08.950801102 +0000 UTC m=+5458.571734599" Sep 30 21:02:10 crc kubenswrapper[4756]: I0930 21:02:10.227011 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-8rdqr" Sep 30 21:02:10 crc kubenswrapper[4756]: I0930 21:02:10.391999 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9bdf287b-f9d9-44e4-926c-958054dd0ca7-scripts\") pod \"9bdf287b-f9d9-44e4-926c-958054dd0ca7\" (UID: \"9bdf287b-f9d9-44e4-926c-958054dd0ca7\") " Sep 30 21:02:10 crc kubenswrapper[4756]: I0930 21:02:10.392050 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9bdf287b-f9d9-44e4-926c-958054dd0ca7-config-data\") pod \"9bdf287b-f9d9-44e4-926c-958054dd0ca7\" (UID: \"9bdf287b-f9d9-44e4-926c-958054dd0ca7\") " Sep 30 21:02:10 crc kubenswrapper[4756]: I0930 21:02:10.392102 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9bdf287b-f9d9-44e4-926c-958054dd0ca7-combined-ca-bundle\") pod \"9bdf287b-f9d9-44e4-926c-958054dd0ca7\" (UID: \"9bdf287b-f9d9-44e4-926c-958054dd0ca7\") " Sep 30 21:02:10 crc kubenswrapper[4756]: I0930 21:02:10.392193 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9bdf287b-f9d9-44e4-926c-958054dd0ca7-logs\") pod \"9bdf287b-f9d9-44e4-926c-958054dd0ca7\" (UID: \"9bdf287b-f9d9-44e4-926c-958054dd0ca7\") " Sep 30 21:02:10 crc kubenswrapper[4756]: I0930 21:02:10.392220 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gt89s\" (UniqueName: \"kubernetes.io/projected/9bdf287b-f9d9-44e4-926c-958054dd0ca7-kube-api-access-gt89s\") pod \"9bdf287b-f9d9-44e4-926c-958054dd0ca7\" (UID: \"9bdf287b-f9d9-44e4-926c-958054dd0ca7\") " Sep 30 21:02:10 crc kubenswrapper[4756]: I0930 21:02:10.393901 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9bdf287b-f9d9-44e4-926c-958054dd0ca7-logs" (OuterVolumeSpecName: "logs") pod "9bdf287b-f9d9-44e4-926c-958054dd0ca7" (UID: "9bdf287b-f9d9-44e4-926c-958054dd0ca7"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 21:02:10 crc kubenswrapper[4756]: I0930 21:02:10.399558 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9bdf287b-f9d9-44e4-926c-958054dd0ca7-scripts" (OuterVolumeSpecName: "scripts") pod "9bdf287b-f9d9-44e4-926c-958054dd0ca7" (UID: "9bdf287b-f9d9-44e4-926c-958054dd0ca7"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 21:02:10 crc kubenswrapper[4756]: I0930 21:02:10.399689 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9bdf287b-f9d9-44e4-926c-958054dd0ca7-kube-api-access-gt89s" (OuterVolumeSpecName: "kube-api-access-gt89s") pod "9bdf287b-f9d9-44e4-926c-958054dd0ca7" (UID: "9bdf287b-f9d9-44e4-926c-958054dd0ca7"). InnerVolumeSpecName "kube-api-access-gt89s". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 21:02:10 crc kubenswrapper[4756]: I0930 21:02:10.415183 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9bdf287b-f9d9-44e4-926c-958054dd0ca7-config-data" (OuterVolumeSpecName: "config-data") pod "9bdf287b-f9d9-44e4-926c-958054dd0ca7" (UID: "9bdf287b-f9d9-44e4-926c-958054dd0ca7"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 21:02:10 crc kubenswrapper[4756]: I0930 21:02:10.420983 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9bdf287b-f9d9-44e4-926c-958054dd0ca7-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "9bdf287b-f9d9-44e4-926c-958054dd0ca7" (UID: "9bdf287b-f9d9-44e4-926c-958054dd0ca7"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 21:02:10 crc kubenswrapper[4756]: I0930 21:02:10.494925 4756 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9bdf287b-f9d9-44e4-926c-958054dd0ca7-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 21:02:10 crc kubenswrapper[4756]: I0930 21:02:10.494969 4756 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9bdf287b-f9d9-44e4-926c-958054dd0ca7-logs\") on node \"crc\" DevicePath \"\"" Sep 30 21:02:10 crc kubenswrapper[4756]: I0930 21:02:10.494986 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gt89s\" (UniqueName: \"kubernetes.io/projected/9bdf287b-f9d9-44e4-926c-958054dd0ca7-kube-api-access-gt89s\") on node \"crc\" DevicePath \"\"" Sep 30 21:02:10 crc kubenswrapper[4756]: I0930 21:02:10.495001 4756 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9bdf287b-f9d9-44e4-926c-958054dd0ca7-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 21:02:10 crc kubenswrapper[4756]: I0930 21:02:10.495016 4756 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9bdf287b-f9d9-44e4-926c-958054dd0ca7-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 21:02:10 crc kubenswrapper[4756]: I0930 21:02:10.936219 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-8rdqr" event={"ID":"9bdf287b-f9d9-44e4-926c-958054dd0ca7","Type":"ContainerDied","Data":"a66a492e470b433f06e033687bc6958ecc6f6edb908caf311196127da80a038f"} Sep 30 21:02:10 crc kubenswrapper[4756]: I0930 21:02:10.936265 4756 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a66a492e470b433f06e033687bc6958ecc6f6edb908caf311196127da80a038f" Sep 30 21:02:10 crc kubenswrapper[4756]: I0930 21:02:10.936346 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-8rdqr" Sep 30 21:02:11 crc kubenswrapper[4756]: I0930 21:02:11.032805 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-7f6fcc4bd8-b42zh"] Sep 30 21:02:11 crc kubenswrapper[4756]: E0930 21:02:11.033693 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9bdf287b-f9d9-44e4-926c-958054dd0ca7" containerName="placement-db-sync" Sep 30 21:02:11 crc kubenswrapper[4756]: I0930 21:02:11.033711 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="9bdf287b-f9d9-44e4-926c-958054dd0ca7" containerName="placement-db-sync" Sep 30 21:02:11 crc kubenswrapper[4756]: I0930 21:02:11.033971 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="9bdf287b-f9d9-44e4-926c-958054dd0ca7" containerName="placement-db-sync" Sep 30 21:02:11 crc kubenswrapper[4756]: I0930 21:02:11.035164 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-7f6fcc4bd8-b42zh" Sep 30 21:02:11 crc kubenswrapper[4756]: I0930 21:02:11.038255 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-scripts" Sep 30 21:02:11 crc kubenswrapper[4756]: I0930 21:02:11.038778 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-config-data" Sep 30 21:02:11 crc kubenswrapper[4756]: I0930 21:02:11.038911 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-placement-dockercfg-75tqk" Sep 30 21:02:11 crc kubenswrapper[4756]: I0930 21:02:11.063800 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-7f6fcc4bd8-b42zh"] Sep 30 21:02:11 crc kubenswrapper[4756]: I0930 21:02:11.107472 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0fd50551-db2a-414f-9c25-712ff7b4ec70-combined-ca-bundle\") pod \"placement-7f6fcc4bd8-b42zh\" (UID: \"0fd50551-db2a-414f-9c25-712ff7b4ec70\") " pod="openstack/placement-7f6fcc4bd8-b42zh" Sep 30 21:02:11 crc kubenswrapper[4756]: I0930 21:02:11.107530 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0fd50551-db2a-414f-9c25-712ff7b4ec70-scripts\") pod \"placement-7f6fcc4bd8-b42zh\" (UID: \"0fd50551-db2a-414f-9c25-712ff7b4ec70\") " pod="openstack/placement-7f6fcc4bd8-b42zh" Sep 30 21:02:11 crc kubenswrapper[4756]: I0930 21:02:11.107566 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0fd50551-db2a-414f-9c25-712ff7b4ec70-logs\") pod \"placement-7f6fcc4bd8-b42zh\" (UID: \"0fd50551-db2a-414f-9c25-712ff7b4ec70\") " pod="openstack/placement-7f6fcc4bd8-b42zh" Sep 30 21:02:11 crc kubenswrapper[4756]: I0930 21:02:11.107581 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0fd50551-db2a-414f-9c25-712ff7b4ec70-config-data\") pod \"placement-7f6fcc4bd8-b42zh\" (UID: \"0fd50551-db2a-414f-9c25-712ff7b4ec70\") " pod="openstack/placement-7f6fcc4bd8-b42zh" Sep 30 21:02:11 crc kubenswrapper[4756]: I0930 21:02:11.107610 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kjvjs\" (UniqueName: \"kubernetes.io/projected/0fd50551-db2a-414f-9c25-712ff7b4ec70-kube-api-access-kjvjs\") pod \"placement-7f6fcc4bd8-b42zh\" (UID: \"0fd50551-db2a-414f-9c25-712ff7b4ec70\") " pod="openstack/placement-7f6fcc4bd8-b42zh" Sep 30 21:02:11 crc kubenswrapper[4756]: I0930 21:02:11.209334 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0fd50551-db2a-414f-9c25-712ff7b4ec70-logs\") pod \"placement-7f6fcc4bd8-b42zh\" (UID: \"0fd50551-db2a-414f-9c25-712ff7b4ec70\") " pod="openstack/placement-7f6fcc4bd8-b42zh" Sep 30 21:02:11 crc kubenswrapper[4756]: I0930 21:02:11.209387 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0fd50551-db2a-414f-9c25-712ff7b4ec70-config-data\") pod \"placement-7f6fcc4bd8-b42zh\" (UID: \"0fd50551-db2a-414f-9c25-712ff7b4ec70\") " pod="openstack/placement-7f6fcc4bd8-b42zh" Sep 30 21:02:11 crc kubenswrapper[4756]: I0930 21:02:11.209450 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kjvjs\" (UniqueName: \"kubernetes.io/projected/0fd50551-db2a-414f-9c25-712ff7b4ec70-kube-api-access-kjvjs\") pod \"placement-7f6fcc4bd8-b42zh\" (UID: \"0fd50551-db2a-414f-9c25-712ff7b4ec70\") " pod="openstack/placement-7f6fcc4bd8-b42zh" Sep 30 21:02:11 crc kubenswrapper[4756]: I0930 21:02:11.209587 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0fd50551-db2a-414f-9c25-712ff7b4ec70-combined-ca-bundle\") pod \"placement-7f6fcc4bd8-b42zh\" (UID: \"0fd50551-db2a-414f-9c25-712ff7b4ec70\") " pod="openstack/placement-7f6fcc4bd8-b42zh" Sep 30 21:02:11 crc kubenswrapper[4756]: I0930 21:02:11.209625 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0fd50551-db2a-414f-9c25-712ff7b4ec70-scripts\") pod \"placement-7f6fcc4bd8-b42zh\" (UID: \"0fd50551-db2a-414f-9c25-712ff7b4ec70\") " pod="openstack/placement-7f6fcc4bd8-b42zh" Sep 30 21:02:11 crc kubenswrapper[4756]: I0930 21:02:11.209736 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0fd50551-db2a-414f-9c25-712ff7b4ec70-logs\") pod \"placement-7f6fcc4bd8-b42zh\" (UID: \"0fd50551-db2a-414f-9c25-712ff7b4ec70\") " pod="openstack/placement-7f6fcc4bd8-b42zh" Sep 30 21:02:11 crc kubenswrapper[4756]: I0930 21:02:11.211290 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-config-data" Sep 30 21:02:11 crc kubenswrapper[4756]: I0930 21:02:11.211407 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-scripts" Sep 30 21:02:11 crc kubenswrapper[4756]: I0930 21:02:11.215195 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0fd50551-db2a-414f-9c25-712ff7b4ec70-combined-ca-bundle\") pod \"placement-7f6fcc4bd8-b42zh\" (UID: \"0fd50551-db2a-414f-9c25-712ff7b4ec70\") " pod="openstack/placement-7f6fcc4bd8-b42zh" Sep 30 21:02:11 crc kubenswrapper[4756]: I0930 21:02:11.222335 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0fd50551-db2a-414f-9c25-712ff7b4ec70-config-data\") pod \"placement-7f6fcc4bd8-b42zh\" (UID: \"0fd50551-db2a-414f-9c25-712ff7b4ec70\") " pod="openstack/placement-7f6fcc4bd8-b42zh" Sep 30 21:02:11 crc kubenswrapper[4756]: I0930 21:02:11.225083 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0fd50551-db2a-414f-9c25-712ff7b4ec70-scripts\") pod \"placement-7f6fcc4bd8-b42zh\" (UID: \"0fd50551-db2a-414f-9c25-712ff7b4ec70\") " pod="openstack/placement-7f6fcc4bd8-b42zh" Sep 30 21:02:11 crc kubenswrapper[4756]: I0930 21:02:11.225718 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kjvjs\" (UniqueName: \"kubernetes.io/projected/0fd50551-db2a-414f-9c25-712ff7b4ec70-kube-api-access-kjvjs\") pod \"placement-7f6fcc4bd8-b42zh\" (UID: \"0fd50551-db2a-414f-9c25-712ff7b4ec70\") " pod="openstack/placement-7f6fcc4bd8-b42zh" Sep 30 21:02:11 crc kubenswrapper[4756]: I0930 21:02:11.364559 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-placement-dockercfg-75tqk" Sep 30 21:02:11 crc kubenswrapper[4756]: I0930 21:02:11.373646 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-7f6fcc4bd8-b42zh" Sep 30 21:02:11 crc kubenswrapper[4756]: I0930 21:02:11.539426 4756 patch_prober.go:28] interesting pod/machine-config-daemon-4n9zj container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 21:02:11 crc kubenswrapper[4756]: I0930 21:02:11.540192 4756 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 21:02:11 crc kubenswrapper[4756]: I0930 21:02:11.859300 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-7f6fcc4bd8-b42zh"] Sep 30 21:02:11 crc kubenswrapper[4756]: I0930 21:02:11.947709 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-7f6fcc4bd8-b42zh" event={"ID":"0fd50551-db2a-414f-9c25-712ff7b4ec70","Type":"ContainerStarted","Data":"4c8085d9b7dd2c1448e1bd1c8ad7d7ac778b76faeb1723b58736f3a4262de84d"} Sep 30 21:02:12 crc kubenswrapper[4756]: I0930 21:02:12.957030 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-7f6fcc4bd8-b42zh" event={"ID":"0fd50551-db2a-414f-9c25-712ff7b4ec70","Type":"ContainerStarted","Data":"5c444c85c6e65b3ae3f2fdc2d9c79ad5f0c4007de805cd5b7557adebdf561935"} Sep 30 21:02:12 crc kubenswrapper[4756]: I0930 21:02:12.957391 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-7f6fcc4bd8-b42zh" event={"ID":"0fd50551-db2a-414f-9c25-712ff7b4ec70","Type":"ContainerStarted","Data":"8f10cd6474d9fb566cb03e1667f331733a7474aab08b0f07b02144b83f6aaa03"} Sep 30 21:02:12 crc kubenswrapper[4756]: I0930 21:02:12.958760 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-7f6fcc4bd8-b42zh" Sep 30 21:02:12 crc kubenswrapper[4756]: I0930 21:02:12.958807 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-7f6fcc4bd8-b42zh" Sep 30 21:02:12 crc kubenswrapper[4756]: I0930 21:02:12.986564 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-7f6fcc4bd8-b42zh" podStartSLOduration=1.986538653 podStartE2EDuration="1.986538653s" podCreationTimestamp="2025-09-30 21:02:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 21:02:12.979023537 +0000 UTC m=+5462.599957054" watchObservedRunningTime="2025-09-30 21:02:12.986538653 +0000 UTC m=+5462.607472150" Sep 30 21:02:16 crc kubenswrapper[4756]: I0930 21:02:16.357520 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-6cc6b65957-62z7q" Sep 30 21:02:16 crc kubenswrapper[4756]: I0930 21:02:16.416073 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7655654869-kfkn4"] Sep 30 21:02:16 crc kubenswrapper[4756]: I0930 21:02:16.416353 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-7655654869-kfkn4" podUID="e9cf1b3d-95ca-4954-9482-fda5d81d2a1f" containerName="dnsmasq-dns" containerID="cri-o://93fd5ce377841a0ef6f7283b9d60fd632ae6a162d26d7dfabf48f313d3a82766" gracePeriod=10 Sep 30 21:02:16 crc kubenswrapper[4756]: I0930 21:02:16.850506 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7655654869-kfkn4" Sep 30 21:02:16 crc kubenswrapper[4756]: I0930 21:02:16.914742 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e9cf1b3d-95ca-4954-9482-fda5d81d2a1f-ovsdbserver-sb\") pod \"e9cf1b3d-95ca-4954-9482-fda5d81d2a1f\" (UID: \"e9cf1b3d-95ca-4954-9482-fda5d81d2a1f\") " Sep 30 21:02:16 crc kubenswrapper[4756]: I0930 21:02:16.914816 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e9cf1b3d-95ca-4954-9482-fda5d81d2a1f-dns-svc\") pod \"e9cf1b3d-95ca-4954-9482-fda5d81d2a1f\" (UID: \"e9cf1b3d-95ca-4954-9482-fda5d81d2a1f\") " Sep 30 21:02:16 crc kubenswrapper[4756]: I0930 21:02:16.914893 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e9cf1b3d-95ca-4954-9482-fda5d81d2a1f-config\") pod \"e9cf1b3d-95ca-4954-9482-fda5d81d2a1f\" (UID: \"e9cf1b3d-95ca-4954-9482-fda5d81d2a1f\") " Sep 30 21:02:16 crc kubenswrapper[4756]: I0930 21:02:16.915009 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e9cf1b3d-95ca-4954-9482-fda5d81d2a1f-ovsdbserver-nb\") pod \"e9cf1b3d-95ca-4954-9482-fda5d81d2a1f\" (UID: \"e9cf1b3d-95ca-4954-9482-fda5d81d2a1f\") " Sep 30 21:02:16 crc kubenswrapper[4756]: I0930 21:02:16.915086 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tgmjx\" (UniqueName: \"kubernetes.io/projected/e9cf1b3d-95ca-4954-9482-fda5d81d2a1f-kube-api-access-tgmjx\") pod \"e9cf1b3d-95ca-4954-9482-fda5d81d2a1f\" (UID: \"e9cf1b3d-95ca-4954-9482-fda5d81d2a1f\") " Sep 30 21:02:16 crc kubenswrapper[4756]: I0930 21:02:16.923853 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e9cf1b3d-95ca-4954-9482-fda5d81d2a1f-kube-api-access-tgmjx" (OuterVolumeSpecName: "kube-api-access-tgmjx") pod "e9cf1b3d-95ca-4954-9482-fda5d81d2a1f" (UID: "e9cf1b3d-95ca-4954-9482-fda5d81d2a1f"). InnerVolumeSpecName "kube-api-access-tgmjx". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 21:02:16 crc kubenswrapper[4756]: I0930 21:02:16.971857 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e9cf1b3d-95ca-4954-9482-fda5d81d2a1f-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "e9cf1b3d-95ca-4954-9482-fda5d81d2a1f" (UID: "e9cf1b3d-95ca-4954-9482-fda5d81d2a1f"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 21:02:16 crc kubenswrapper[4756]: I0930 21:02:16.978212 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e9cf1b3d-95ca-4954-9482-fda5d81d2a1f-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "e9cf1b3d-95ca-4954-9482-fda5d81d2a1f" (UID: "e9cf1b3d-95ca-4954-9482-fda5d81d2a1f"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 21:02:16 crc kubenswrapper[4756]: I0930 21:02:16.983138 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e9cf1b3d-95ca-4954-9482-fda5d81d2a1f-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "e9cf1b3d-95ca-4954-9482-fda5d81d2a1f" (UID: "e9cf1b3d-95ca-4954-9482-fda5d81d2a1f"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 21:02:16 crc kubenswrapper[4756]: I0930 21:02:16.990705 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e9cf1b3d-95ca-4954-9482-fda5d81d2a1f-config" (OuterVolumeSpecName: "config") pod "e9cf1b3d-95ca-4954-9482-fda5d81d2a1f" (UID: "e9cf1b3d-95ca-4954-9482-fda5d81d2a1f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 21:02:17 crc kubenswrapper[4756]: I0930 21:02:17.010070 4756 generic.go:334] "Generic (PLEG): container finished" podID="e9cf1b3d-95ca-4954-9482-fda5d81d2a1f" containerID="93fd5ce377841a0ef6f7283b9d60fd632ae6a162d26d7dfabf48f313d3a82766" exitCode=0 Sep 30 21:02:17 crc kubenswrapper[4756]: I0930 21:02:17.010122 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7655654869-kfkn4" event={"ID":"e9cf1b3d-95ca-4954-9482-fda5d81d2a1f","Type":"ContainerDied","Data":"93fd5ce377841a0ef6f7283b9d60fd632ae6a162d26d7dfabf48f313d3a82766"} Sep 30 21:02:17 crc kubenswrapper[4756]: I0930 21:02:17.010154 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7655654869-kfkn4" event={"ID":"e9cf1b3d-95ca-4954-9482-fda5d81d2a1f","Type":"ContainerDied","Data":"869469bd4741a88699ea6a9a7ddea62b591c54dc446b2cbe62d1c1ae30f9a618"} Sep 30 21:02:17 crc kubenswrapper[4756]: I0930 21:02:17.010177 4756 scope.go:117] "RemoveContainer" containerID="93fd5ce377841a0ef6f7283b9d60fd632ae6a162d26d7dfabf48f313d3a82766" Sep 30 21:02:17 crc kubenswrapper[4756]: I0930 21:02:17.010314 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7655654869-kfkn4" Sep 30 21:02:17 crc kubenswrapper[4756]: I0930 21:02:17.016719 4756 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e9cf1b3d-95ca-4954-9482-fda5d81d2a1f-config\") on node \"crc\" DevicePath \"\"" Sep 30 21:02:17 crc kubenswrapper[4756]: I0930 21:02:17.016739 4756 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e9cf1b3d-95ca-4954-9482-fda5d81d2a1f-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Sep 30 21:02:17 crc kubenswrapper[4756]: I0930 21:02:17.016750 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tgmjx\" (UniqueName: \"kubernetes.io/projected/e9cf1b3d-95ca-4954-9482-fda5d81d2a1f-kube-api-access-tgmjx\") on node \"crc\" DevicePath \"\"" Sep 30 21:02:17 crc kubenswrapper[4756]: I0930 21:02:17.016759 4756 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e9cf1b3d-95ca-4954-9482-fda5d81d2a1f-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Sep 30 21:02:17 crc kubenswrapper[4756]: I0930 21:02:17.016767 4756 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e9cf1b3d-95ca-4954-9482-fda5d81d2a1f-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 30 21:02:17 crc kubenswrapper[4756]: I0930 21:02:17.041928 4756 scope.go:117] "RemoveContainer" containerID="9646756d4eb532e72f10d8e9631db724b45625739e82259841ef19b312ab4b7d" Sep 30 21:02:17 crc kubenswrapper[4756]: I0930 21:02:17.047083 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7655654869-kfkn4"] Sep 30 21:02:17 crc kubenswrapper[4756]: I0930 21:02:17.054382 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-7655654869-kfkn4"] Sep 30 21:02:17 crc kubenswrapper[4756]: I0930 21:02:17.062023 4756 scope.go:117] "RemoveContainer" containerID="93fd5ce377841a0ef6f7283b9d60fd632ae6a162d26d7dfabf48f313d3a82766" Sep 30 21:02:17 crc kubenswrapper[4756]: E0930 21:02:17.062344 4756 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"93fd5ce377841a0ef6f7283b9d60fd632ae6a162d26d7dfabf48f313d3a82766\": container with ID starting with 93fd5ce377841a0ef6f7283b9d60fd632ae6a162d26d7dfabf48f313d3a82766 not found: ID does not exist" containerID="93fd5ce377841a0ef6f7283b9d60fd632ae6a162d26d7dfabf48f313d3a82766" Sep 30 21:02:17 crc kubenswrapper[4756]: I0930 21:02:17.062373 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"93fd5ce377841a0ef6f7283b9d60fd632ae6a162d26d7dfabf48f313d3a82766"} err="failed to get container status \"93fd5ce377841a0ef6f7283b9d60fd632ae6a162d26d7dfabf48f313d3a82766\": rpc error: code = NotFound desc = could not find container \"93fd5ce377841a0ef6f7283b9d60fd632ae6a162d26d7dfabf48f313d3a82766\": container with ID starting with 93fd5ce377841a0ef6f7283b9d60fd632ae6a162d26d7dfabf48f313d3a82766 not found: ID does not exist" Sep 30 21:02:17 crc kubenswrapper[4756]: I0930 21:02:17.062407 4756 scope.go:117] "RemoveContainer" containerID="9646756d4eb532e72f10d8e9631db724b45625739e82259841ef19b312ab4b7d" Sep 30 21:02:17 crc kubenswrapper[4756]: E0930 21:02:17.062627 4756 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9646756d4eb532e72f10d8e9631db724b45625739e82259841ef19b312ab4b7d\": container with ID starting with 9646756d4eb532e72f10d8e9631db724b45625739e82259841ef19b312ab4b7d not found: ID does not exist" containerID="9646756d4eb532e72f10d8e9631db724b45625739e82259841ef19b312ab4b7d" Sep 30 21:02:17 crc kubenswrapper[4756]: I0930 21:02:17.062649 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9646756d4eb532e72f10d8e9631db724b45625739e82259841ef19b312ab4b7d"} err="failed to get container status \"9646756d4eb532e72f10d8e9631db724b45625739e82259841ef19b312ab4b7d\": rpc error: code = NotFound desc = could not find container \"9646756d4eb532e72f10d8e9631db724b45625739e82259841ef19b312ab4b7d\": container with ID starting with 9646756d4eb532e72f10d8e9631db724b45625739e82259841ef19b312ab4b7d not found: ID does not exist" Sep 30 21:02:17 crc kubenswrapper[4756]: I0930 21:02:17.126887 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e9cf1b3d-95ca-4954-9482-fda5d81d2a1f" path="/var/lib/kubelet/pods/e9cf1b3d-95ca-4954-9482-fda5d81d2a1f/volumes" Sep 30 21:02:41 crc kubenswrapper[4756]: I0930 21:02:41.539254 4756 patch_prober.go:28] interesting pod/machine-config-daemon-4n9zj container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 21:02:41 crc kubenswrapper[4756]: I0930 21:02:41.539913 4756 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 21:02:42 crc kubenswrapper[4756]: I0930 21:02:42.428895 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-7f6fcc4bd8-b42zh" Sep 30 21:02:42 crc kubenswrapper[4756]: I0930 21:02:42.430635 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-7f6fcc4bd8-b42zh" Sep 30 21:03:00 crc kubenswrapper[4756]: E0930 21:03:00.059810 4756 upgradeaware.go:427] Error proxying data from client to backend: readfrom tcp 38.102.83.12:37836->38.102.83.12:35835: write tcp 38.102.83.12:37836->38.102.83.12:35835: write: broken pipe Sep 30 21:03:03 crc kubenswrapper[4756]: I0930 21:03:03.321789 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-db-create-pg6bc"] Sep 30 21:03:03 crc kubenswrapper[4756]: E0930 21:03:03.322702 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e9cf1b3d-95ca-4954-9482-fda5d81d2a1f" containerName="init" Sep 30 21:03:03 crc kubenswrapper[4756]: I0930 21:03:03.322717 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="e9cf1b3d-95ca-4954-9482-fda5d81d2a1f" containerName="init" Sep 30 21:03:03 crc kubenswrapper[4756]: E0930 21:03:03.322731 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e9cf1b3d-95ca-4954-9482-fda5d81d2a1f" containerName="dnsmasq-dns" Sep 30 21:03:03 crc kubenswrapper[4756]: I0930 21:03:03.322741 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="e9cf1b3d-95ca-4954-9482-fda5d81d2a1f" containerName="dnsmasq-dns" Sep 30 21:03:03 crc kubenswrapper[4756]: I0930 21:03:03.322961 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="e9cf1b3d-95ca-4954-9482-fda5d81d2a1f" containerName="dnsmasq-dns" Sep 30 21:03:03 crc kubenswrapper[4756]: I0930 21:03:03.323712 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-pg6bc" Sep 30 21:03:03 crc kubenswrapper[4756]: I0930 21:03:03.333600 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-db-create-pg6bc"] Sep 30 21:03:03 crc kubenswrapper[4756]: I0930 21:03:03.401758 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-db-create-jsrcc"] Sep 30 21:03:03 crc kubenswrapper[4756]: I0930 21:03:03.404884 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-jsrcc" Sep 30 21:03:03 crc kubenswrapper[4756]: I0930 21:03:03.409212 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-db-create-jsrcc"] Sep 30 21:03:03 crc kubenswrapper[4756]: I0930 21:03:03.495692 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jzwmr\" (UniqueName: \"kubernetes.io/projected/eee9c0c3-6721-41ad-a547-cceea6c853b5-kube-api-access-jzwmr\") pod \"nova-api-db-create-pg6bc\" (UID: \"eee9c0c3-6721-41ad-a547-cceea6c853b5\") " pod="openstack/nova-api-db-create-pg6bc" Sep 30 21:03:03 crc kubenswrapper[4756]: I0930 21:03:03.509642 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-db-create-st8q8"] Sep 30 21:03:03 crc kubenswrapper[4756]: I0930 21:03:03.510850 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-st8q8" Sep 30 21:03:03 crc kubenswrapper[4756]: I0930 21:03:03.516516 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-db-create-st8q8"] Sep 30 21:03:03 crc kubenswrapper[4756]: I0930 21:03:03.599113 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hkbxl\" (UniqueName: \"kubernetes.io/projected/4e782094-bf7e-4670-ae47-42a92bb17066-kube-api-access-hkbxl\") pod \"nova-cell0-db-create-jsrcc\" (UID: \"4e782094-bf7e-4670-ae47-42a92bb17066\") " pod="openstack/nova-cell0-db-create-jsrcc" Sep 30 21:03:03 crc kubenswrapper[4756]: I0930 21:03:03.599176 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jzwmr\" (UniqueName: \"kubernetes.io/projected/eee9c0c3-6721-41ad-a547-cceea6c853b5-kube-api-access-jzwmr\") pod \"nova-api-db-create-pg6bc\" (UID: \"eee9c0c3-6721-41ad-a547-cceea6c853b5\") " pod="openstack/nova-api-db-create-pg6bc" Sep 30 21:03:03 crc kubenswrapper[4756]: I0930 21:03:03.619012 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jzwmr\" (UniqueName: \"kubernetes.io/projected/eee9c0c3-6721-41ad-a547-cceea6c853b5-kube-api-access-jzwmr\") pod \"nova-api-db-create-pg6bc\" (UID: \"eee9c0c3-6721-41ad-a547-cceea6c853b5\") " pod="openstack/nova-api-db-create-pg6bc" Sep 30 21:03:03 crc kubenswrapper[4756]: I0930 21:03:03.645845 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-pg6bc" Sep 30 21:03:03 crc kubenswrapper[4756]: I0930 21:03:03.701268 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hkbxl\" (UniqueName: \"kubernetes.io/projected/4e782094-bf7e-4670-ae47-42a92bb17066-kube-api-access-hkbxl\") pod \"nova-cell0-db-create-jsrcc\" (UID: \"4e782094-bf7e-4670-ae47-42a92bb17066\") " pod="openstack/nova-cell0-db-create-jsrcc" Sep 30 21:03:03 crc kubenswrapper[4756]: I0930 21:03:03.701388 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qrftj\" (UniqueName: \"kubernetes.io/projected/8e05b932-7161-43ea-a0cc-0fb87c3e0518-kube-api-access-qrftj\") pod \"nova-cell1-db-create-st8q8\" (UID: \"8e05b932-7161-43ea-a0cc-0fb87c3e0518\") " pod="openstack/nova-cell1-db-create-st8q8" Sep 30 21:03:03 crc kubenswrapper[4756]: I0930 21:03:03.720561 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hkbxl\" (UniqueName: \"kubernetes.io/projected/4e782094-bf7e-4670-ae47-42a92bb17066-kube-api-access-hkbxl\") pod \"nova-cell0-db-create-jsrcc\" (UID: \"4e782094-bf7e-4670-ae47-42a92bb17066\") " pod="openstack/nova-cell0-db-create-jsrcc" Sep 30 21:03:03 crc kubenswrapper[4756]: I0930 21:03:03.731441 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-jsrcc" Sep 30 21:03:03 crc kubenswrapper[4756]: I0930 21:03:03.802943 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qrftj\" (UniqueName: \"kubernetes.io/projected/8e05b932-7161-43ea-a0cc-0fb87c3e0518-kube-api-access-qrftj\") pod \"nova-cell1-db-create-st8q8\" (UID: \"8e05b932-7161-43ea-a0cc-0fb87c3e0518\") " pod="openstack/nova-cell1-db-create-st8q8" Sep 30 21:03:03 crc kubenswrapper[4756]: I0930 21:03:03.822012 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qrftj\" (UniqueName: \"kubernetes.io/projected/8e05b932-7161-43ea-a0cc-0fb87c3e0518-kube-api-access-qrftj\") pod \"nova-cell1-db-create-st8q8\" (UID: \"8e05b932-7161-43ea-a0cc-0fb87c3e0518\") " pod="openstack/nova-cell1-db-create-st8q8" Sep 30 21:03:03 crc kubenswrapper[4756]: I0930 21:03:03.833639 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-st8q8" Sep 30 21:03:04 crc kubenswrapper[4756]: I0930 21:03:04.184502 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-db-create-pg6bc"] Sep 30 21:03:04 crc kubenswrapper[4756]: W0930 21:03:04.189227 4756 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podeee9c0c3_6721_41ad_a547_cceea6c853b5.slice/crio-63c807eea0d162e8a127423155f8c6465f2adec58dd9230fd315d1bc2a4659dd WatchSource:0}: Error finding container 63c807eea0d162e8a127423155f8c6465f2adec58dd9230fd315d1bc2a4659dd: Status 404 returned error can't find the container with id 63c807eea0d162e8a127423155f8c6465f2adec58dd9230fd315d1bc2a4659dd Sep 30 21:03:04 crc kubenswrapper[4756]: W0930 21:03:04.256080 4756 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4e782094_bf7e_4670_ae47_42a92bb17066.slice/crio-c9a78545fd3fa62f12838c1d4de8d0575ceff12994d9e6a5cd070c4cf6c4f028 WatchSource:0}: Error finding container c9a78545fd3fa62f12838c1d4de8d0575ceff12994d9e6a5cd070c4cf6c4f028: Status 404 returned error can't find the container with id c9a78545fd3fa62f12838c1d4de8d0575ceff12994d9e6a5cd070c4cf6c4f028 Sep 30 21:03:04 crc kubenswrapper[4756]: I0930 21:03:04.257451 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-db-create-jsrcc"] Sep 30 21:03:04 crc kubenswrapper[4756]: W0930 21:03:04.338410 4756 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8e05b932_7161_43ea_a0cc_0fb87c3e0518.slice/crio-e3b389d8d38c5025ac1e51298f07902e4e4d437dbaf2c6d4b8f7e346021a8c6f WatchSource:0}: Error finding container e3b389d8d38c5025ac1e51298f07902e4e4d437dbaf2c6d4b8f7e346021a8c6f: Status 404 returned error can't find the container with id e3b389d8d38c5025ac1e51298f07902e4e4d437dbaf2c6d4b8f7e346021a8c6f Sep 30 21:03:04 crc kubenswrapper[4756]: I0930 21:03:04.339923 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-db-create-st8q8"] Sep 30 21:03:04 crc kubenswrapper[4756]: I0930 21:03:04.513133 4756 generic.go:334] "Generic (PLEG): container finished" podID="4e782094-bf7e-4670-ae47-42a92bb17066" containerID="17c2e6db03e4efc1e51d234dd3d5d389b910a3f8f9803a38f09c70f331aefc76" exitCode=0 Sep 30 21:03:04 crc kubenswrapper[4756]: I0930 21:03:04.513196 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-jsrcc" event={"ID":"4e782094-bf7e-4670-ae47-42a92bb17066","Type":"ContainerDied","Data":"17c2e6db03e4efc1e51d234dd3d5d389b910a3f8f9803a38f09c70f331aefc76"} Sep 30 21:03:04 crc kubenswrapper[4756]: I0930 21:03:04.513222 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-jsrcc" event={"ID":"4e782094-bf7e-4670-ae47-42a92bb17066","Type":"ContainerStarted","Data":"c9a78545fd3fa62f12838c1d4de8d0575ceff12994d9e6a5cd070c4cf6c4f028"} Sep 30 21:03:04 crc kubenswrapper[4756]: I0930 21:03:04.515612 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-st8q8" event={"ID":"8e05b932-7161-43ea-a0cc-0fb87c3e0518","Type":"ContainerStarted","Data":"e3b389d8d38c5025ac1e51298f07902e4e4d437dbaf2c6d4b8f7e346021a8c6f"} Sep 30 21:03:04 crc kubenswrapper[4756]: I0930 21:03:04.517527 4756 generic.go:334] "Generic (PLEG): container finished" podID="eee9c0c3-6721-41ad-a547-cceea6c853b5" containerID="34ca515ffc06242358cfc7613e003721216660a5d42f6f8a0e53301976e83a72" exitCode=0 Sep 30 21:03:04 crc kubenswrapper[4756]: I0930 21:03:04.517586 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-pg6bc" event={"ID":"eee9c0c3-6721-41ad-a547-cceea6c853b5","Type":"ContainerDied","Data":"34ca515ffc06242358cfc7613e003721216660a5d42f6f8a0e53301976e83a72"} Sep 30 21:03:04 crc kubenswrapper[4756]: I0930 21:03:04.517619 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-pg6bc" event={"ID":"eee9c0c3-6721-41ad-a547-cceea6c853b5","Type":"ContainerStarted","Data":"63c807eea0d162e8a127423155f8c6465f2adec58dd9230fd315d1bc2a4659dd"} Sep 30 21:03:05 crc kubenswrapper[4756]: I0930 21:03:05.538360 4756 generic.go:334] "Generic (PLEG): container finished" podID="8e05b932-7161-43ea-a0cc-0fb87c3e0518" containerID="01072dd17822e98196499fcbfee9bc9575306997c9eaffc0bf463433e52a7147" exitCode=0 Sep 30 21:03:05 crc kubenswrapper[4756]: I0930 21:03:05.538422 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-st8q8" event={"ID":"8e05b932-7161-43ea-a0cc-0fb87c3e0518","Type":"ContainerDied","Data":"01072dd17822e98196499fcbfee9bc9575306997c9eaffc0bf463433e52a7147"} Sep 30 21:03:05 crc kubenswrapper[4756]: I0930 21:03:05.956303 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-jsrcc" Sep 30 21:03:05 crc kubenswrapper[4756]: I0930 21:03:05.961781 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-pg6bc" Sep 30 21:03:06 crc kubenswrapper[4756]: I0930 21:03:06.140901 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hkbxl\" (UniqueName: \"kubernetes.io/projected/4e782094-bf7e-4670-ae47-42a92bb17066-kube-api-access-hkbxl\") pod \"4e782094-bf7e-4670-ae47-42a92bb17066\" (UID: \"4e782094-bf7e-4670-ae47-42a92bb17066\") " Sep 30 21:03:06 crc kubenswrapper[4756]: I0930 21:03:06.141066 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jzwmr\" (UniqueName: \"kubernetes.io/projected/eee9c0c3-6721-41ad-a547-cceea6c853b5-kube-api-access-jzwmr\") pod \"eee9c0c3-6721-41ad-a547-cceea6c853b5\" (UID: \"eee9c0c3-6721-41ad-a547-cceea6c853b5\") " Sep 30 21:03:06 crc kubenswrapper[4756]: I0930 21:03:06.150037 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4e782094-bf7e-4670-ae47-42a92bb17066-kube-api-access-hkbxl" (OuterVolumeSpecName: "kube-api-access-hkbxl") pod "4e782094-bf7e-4670-ae47-42a92bb17066" (UID: "4e782094-bf7e-4670-ae47-42a92bb17066"). InnerVolumeSpecName "kube-api-access-hkbxl". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 21:03:06 crc kubenswrapper[4756]: I0930 21:03:06.151438 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/eee9c0c3-6721-41ad-a547-cceea6c853b5-kube-api-access-jzwmr" (OuterVolumeSpecName: "kube-api-access-jzwmr") pod "eee9c0c3-6721-41ad-a547-cceea6c853b5" (UID: "eee9c0c3-6721-41ad-a547-cceea6c853b5"). InnerVolumeSpecName "kube-api-access-jzwmr". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 21:03:06 crc kubenswrapper[4756]: I0930 21:03:06.245438 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hkbxl\" (UniqueName: \"kubernetes.io/projected/4e782094-bf7e-4670-ae47-42a92bb17066-kube-api-access-hkbxl\") on node \"crc\" DevicePath \"\"" Sep 30 21:03:06 crc kubenswrapper[4756]: I0930 21:03:06.245504 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jzwmr\" (UniqueName: \"kubernetes.io/projected/eee9c0c3-6721-41ad-a547-cceea6c853b5-kube-api-access-jzwmr\") on node \"crc\" DevicePath \"\"" Sep 30 21:03:06 crc kubenswrapper[4756]: I0930 21:03:06.548828 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-pg6bc" event={"ID":"eee9c0c3-6721-41ad-a547-cceea6c853b5","Type":"ContainerDied","Data":"63c807eea0d162e8a127423155f8c6465f2adec58dd9230fd315d1bc2a4659dd"} Sep 30 21:03:06 crc kubenswrapper[4756]: I0930 21:03:06.549512 4756 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="63c807eea0d162e8a127423155f8c6465f2adec58dd9230fd315d1bc2a4659dd" Sep 30 21:03:06 crc kubenswrapper[4756]: I0930 21:03:06.548850 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-pg6bc" Sep 30 21:03:06 crc kubenswrapper[4756]: I0930 21:03:06.551186 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-jsrcc" Sep 30 21:03:06 crc kubenswrapper[4756]: I0930 21:03:06.555483 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-jsrcc" event={"ID":"4e782094-bf7e-4670-ae47-42a92bb17066","Type":"ContainerDied","Data":"c9a78545fd3fa62f12838c1d4de8d0575ceff12994d9e6a5cd070c4cf6c4f028"} Sep 30 21:03:06 crc kubenswrapper[4756]: I0930 21:03:06.555535 4756 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c9a78545fd3fa62f12838c1d4de8d0575ceff12994d9e6a5cd070c4cf6c4f028" Sep 30 21:03:06 crc kubenswrapper[4756]: I0930 21:03:06.833054 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-st8q8" Sep 30 21:03:06 crc kubenswrapper[4756]: I0930 21:03:06.960706 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qrftj\" (UniqueName: \"kubernetes.io/projected/8e05b932-7161-43ea-a0cc-0fb87c3e0518-kube-api-access-qrftj\") pod \"8e05b932-7161-43ea-a0cc-0fb87c3e0518\" (UID: \"8e05b932-7161-43ea-a0cc-0fb87c3e0518\") " Sep 30 21:03:06 crc kubenswrapper[4756]: I0930 21:03:06.966891 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8e05b932-7161-43ea-a0cc-0fb87c3e0518-kube-api-access-qrftj" (OuterVolumeSpecName: "kube-api-access-qrftj") pod "8e05b932-7161-43ea-a0cc-0fb87c3e0518" (UID: "8e05b932-7161-43ea-a0cc-0fb87c3e0518"). InnerVolumeSpecName "kube-api-access-qrftj". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 21:03:07 crc kubenswrapper[4756]: I0930 21:03:07.063066 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qrftj\" (UniqueName: \"kubernetes.io/projected/8e05b932-7161-43ea-a0cc-0fb87c3e0518-kube-api-access-qrftj\") on node \"crc\" DevicePath \"\"" Sep 30 21:03:07 crc kubenswrapper[4756]: I0930 21:03:07.564261 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-st8q8" event={"ID":"8e05b932-7161-43ea-a0cc-0fb87c3e0518","Type":"ContainerDied","Data":"e3b389d8d38c5025ac1e51298f07902e4e4d437dbaf2c6d4b8f7e346021a8c6f"} Sep 30 21:03:07 crc kubenswrapper[4756]: I0930 21:03:07.564427 4756 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e3b389d8d38c5025ac1e51298f07902e4e4d437dbaf2c6d4b8f7e346021a8c6f" Sep 30 21:03:07 crc kubenswrapper[4756]: I0930 21:03:07.564538 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-st8q8" Sep 30 21:03:11 crc kubenswrapper[4756]: I0930 21:03:11.539103 4756 patch_prober.go:28] interesting pod/machine-config-daemon-4n9zj container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 21:03:11 crc kubenswrapper[4756]: I0930 21:03:11.539920 4756 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 21:03:11 crc kubenswrapper[4756]: I0930 21:03:11.539984 4756 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" Sep 30 21:03:11 crc kubenswrapper[4756]: I0930 21:03:11.541202 4756 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"b07c7b0720849ce41d8fe35c4a65c847659c084f93d181b0409c8a9ac837fcae"} pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 30 21:03:11 crc kubenswrapper[4756]: I0930 21:03:11.541327 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" containerName="machine-config-daemon" containerID="cri-o://b07c7b0720849ce41d8fe35c4a65c847659c084f93d181b0409c8a9ac837fcae" gracePeriod=600 Sep 30 21:03:12 crc kubenswrapper[4756]: I0930 21:03:12.629209 4756 generic.go:334] "Generic (PLEG): container finished" podID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" containerID="b07c7b0720849ce41d8fe35c4a65c847659c084f93d181b0409c8a9ac837fcae" exitCode=0 Sep 30 21:03:12 crc kubenswrapper[4756]: I0930 21:03:12.629341 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" event={"ID":"3370c2ca-fec3-4f90-8df7-51e21e6c7e1c","Type":"ContainerDied","Data":"b07c7b0720849ce41d8fe35c4a65c847659c084f93d181b0409c8a9ac837fcae"} Sep 30 21:03:12 crc kubenswrapper[4756]: I0930 21:03:12.629863 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" event={"ID":"3370c2ca-fec3-4f90-8df7-51e21e6c7e1c","Type":"ContainerStarted","Data":"6282743b2855b6e8d5a5f2de10949d881993e076a7475251b665c1caeac15b6c"} Sep 30 21:03:12 crc kubenswrapper[4756]: I0930 21:03:12.629898 4756 scope.go:117] "RemoveContainer" containerID="47efbbc76271cd44fa05dca3e26855fb46918fbc5a3a511e482c89c79b8cb60c" Sep 30 21:03:13 crc kubenswrapper[4756]: I0930 21:03:13.535619 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-5f75-account-create-66wxn"] Sep 30 21:03:13 crc kubenswrapper[4756]: E0930 21:03:13.536215 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eee9c0c3-6721-41ad-a547-cceea6c853b5" containerName="mariadb-database-create" Sep 30 21:03:13 crc kubenswrapper[4756]: I0930 21:03:13.536228 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="eee9c0c3-6721-41ad-a547-cceea6c853b5" containerName="mariadb-database-create" Sep 30 21:03:13 crc kubenswrapper[4756]: E0930 21:03:13.536242 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4e782094-bf7e-4670-ae47-42a92bb17066" containerName="mariadb-database-create" Sep 30 21:03:13 crc kubenswrapper[4756]: I0930 21:03:13.536248 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="4e782094-bf7e-4670-ae47-42a92bb17066" containerName="mariadb-database-create" Sep 30 21:03:13 crc kubenswrapper[4756]: E0930 21:03:13.536262 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8e05b932-7161-43ea-a0cc-0fb87c3e0518" containerName="mariadb-database-create" Sep 30 21:03:13 crc kubenswrapper[4756]: I0930 21:03:13.536269 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="8e05b932-7161-43ea-a0cc-0fb87c3e0518" containerName="mariadb-database-create" Sep 30 21:03:13 crc kubenswrapper[4756]: I0930 21:03:13.536471 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="8e05b932-7161-43ea-a0cc-0fb87c3e0518" containerName="mariadb-database-create" Sep 30 21:03:13 crc kubenswrapper[4756]: I0930 21:03:13.536495 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="eee9c0c3-6721-41ad-a547-cceea6c853b5" containerName="mariadb-database-create" Sep 30 21:03:13 crc kubenswrapper[4756]: I0930 21:03:13.536505 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="4e782094-bf7e-4670-ae47-42a92bb17066" containerName="mariadb-database-create" Sep 30 21:03:13 crc kubenswrapper[4756]: I0930 21:03:13.537012 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-5f75-account-create-66wxn" Sep 30 21:03:13 crc kubenswrapper[4756]: I0930 21:03:13.539063 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-db-secret" Sep 30 21:03:13 crc kubenswrapper[4756]: I0930 21:03:13.552045 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-5f75-account-create-66wxn"] Sep 30 21:03:13 crc kubenswrapper[4756]: I0930 21:03:13.590764 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2qw62\" (UniqueName: \"kubernetes.io/projected/6ed2348c-6c17-4c81-a72f-eba6b032f566-kube-api-access-2qw62\") pod \"nova-api-5f75-account-create-66wxn\" (UID: \"6ed2348c-6c17-4c81-a72f-eba6b032f566\") " pod="openstack/nova-api-5f75-account-create-66wxn" Sep 30 21:03:13 crc kubenswrapper[4756]: I0930 21:03:13.692825 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2qw62\" (UniqueName: \"kubernetes.io/projected/6ed2348c-6c17-4c81-a72f-eba6b032f566-kube-api-access-2qw62\") pod \"nova-api-5f75-account-create-66wxn\" (UID: \"6ed2348c-6c17-4c81-a72f-eba6b032f566\") " pod="openstack/nova-api-5f75-account-create-66wxn" Sep 30 21:03:13 crc kubenswrapper[4756]: I0930 21:03:13.710905 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2qw62\" (UniqueName: \"kubernetes.io/projected/6ed2348c-6c17-4c81-a72f-eba6b032f566-kube-api-access-2qw62\") pod \"nova-api-5f75-account-create-66wxn\" (UID: \"6ed2348c-6c17-4c81-a72f-eba6b032f566\") " pod="openstack/nova-api-5f75-account-create-66wxn" Sep 30 21:03:13 crc kubenswrapper[4756]: I0930 21:03:13.746788 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-c790-account-create-p82f9"] Sep 30 21:03:13 crc kubenswrapper[4756]: I0930 21:03:13.748592 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-c790-account-create-p82f9" Sep 30 21:03:13 crc kubenswrapper[4756]: I0930 21:03:13.750667 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-db-secret" Sep 30 21:03:13 crc kubenswrapper[4756]: I0930 21:03:13.757090 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-c790-account-create-p82f9"] Sep 30 21:03:13 crc kubenswrapper[4756]: I0930 21:03:13.793876 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nc6dd\" (UniqueName: \"kubernetes.io/projected/c67011a6-d3f0-4ce8-b8de-3372650df4a2-kube-api-access-nc6dd\") pod \"nova-cell0-c790-account-create-p82f9\" (UID: \"c67011a6-d3f0-4ce8-b8de-3372650df4a2\") " pod="openstack/nova-cell0-c790-account-create-p82f9" Sep 30 21:03:13 crc kubenswrapper[4756]: I0930 21:03:13.861684 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-5f75-account-create-66wxn" Sep 30 21:03:13 crc kubenswrapper[4756]: I0930 21:03:13.900295 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nc6dd\" (UniqueName: \"kubernetes.io/projected/c67011a6-d3f0-4ce8-b8de-3372650df4a2-kube-api-access-nc6dd\") pod \"nova-cell0-c790-account-create-p82f9\" (UID: \"c67011a6-d3f0-4ce8-b8de-3372650df4a2\") " pod="openstack/nova-cell0-c790-account-create-p82f9" Sep 30 21:03:13 crc kubenswrapper[4756]: I0930 21:03:13.921119 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nc6dd\" (UniqueName: \"kubernetes.io/projected/c67011a6-d3f0-4ce8-b8de-3372650df4a2-kube-api-access-nc6dd\") pod \"nova-cell0-c790-account-create-p82f9\" (UID: \"c67011a6-d3f0-4ce8-b8de-3372650df4a2\") " pod="openstack/nova-cell0-c790-account-create-p82f9" Sep 30 21:03:13 crc kubenswrapper[4756]: I0930 21:03:13.942874 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-00ab-account-create-qzmg6"] Sep 30 21:03:13 crc kubenswrapper[4756]: I0930 21:03:13.943963 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-00ab-account-create-qzmg6" Sep 30 21:03:13 crc kubenswrapper[4756]: I0930 21:03:13.947208 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-db-secret" Sep 30 21:03:13 crc kubenswrapper[4756]: I0930 21:03:13.955619 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-00ab-account-create-qzmg6"] Sep 30 21:03:14 crc kubenswrapper[4756]: I0930 21:03:14.090359 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-c790-account-create-p82f9" Sep 30 21:03:14 crc kubenswrapper[4756]: I0930 21:03:14.105279 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nz726\" (UniqueName: \"kubernetes.io/projected/61039609-1019-4e9c-9aeb-24d37151f9d6-kube-api-access-nz726\") pod \"nova-cell1-00ab-account-create-qzmg6\" (UID: \"61039609-1019-4e9c-9aeb-24d37151f9d6\") " pod="openstack/nova-cell1-00ab-account-create-qzmg6" Sep 30 21:03:14 crc kubenswrapper[4756]: I0930 21:03:14.217332 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nz726\" (UniqueName: \"kubernetes.io/projected/61039609-1019-4e9c-9aeb-24d37151f9d6-kube-api-access-nz726\") pod \"nova-cell1-00ab-account-create-qzmg6\" (UID: \"61039609-1019-4e9c-9aeb-24d37151f9d6\") " pod="openstack/nova-cell1-00ab-account-create-qzmg6" Sep 30 21:03:14 crc kubenswrapper[4756]: I0930 21:03:14.240973 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nz726\" (UniqueName: \"kubernetes.io/projected/61039609-1019-4e9c-9aeb-24d37151f9d6-kube-api-access-nz726\") pod \"nova-cell1-00ab-account-create-qzmg6\" (UID: \"61039609-1019-4e9c-9aeb-24d37151f9d6\") " pod="openstack/nova-cell1-00ab-account-create-qzmg6" Sep 30 21:03:14 crc kubenswrapper[4756]: I0930 21:03:14.314802 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-00ab-account-create-qzmg6" Sep 30 21:03:14 crc kubenswrapper[4756]: I0930 21:03:14.489809 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-5f75-account-create-66wxn"] Sep 30 21:03:14 crc kubenswrapper[4756]: W0930 21:03:14.499261 4756 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6ed2348c_6c17_4c81_a72f_eba6b032f566.slice/crio-a526e411ab9e650ed8eef5096ed388a00d8e1616f860e8dccd62d8ec1a016f08 WatchSource:0}: Error finding container a526e411ab9e650ed8eef5096ed388a00d8e1616f860e8dccd62d8ec1a016f08: Status 404 returned error can't find the container with id a526e411ab9e650ed8eef5096ed388a00d8e1616f860e8dccd62d8ec1a016f08 Sep 30 21:03:14 crc kubenswrapper[4756]: I0930 21:03:14.616820 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-c790-account-create-p82f9"] Sep 30 21:03:14 crc kubenswrapper[4756]: I0930 21:03:14.652058 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-5f75-account-create-66wxn" event={"ID":"6ed2348c-6c17-4c81-a72f-eba6b032f566","Type":"ContainerStarted","Data":"a84a3a34d1828508c86f88dbac5b79d7c18d102bc693414081a1f91e7a159dc9"} Sep 30 21:03:14 crc kubenswrapper[4756]: I0930 21:03:14.652098 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-5f75-account-create-66wxn" event={"ID":"6ed2348c-6c17-4c81-a72f-eba6b032f566","Type":"ContainerStarted","Data":"a526e411ab9e650ed8eef5096ed388a00d8e1616f860e8dccd62d8ec1a016f08"} Sep 30 21:03:14 crc kubenswrapper[4756]: I0930 21:03:14.654135 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-c790-account-create-p82f9" event={"ID":"c67011a6-d3f0-4ce8-b8de-3372650df4a2","Type":"ContainerStarted","Data":"25a3f6cf4ffe9d0bf6df1ec20112b9537e2177aebd6ffe683601f9be79984d85"} Sep 30 21:03:14 crc kubenswrapper[4756]: I0930 21:03:14.680609 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-5f75-account-create-66wxn" podStartSLOduration=1.680578155 podStartE2EDuration="1.680578155s" podCreationTimestamp="2025-09-30 21:03:13 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 21:03:14.671208681 +0000 UTC m=+5524.292142158" watchObservedRunningTime="2025-09-30 21:03:14.680578155 +0000 UTC m=+5524.301511632" Sep 30 21:03:14 crc kubenswrapper[4756]: I0930 21:03:14.715195 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-00ab-account-create-qzmg6"] Sep 30 21:03:14 crc kubenswrapper[4756]: W0930 21:03:14.717300 4756 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod61039609_1019_4e9c_9aeb_24d37151f9d6.slice/crio-7e23a0a02abb69bbde66da1c88dfdc6d8fecd95db26362c603542e836a719131 WatchSource:0}: Error finding container 7e23a0a02abb69bbde66da1c88dfdc6d8fecd95db26362c603542e836a719131: Status 404 returned error can't find the container with id 7e23a0a02abb69bbde66da1c88dfdc6d8fecd95db26362c603542e836a719131 Sep 30 21:03:15 crc kubenswrapper[4756]: I0930 21:03:15.667127 4756 generic.go:334] "Generic (PLEG): container finished" podID="c67011a6-d3f0-4ce8-b8de-3372650df4a2" containerID="a22f1622dcfd04bf5db7bcd1135473d970103cabfcf897fae357586540d14479" exitCode=0 Sep 30 21:03:15 crc kubenswrapper[4756]: I0930 21:03:15.667190 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-c790-account-create-p82f9" event={"ID":"c67011a6-d3f0-4ce8-b8de-3372650df4a2","Type":"ContainerDied","Data":"a22f1622dcfd04bf5db7bcd1135473d970103cabfcf897fae357586540d14479"} Sep 30 21:03:15 crc kubenswrapper[4756]: I0930 21:03:15.670630 4756 generic.go:334] "Generic (PLEG): container finished" podID="61039609-1019-4e9c-9aeb-24d37151f9d6" containerID="310f89380ae9046cb077ef97b6c26621edbdaed42ecb026128dc9b788c43e333" exitCode=0 Sep 30 21:03:15 crc kubenswrapper[4756]: I0930 21:03:15.670811 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-00ab-account-create-qzmg6" event={"ID":"61039609-1019-4e9c-9aeb-24d37151f9d6","Type":"ContainerDied","Data":"310f89380ae9046cb077ef97b6c26621edbdaed42ecb026128dc9b788c43e333"} Sep 30 21:03:15 crc kubenswrapper[4756]: I0930 21:03:15.670863 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-00ab-account-create-qzmg6" event={"ID":"61039609-1019-4e9c-9aeb-24d37151f9d6","Type":"ContainerStarted","Data":"7e23a0a02abb69bbde66da1c88dfdc6d8fecd95db26362c603542e836a719131"} Sep 30 21:03:15 crc kubenswrapper[4756]: I0930 21:03:15.677192 4756 generic.go:334] "Generic (PLEG): container finished" podID="6ed2348c-6c17-4c81-a72f-eba6b032f566" containerID="a84a3a34d1828508c86f88dbac5b79d7c18d102bc693414081a1f91e7a159dc9" exitCode=0 Sep 30 21:03:15 crc kubenswrapper[4756]: I0930 21:03:15.677241 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-5f75-account-create-66wxn" event={"ID":"6ed2348c-6c17-4c81-a72f-eba6b032f566","Type":"ContainerDied","Data":"a84a3a34d1828508c86f88dbac5b79d7c18d102bc693414081a1f91e7a159dc9"} Sep 30 21:03:17 crc kubenswrapper[4756]: I0930 21:03:17.120673 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-00ab-account-create-qzmg6" Sep 30 21:03:17 crc kubenswrapper[4756]: I0930 21:03:17.122593 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-5f75-account-create-66wxn" Sep 30 21:03:17 crc kubenswrapper[4756]: I0930 21:03:17.130542 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-c790-account-create-p82f9" Sep 30 21:03:17 crc kubenswrapper[4756]: I0930 21:03:17.175800 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2qw62\" (UniqueName: \"kubernetes.io/projected/6ed2348c-6c17-4c81-a72f-eba6b032f566-kube-api-access-2qw62\") pod \"6ed2348c-6c17-4c81-a72f-eba6b032f566\" (UID: \"6ed2348c-6c17-4c81-a72f-eba6b032f566\") " Sep 30 21:03:17 crc kubenswrapper[4756]: I0930 21:03:17.175856 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nc6dd\" (UniqueName: \"kubernetes.io/projected/c67011a6-d3f0-4ce8-b8de-3372650df4a2-kube-api-access-nc6dd\") pod \"c67011a6-d3f0-4ce8-b8de-3372650df4a2\" (UID: \"c67011a6-d3f0-4ce8-b8de-3372650df4a2\") " Sep 30 21:03:17 crc kubenswrapper[4756]: I0930 21:03:17.175902 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nz726\" (UniqueName: \"kubernetes.io/projected/61039609-1019-4e9c-9aeb-24d37151f9d6-kube-api-access-nz726\") pod \"61039609-1019-4e9c-9aeb-24d37151f9d6\" (UID: \"61039609-1019-4e9c-9aeb-24d37151f9d6\") " Sep 30 21:03:17 crc kubenswrapper[4756]: I0930 21:03:17.182284 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c67011a6-d3f0-4ce8-b8de-3372650df4a2-kube-api-access-nc6dd" (OuterVolumeSpecName: "kube-api-access-nc6dd") pod "c67011a6-d3f0-4ce8-b8de-3372650df4a2" (UID: "c67011a6-d3f0-4ce8-b8de-3372650df4a2"). InnerVolumeSpecName "kube-api-access-nc6dd". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 21:03:17 crc kubenswrapper[4756]: I0930 21:03:17.182566 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6ed2348c-6c17-4c81-a72f-eba6b032f566-kube-api-access-2qw62" (OuterVolumeSpecName: "kube-api-access-2qw62") pod "6ed2348c-6c17-4c81-a72f-eba6b032f566" (UID: "6ed2348c-6c17-4c81-a72f-eba6b032f566"). InnerVolumeSpecName "kube-api-access-2qw62". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 21:03:17 crc kubenswrapper[4756]: I0930 21:03:17.182790 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/61039609-1019-4e9c-9aeb-24d37151f9d6-kube-api-access-nz726" (OuterVolumeSpecName: "kube-api-access-nz726") pod "61039609-1019-4e9c-9aeb-24d37151f9d6" (UID: "61039609-1019-4e9c-9aeb-24d37151f9d6"). InnerVolumeSpecName "kube-api-access-nz726". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 21:03:17 crc kubenswrapper[4756]: I0930 21:03:17.277461 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2qw62\" (UniqueName: \"kubernetes.io/projected/6ed2348c-6c17-4c81-a72f-eba6b032f566-kube-api-access-2qw62\") on node \"crc\" DevicePath \"\"" Sep 30 21:03:17 crc kubenswrapper[4756]: I0930 21:03:17.277597 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nc6dd\" (UniqueName: \"kubernetes.io/projected/c67011a6-d3f0-4ce8-b8de-3372650df4a2-kube-api-access-nc6dd\") on node \"crc\" DevicePath \"\"" Sep 30 21:03:17 crc kubenswrapper[4756]: I0930 21:03:17.277608 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nz726\" (UniqueName: \"kubernetes.io/projected/61039609-1019-4e9c-9aeb-24d37151f9d6-kube-api-access-nz726\") on node \"crc\" DevicePath \"\"" Sep 30 21:03:17 crc kubenswrapper[4756]: I0930 21:03:17.711487 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-00ab-account-create-qzmg6" event={"ID":"61039609-1019-4e9c-9aeb-24d37151f9d6","Type":"ContainerDied","Data":"7e23a0a02abb69bbde66da1c88dfdc6d8fecd95db26362c603542e836a719131"} Sep 30 21:03:17 crc kubenswrapper[4756]: I0930 21:03:17.711805 4756 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7e23a0a02abb69bbde66da1c88dfdc6d8fecd95db26362c603542e836a719131" Sep 30 21:03:17 crc kubenswrapper[4756]: I0930 21:03:17.711615 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-00ab-account-create-qzmg6" Sep 30 21:03:17 crc kubenswrapper[4756]: I0930 21:03:17.715185 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-5f75-account-create-66wxn" event={"ID":"6ed2348c-6c17-4c81-a72f-eba6b032f566","Type":"ContainerDied","Data":"a526e411ab9e650ed8eef5096ed388a00d8e1616f860e8dccd62d8ec1a016f08"} Sep 30 21:03:17 crc kubenswrapper[4756]: I0930 21:03:17.715227 4756 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a526e411ab9e650ed8eef5096ed388a00d8e1616f860e8dccd62d8ec1a016f08" Sep 30 21:03:17 crc kubenswrapper[4756]: I0930 21:03:17.715198 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-5f75-account-create-66wxn" Sep 30 21:03:17 crc kubenswrapper[4756]: I0930 21:03:17.717414 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-c790-account-create-p82f9" event={"ID":"c67011a6-d3f0-4ce8-b8de-3372650df4a2","Type":"ContainerDied","Data":"25a3f6cf4ffe9d0bf6df1ec20112b9537e2177aebd6ffe683601f9be79984d85"} Sep 30 21:03:17 crc kubenswrapper[4756]: I0930 21:03:17.717452 4756 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="25a3f6cf4ffe9d0bf6df1ec20112b9537e2177aebd6ffe683601f9be79984d85" Sep 30 21:03:17 crc kubenswrapper[4756]: I0930 21:03:17.717504 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-c790-account-create-p82f9" Sep 30 21:03:18 crc kubenswrapper[4756]: I0930 21:03:18.999304 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-db-sync-xsk5l"] Sep 30 21:03:19 crc kubenswrapper[4756]: E0930 21:03:19.000412 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="61039609-1019-4e9c-9aeb-24d37151f9d6" containerName="mariadb-account-create" Sep 30 21:03:19 crc kubenswrapper[4756]: I0930 21:03:19.000505 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="61039609-1019-4e9c-9aeb-24d37151f9d6" containerName="mariadb-account-create" Sep 30 21:03:19 crc kubenswrapper[4756]: E0930 21:03:19.000603 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6ed2348c-6c17-4c81-a72f-eba6b032f566" containerName="mariadb-account-create" Sep 30 21:03:19 crc kubenswrapper[4756]: I0930 21:03:19.000696 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="6ed2348c-6c17-4c81-a72f-eba6b032f566" containerName="mariadb-account-create" Sep 30 21:03:19 crc kubenswrapper[4756]: E0930 21:03:19.000776 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c67011a6-d3f0-4ce8-b8de-3372650df4a2" containerName="mariadb-account-create" Sep 30 21:03:19 crc kubenswrapper[4756]: I0930 21:03:19.000840 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="c67011a6-d3f0-4ce8-b8de-3372650df4a2" containerName="mariadb-account-create" Sep 30 21:03:19 crc kubenswrapper[4756]: I0930 21:03:19.001124 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="61039609-1019-4e9c-9aeb-24d37151f9d6" containerName="mariadb-account-create" Sep 30 21:03:19 crc kubenswrapper[4756]: I0930 21:03:19.001222 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="6ed2348c-6c17-4c81-a72f-eba6b032f566" containerName="mariadb-account-create" Sep 30 21:03:19 crc kubenswrapper[4756]: I0930 21:03:19.001305 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="c67011a6-d3f0-4ce8-b8de-3372650df4a2" containerName="mariadb-account-create" Sep 30 21:03:19 crc kubenswrapper[4756]: I0930 21:03:19.002080 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-xsk5l" Sep 30 21:03:19 crc kubenswrapper[4756]: I0930 21:03:19.004331 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-nova-dockercfg-6krg5" Sep 30 21:03:19 crc kubenswrapper[4756]: I0930 21:03:19.004502 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-scripts" Sep 30 21:03:19 crc kubenswrapper[4756]: I0930 21:03:19.010708 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Sep 30 21:03:19 crc kubenswrapper[4756]: I0930 21:03:19.023559 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-xsk5l"] Sep 30 21:03:19 crc kubenswrapper[4756]: I0930 21:03:19.112736 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/134e5ca4-9f84-4f3f-8f8e-6b6dc7953308-scripts\") pod \"nova-cell0-conductor-db-sync-xsk5l\" (UID: \"134e5ca4-9f84-4f3f-8f8e-6b6dc7953308\") " pod="openstack/nova-cell0-conductor-db-sync-xsk5l" Sep 30 21:03:19 crc kubenswrapper[4756]: I0930 21:03:19.112853 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/134e5ca4-9f84-4f3f-8f8e-6b6dc7953308-config-data\") pod \"nova-cell0-conductor-db-sync-xsk5l\" (UID: \"134e5ca4-9f84-4f3f-8f8e-6b6dc7953308\") " pod="openstack/nova-cell0-conductor-db-sync-xsk5l" Sep 30 21:03:19 crc kubenswrapper[4756]: I0930 21:03:19.112870 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/134e5ca4-9f84-4f3f-8f8e-6b6dc7953308-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-xsk5l\" (UID: \"134e5ca4-9f84-4f3f-8f8e-6b6dc7953308\") " pod="openstack/nova-cell0-conductor-db-sync-xsk5l" Sep 30 21:03:19 crc kubenswrapper[4756]: I0930 21:03:19.113554 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ppzrn\" (UniqueName: \"kubernetes.io/projected/134e5ca4-9f84-4f3f-8f8e-6b6dc7953308-kube-api-access-ppzrn\") pod \"nova-cell0-conductor-db-sync-xsk5l\" (UID: \"134e5ca4-9f84-4f3f-8f8e-6b6dc7953308\") " pod="openstack/nova-cell0-conductor-db-sync-xsk5l" Sep 30 21:03:19 crc kubenswrapper[4756]: I0930 21:03:19.215685 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/134e5ca4-9f84-4f3f-8f8e-6b6dc7953308-scripts\") pod \"nova-cell0-conductor-db-sync-xsk5l\" (UID: \"134e5ca4-9f84-4f3f-8f8e-6b6dc7953308\") " pod="openstack/nova-cell0-conductor-db-sync-xsk5l" Sep 30 21:03:19 crc kubenswrapper[4756]: I0930 21:03:19.215833 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/134e5ca4-9f84-4f3f-8f8e-6b6dc7953308-config-data\") pod \"nova-cell0-conductor-db-sync-xsk5l\" (UID: \"134e5ca4-9f84-4f3f-8f8e-6b6dc7953308\") " pod="openstack/nova-cell0-conductor-db-sync-xsk5l" Sep 30 21:03:19 crc kubenswrapper[4756]: I0930 21:03:19.215852 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/134e5ca4-9f84-4f3f-8f8e-6b6dc7953308-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-xsk5l\" (UID: \"134e5ca4-9f84-4f3f-8f8e-6b6dc7953308\") " pod="openstack/nova-cell0-conductor-db-sync-xsk5l" Sep 30 21:03:19 crc kubenswrapper[4756]: I0930 21:03:19.215876 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ppzrn\" (UniqueName: \"kubernetes.io/projected/134e5ca4-9f84-4f3f-8f8e-6b6dc7953308-kube-api-access-ppzrn\") pod \"nova-cell0-conductor-db-sync-xsk5l\" (UID: \"134e5ca4-9f84-4f3f-8f8e-6b6dc7953308\") " pod="openstack/nova-cell0-conductor-db-sync-xsk5l" Sep 30 21:03:19 crc kubenswrapper[4756]: I0930 21:03:19.221040 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/134e5ca4-9f84-4f3f-8f8e-6b6dc7953308-scripts\") pod \"nova-cell0-conductor-db-sync-xsk5l\" (UID: \"134e5ca4-9f84-4f3f-8f8e-6b6dc7953308\") " pod="openstack/nova-cell0-conductor-db-sync-xsk5l" Sep 30 21:03:19 crc kubenswrapper[4756]: I0930 21:03:19.223013 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/134e5ca4-9f84-4f3f-8f8e-6b6dc7953308-config-data\") pod \"nova-cell0-conductor-db-sync-xsk5l\" (UID: \"134e5ca4-9f84-4f3f-8f8e-6b6dc7953308\") " pod="openstack/nova-cell0-conductor-db-sync-xsk5l" Sep 30 21:03:19 crc kubenswrapper[4756]: I0930 21:03:19.225467 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/134e5ca4-9f84-4f3f-8f8e-6b6dc7953308-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-xsk5l\" (UID: \"134e5ca4-9f84-4f3f-8f8e-6b6dc7953308\") " pod="openstack/nova-cell0-conductor-db-sync-xsk5l" Sep 30 21:03:19 crc kubenswrapper[4756]: I0930 21:03:19.234256 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ppzrn\" (UniqueName: \"kubernetes.io/projected/134e5ca4-9f84-4f3f-8f8e-6b6dc7953308-kube-api-access-ppzrn\") pod \"nova-cell0-conductor-db-sync-xsk5l\" (UID: \"134e5ca4-9f84-4f3f-8f8e-6b6dc7953308\") " pod="openstack/nova-cell0-conductor-db-sync-xsk5l" Sep 30 21:03:19 crc kubenswrapper[4756]: I0930 21:03:19.319502 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-xsk5l" Sep 30 21:03:19 crc kubenswrapper[4756]: I0930 21:03:19.771226 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-xsk5l"] Sep 30 21:03:19 crc kubenswrapper[4756]: W0930 21:03:19.786565 4756 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod134e5ca4_9f84_4f3f_8f8e_6b6dc7953308.slice/crio-ba26ac2c91f5b6a609a675aa9355a380f853a6050069984d97d7a7d905838726 WatchSource:0}: Error finding container ba26ac2c91f5b6a609a675aa9355a380f853a6050069984d97d7a7d905838726: Status 404 returned error can't find the container with id ba26ac2c91f5b6a609a675aa9355a380f853a6050069984d97d7a7d905838726 Sep 30 21:03:20 crc kubenswrapper[4756]: I0930 21:03:20.752127 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-xsk5l" event={"ID":"134e5ca4-9f84-4f3f-8f8e-6b6dc7953308","Type":"ContainerStarted","Data":"91b0e4a91df6435eebf9199d7a8bfff9d646e32b50218937b93ef02c4b8f0288"} Sep 30 21:03:20 crc kubenswrapper[4756]: I0930 21:03:20.753440 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-xsk5l" event={"ID":"134e5ca4-9f84-4f3f-8f8e-6b6dc7953308","Type":"ContainerStarted","Data":"ba26ac2c91f5b6a609a675aa9355a380f853a6050069984d97d7a7d905838726"} Sep 30 21:03:20 crc kubenswrapper[4756]: I0930 21:03:20.766718 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-db-sync-xsk5l" podStartSLOduration=2.766701875 podStartE2EDuration="2.766701875s" podCreationTimestamp="2025-09-30 21:03:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 21:03:20.765144864 +0000 UTC m=+5530.386078341" watchObservedRunningTime="2025-09-30 21:03:20.766701875 +0000 UTC m=+5530.387635352" Sep 30 21:03:21 crc kubenswrapper[4756]: I0930 21:03:21.160813 4756 scope.go:117] "RemoveContainer" containerID="d1c3222dedb34463fafc99c69f7ce1e3e59c38d7e972c4c1d34eefe7278b9632" Sep 30 21:03:25 crc kubenswrapper[4756]: I0930 21:03:25.810261 4756 generic.go:334] "Generic (PLEG): container finished" podID="134e5ca4-9f84-4f3f-8f8e-6b6dc7953308" containerID="91b0e4a91df6435eebf9199d7a8bfff9d646e32b50218937b93ef02c4b8f0288" exitCode=0 Sep 30 21:03:25 crc kubenswrapper[4756]: I0930 21:03:25.810438 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-xsk5l" event={"ID":"134e5ca4-9f84-4f3f-8f8e-6b6dc7953308","Type":"ContainerDied","Data":"91b0e4a91df6435eebf9199d7a8bfff9d646e32b50218937b93ef02c4b8f0288"} Sep 30 21:03:27 crc kubenswrapper[4756]: I0930 21:03:27.199881 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-xsk5l" Sep 30 21:03:27 crc kubenswrapper[4756]: I0930 21:03:27.363325 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/134e5ca4-9f84-4f3f-8f8e-6b6dc7953308-combined-ca-bundle\") pod \"134e5ca4-9f84-4f3f-8f8e-6b6dc7953308\" (UID: \"134e5ca4-9f84-4f3f-8f8e-6b6dc7953308\") " Sep 30 21:03:27 crc kubenswrapper[4756]: I0930 21:03:27.363792 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ppzrn\" (UniqueName: \"kubernetes.io/projected/134e5ca4-9f84-4f3f-8f8e-6b6dc7953308-kube-api-access-ppzrn\") pod \"134e5ca4-9f84-4f3f-8f8e-6b6dc7953308\" (UID: \"134e5ca4-9f84-4f3f-8f8e-6b6dc7953308\") " Sep 30 21:03:27 crc kubenswrapper[4756]: I0930 21:03:27.363982 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/134e5ca4-9f84-4f3f-8f8e-6b6dc7953308-config-data\") pod \"134e5ca4-9f84-4f3f-8f8e-6b6dc7953308\" (UID: \"134e5ca4-9f84-4f3f-8f8e-6b6dc7953308\") " Sep 30 21:03:27 crc kubenswrapper[4756]: I0930 21:03:27.364130 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/134e5ca4-9f84-4f3f-8f8e-6b6dc7953308-scripts\") pod \"134e5ca4-9f84-4f3f-8f8e-6b6dc7953308\" (UID: \"134e5ca4-9f84-4f3f-8f8e-6b6dc7953308\") " Sep 30 21:03:27 crc kubenswrapper[4756]: I0930 21:03:27.369051 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/134e5ca4-9f84-4f3f-8f8e-6b6dc7953308-kube-api-access-ppzrn" (OuterVolumeSpecName: "kube-api-access-ppzrn") pod "134e5ca4-9f84-4f3f-8f8e-6b6dc7953308" (UID: "134e5ca4-9f84-4f3f-8f8e-6b6dc7953308"). InnerVolumeSpecName "kube-api-access-ppzrn". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 21:03:27 crc kubenswrapper[4756]: I0930 21:03:27.374597 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/134e5ca4-9f84-4f3f-8f8e-6b6dc7953308-scripts" (OuterVolumeSpecName: "scripts") pod "134e5ca4-9f84-4f3f-8f8e-6b6dc7953308" (UID: "134e5ca4-9f84-4f3f-8f8e-6b6dc7953308"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 21:03:27 crc kubenswrapper[4756]: I0930 21:03:27.386866 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/134e5ca4-9f84-4f3f-8f8e-6b6dc7953308-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "134e5ca4-9f84-4f3f-8f8e-6b6dc7953308" (UID: "134e5ca4-9f84-4f3f-8f8e-6b6dc7953308"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 21:03:27 crc kubenswrapper[4756]: I0930 21:03:27.406743 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/134e5ca4-9f84-4f3f-8f8e-6b6dc7953308-config-data" (OuterVolumeSpecName: "config-data") pod "134e5ca4-9f84-4f3f-8f8e-6b6dc7953308" (UID: "134e5ca4-9f84-4f3f-8f8e-6b6dc7953308"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 21:03:27 crc kubenswrapper[4756]: I0930 21:03:27.467124 4756 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/134e5ca4-9f84-4f3f-8f8e-6b6dc7953308-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 21:03:27 crc kubenswrapper[4756]: I0930 21:03:27.467178 4756 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/134e5ca4-9f84-4f3f-8f8e-6b6dc7953308-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 21:03:27 crc kubenswrapper[4756]: I0930 21:03:27.467197 4756 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/134e5ca4-9f84-4f3f-8f8e-6b6dc7953308-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 21:03:27 crc kubenswrapper[4756]: I0930 21:03:27.467232 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ppzrn\" (UniqueName: \"kubernetes.io/projected/134e5ca4-9f84-4f3f-8f8e-6b6dc7953308-kube-api-access-ppzrn\") on node \"crc\" DevicePath \"\"" Sep 30 21:03:27 crc kubenswrapper[4756]: I0930 21:03:27.832424 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-xsk5l" event={"ID":"134e5ca4-9f84-4f3f-8f8e-6b6dc7953308","Type":"ContainerDied","Data":"ba26ac2c91f5b6a609a675aa9355a380f853a6050069984d97d7a7d905838726"} Sep 30 21:03:27 crc kubenswrapper[4756]: I0930 21:03:27.832468 4756 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ba26ac2c91f5b6a609a675aa9355a380f853a6050069984d97d7a7d905838726" Sep 30 21:03:27 crc kubenswrapper[4756]: I0930 21:03:27.832785 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-xsk5l" Sep 30 21:03:27 crc kubenswrapper[4756]: I0930 21:03:27.905991 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-0"] Sep 30 21:03:27 crc kubenswrapper[4756]: E0930 21:03:27.906352 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="134e5ca4-9f84-4f3f-8f8e-6b6dc7953308" containerName="nova-cell0-conductor-db-sync" Sep 30 21:03:27 crc kubenswrapper[4756]: I0930 21:03:27.906370 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="134e5ca4-9f84-4f3f-8f8e-6b6dc7953308" containerName="nova-cell0-conductor-db-sync" Sep 30 21:03:27 crc kubenswrapper[4756]: I0930 21:03:27.906532 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="134e5ca4-9f84-4f3f-8f8e-6b6dc7953308" containerName="nova-cell0-conductor-db-sync" Sep 30 21:03:27 crc kubenswrapper[4756]: I0930 21:03:27.907062 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Sep 30 21:03:27 crc kubenswrapper[4756]: I0930 21:03:27.909311 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Sep 30 21:03:27 crc kubenswrapper[4756]: I0930 21:03:27.909635 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-nova-dockercfg-6krg5" Sep 30 21:03:27 crc kubenswrapper[4756]: I0930 21:03:27.918575 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Sep 30 21:03:28 crc kubenswrapper[4756]: I0930 21:03:28.077290 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ca6b90e2-e28e-41e7-8029-58917d24e5c1-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"ca6b90e2-e28e-41e7-8029-58917d24e5c1\") " pod="openstack/nova-cell0-conductor-0" Sep 30 21:03:28 crc kubenswrapper[4756]: I0930 21:03:28.077381 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ca6b90e2-e28e-41e7-8029-58917d24e5c1-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"ca6b90e2-e28e-41e7-8029-58917d24e5c1\") " pod="openstack/nova-cell0-conductor-0" Sep 30 21:03:28 crc kubenswrapper[4756]: I0930 21:03:28.077723 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pf7sq\" (UniqueName: \"kubernetes.io/projected/ca6b90e2-e28e-41e7-8029-58917d24e5c1-kube-api-access-pf7sq\") pod \"nova-cell0-conductor-0\" (UID: \"ca6b90e2-e28e-41e7-8029-58917d24e5c1\") " pod="openstack/nova-cell0-conductor-0" Sep 30 21:03:28 crc kubenswrapper[4756]: I0930 21:03:28.180197 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ca6b90e2-e28e-41e7-8029-58917d24e5c1-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"ca6b90e2-e28e-41e7-8029-58917d24e5c1\") " pod="openstack/nova-cell0-conductor-0" Sep 30 21:03:28 crc kubenswrapper[4756]: I0930 21:03:28.180282 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ca6b90e2-e28e-41e7-8029-58917d24e5c1-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"ca6b90e2-e28e-41e7-8029-58917d24e5c1\") " pod="openstack/nova-cell0-conductor-0" Sep 30 21:03:28 crc kubenswrapper[4756]: I0930 21:03:28.180386 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pf7sq\" (UniqueName: \"kubernetes.io/projected/ca6b90e2-e28e-41e7-8029-58917d24e5c1-kube-api-access-pf7sq\") pod \"nova-cell0-conductor-0\" (UID: \"ca6b90e2-e28e-41e7-8029-58917d24e5c1\") " pod="openstack/nova-cell0-conductor-0" Sep 30 21:03:28 crc kubenswrapper[4756]: I0930 21:03:28.187265 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ca6b90e2-e28e-41e7-8029-58917d24e5c1-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"ca6b90e2-e28e-41e7-8029-58917d24e5c1\") " pod="openstack/nova-cell0-conductor-0" Sep 30 21:03:28 crc kubenswrapper[4756]: I0930 21:03:28.187357 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ca6b90e2-e28e-41e7-8029-58917d24e5c1-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"ca6b90e2-e28e-41e7-8029-58917d24e5c1\") " pod="openstack/nova-cell0-conductor-0" Sep 30 21:03:28 crc kubenswrapper[4756]: I0930 21:03:28.198011 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pf7sq\" (UniqueName: \"kubernetes.io/projected/ca6b90e2-e28e-41e7-8029-58917d24e5c1-kube-api-access-pf7sq\") pod \"nova-cell0-conductor-0\" (UID: \"ca6b90e2-e28e-41e7-8029-58917d24e5c1\") " pod="openstack/nova-cell0-conductor-0" Sep 30 21:03:28 crc kubenswrapper[4756]: I0930 21:03:28.221994 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Sep 30 21:03:28 crc kubenswrapper[4756]: I0930 21:03:28.521697 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Sep 30 21:03:28 crc kubenswrapper[4756]: I0930 21:03:28.841458 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"ca6b90e2-e28e-41e7-8029-58917d24e5c1","Type":"ContainerStarted","Data":"71e64e2544e4f16deb6082288d1fb345e335e6c25f7b3ccdb401549c129339ff"} Sep 30 21:03:28 crc kubenswrapper[4756]: I0930 21:03:28.841769 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"ca6b90e2-e28e-41e7-8029-58917d24e5c1","Type":"ContainerStarted","Data":"3eeb074521fb16396db4828f04ed80c50e115fbbb0c012eeb58829d420e1a736"} Sep 30 21:03:28 crc kubenswrapper[4756]: I0930 21:03:28.841911 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell0-conductor-0" Sep 30 21:03:28 crc kubenswrapper[4756]: I0930 21:03:28.860555 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-0" podStartSLOduration=1.860530189 podStartE2EDuration="1.860530189s" podCreationTimestamp="2025-09-30 21:03:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 21:03:28.858912327 +0000 UTC m=+5538.479845814" watchObservedRunningTime="2025-09-30 21:03:28.860530189 +0000 UTC m=+5538.481463686" Sep 30 21:03:33 crc kubenswrapper[4756]: I0930 21:03:33.259021 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell0-conductor-0" Sep 30 21:03:33 crc kubenswrapper[4756]: I0930 21:03:33.808708 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-cell-mapping-g7jd6"] Sep 30 21:03:33 crc kubenswrapper[4756]: I0930 21:03:33.810072 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-g7jd6" Sep 30 21:03:33 crc kubenswrapper[4756]: I0930 21:03:33.817052 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-manage-config-data" Sep 30 21:03:33 crc kubenswrapper[4756]: I0930 21:03:33.823015 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-manage-scripts" Sep 30 21:03:33 crc kubenswrapper[4756]: I0930 21:03:33.825468 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-cell-mapping-g7jd6"] Sep 30 21:03:33 crc kubenswrapper[4756]: I0930 21:03:33.894705 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d8p8g\" (UniqueName: \"kubernetes.io/projected/1c7caeef-7ea4-4e7f-9e4b-de3aed88f7e2-kube-api-access-d8p8g\") pod \"nova-cell0-cell-mapping-g7jd6\" (UID: \"1c7caeef-7ea4-4e7f-9e4b-de3aed88f7e2\") " pod="openstack/nova-cell0-cell-mapping-g7jd6" Sep 30 21:03:33 crc kubenswrapper[4756]: I0930 21:03:33.894870 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1c7caeef-7ea4-4e7f-9e4b-de3aed88f7e2-scripts\") pod \"nova-cell0-cell-mapping-g7jd6\" (UID: \"1c7caeef-7ea4-4e7f-9e4b-de3aed88f7e2\") " pod="openstack/nova-cell0-cell-mapping-g7jd6" Sep 30 21:03:33 crc kubenswrapper[4756]: I0930 21:03:33.894923 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1c7caeef-7ea4-4e7f-9e4b-de3aed88f7e2-config-data\") pod \"nova-cell0-cell-mapping-g7jd6\" (UID: \"1c7caeef-7ea4-4e7f-9e4b-de3aed88f7e2\") " pod="openstack/nova-cell0-cell-mapping-g7jd6" Sep 30 21:03:33 crc kubenswrapper[4756]: I0930 21:03:33.895044 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1c7caeef-7ea4-4e7f-9e4b-de3aed88f7e2-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-g7jd6\" (UID: \"1c7caeef-7ea4-4e7f-9e4b-de3aed88f7e2\") " pod="openstack/nova-cell0-cell-mapping-g7jd6" Sep 30 21:03:33 crc kubenswrapper[4756]: I0930 21:03:33.988199 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Sep 30 21:03:33 crc kubenswrapper[4756]: I0930 21:03:33.989619 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 30 21:03:33 crc kubenswrapper[4756]: I0930 21:03:33.996816 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Sep 30 21:03:33 crc kubenswrapper[4756]: I0930 21:03:33.998891 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d8p8g\" (UniqueName: \"kubernetes.io/projected/1c7caeef-7ea4-4e7f-9e4b-de3aed88f7e2-kube-api-access-d8p8g\") pod \"nova-cell0-cell-mapping-g7jd6\" (UID: \"1c7caeef-7ea4-4e7f-9e4b-de3aed88f7e2\") " pod="openstack/nova-cell0-cell-mapping-g7jd6" Sep 30 21:03:33 crc kubenswrapper[4756]: I0930 21:03:33.998958 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1c7caeef-7ea4-4e7f-9e4b-de3aed88f7e2-scripts\") pod \"nova-cell0-cell-mapping-g7jd6\" (UID: \"1c7caeef-7ea4-4e7f-9e4b-de3aed88f7e2\") " pod="openstack/nova-cell0-cell-mapping-g7jd6" Sep 30 21:03:33 crc kubenswrapper[4756]: I0930 21:03:33.998990 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1c7caeef-7ea4-4e7f-9e4b-de3aed88f7e2-config-data\") pod \"nova-cell0-cell-mapping-g7jd6\" (UID: \"1c7caeef-7ea4-4e7f-9e4b-de3aed88f7e2\") " pod="openstack/nova-cell0-cell-mapping-g7jd6" Sep 30 21:03:33 crc kubenswrapper[4756]: I0930 21:03:33.999037 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1c7caeef-7ea4-4e7f-9e4b-de3aed88f7e2-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-g7jd6\" (UID: \"1c7caeef-7ea4-4e7f-9e4b-de3aed88f7e2\") " pod="openstack/nova-cell0-cell-mapping-g7jd6" Sep 30 21:03:34 crc kubenswrapper[4756]: I0930 21:03:34.018788 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1c7caeef-7ea4-4e7f-9e4b-de3aed88f7e2-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-g7jd6\" (UID: \"1c7caeef-7ea4-4e7f-9e4b-de3aed88f7e2\") " pod="openstack/nova-cell0-cell-mapping-g7jd6" Sep 30 21:03:34 crc kubenswrapper[4756]: I0930 21:03:34.019051 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Sep 30 21:03:34 crc kubenswrapper[4756]: I0930 21:03:34.019915 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1c7caeef-7ea4-4e7f-9e4b-de3aed88f7e2-scripts\") pod \"nova-cell0-cell-mapping-g7jd6\" (UID: \"1c7caeef-7ea4-4e7f-9e4b-de3aed88f7e2\") " pod="openstack/nova-cell0-cell-mapping-g7jd6" Sep 30 21:03:34 crc kubenswrapper[4756]: I0930 21:03:34.020485 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Sep 30 21:03:34 crc kubenswrapper[4756]: I0930 21:03:34.028142 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-novncproxy-config-data" Sep 30 21:03:34 crc kubenswrapper[4756]: I0930 21:03:34.028711 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Sep 30 21:03:34 crc kubenswrapper[4756]: I0930 21:03:34.042165 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1c7caeef-7ea4-4e7f-9e4b-de3aed88f7e2-config-data\") pod \"nova-cell0-cell-mapping-g7jd6\" (UID: \"1c7caeef-7ea4-4e7f-9e4b-de3aed88f7e2\") " pod="openstack/nova-cell0-cell-mapping-g7jd6" Sep 30 21:03:34 crc kubenswrapper[4756]: I0930 21:03:34.050913 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d8p8g\" (UniqueName: \"kubernetes.io/projected/1c7caeef-7ea4-4e7f-9e4b-de3aed88f7e2-kube-api-access-d8p8g\") pod \"nova-cell0-cell-mapping-g7jd6\" (UID: \"1c7caeef-7ea4-4e7f-9e4b-de3aed88f7e2\") " pod="openstack/nova-cell0-cell-mapping-g7jd6" Sep 30 21:03:34 crc kubenswrapper[4756]: I0930 21:03:34.054447 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Sep 30 21:03:34 crc kubenswrapper[4756]: I0930 21:03:34.076794 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Sep 30 21:03:34 crc kubenswrapper[4756]: I0930 21:03:34.087615 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Sep 30 21:03:34 crc kubenswrapper[4756]: I0930 21:03:34.097999 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Sep 30 21:03:34 crc kubenswrapper[4756]: I0930 21:03:34.100812 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fgdxt\" (UniqueName: \"kubernetes.io/projected/af618cfb-1960-4d7b-a513-48dcbe0a7ddd-kube-api-access-fgdxt\") pod \"nova-cell1-novncproxy-0\" (UID: \"af618cfb-1960-4d7b-a513-48dcbe0a7ddd\") " pod="openstack/nova-cell1-novncproxy-0" Sep 30 21:03:34 crc kubenswrapper[4756]: I0930 21:03:34.100858 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tfkq5\" (UniqueName: \"kubernetes.io/projected/ae546baf-e30a-48a8-a4c1-9bc37adfbc12-kube-api-access-tfkq5\") pod \"nova-api-0\" (UID: \"ae546baf-e30a-48a8-a4c1-9bc37adfbc12\") " pod="openstack/nova-api-0" Sep 30 21:03:34 crc kubenswrapper[4756]: I0930 21:03:34.100880 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ae546baf-e30a-48a8-a4c1-9bc37adfbc12-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"ae546baf-e30a-48a8-a4c1-9bc37adfbc12\") " pod="openstack/nova-api-0" Sep 30 21:03:34 crc kubenswrapper[4756]: I0930 21:03:34.100910 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ae546baf-e30a-48a8-a4c1-9bc37adfbc12-logs\") pod \"nova-api-0\" (UID: \"ae546baf-e30a-48a8-a4c1-9bc37adfbc12\") " pod="openstack/nova-api-0" Sep 30 21:03:34 crc kubenswrapper[4756]: I0930 21:03:34.100937 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/af618cfb-1960-4d7b-a513-48dcbe0a7ddd-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"af618cfb-1960-4d7b-a513-48dcbe0a7ddd\") " pod="openstack/nova-cell1-novncproxy-0" Sep 30 21:03:34 crc kubenswrapper[4756]: I0930 21:03:34.101013 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ae546baf-e30a-48a8-a4c1-9bc37adfbc12-config-data\") pod \"nova-api-0\" (UID: \"ae546baf-e30a-48a8-a4c1-9bc37adfbc12\") " pod="openstack/nova-api-0" Sep 30 21:03:34 crc kubenswrapper[4756]: I0930 21:03:34.101031 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/af618cfb-1960-4d7b-a513-48dcbe0a7ddd-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"af618cfb-1960-4d7b-a513-48dcbe0a7ddd\") " pod="openstack/nova-cell1-novncproxy-0" Sep 30 21:03:34 crc kubenswrapper[4756]: I0930 21:03:34.121126 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Sep 30 21:03:34 crc kubenswrapper[4756]: I0930 21:03:34.138832 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-g7jd6" Sep 30 21:03:34 crc kubenswrapper[4756]: I0930 21:03:34.165298 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Sep 30 21:03:34 crc kubenswrapper[4756]: I0930 21:03:34.166596 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Sep 30 21:03:34 crc kubenswrapper[4756]: I0930 21:03:34.178777 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Sep 30 21:03:34 crc kubenswrapper[4756]: I0930 21:03:34.190597 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Sep 30 21:03:34 crc kubenswrapper[4756]: I0930 21:03:34.202742 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1eb0c214-c7ca-4179-a52e-af13f1ce8c74-config-data\") pod \"nova-metadata-0\" (UID: \"1eb0c214-c7ca-4179-a52e-af13f1ce8c74\") " pod="openstack/nova-metadata-0" Sep 30 21:03:34 crc kubenswrapper[4756]: I0930 21:03:34.202806 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ae546baf-e30a-48a8-a4c1-9bc37adfbc12-config-data\") pod \"nova-api-0\" (UID: \"ae546baf-e30a-48a8-a4c1-9bc37adfbc12\") " pod="openstack/nova-api-0" Sep 30 21:03:34 crc kubenswrapper[4756]: I0930 21:03:34.202834 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/af618cfb-1960-4d7b-a513-48dcbe0a7ddd-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"af618cfb-1960-4d7b-a513-48dcbe0a7ddd\") " pod="openstack/nova-cell1-novncproxy-0" Sep 30 21:03:34 crc kubenswrapper[4756]: I0930 21:03:34.202854 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1eb0c214-c7ca-4179-a52e-af13f1ce8c74-logs\") pod \"nova-metadata-0\" (UID: \"1eb0c214-c7ca-4179-a52e-af13f1ce8c74\") " pod="openstack/nova-metadata-0" Sep 30 21:03:34 crc kubenswrapper[4756]: I0930 21:03:34.202925 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xbzcm\" (UniqueName: \"kubernetes.io/projected/1eb0c214-c7ca-4179-a52e-af13f1ce8c74-kube-api-access-xbzcm\") pod \"nova-metadata-0\" (UID: \"1eb0c214-c7ca-4179-a52e-af13f1ce8c74\") " pod="openstack/nova-metadata-0" Sep 30 21:03:34 crc kubenswrapper[4756]: I0930 21:03:34.202966 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fgdxt\" (UniqueName: \"kubernetes.io/projected/af618cfb-1960-4d7b-a513-48dcbe0a7ddd-kube-api-access-fgdxt\") pod \"nova-cell1-novncproxy-0\" (UID: \"af618cfb-1960-4d7b-a513-48dcbe0a7ddd\") " pod="openstack/nova-cell1-novncproxy-0" Sep 30 21:03:34 crc kubenswrapper[4756]: I0930 21:03:34.203042 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tfkq5\" (UniqueName: \"kubernetes.io/projected/ae546baf-e30a-48a8-a4c1-9bc37adfbc12-kube-api-access-tfkq5\") pod \"nova-api-0\" (UID: \"ae546baf-e30a-48a8-a4c1-9bc37adfbc12\") " pod="openstack/nova-api-0" Sep 30 21:03:34 crc kubenswrapper[4756]: I0930 21:03:34.203073 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ae546baf-e30a-48a8-a4c1-9bc37adfbc12-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"ae546baf-e30a-48a8-a4c1-9bc37adfbc12\") " pod="openstack/nova-api-0" Sep 30 21:03:34 crc kubenswrapper[4756]: I0930 21:03:34.203121 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1eb0c214-c7ca-4179-a52e-af13f1ce8c74-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"1eb0c214-c7ca-4179-a52e-af13f1ce8c74\") " pod="openstack/nova-metadata-0" Sep 30 21:03:34 crc kubenswrapper[4756]: I0930 21:03:34.203158 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ae546baf-e30a-48a8-a4c1-9bc37adfbc12-logs\") pod \"nova-api-0\" (UID: \"ae546baf-e30a-48a8-a4c1-9bc37adfbc12\") " pod="openstack/nova-api-0" Sep 30 21:03:34 crc kubenswrapper[4756]: I0930 21:03:34.203226 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/af618cfb-1960-4d7b-a513-48dcbe0a7ddd-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"af618cfb-1960-4d7b-a513-48dcbe0a7ddd\") " pod="openstack/nova-cell1-novncproxy-0" Sep 30 21:03:34 crc kubenswrapper[4756]: I0930 21:03:34.209104 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ae546baf-e30a-48a8-a4c1-9bc37adfbc12-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"ae546baf-e30a-48a8-a4c1-9bc37adfbc12\") " pod="openstack/nova-api-0" Sep 30 21:03:34 crc kubenswrapper[4756]: I0930 21:03:34.209631 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ae546baf-e30a-48a8-a4c1-9bc37adfbc12-logs\") pod \"nova-api-0\" (UID: \"ae546baf-e30a-48a8-a4c1-9bc37adfbc12\") " pod="openstack/nova-api-0" Sep 30 21:03:34 crc kubenswrapper[4756]: I0930 21:03:34.212631 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/af618cfb-1960-4d7b-a513-48dcbe0a7ddd-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"af618cfb-1960-4d7b-a513-48dcbe0a7ddd\") " pod="openstack/nova-cell1-novncproxy-0" Sep 30 21:03:34 crc kubenswrapper[4756]: I0930 21:03:34.212937 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/af618cfb-1960-4d7b-a513-48dcbe0a7ddd-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"af618cfb-1960-4d7b-a513-48dcbe0a7ddd\") " pod="openstack/nova-cell1-novncproxy-0" Sep 30 21:03:34 crc kubenswrapper[4756]: I0930 21:03:34.216154 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ae546baf-e30a-48a8-a4c1-9bc37adfbc12-config-data\") pod \"nova-api-0\" (UID: \"ae546baf-e30a-48a8-a4c1-9bc37adfbc12\") " pod="openstack/nova-api-0" Sep 30 21:03:34 crc kubenswrapper[4756]: I0930 21:03:34.234944 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fgdxt\" (UniqueName: \"kubernetes.io/projected/af618cfb-1960-4d7b-a513-48dcbe0a7ddd-kube-api-access-fgdxt\") pod \"nova-cell1-novncproxy-0\" (UID: \"af618cfb-1960-4d7b-a513-48dcbe0a7ddd\") " pod="openstack/nova-cell1-novncproxy-0" Sep 30 21:03:34 crc kubenswrapper[4756]: I0930 21:03:34.237864 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tfkq5\" (UniqueName: \"kubernetes.io/projected/ae546baf-e30a-48a8-a4c1-9bc37adfbc12-kube-api-access-tfkq5\") pod \"nova-api-0\" (UID: \"ae546baf-e30a-48a8-a4c1-9bc37adfbc12\") " pod="openstack/nova-api-0" Sep 30 21:03:34 crc kubenswrapper[4756]: I0930 21:03:34.248245 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-58b45b7695-v65nt"] Sep 30 21:03:34 crc kubenswrapper[4756]: I0930 21:03:34.249781 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-58b45b7695-v65nt" Sep 30 21:03:34 crc kubenswrapper[4756]: I0930 21:03:34.257209 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-58b45b7695-v65nt"] Sep 30 21:03:34 crc kubenswrapper[4756]: I0930 21:03:34.304290 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1eb0c214-c7ca-4179-a52e-af13f1ce8c74-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"1eb0c214-c7ca-4179-a52e-af13f1ce8c74\") " pod="openstack/nova-metadata-0" Sep 30 21:03:34 crc kubenswrapper[4756]: I0930 21:03:34.304872 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5302e743-f6ec-4907-8de8-677e0151248b-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"5302e743-f6ec-4907-8de8-677e0151248b\") " pod="openstack/nova-scheduler-0" Sep 30 21:03:34 crc kubenswrapper[4756]: I0930 21:03:34.304996 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4872q\" (UniqueName: \"kubernetes.io/projected/e093a2b1-f219-40f0-9abc-bb97939f235a-kube-api-access-4872q\") pod \"dnsmasq-dns-58b45b7695-v65nt\" (UID: \"e093a2b1-f219-40f0-9abc-bb97939f235a\") " pod="openstack/dnsmasq-dns-58b45b7695-v65nt" Sep 30 21:03:34 crc kubenswrapper[4756]: I0930 21:03:34.305540 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5302e743-f6ec-4907-8de8-677e0151248b-config-data\") pod \"nova-scheduler-0\" (UID: \"5302e743-f6ec-4907-8de8-677e0151248b\") " pod="openstack/nova-scheduler-0" Sep 30 21:03:34 crc kubenswrapper[4756]: I0930 21:03:34.305629 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1eb0c214-c7ca-4179-a52e-af13f1ce8c74-config-data\") pod \"nova-metadata-0\" (UID: \"1eb0c214-c7ca-4179-a52e-af13f1ce8c74\") " pod="openstack/nova-metadata-0" Sep 30 21:03:34 crc kubenswrapper[4756]: I0930 21:03:34.305710 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1eb0c214-c7ca-4179-a52e-af13f1ce8c74-logs\") pod \"nova-metadata-0\" (UID: \"1eb0c214-c7ca-4179-a52e-af13f1ce8c74\") " pod="openstack/nova-metadata-0" Sep 30 21:03:34 crc kubenswrapper[4756]: I0930 21:03:34.305782 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e093a2b1-f219-40f0-9abc-bb97939f235a-ovsdbserver-sb\") pod \"dnsmasq-dns-58b45b7695-v65nt\" (UID: \"e093a2b1-f219-40f0-9abc-bb97939f235a\") " pod="openstack/dnsmasq-dns-58b45b7695-v65nt" Sep 30 21:03:34 crc kubenswrapper[4756]: I0930 21:03:34.305884 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e093a2b1-f219-40f0-9abc-bb97939f235a-config\") pod \"dnsmasq-dns-58b45b7695-v65nt\" (UID: \"e093a2b1-f219-40f0-9abc-bb97939f235a\") " pod="openstack/dnsmasq-dns-58b45b7695-v65nt" Sep 30 21:03:34 crc kubenswrapper[4756]: I0930 21:03:34.305960 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xbzcm\" (UniqueName: \"kubernetes.io/projected/1eb0c214-c7ca-4179-a52e-af13f1ce8c74-kube-api-access-xbzcm\") pod \"nova-metadata-0\" (UID: \"1eb0c214-c7ca-4179-a52e-af13f1ce8c74\") " pod="openstack/nova-metadata-0" Sep 30 21:03:34 crc kubenswrapper[4756]: I0930 21:03:34.306042 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e093a2b1-f219-40f0-9abc-bb97939f235a-dns-svc\") pod \"dnsmasq-dns-58b45b7695-v65nt\" (UID: \"e093a2b1-f219-40f0-9abc-bb97939f235a\") " pod="openstack/dnsmasq-dns-58b45b7695-v65nt" Sep 30 21:03:34 crc kubenswrapper[4756]: I0930 21:03:34.306122 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nb2ss\" (UniqueName: \"kubernetes.io/projected/5302e743-f6ec-4907-8de8-677e0151248b-kube-api-access-nb2ss\") pod \"nova-scheduler-0\" (UID: \"5302e743-f6ec-4907-8de8-677e0151248b\") " pod="openstack/nova-scheduler-0" Sep 30 21:03:34 crc kubenswrapper[4756]: I0930 21:03:34.306191 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e093a2b1-f219-40f0-9abc-bb97939f235a-ovsdbserver-nb\") pod \"dnsmasq-dns-58b45b7695-v65nt\" (UID: \"e093a2b1-f219-40f0-9abc-bb97939f235a\") " pod="openstack/dnsmasq-dns-58b45b7695-v65nt" Sep 30 21:03:34 crc kubenswrapper[4756]: I0930 21:03:34.308045 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1eb0c214-c7ca-4179-a52e-af13f1ce8c74-logs\") pod \"nova-metadata-0\" (UID: \"1eb0c214-c7ca-4179-a52e-af13f1ce8c74\") " pod="openstack/nova-metadata-0" Sep 30 21:03:34 crc kubenswrapper[4756]: I0930 21:03:34.319430 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1eb0c214-c7ca-4179-a52e-af13f1ce8c74-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"1eb0c214-c7ca-4179-a52e-af13f1ce8c74\") " pod="openstack/nova-metadata-0" Sep 30 21:03:34 crc kubenswrapper[4756]: I0930 21:03:34.328162 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xbzcm\" (UniqueName: \"kubernetes.io/projected/1eb0c214-c7ca-4179-a52e-af13f1ce8c74-kube-api-access-xbzcm\") pod \"nova-metadata-0\" (UID: \"1eb0c214-c7ca-4179-a52e-af13f1ce8c74\") " pod="openstack/nova-metadata-0" Sep 30 21:03:34 crc kubenswrapper[4756]: I0930 21:03:34.330528 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1eb0c214-c7ca-4179-a52e-af13f1ce8c74-config-data\") pod \"nova-metadata-0\" (UID: \"1eb0c214-c7ca-4179-a52e-af13f1ce8c74\") " pod="openstack/nova-metadata-0" Sep 30 21:03:34 crc kubenswrapper[4756]: I0930 21:03:34.404027 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 30 21:03:34 crc kubenswrapper[4756]: I0930 21:03:34.407373 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5302e743-f6ec-4907-8de8-677e0151248b-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"5302e743-f6ec-4907-8de8-677e0151248b\") " pod="openstack/nova-scheduler-0" Sep 30 21:03:34 crc kubenswrapper[4756]: I0930 21:03:34.407460 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4872q\" (UniqueName: \"kubernetes.io/projected/e093a2b1-f219-40f0-9abc-bb97939f235a-kube-api-access-4872q\") pod \"dnsmasq-dns-58b45b7695-v65nt\" (UID: \"e093a2b1-f219-40f0-9abc-bb97939f235a\") " pod="openstack/dnsmasq-dns-58b45b7695-v65nt" Sep 30 21:03:34 crc kubenswrapper[4756]: I0930 21:03:34.407480 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5302e743-f6ec-4907-8de8-677e0151248b-config-data\") pod \"nova-scheduler-0\" (UID: \"5302e743-f6ec-4907-8de8-677e0151248b\") " pod="openstack/nova-scheduler-0" Sep 30 21:03:34 crc kubenswrapper[4756]: I0930 21:03:34.407518 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e093a2b1-f219-40f0-9abc-bb97939f235a-ovsdbserver-sb\") pod \"dnsmasq-dns-58b45b7695-v65nt\" (UID: \"e093a2b1-f219-40f0-9abc-bb97939f235a\") " pod="openstack/dnsmasq-dns-58b45b7695-v65nt" Sep 30 21:03:34 crc kubenswrapper[4756]: I0930 21:03:34.407537 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e093a2b1-f219-40f0-9abc-bb97939f235a-config\") pod \"dnsmasq-dns-58b45b7695-v65nt\" (UID: \"e093a2b1-f219-40f0-9abc-bb97939f235a\") " pod="openstack/dnsmasq-dns-58b45b7695-v65nt" Sep 30 21:03:34 crc kubenswrapper[4756]: I0930 21:03:34.407576 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e093a2b1-f219-40f0-9abc-bb97939f235a-dns-svc\") pod \"dnsmasq-dns-58b45b7695-v65nt\" (UID: \"e093a2b1-f219-40f0-9abc-bb97939f235a\") " pod="openstack/dnsmasq-dns-58b45b7695-v65nt" Sep 30 21:03:34 crc kubenswrapper[4756]: I0930 21:03:34.407601 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nb2ss\" (UniqueName: \"kubernetes.io/projected/5302e743-f6ec-4907-8de8-677e0151248b-kube-api-access-nb2ss\") pod \"nova-scheduler-0\" (UID: \"5302e743-f6ec-4907-8de8-677e0151248b\") " pod="openstack/nova-scheduler-0" Sep 30 21:03:34 crc kubenswrapper[4756]: I0930 21:03:34.407623 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e093a2b1-f219-40f0-9abc-bb97939f235a-ovsdbserver-nb\") pod \"dnsmasq-dns-58b45b7695-v65nt\" (UID: \"e093a2b1-f219-40f0-9abc-bb97939f235a\") " pod="openstack/dnsmasq-dns-58b45b7695-v65nt" Sep 30 21:03:34 crc kubenswrapper[4756]: I0930 21:03:34.408815 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e093a2b1-f219-40f0-9abc-bb97939f235a-ovsdbserver-nb\") pod \"dnsmasq-dns-58b45b7695-v65nt\" (UID: \"e093a2b1-f219-40f0-9abc-bb97939f235a\") " pod="openstack/dnsmasq-dns-58b45b7695-v65nt" Sep 30 21:03:34 crc kubenswrapper[4756]: I0930 21:03:34.408988 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e093a2b1-f219-40f0-9abc-bb97939f235a-config\") pod \"dnsmasq-dns-58b45b7695-v65nt\" (UID: \"e093a2b1-f219-40f0-9abc-bb97939f235a\") " pod="openstack/dnsmasq-dns-58b45b7695-v65nt" Sep 30 21:03:34 crc kubenswrapper[4756]: I0930 21:03:34.409033 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e093a2b1-f219-40f0-9abc-bb97939f235a-dns-svc\") pod \"dnsmasq-dns-58b45b7695-v65nt\" (UID: \"e093a2b1-f219-40f0-9abc-bb97939f235a\") " pod="openstack/dnsmasq-dns-58b45b7695-v65nt" Sep 30 21:03:34 crc kubenswrapper[4756]: I0930 21:03:34.412315 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5302e743-f6ec-4907-8de8-677e0151248b-config-data\") pod \"nova-scheduler-0\" (UID: \"5302e743-f6ec-4907-8de8-677e0151248b\") " pod="openstack/nova-scheduler-0" Sep 30 21:03:34 crc kubenswrapper[4756]: I0930 21:03:34.414898 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e093a2b1-f219-40f0-9abc-bb97939f235a-ovsdbserver-sb\") pod \"dnsmasq-dns-58b45b7695-v65nt\" (UID: \"e093a2b1-f219-40f0-9abc-bb97939f235a\") " pod="openstack/dnsmasq-dns-58b45b7695-v65nt" Sep 30 21:03:34 crc kubenswrapper[4756]: I0930 21:03:34.421498 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Sep 30 21:03:34 crc kubenswrapper[4756]: I0930 21:03:34.423518 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5302e743-f6ec-4907-8de8-677e0151248b-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"5302e743-f6ec-4907-8de8-677e0151248b\") " pod="openstack/nova-scheduler-0" Sep 30 21:03:34 crc kubenswrapper[4756]: I0930 21:03:34.428018 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nb2ss\" (UniqueName: \"kubernetes.io/projected/5302e743-f6ec-4907-8de8-677e0151248b-kube-api-access-nb2ss\") pod \"nova-scheduler-0\" (UID: \"5302e743-f6ec-4907-8de8-677e0151248b\") " pod="openstack/nova-scheduler-0" Sep 30 21:03:34 crc kubenswrapper[4756]: I0930 21:03:34.431437 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4872q\" (UniqueName: \"kubernetes.io/projected/e093a2b1-f219-40f0-9abc-bb97939f235a-kube-api-access-4872q\") pod \"dnsmasq-dns-58b45b7695-v65nt\" (UID: \"e093a2b1-f219-40f0-9abc-bb97939f235a\") " pod="openstack/dnsmasq-dns-58b45b7695-v65nt" Sep 30 21:03:34 crc kubenswrapper[4756]: I0930 21:03:34.440528 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Sep 30 21:03:34 crc kubenswrapper[4756]: I0930 21:03:34.482907 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Sep 30 21:03:34 crc kubenswrapper[4756]: I0930 21:03:34.632099 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-58b45b7695-v65nt" Sep 30 21:03:34 crc kubenswrapper[4756]: I0930 21:03:34.703808 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-nx4mc"] Sep 30 21:03:34 crc kubenswrapper[4756]: I0930 21:03:34.705891 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-nx4mc" Sep 30 21:03:34 crc kubenswrapper[4756]: I0930 21:03:34.723847 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-nx4mc"] Sep 30 21:03:34 crc kubenswrapper[4756]: I0930 21:03:34.734073 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-cell-mapping-g7jd6"] Sep 30 21:03:34 crc kubenswrapper[4756]: I0930 21:03:34.814190 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/14bad369-c240-4a03-803d-b0b6c2baf689-catalog-content\") pod \"redhat-operators-nx4mc\" (UID: \"14bad369-c240-4a03-803d-b0b6c2baf689\") " pod="openshift-marketplace/redhat-operators-nx4mc" Sep 30 21:03:34 crc kubenswrapper[4756]: I0930 21:03:34.814240 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-64596\" (UniqueName: \"kubernetes.io/projected/14bad369-c240-4a03-803d-b0b6c2baf689-kube-api-access-64596\") pod \"redhat-operators-nx4mc\" (UID: \"14bad369-c240-4a03-803d-b0b6c2baf689\") " pod="openshift-marketplace/redhat-operators-nx4mc" Sep 30 21:03:34 crc kubenswrapper[4756]: I0930 21:03:34.814356 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/14bad369-c240-4a03-803d-b0b6c2baf689-utilities\") pod \"redhat-operators-nx4mc\" (UID: \"14bad369-c240-4a03-803d-b0b6c2baf689\") " pod="openshift-marketplace/redhat-operators-nx4mc" Sep 30 21:03:34 crc kubenswrapper[4756]: I0930 21:03:34.903098 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-g7jd6" event={"ID":"1c7caeef-7ea4-4e7f-9e4b-de3aed88f7e2","Type":"ContainerStarted","Data":"7772165d36872024a44e0cc0f5422a344123d1a843ba2eae1c6610d14801ebf8"} Sep 30 21:03:34 crc kubenswrapper[4756]: I0930 21:03:34.918471 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/14bad369-c240-4a03-803d-b0b6c2baf689-catalog-content\") pod \"redhat-operators-nx4mc\" (UID: \"14bad369-c240-4a03-803d-b0b6c2baf689\") " pod="openshift-marketplace/redhat-operators-nx4mc" Sep 30 21:03:34 crc kubenswrapper[4756]: I0930 21:03:34.918532 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-64596\" (UniqueName: \"kubernetes.io/projected/14bad369-c240-4a03-803d-b0b6c2baf689-kube-api-access-64596\") pod \"redhat-operators-nx4mc\" (UID: \"14bad369-c240-4a03-803d-b0b6c2baf689\") " pod="openshift-marketplace/redhat-operators-nx4mc" Sep 30 21:03:34 crc kubenswrapper[4756]: I0930 21:03:34.918649 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/14bad369-c240-4a03-803d-b0b6c2baf689-utilities\") pod \"redhat-operators-nx4mc\" (UID: \"14bad369-c240-4a03-803d-b0b6c2baf689\") " pod="openshift-marketplace/redhat-operators-nx4mc" Sep 30 21:03:34 crc kubenswrapper[4756]: I0930 21:03:34.919528 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/14bad369-c240-4a03-803d-b0b6c2baf689-utilities\") pod \"redhat-operators-nx4mc\" (UID: \"14bad369-c240-4a03-803d-b0b6c2baf689\") " pod="openshift-marketplace/redhat-operators-nx4mc" Sep 30 21:03:34 crc kubenswrapper[4756]: I0930 21:03:34.919643 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/14bad369-c240-4a03-803d-b0b6c2baf689-catalog-content\") pod \"redhat-operators-nx4mc\" (UID: \"14bad369-c240-4a03-803d-b0b6c2baf689\") " pod="openshift-marketplace/redhat-operators-nx4mc" Sep 30 21:03:34 crc kubenswrapper[4756]: I0930 21:03:34.947875 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-64596\" (UniqueName: \"kubernetes.io/projected/14bad369-c240-4a03-803d-b0b6c2baf689-kube-api-access-64596\") pod \"redhat-operators-nx4mc\" (UID: \"14bad369-c240-4a03-803d-b0b6c2baf689\") " pod="openshift-marketplace/redhat-operators-nx4mc" Sep 30 21:03:34 crc kubenswrapper[4756]: I0930 21:03:34.971323 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Sep 30 21:03:34 crc kubenswrapper[4756]: W0930 21:03:34.997558 4756 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podae546baf_e30a_48a8_a4c1_9bc37adfbc12.slice/crio-50bc367fd9372e6233447cc9d0e0d9f65b22a02e03d458a8d20d8e63badeb309 WatchSource:0}: Error finding container 50bc367fd9372e6233447cc9d0e0d9f65b22a02e03d458a8d20d8e63badeb309: Status 404 returned error can't find the container with id 50bc367fd9372e6233447cc9d0e0d9f65b22a02e03d458a8d20d8e63badeb309 Sep 30 21:03:35 crc kubenswrapper[4756]: I0930 21:03:35.007443 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Sep 30 21:03:35 crc kubenswrapper[4756]: I0930 21:03:35.044372 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-nx4mc" Sep 30 21:03:35 crc kubenswrapper[4756]: I0930 21:03:35.092845 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Sep 30 21:03:35 crc kubenswrapper[4756]: I0930 21:03:35.165087 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Sep 30 21:03:35 crc kubenswrapper[4756]: I0930 21:03:35.195496 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-db-sync-svqlp"] Sep 30 21:03:35 crc kubenswrapper[4756]: I0930 21:03:35.196712 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-svqlp" Sep 30 21:03:35 crc kubenswrapper[4756]: I0930 21:03:35.199316 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-scripts" Sep 30 21:03:35 crc kubenswrapper[4756]: I0930 21:03:35.199774 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Sep 30 21:03:35 crc kubenswrapper[4756]: I0930 21:03:35.211305 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-svqlp"] Sep 30 21:03:35 crc kubenswrapper[4756]: I0930 21:03:35.329201 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-58b45b7695-v65nt"] Sep 30 21:03:35 crc kubenswrapper[4756]: I0930 21:03:35.337290 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4bb17af9-3b2c-4719-b35b-349cd56624f2-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-svqlp\" (UID: \"4bb17af9-3b2c-4719-b35b-349cd56624f2\") " pod="openstack/nova-cell1-conductor-db-sync-svqlp" Sep 30 21:03:35 crc kubenswrapper[4756]: I0930 21:03:35.337413 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4bb17af9-3b2c-4719-b35b-349cd56624f2-config-data\") pod \"nova-cell1-conductor-db-sync-svqlp\" (UID: \"4bb17af9-3b2c-4719-b35b-349cd56624f2\") " pod="openstack/nova-cell1-conductor-db-sync-svqlp" Sep 30 21:03:35 crc kubenswrapper[4756]: I0930 21:03:35.337479 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4bb17af9-3b2c-4719-b35b-349cd56624f2-scripts\") pod \"nova-cell1-conductor-db-sync-svqlp\" (UID: \"4bb17af9-3b2c-4719-b35b-349cd56624f2\") " pod="openstack/nova-cell1-conductor-db-sync-svqlp" Sep 30 21:03:35 crc kubenswrapper[4756]: I0930 21:03:35.337514 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m5dml\" (UniqueName: \"kubernetes.io/projected/4bb17af9-3b2c-4719-b35b-349cd56624f2-kube-api-access-m5dml\") pod \"nova-cell1-conductor-db-sync-svqlp\" (UID: \"4bb17af9-3b2c-4719-b35b-349cd56624f2\") " pod="openstack/nova-cell1-conductor-db-sync-svqlp" Sep 30 21:03:35 crc kubenswrapper[4756]: I0930 21:03:35.440912 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4bb17af9-3b2c-4719-b35b-349cd56624f2-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-svqlp\" (UID: \"4bb17af9-3b2c-4719-b35b-349cd56624f2\") " pod="openstack/nova-cell1-conductor-db-sync-svqlp" Sep 30 21:03:35 crc kubenswrapper[4756]: I0930 21:03:35.441380 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4bb17af9-3b2c-4719-b35b-349cd56624f2-config-data\") pod \"nova-cell1-conductor-db-sync-svqlp\" (UID: \"4bb17af9-3b2c-4719-b35b-349cd56624f2\") " pod="openstack/nova-cell1-conductor-db-sync-svqlp" Sep 30 21:03:35 crc kubenswrapper[4756]: I0930 21:03:35.441547 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4bb17af9-3b2c-4719-b35b-349cd56624f2-scripts\") pod \"nova-cell1-conductor-db-sync-svqlp\" (UID: \"4bb17af9-3b2c-4719-b35b-349cd56624f2\") " pod="openstack/nova-cell1-conductor-db-sync-svqlp" Sep 30 21:03:35 crc kubenswrapper[4756]: I0930 21:03:35.441605 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m5dml\" (UniqueName: \"kubernetes.io/projected/4bb17af9-3b2c-4719-b35b-349cd56624f2-kube-api-access-m5dml\") pod \"nova-cell1-conductor-db-sync-svqlp\" (UID: \"4bb17af9-3b2c-4719-b35b-349cd56624f2\") " pod="openstack/nova-cell1-conductor-db-sync-svqlp" Sep 30 21:03:35 crc kubenswrapper[4756]: I0930 21:03:35.453859 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4bb17af9-3b2c-4719-b35b-349cd56624f2-scripts\") pod \"nova-cell1-conductor-db-sync-svqlp\" (UID: \"4bb17af9-3b2c-4719-b35b-349cd56624f2\") " pod="openstack/nova-cell1-conductor-db-sync-svqlp" Sep 30 21:03:35 crc kubenswrapper[4756]: I0930 21:03:35.462061 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4bb17af9-3b2c-4719-b35b-349cd56624f2-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-svqlp\" (UID: \"4bb17af9-3b2c-4719-b35b-349cd56624f2\") " pod="openstack/nova-cell1-conductor-db-sync-svqlp" Sep 30 21:03:35 crc kubenswrapper[4756]: I0930 21:03:35.462220 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m5dml\" (UniqueName: \"kubernetes.io/projected/4bb17af9-3b2c-4719-b35b-349cd56624f2-kube-api-access-m5dml\") pod \"nova-cell1-conductor-db-sync-svqlp\" (UID: \"4bb17af9-3b2c-4719-b35b-349cd56624f2\") " pod="openstack/nova-cell1-conductor-db-sync-svqlp" Sep 30 21:03:35 crc kubenswrapper[4756]: I0930 21:03:35.462910 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4bb17af9-3b2c-4719-b35b-349cd56624f2-config-data\") pod \"nova-cell1-conductor-db-sync-svqlp\" (UID: \"4bb17af9-3b2c-4719-b35b-349cd56624f2\") " pod="openstack/nova-cell1-conductor-db-sync-svqlp" Sep 30 21:03:35 crc kubenswrapper[4756]: I0930 21:03:35.677070 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-svqlp" Sep 30 21:03:35 crc kubenswrapper[4756]: I0930 21:03:35.756387 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-nx4mc"] Sep 30 21:03:35 crc kubenswrapper[4756]: W0930 21:03:35.805518 4756 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod14bad369_c240_4a03_803d_b0b6c2baf689.slice/crio-240ed9f39d92476a49befb6eb53f2e77b8653c941ce2a59b7f02dc0128d6c6fd WatchSource:0}: Error finding container 240ed9f39d92476a49befb6eb53f2e77b8653c941ce2a59b7f02dc0128d6c6fd: Status 404 returned error can't find the container with id 240ed9f39d92476a49befb6eb53f2e77b8653c941ce2a59b7f02dc0128d6c6fd Sep 30 21:03:35 crc kubenswrapper[4756]: I0930 21:03:35.926942 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"5302e743-f6ec-4907-8de8-677e0151248b","Type":"ContainerStarted","Data":"4c679c9660e43fea8f1f3114659b5594e7ad1b47dd0b537f8d8817564a9ede77"} Sep 30 21:03:35 crc kubenswrapper[4756]: I0930 21:03:35.926983 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"5302e743-f6ec-4907-8de8-677e0151248b","Type":"ContainerStarted","Data":"6c224288cb2230b2586ef646b92bf7c7842985d5c94277617cc4731cc90d59e3"} Sep 30 21:03:35 crc kubenswrapper[4756]: I0930 21:03:35.929144 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-nx4mc" event={"ID":"14bad369-c240-4a03-803d-b0b6c2baf689","Type":"ContainerStarted","Data":"240ed9f39d92476a49befb6eb53f2e77b8653c941ce2a59b7f02dc0128d6c6fd"} Sep 30 21:03:35 crc kubenswrapper[4756]: I0930 21:03:35.929824 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-58b45b7695-v65nt" event={"ID":"e093a2b1-f219-40f0-9abc-bb97939f235a","Type":"ContainerStarted","Data":"d6abefba6282900c444f788efdcfbc1fd514dd34dbb973884f5f8ab8a328fb20"} Sep 30 21:03:35 crc kubenswrapper[4756]: I0930 21:03:35.945508 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"1eb0c214-c7ca-4179-a52e-af13f1ce8c74","Type":"ContainerStarted","Data":"9d38d34410f17ee87eced371c79f9c1f64f2bae3527855f9dcc38af11b4ef403"} Sep 30 21:03:35 crc kubenswrapper[4756]: I0930 21:03:35.945764 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"1eb0c214-c7ca-4179-a52e-af13f1ce8c74","Type":"ContainerStarted","Data":"2814d227300e9223a0fa70447c4ab1db3ea75ebee035e28f68e2c171ad1fb5b3"} Sep 30 21:03:35 crc kubenswrapper[4756]: I0930 21:03:35.949794 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-g7jd6" event={"ID":"1c7caeef-7ea4-4e7f-9e4b-de3aed88f7e2","Type":"ContainerStarted","Data":"6aaed6b894650103f5ab222d89bb8c8ef15da0085a383ae9eb3fa8d7f0e78b53"} Sep 30 21:03:35 crc kubenswrapper[4756]: I0930 21:03:35.951201 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"af618cfb-1960-4d7b-a513-48dcbe0a7ddd","Type":"ContainerStarted","Data":"b4f178050b95397b0936c2387eae8ce4a218128e2c5bc238ad5f93d9cdfab028"} Sep 30 21:03:35 crc kubenswrapper[4756]: I0930 21:03:35.951242 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"af618cfb-1960-4d7b-a513-48dcbe0a7ddd","Type":"ContainerStarted","Data":"b48077e2640e1f83ad61ae391cf5c1a6496ff81cfb758649484098c6e3f34fd6"} Sep 30 21:03:35 crc kubenswrapper[4756]: I0930 21:03:35.953201 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"ae546baf-e30a-48a8-a4c1-9bc37adfbc12","Type":"ContainerStarted","Data":"35042716572a05f228b7bd000ab941a7f8400fb6a598d98b79c9821a5191a6f6"} Sep 30 21:03:35 crc kubenswrapper[4756]: I0930 21:03:35.953229 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"ae546baf-e30a-48a8-a4c1-9bc37adfbc12","Type":"ContainerStarted","Data":"37bde90c677de0db519757c57e6b0e40f4d5e43c77304d38b597bca75cdb357f"} Sep 30 21:03:35 crc kubenswrapper[4756]: I0930 21:03:35.953237 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"ae546baf-e30a-48a8-a4c1-9bc37adfbc12","Type":"ContainerStarted","Data":"50bc367fd9372e6233447cc9d0e0d9f65b22a02e03d458a8d20d8e63badeb309"} Sep 30 21:03:35 crc kubenswrapper[4756]: I0930 21:03:35.976152 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=1.9761384990000002 podStartE2EDuration="1.976138499s" podCreationTimestamp="2025-09-30 21:03:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 21:03:35.957598526 +0000 UTC m=+5545.578532003" watchObservedRunningTime="2025-09-30 21:03:35.976138499 +0000 UTC m=+5545.597071976" Sep 30 21:03:35 crc kubenswrapper[4756]: I0930 21:03:35.977425 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.977420033 podStartE2EDuration="2.977420033s" podCreationTimestamp="2025-09-30 21:03:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 21:03:35.975630636 +0000 UTC m=+5545.596564113" watchObservedRunningTime="2025-09-30 21:03:35.977420033 +0000 UTC m=+5545.598353510" Sep 30 21:03:35 crc kubenswrapper[4756]: I0930 21:03:35.997300 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-cell-mapping-g7jd6" podStartSLOduration=2.9972827410000003 podStartE2EDuration="2.997282741s" podCreationTimestamp="2025-09-30 21:03:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 21:03:35.994073107 +0000 UTC m=+5545.615006584" watchObservedRunningTime="2025-09-30 21:03:35.997282741 +0000 UTC m=+5545.618216218" Sep 30 21:03:36 crc kubenswrapper[4756]: I0930 21:03:36.038377 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-novncproxy-0" podStartSLOduration=3.038358952 podStartE2EDuration="3.038358952s" podCreationTimestamp="2025-09-30 21:03:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 21:03:36.036448002 +0000 UTC m=+5545.657381489" watchObservedRunningTime="2025-09-30 21:03:36.038358952 +0000 UTC m=+5545.659292419" Sep 30 21:03:36 crc kubenswrapper[4756]: I0930 21:03:36.272299 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-svqlp"] Sep 30 21:03:36 crc kubenswrapper[4756]: I0930 21:03:36.963201 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-svqlp" event={"ID":"4bb17af9-3b2c-4719-b35b-349cd56624f2","Type":"ContainerStarted","Data":"39690dbc215f423057d4ee5db6901f534d08ef585a8c5ad48b9dbadb32db422b"} Sep 30 21:03:36 crc kubenswrapper[4756]: I0930 21:03:36.964433 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-svqlp" event={"ID":"4bb17af9-3b2c-4719-b35b-349cd56624f2","Type":"ContainerStarted","Data":"ec292f6a742b7612513651af461f4ed6605d2eea07745d96921292a89cc3e01e"} Sep 30 21:03:36 crc kubenswrapper[4756]: I0930 21:03:36.966248 4756 generic.go:334] "Generic (PLEG): container finished" podID="14bad369-c240-4a03-803d-b0b6c2baf689" containerID="59ff8e3e7c82c73b1b11ecde388b5e0208f350c2d2f6eca416a862c2ff69a9b3" exitCode=0 Sep 30 21:03:36 crc kubenswrapper[4756]: I0930 21:03:36.966312 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-nx4mc" event={"ID":"14bad369-c240-4a03-803d-b0b6c2baf689","Type":"ContainerDied","Data":"59ff8e3e7c82c73b1b11ecde388b5e0208f350c2d2f6eca416a862c2ff69a9b3"} Sep 30 21:03:36 crc kubenswrapper[4756]: I0930 21:03:36.969385 4756 generic.go:334] "Generic (PLEG): container finished" podID="e093a2b1-f219-40f0-9abc-bb97939f235a" containerID="72fb08264183c81aecc89659064a7f2a89ea376a916d3bdf4187631f0e92318d" exitCode=0 Sep 30 21:03:36 crc kubenswrapper[4756]: I0930 21:03:36.969466 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-58b45b7695-v65nt" event={"ID":"e093a2b1-f219-40f0-9abc-bb97939f235a","Type":"ContainerDied","Data":"72fb08264183c81aecc89659064a7f2a89ea376a916d3bdf4187631f0e92318d"} Sep 30 21:03:36 crc kubenswrapper[4756]: I0930 21:03:36.977193 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"1eb0c214-c7ca-4179-a52e-af13f1ce8c74","Type":"ContainerStarted","Data":"7274ccdcee64c280a34806a4e622a546fe8c3373a9ef2a760aa6a95437e419db"} Sep 30 21:03:36 crc kubenswrapper[4756]: I0930 21:03:36.993930 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-db-sync-svqlp" podStartSLOduration=1.993913074 podStartE2EDuration="1.993913074s" podCreationTimestamp="2025-09-30 21:03:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 21:03:36.986900482 +0000 UTC m=+5546.607833959" watchObservedRunningTime="2025-09-30 21:03:36.993913074 +0000 UTC m=+5546.614846551" Sep 30 21:03:37 crc kubenswrapper[4756]: I0930 21:03:37.007776 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=4.007760026 podStartE2EDuration="4.007760026s" podCreationTimestamp="2025-09-30 21:03:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 21:03:37.003880654 +0000 UTC m=+5546.624814131" watchObservedRunningTime="2025-09-30 21:03:37.007760026 +0000 UTC m=+5546.628693503" Sep 30 21:03:37 crc kubenswrapper[4756]: I0930 21:03:37.986143 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-nx4mc" event={"ID":"14bad369-c240-4a03-803d-b0b6c2baf689","Type":"ContainerStarted","Data":"a66c3f6decf58a9510d0a9a88b49e5c4f657786a6c70f024bbc8d1e8f05ec308"} Sep 30 21:03:37 crc kubenswrapper[4756]: I0930 21:03:37.994310 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-58b45b7695-v65nt" event={"ID":"e093a2b1-f219-40f0-9abc-bb97939f235a","Type":"ContainerStarted","Data":"2d4d409e7c8bca878f1a8f203d1f7cae4e23a711dff3dba78a4d1f6a26ff60f3"} Sep 30 21:03:37 crc kubenswrapper[4756]: I0930 21:03:37.994357 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-58b45b7695-v65nt" Sep 30 21:03:38 crc kubenswrapper[4756]: I0930 21:03:38.032768 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-58b45b7695-v65nt" podStartSLOduration=4.03275178 podStartE2EDuration="4.03275178s" podCreationTimestamp="2025-09-30 21:03:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 21:03:38.031243591 +0000 UTC m=+5547.652177068" watchObservedRunningTime="2025-09-30 21:03:38.03275178 +0000 UTC m=+5547.653685257" Sep 30 21:03:39 crc kubenswrapper[4756]: I0930 21:03:39.004499 4756 generic.go:334] "Generic (PLEG): container finished" podID="14bad369-c240-4a03-803d-b0b6c2baf689" containerID="a66c3f6decf58a9510d0a9a88b49e5c4f657786a6c70f024bbc8d1e8f05ec308" exitCode=0 Sep 30 21:03:39 crc kubenswrapper[4756]: I0930 21:03:39.004633 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-nx4mc" event={"ID":"14bad369-c240-4a03-803d-b0b6c2baf689","Type":"ContainerDied","Data":"a66c3f6decf58a9510d0a9a88b49e5c4f657786a6c70f024bbc8d1e8f05ec308"} Sep 30 21:03:39 crc kubenswrapper[4756]: I0930 21:03:39.422138 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-novncproxy-0" Sep 30 21:03:39 crc kubenswrapper[4756]: I0930 21:03:39.442029 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Sep 30 21:03:39 crc kubenswrapper[4756]: I0930 21:03:39.442177 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Sep 30 21:03:39 crc kubenswrapper[4756]: I0930 21:03:39.484340 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Sep 30 21:03:40 crc kubenswrapper[4756]: I0930 21:03:40.018992 4756 generic.go:334] "Generic (PLEG): container finished" podID="4bb17af9-3b2c-4719-b35b-349cd56624f2" containerID="39690dbc215f423057d4ee5db6901f534d08ef585a8c5ad48b9dbadb32db422b" exitCode=0 Sep 30 21:03:40 crc kubenswrapper[4756]: I0930 21:03:40.019327 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-svqlp" event={"ID":"4bb17af9-3b2c-4719-b35b-349cd56624f2","Type":"ContainerDied","Data":"39690dbc215f423057d4ee5db6901f534d08ef585a8c5ad48b9dbadb32db422b"} Sep 30 21:03:40 crc kubenswrapper[4756]: I0930 21:03:40.029947 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-nx4mc" event={"ID":"14bad369-c240-4a03-803d-b0b6c2baf689","Type":"ContainerStarted","Data":"554b7b42382a25feb625e63ea4eca1520fe2f2d433d6753d6d8d78fe357e7a1a"} Sep 30 21:03:40 crc kubenswrapper[4756]: I0930 21:03:40.068221 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-nx4mc" podStartSLOduration=3.615270721 podStartE2EDuration="6.06820431s" podCreationTimestamp="2025-09-30 21:03:34 +0000 UTC" firstStartedPulling="2025-09-30 21:03:36.968712777 +0000 UTC m=+5546.589646264" lastFinishedPulling="2025-09-30 21:03:39.421646366 +0000 UTC m=+5549.042579853" observedRunningTime="2025-09-30 21:03:40.061039103 +0000 UTC m=+5549.681972580" watchObservedRunningTime="2025-09-30 21:03:40.06820431 +0000 UTC m=+5549.689137777" Sep 30 21:03:41 crc kubenswrapper[4756]: I0930 21:03:41.042368 4756 generic.go:334] "Generic (PLEG): container finished" podID="1c7caeef-7ea4-4e7f-9e4b-de3aed88f7e2" containerID="6aaed6b894650103f5ab222d89bb8c8ef15da0085a383ae9eb3fa8d7f0e78b53" exitCode=0 Sep 30 21:03:41 crc kubenswrapper[4756]: I0930 21:03:41.043541 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-g7jd6" event={"ID":"1c7caeef-7ea4-4e7f-9e4b-de3aed88f7e2","Type":"ContainerDied","Data":"6aaed6b894650103f5ab222d89bb8c8ef15da0085a383ae9eb3fa8d7f0e78b53"} Sep 30 21:03:41 crc kubenswrapper[4756]: I0930 21:03:41.493608 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-svqlp" Sep 30 21:03:41 crc kubenswrapper[4756]: I0930 21:03:41.561353 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4bb17af9-3b2c-4719-b35b-349cd56624f2-combined-ca-bundle\") pod \"4bb17af9-3b2c-4719-b35b-349cd56624f2\" (UID: \"4bb17af9-3b2c-4719-b35b-349cd56624f2\") " Sep 30 21:03:41 crc kubenswrapper[4756]: I0930 21:03:41.561455 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-m5dml\" (UniqueName: \"kubernetes.io/projected/4bb17af9-3b2c-4719-b35b-349cd56624f2-kube-api-access-m5dml\") pod \"4bb17af9-3b2c-4719-b35b-349cd56624f2\" (UID: \"4bb17af9-3b2c-4719-b35b-349cd56624f2\") " Sep 30 21:03:41 crc kubenswrapper[4756]: I0930 21:03:41.561680 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4bb17af9-3b2c-4719-b35b-349cd56624f2-scripts\") pod \"4bb17af9-3b2c-4719-b35b-349cd56624f2\" (UID: \"4bb17af9-3b2c-4719-b35b-349cd56624f2\") " Sep 30 21:03:41 crc kubenswrapper[4756]: I0930 21:03:41.561879 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4bb17af9-3b2c-4719-b35b-349cd56624f2-config-data\") pod \"4bb17af9-3b2c-4719-b35b-349cd56624f2\" (UID: \"4bb17af9-3b2c-4719-b35b-349cd56624f2\") " Sep 30 21:03:41 crc kubenswrapper[4756]: I0930 21:03:41.567527 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4bb17af9-3b2c-4719-b35b-349cd56624f2-scripts" (OuterVolumeSpecName: "scripts") pod "4bb17af9-3b2c-4719-b35b-349cd56624f2" (UID: "4bb17af9-3b2c-4719-b35b-349cd56624f2"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 21:03:41 crc kubenswrapper[4756]: I0930 21:03:41.568543 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4bb17af9-3b2c-4719-b35b-349cd56624f2-kube-api-access-m5dml" (OuterVolumeSpecName: "kube-api-access-m5dml") pod "4bb17af9-3b2c-4719-b35b-349cd56624f2" (UID: "4bb17af9-3b2c-4719-b35b-349cd56624f2"). InnerVolumeSpecName "kube-api-access-m5dml". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 21:03:41 crc kubenswrapper[4756]: I0930 21:03:41.588050 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4bb17af9-3b2c-4719-b35b-349cd56624f2-config-data" (OuterVolumeSpecName: "config-data") pod "4bb17af9-3b2c-4719-b35b-349cd56624f2" (UID: "4bb17af9-3b2c-4719-b35b-349cd56624f2"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 21:03:41 crc kubenswrapper[4756]: I0930 21:03:41.607032 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4bb17af9-3b2c-4719-b35b-349cd56624f2-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "4bb17af9-3b2c-4719-b35b-349cd56624f2" (UID: "4bb17af9-3b2c-4719-b35b-349cd56624f2"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 21:03:41 crc kubenswrapper[4756]: I0930 21:03:41.665336 4756 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4bb17af9-3b2c-4719-b35b-349cd56624f2-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 21:03:41 crc kubenswrapper[4756]: I0930 21:03:41.665418 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-m5dml\" (UniqueName: \"kubernetes.io/projected/4bb17af9-3b2c-4719-b35b-349cd56624f2-kube-api-access-m5dml\") on node \"crc\" DevicePath \"\"" Sep 30 21:03:41 crc kubenswrapper[4756]: I0930 21:03:41.665433 4756 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4bb17af9-3b2c-4719-b35b-349cd56624f2-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 21:03:41 crc kubenswrapper[4756]: I0930 21:03:41.665446 4756 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4bb17af9-3b2c-4719-b35b-349cd56624f2-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 21:03:42 crc kubenswrapper[4756]: I0930 21:03:42.055540 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-svqlp" Sep 30 21:03:42 crc kubenswrapper[4756]: I0930 21:03:42.055531 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-svqlp" event={"ID":"4bb17af9-3b2c-4719-b35b-349cd56624f2","Type":"ContainerDied","Data":"ec292f6a742b7612513651af461f4ed6605d2eea07745d96921292a89cc3e01e"} Sep 30 21:03:42 crc kubenswrapper[4756]: I0930 21:03:42.055724 4756 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ec292f6a742b7612513651af461f4ed6605d2eea07745d96921292a89cc3e01e" Sep 30 21:03:42 crc kubenswrapper[4756]: I0930 21:03:42.195223 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-0"] Sep 30 21:03:42 crc kubenswrapper[4756]: E0930 21:03:42.203972 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4bb17af9-3b2c-4719-b35b-349cd56624f2" containerName="nova-cell1-conductor-db-sync" Sep 30 21:03:42 crc kubenswrapper[4756]: I0930 21:03:42.204003 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="4bb17af9-3b2c-4719-b35b-349cd56624f2" containerName="nova-cell1-conductor-db-sync" Sep 30 21:03:42 crc kubenswrapper[4756]: I0930 21:03:42.204599 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="4bb17af9-3b2c-4719-b35b-349cd56624f2" containerName="nova-cell1-conductor-db-sync" Sep 30 21:03:42 crc kubenswrapper[4756]: I0930 21:03:42.205542 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Sep 30 21:03:42 crc kubenswrapper[4756]: I0930 21:03:42.207957 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Sep 30 21:03:42 crc kubenswrapper[4756]: I0930 21:03:42.248924 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Sep 30 21:03:42 crc kubenswrapper[4756]: I0930 21:03:42.276702 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p77qn\" (UniqueName: \"kubernetes.io/projected/cecb74a6-d43b-46e2-b423-628303002ae3-kube-api-access-p77qn\") pod \"nova-cell1-conductor-0\" (UID: \"cecb74a6-d43b-46e2-b423-628303002ae3\") " pod="openstack/nova-cell1-conductor-0" Sep 30 21:03:42 crc kubenswrapper[4756]: I0930 21:03:42.276862 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cecb74a6-d43b-46e2-b423-628303002ae3-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"cecb74a6-d43b-46e2-b423-628303002ae3\") " pod="openstack/nova-cell1-conductor-0" Sep 30 21:03:42 crc kubenswrapper[4756]: I0930 21:03:42.277100 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cecb74a6-d43b-46e2-b423-628303002ae3-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"cecb74a6-d43b-46e2-b423-628303002ae3\") " pod="openstack/nova-cell1-conductor-0" Sep 30 21:03:42 crc kubenswrapper[4756]: I0930 21:03:42.379126 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cecb74a6-d43b-46e2-b423-628303002ae3-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"cecb74a6-d43b-46e2-b423-628303002ae3\") " pod="openstack/nova-cell1-conductor-0" Sep 30 21:03:42 crc kubenswrapper[4756]: I0930 21:03:42.379489 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p77qn\" (UniqueName: \"kubernetes.io/projected/cecb74a6-d43b-46e2-b423-628303002ae3-kube-api-access-p77qn\") pod \"nova-cell1-conductor-0\" (UID: \"cecb74a6-d43b-46e2-b423-628303002ae3\") " pod="openstack/nova-cell1-conductor-0" Sep 30 21:03:42 crc kubenswrapper[4756]: I0930 21:03:42.379554 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cecb74a6-d43b-46e2-b423-628303002ae3-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"cecb74a6-d43b-46e2-b423-628303002ae3\") " pod="openstack/nova-cell1-conductor-0" Sep 30 21:03:42 crc kubenswrapper[4756]: I0930 21:03:42.385060 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cecb74a6-d43b-46e2-b423-628303002ae3-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"cecb74a6-d43b-46e2-b423-628303002ae3\") " pod="openstack/nova-cell1-conductor-0" Sep 30 21:03:42 crc kubenswrapper[4756]: I0930 21:03:42.390787 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cecb74a6-d43b-46e2-b423-628303002ae3-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"cecb74a6-d43b-46e2-b423-628303002ae3\") " pod="openstack/nova-cell1-conductor-0" Sep 30 21:03:42 crc kubenswrapper[4756]: I0930 21:03:42.397803 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-g7jd6" Sep 30 21:03:42 crc kubenswrapper[4756]: I0930 21:03:42.400426 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p77qn\" (UniqueName: \"kubernetes.io/projected/cecb74a6-d43b-46e2-b423-628303002ae3-kube-api-access-p77qn\") pod \"nova-cell1-conductor-0\" (UID: \"cecb74a6-d43b-46e2-b423-628303002ae3\") " pod="openstack/nova-cell1-conductor-0" Sep 30 21:03:42 crc kubenswrapper[4756]: I0930 21:03:42.481034 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1c7caeef-7ea4-4e7f-9e4b-de3aed88f7e2-scripts\") pod \"1c7caeef-7ea4-4e7f-9e4b-de3aed88f7e2\" (UID: \"1c7caeef-7ea4-4e7f-9e4b-de3aed88f7e2\") " Sep 30 21:03:42 crc kubenswrapper[4756]: I0930 21:03:42.481082 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1c7caeef-7ea4-4e7f-9e4b-de3aed88f7e2-config-data\") pod \"1c7caeef-7ea4-4e7f-9e4b-de3aed88f7e2\" (UID: \"1c7caeef-7ea4-4e7f-9e4b-de3aed88f7e2\") " Sep 30 21:03:42 crc kubenswrapper[4756]: I0930 21:03:42.481103 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1c7caeef-7ea4-4e7f-9e4b-de3aed88f7e2-combined-ca-bundle\") pod \"1c7caeef-7ea4-4e7f-9e4b-de3aed88f7e2\" (UID: \"1c7caeef-7ea4-4e7f-9e4b-de3aed88f7e2\") " Sep 30 21:03:42 crc kubenswrapper[4756]: I0930 21:03:42.481132 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d8p8g\" (UniqueName: \"kubernetes.io/projected/1c7caeef-7ea4-4e7f-9e4b-de3aed88f7e2-kube-api-access-d8p8g\") pod \"1c7caeef-7ea4-4e7f-9e4b-de3aed88f7e2\" (UID: \"1c7caeef-7ea4-4e7f-9e4b-de3aed88f7e2\") " Sep 30 21:03:42 crc kubenswrapper[4756]: I0930 21:03:42.484582 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1c7caeef-7ea4-4e7f-9e4b-de3aed88f7e2-kube-api-access-d8p8g" (OuterVolumeSpecName: "kube-api-access-d8p8g") pod "1c7caeef-7ea4-4e7f-9e4b-de3aed88f7e2" (UID: "1c7caeef-7ea4-4e7f-9e4b-de3aed88f7e2"). InnerVolumeSpecName "kube-api-access-d8p8g". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 21:03:42 crc kubenswrapper[4756]: I0930 21:03:42.493256 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1c7caeef-7ea4-4e7f-9e4b-de3aed88f7e2-scripts" (OuterVolumeSpecName: "scripts") pod "1c7caeef-7ea4-4e7f-9e4b-de3aed88f7e2" (UID: "1c7caeef-7ea4-4e7f-9e4b-de3aed88f7e2"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 21:03:42 crc kubenswrapper[4756]: I0930 21:03:42.507019 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1c7caeef-7ea4-4e7f-9e4b-de3aed88f7e2-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "1c7caeef-7ea4-4e7f-9e4b-de3aed88f7e2" (UID: "1c7caeef-7ea4-4e7f-9e4b-de3aed88f7e2"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 21:03:42 crc kubenswrapper[4756]: I0930 21:03:42.508992 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1c7caeef-7ea4-4e7f-9e4b-de3aed88f7e2-config-data" (OuterVolumeSpecName: "config-data") pod "1c7caeef-7ea4-4e7f-9e4b-de3aed88f7e2" (UID: "1c7caeef-7ea4-4e7f-9e4b-de3aed88f7e2"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 21:03:42 crc kubenswrapper[4756]: I0930 21:03:42.566497 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Sep 30 21:03:42 crc kubenswrapper[4756]: I0930 21:03:42.582820 4756 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1c7caeef-7ea4-4e7f-9e4b-de3aed88f7e2-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 21:03:42 crc kubenswrapper[4756]: I0930 21:03:42.582849 4756 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1c7caeef-7ea4-4e7f-9e4b-de3aed88f7e2-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 21:03:42 crc kubenswrapper[4756]: I0930 21:03:42.582859 4756 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1c7caeef-7ea4-4e7f-9e4b-de3aed88f7e2-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 21:03:42 crc kubenswrapper[4756]: I0930 21:03:42.582870 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d8p8g\" (UniqueName: \"kubernetes.io/projected/1c7caeef-7ea4-4e7f-9e4b-de3aed88f7e2-kube-api-access-d8p8g\") on node \"crc\" DevicePath \"\"" Sep 30 21:03:43 crc kubenswrapper[4756]: I0930 21:03:43.000088 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Sep 30 21:03:43 crc kubenswrapper[4756]: W0930 21:03:43.000651 4756 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podcecb74a6_d43b_46e2_b423_628303002ae3.slice/crio-36f42cda1b6ab56df4091d997f0976bb6255a0984cce114c99eb09fea90feeef WatchSource:0}: Error finding container 36f42cda1b6ab56df4091d997f0976bb6255a0984cce114c99eb09fea90feeef: Status 404 returned error can't find the container with id 36f42cda1b6ab56df4091d997f0976bb6255a0984cce114c99eb09fea90feeef Sep 30 21:03:43 crc kubenswrapper[4756]: I0930 21:03:43.066514 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-g7jd6" Sep 30 21:03:43 crc kubenswrapper[4756]: I0930 21:03:43.066509 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-g7jd6" event={"ID":"1c7caeef-7ea4-4e7f-9e4b-de3aed88f7e2","Type":"ContainerDied","Data":"7772165d36872024a44e0cc0f5422a344123d1a843ba2eae1c6610d14801ebf8"} Sep 30 21:03:43 crc kubenswrapper[4756]: I0930 21:03:43.066552 4756 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7772165d36872024a44e0cc0f5422a344123d1a843ba2eae1c6610d14801ebf8" Sep 30 21:03:43 crc kubenswrapper[4756]: I0930 21:03:43.067245 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"cecb74a6-d43b-46e2-b423-628303002ae3","Type":"ContainerStarted","Data":"36f42cda1b6ab56df4091d997f0976bb6255a0984cce114c99eb09fea90feeef"} Sep 30 21:03:43 crc kubenswrapper[4756]: I0930 21:03:43.243330 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Sep 30 21:03:43 crc kubenswrapper[4756]: I0930 21:03:43.243641 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="ae546baf-e30a-48a8-a4c1-9bc37adfbc12" containerName="nova-api-log" containerID="cri-o://37bde90c677de0db519757c57e6b0e40f4d5e43c77304d38b597bca75cdb357f" gracePeriod=30 Sep 30 21:03:43 crc kubenswrapper[4756]: I0930 21:03:43.244153 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="ae546baf-e30a-48a8-a4c1-9bc37adfbc12" containerName="nova-api-api" containerID="cri-o://35042716572a05f228b7bd000ab941a7f8400fb6a598d98b79c9821a5191a6f6" gracePeriod=30 Sep 30 21:03:43 crc kubenswrapper[4756]: I0930 21:03:43.267677 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Sep 30 21:03:43 crc kubenswrapper[4756]: I0930 21:03:43.267904 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="5302e743-f6ec-4907-8de8-677e0151248b" containerName="nova-scheduler-scheduler" containerID="cri-o://4c679c9660e43fea8f1f3114659b5594e7ad1b47dd0b537f8d8817564a9ede77" gracePeriod=30 Sep 30 21:03:43 crc kubenswrapper[4756]: I0930 21:03:43.300271 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Sep 30 21:03:43 crc kubenswrapper[4756]: I0930 21:03:43.300586 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="1eb0c214-c7ca-4179-a52e-af13f1ce8c74" containerName="nova-metadata-log" containerID="cri-o://9d38d34410f17ee87eced371c79f9c1f64f2bae3527855f9dcc38af11b4ef403" gracePeriod=30 Sep 30 21:03:43 crc kubenswrapper[4756]: I0930 21:03:43.301165 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="1eb0c214-c7ca-4179-a52e-af13f1ce8c74" containerName="nova-metadata-metadata" containerID="cri-o://7274ccdcee64c280a34806a4e622a546fe8c3373a9ef2a760aa6a95437e419db" gracePeriod=30 Sep 30 21:03:44 crc kubenswrapper[4756]: I0930 21:03:44.189631 4756 generic.go:334] "Generic (PLEG): container finished" podID="1eb0c214-c7ca-4179-a52e-af13f1ce8c74" containerID="7274ccdcee64c280a34806a4e622a546fe8c3373a9ef2a760aa6a95437e419db" exitCode=0 Sep 30 21:03:44 crc kubenswrapper[4756]: I0930 21:03:44.189873 4756 generic.go:334] "Generic (PLEG): container finished" podID="1eb0c214-c7ca-4179-a52e-af13f1ce8c74" containerID="9d38d34410f17ee87eced371c79f9c1f64f2bae3527855f9dcc38af11b4ef403" exitCode=143 Sep 30 21:03:44 crc kubenswrapper[4756]: I0930 21:03:44.189968 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"1eb0c214-c7ca-4179-a52e-af13f1ce8c74","Type":"ContainerDied","Data":"7274ccdcee64c280a34806a4e622a546fe8c3373a9ef2a760aa6a95437e419db"} Sep 30 21:03:44 crc kubenswrapper[4756]: I0930 21:03:44.189995 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"1eb0c214-c7ca-4179-a52e-af13f1ce8c74","Type":"ContainerDied","Data":"9d38d34410f17ee87eced371c79f9c1f64f2bae3527855f9dcc38af11b4ef403"} Sep 30 21:03:44 crc kubenswrapper[4756]: I0930 21:03:44.220559 4756 generic.go:334] "Generic (PLEG): container finished" podID="ae546baf-e30a-48a8-a4c1-9bc37adfbc12" containerID="35042716572a05f228b7bd000ab941a7f8400fb6a598d98b79c9821a5191a6f6" exitCode=0 Sep 30 21:03:44 crc kubenswrapper[4756]: I0930 21:03:44.220588 4756 generic.go:334] "Generic (PLEG): container finished" podID="ae546baf-e30a-48a8-a4c1-9bc37adfbc12" containerID="37bde90c677de0db519757c57e6b0e40f4d5e43c77304d38b597bca75cdb357f" exitCode=143 Sep 30 21:03:44 crc kubenswrapper[4756]: I0930 21:03:44.220640 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"ae546baf-e30a-48a8-a4c1-9bc37adfbc12","Type":"ContainerDied","Data":"35042716572a05f228b7bd000ab941a7f8400fb6a598d98b79c9821a5191a6f6"} Sep 30 21:03:44 crc kubenswrapper[4756]: I0930 21:03:44.220668 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"ae546baf-e30a-48a8-a4c1-9bc37adfbc12","Type":"ContainerDied","Data":"37bde90c677de0db519757c57e6b0e40f4d5e43c77304d38b597bca75cdb357f"} Sep 30 21:03:44 crc kubenswrapper[4756]: I0930 21:03:44.230535 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"cecb74a6-d43b-46e2-b423-628303002ae3","Type":"ContainerStarted","Data":"0fdcb8d72e9fe20acbee104323b320b1e5934321e2519011362bb10d0b1fa117"} Sep 30 21:03:44 crc kubenswrapper[4756]: I0930 21:03:44.231622 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-conductor-0" Sep 30 21:03:44 crc kubenswrapper[4756]: I0930 21:03:44.266044 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-0" podStartSLOduration=2.266022337 podStartE2EDuration="2.266022337s" podCreationTimestamp="2025-09-30 21:03:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 21:03:44.261702084 +0000 UTC m=+5553.882635561" watchObservedRunningTime="2025-09-30 21:03:44.266022337 +0000 UTC m=+5553.886955804" Sep 30 21:03:44 crc kubenswrapper[4756]: I0930 21:03:44.395291 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 30 21:03:44 crc kubenswrapper[4756]: I0930 21:03:44.424123 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-cell1-novncproxy-0" Sep 30 21:03:44 crc kubenswrapper[4756]: I0930 21:03:44.437821 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-cell1-novncproxy-0" Sep 30 21:03:44 crc kubenswrapper[4756]: I0930 21:03:44.528922 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ae546baf-e30a-48a8-a4c1-9bc37adfbc12-logs\") pod \"ae546baf-e30a-48a8-a4c1-9bc37adfbc12\" (UID: \"ae546baf-e30a-48a8-a4c1-9bc37adfbc12\") " Sep 30 21:03:44 crc kubenswrapper[4756]: I0930 21:03:44.529047 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tfkq5\" (UniqueName: \"kubernetes.io/projected/ae546baf-e30a-48a8-a4c1-9bc37adfbc12-kube-api-access-tfkq5\") pod \"ae546baf-e30a-48a8-a4c1-9bc37adfbc12\" (UID: \"ae546baf-e30a-48a8-a4c1-9bc37adfbc12\") " Sep 30 21:03:44 crc kubenswrapper[4756]: I0930 21:03:44.529079 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ae546baf-e30a-48a8-a4c1-9bc37adfbc12-combined-ca-bundle\") pod \"ae546baf-e30a-48a8-a4c1-9bc37adfbc12\" (UID: \"ae546baf-e30a-48a8-a4c1-9bc37adfbc12\") " Sep 30 21:03:44 crc kubenswrapper[4756]: I0930 21:03:44.529115 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ae546baf-e30a-48a8-a4c1-9bc37adfbc12-config-data\") pod \"ae546baf-e30a-48a8-a4c1-9bc37adfbc12\" (UID: \"ae546baf-e30a-48a8-a4c1-9bc37adfbc12\") " Sep 30 21:03:44 crc kubenswrapper[4756]: I0930 21:03:44.529260 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ae546baf-e30a-48a8-a4c1-9bc37adfbc12-logs" (OuterVolumeSpecName: "logs") pod "ae546baf-e30a-48a8-a4c1-9bc37adfbc12" (UID: "ae546baf-e30a-48a8-a4c1-9bc37adfbc12"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 21:03:44 crc kubenswrapper[4756]: I0930 21:03:44.529719 4756 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ae546baf-e30a-48a8-a4c1-9bc37adfbc12-logs\") on node \"crc\" DevicePath \"\"" Sep 30 21:03:44 crc kubenswrapper[4756]: I0930 21:03:44.541015 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ae546baf-e30a-48a8-a4c1-9bc37adfbc12-kube-api-access-tfkq5" (OuterVolumeSpecName: "kube-api-access-tfkq5") pod "ae546baf-e30a-48a8-a4c1-9bc37adfbc12" (UID: "ae546baf-e30a-48a8-a4c1-9bc37adfbc12"). InnerVolumeSpecName "kube-api-access-tfkq5". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 21:03:44 crc kubenswrapper[4756]: I0930 21:03:44.559054 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ae546baf-e30a-48a8-a4c1-9bc37adfbc12-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ae546baf-e30a-48a8-a4c1-9bc37adfbc12" (UID: "ae546baf-e30a-48a8-a4c1-9bc37adfbc12"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 21:03:44 crc kubenswrapper[4756]: I0930 21:03:44.562456 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ae546baf-e30a-48a8-a4c1-9bc37adfbc12-config-data" (OuterVolumeSpecName: "config-data") pod "ae546baf-e30a-48a8-a4c1-9bc37adfbc12" (UID: "ae546baf-e30a-48a8-a4c1-9bc37adfbc12"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 21:03:44 crc kubenswrapper[4756]: I0930 21:03:44.622715 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Sep 30 21:03:44 crc kubenswrapper[4756]: I0930 21:03:44.631785 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tfkq5\" (UniqueName: \"kubernetes.io/projected/ae546baf-e30a-48a8-a4c1-9bc37adfbc12-kube-api-access-tfkq5\") on node \"crc\" DevicePath \"\"" Sep 30 21:03:44 crc kubenswrapper[4756]: I0930 21:03:44.631812 4756 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ae546baf-e30a-48a8-a4c1-9bc37adfbc12-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 21:03:44 crc kubenswrapper[4756]: I0930 21:03:44.631823 4756 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ae546baf-e30a-48a8-a4c1-9bc37adfbc12-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 21:03:44 crc kubenswrapper[4756]: I0930 21:03:44.634433 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-58b45b7695-v65nt" Sep 30 21:03:44 crc kubenswrapper[4756]: I0930 21:03:44.695872 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6cc6b65957-62z7q"] Sep 30 21:03:44 crc kubenswrapper[4756]: I0930 21:03:44.696104 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-6cc6b65957-62z7q" podUID="c60a0dcf-789e-4d15-9456-6e3e414e18b1" containerName="dnsmasq-dns" containerID="cri-o://b5cd482a383163dd81a4bf76899b377ec56ec2a8e22ac5dae8229bea29157163" gracePeriod=10 Sep 30 21:03:44 crc kubenswrapper[4756]: I0930 21:03:44.733335 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1eb0c214-c7ca-4179-a52e-af13f1ce8c74-config-data\") pod \"1eb0c214-c7ca-4179-a52e-af13f1ce8c74\" (UID: \"1eb0c214-c7ca-4179-a52e-af13f1ce8c74\") " Sep 30 21:03:44 crc kubenswrapper[4756]: I0930 21:03:44.733485 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1eb0c214-c7ca-4179-a52e-af13f1ce8c74-combined-ca-bundle\") pod \"1eb0c214-c7ca-4179-a52e-af13f1ce8c74\" (UID: \"1eb0c214-c7ca-4179-a52e-af13f1ce8c74\") " Sep 30 21:03:44 crc kubenswrapper[4756]: I0930 21:03:44.733997 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1eb0c214-c7ca-4179-a52e-af13f1ce8c74-logs\") pod \"1eb0c214-c7ca-4179-a52e-af13f1ce8c74\" (UID: \"1eb0c214-c7ca-4179-a52e-af13f1ce8c74\") " Sep 30 21:03:44 crc kubenswrapper[4756]: I0930 21:03:44.734089 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xbzcm\" (UniqueName: \"kubernetes.io/projected/1eb0c214-c7ca-4179-a52e-af13f1ce8c74-kube-api-access-xbzcm\") pod \"1eb0c214-c7ca-4179-a52e-af13f1ce8c74\" (UID: \"1eb0c214-c7ca-4179-a52e-af13f1ce8c74\") " Sep 30 21:03:44 crc kubenswrapper[4756]: I0930 21:03:44.734328 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1eb0c214-c7ca-4179-a52e-af13f1ce8c74-logs" (OuterVolumeSpecName: "logs") pod "1eb0c214-c7ca-4179-a52e-af13f1ce8c74" (UID: "1eb0c214-c7ca-4179-a52e-af13f1ce8c74"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 21:03:44 crc kubenswrapper[4756]: I0930 21:03:44.734812 4756 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1eb0c214-c7ca-4179-a52e-af13f1ce8c74-logs\") on node \"crc\" DevicePath \"\"" Sep 30 21:03:44 crc kubenswrapper[4756]: I0930 21:03:44.737689 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1eb0c214-c7ca-4179-a52e-af13f1ce8c74-kube-api-access-xbzcm" (OuterVolumeSpecName: "kube-api-access-xbzcm") pod "1eb0c214-c7ca-4179-a52e-af13f1ce8c74" (UID: "1eb0c214-c7ca-4179-a52e-af13f1ce8c74"). InnerVolumeSpecName "kube-api-access-xbzcm". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 21:03:44 crc kubenswrapper[4756]: I0930 21:03:44.760812 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1eb0c214-c7ca-4179-a52e-af13f1ce8c74-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "1eb0c214-c7ca-4179-a52e-af13f1ce8c74" (UID: "1eb0c214-c7ca-4179-a52e-af13f1ce8c74"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 21:03:44 crc kubenswrapper[4756]: I0930 21:03:44.792379 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1eb0c214-c7ca-4179-a52e-af13f1ce8c74-config-data" (OuterVolumeSpecName: "config-data") pod "1eb0c214-c7ca-4179-a52e-af13f1ce8c74" (UID: "1eb0c214-c7ca-4179-a52e-af13f1ce8c74"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 21:03:44 crc kubenswrapper[4756]: I0930 21:03:44.836810 4756 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1eb0c214-c7ca-4179-a52e-af13f1ce8c74-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 21:03:44 crc kubenswrapper[4756]: I0930 21:03:44.836837 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xbzcm\" (UniqueName: \"kubernetes.io/projected/1eb0c214-c7ca-4179-a52e-af13f1ce8c74-kube-api-access-xbzcm\") on node \"crc\" DevicePath \"\"" Sep 30 21:03:44 crc kubenswrapper[4756]: I0930 21:03:44.836847 4756 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1eb0c214-c7ca-4179-a52e-af13f1ce8c74-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 21:03:45 crc kubenswrapper[4756]: I0930 21:03:45.045615 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-nx4mc" Sep 30 21:03:45 crc kubenswrapper[4756]: I0930 21:03:45.045655 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-nx4mc" Sep 30 21:03:45 crc kubenswrapper[4756]: I0930 21:03:45.092476 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-nx4mc" Sep 30 21:03:45 crc kubenswrapper[4756]: I0930 21:03:45.110278 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6cc6b65957-62z7q" Sep 30 21:03:45 crc kubenswrapper[4756]: I0930 21:03:45.245139 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"1eb0c214-c7ca-4179-a52e-af13f1ce8c74","Type":"ContainerDied","Data":"2814d227300e9223a0fa70447c4ab1db3ea75ebee035e28f68e2c171ad1fb5b3"} Sep 30 21:03:45 crc kubenswrapper[4756]: I0930 21:03:45.245191 4756 scope.go:117] "RemoveContainer" containerID="7274ccdcee64c280a34806a4e622a546fe8c3373a9ef2a760aa6a95437e419db" Sep 30 21:03:45 crc kubenswrapper[4756]: I0930 21:03:45.245197 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Sep 30 21:03:45 crc kubenswrapper[4756]: I0930 21:03:45.245856 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c60a0dcf-789e-4d15-9456-6e3e414e18b1-ovsdbserver-nb\") pod \"c60a0dcf-789e-4d15-9456-6e3e414e18b1\" (UID: \"c60a0dcf-789e-4d15-9456-6e3e414e18b1\") " Sep 30 21:03:45 crc kubenswrapper[4756]: I0930 21:03:45.245891 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c60a0dcf-789e-4d15-9456-6e3e414e18b1-config\") pod \"c60a0dcf-789e-4d15-9456-6e3e414e18b1\" (UID: \"c60a0dcf-789e-4d15-9456-6e3e414e18b1\") " Sep 30 21:03:45 crc kubenswrapper[4756]: I0930 21:03:45.248126 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-488qq\" (UniqueName: \"kubernetes.io/projected/c60a0dcf-789e-4d15-9456-6e3e414e18b1-kube-api-access-488qq\") pod \"c60a0dcf-789e-4d15-9456-6e3e414e18b1\" (UID: \"c60a0dcf-789e-4d15-9456-6e3e414e18b1\") " Sep 30 21:03:45 crc kubenswrapper[4756]: I0930 21:03:45.248171 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c60a0dcf-789e-4d15-9456-6e3e414e18b1-dns-svc\") pod \"c60a0dcf-789e-4d15-9456-6e3e414e18b1\" (UID: \"c60a0dcf-789e-4d15-9456-6e3e414e18b1\") " Sep 30 21:03:45 crc kubenswrapper[4756]: I0930 21:03:45.248201 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c60a0dcf-789e-4d15-9456-6e3e414e18b1-ovsdbserver-sb\") pod \"c60a0dcf-789e-4d15-9456-6e3e414e18b1\" (UID: \"c60a0dcf-789e-4d15-9456-6e3e414e18b1\") " Sep 30 21:03:45 crc kubenswrapper[4756]: I0930 21:03:45.248265 4756 generic.go:334] "Generic (PLEG): container finished" podID="c60a0dcf-789e-4d15-9456-6e3e414e18b1" containerID="b5cd482a383163dd81a4bf76899b377ec56ec2a8e22ac5dae8229bea29157163" exitCode=0 Sep 30 21:03:45 crc kubenswrapper[4756]: I0930 21:03:45.248410 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6cc6b65957-62z7q" event={"ID":"c60a0dcf-789e-4d15-9456-6e3e414e18b1","Type":"ContainerDied","Data":"b5cd482a383163dd81a4bf76899b377ec56ec2a8e22ac5dae8229bea29157163"} Sep 30 21:03:45 crc kubenswrapper[4756]: I0930 21:03:45.248452 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6cc6b65957-62z7q" event={"ID":"c60a0dcf-789e-4d15-9456-6e3e414e18b1","Type":"ContainerDied","Data":"d6967756770d2370a23eb281034458feb1076c821757a6e4f736daaf33b3567c"} Sep 30 21:03:45 crc kubenswrapper[4756]: I0930 21:03:45.248552 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6cc6b65957-62z7q" Sep 30 21:03:45 crc kubenswrapper[4756]: I0930 21:03:45.251624 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 30 21:03:45 crc kubenswrapper[4756]: I0930 21:03:45.251696 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"ae546baf-e30a-48a8-a4c1-9bc37adfbc12","Type":"ContainerDied","Data":"50bc367fd9372e6233447cc9d0e0d9f65b22a02e03d458a8d20d8e63badeb309"} Sep 30 21:03:45 crc kubenswrapper[4756]: I0930 21:03:45.253772 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c60a0dcf-789e-4d15-9456-6e3e414e18b1-kube-api-access-488qq" (OuterVolumeSpecName: "kube-api-access-488qq") pod "c60a0dcf-789e-4d15-9456-6e3e414e18b1" (UID: "c60a0dcf-789e-4d15-9456-6e3e414e18b1"). InnerVolumeSpecName "kube-api-access-488qq". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 21:03:45 crc kubenswrapper[4756]: I0930 21:03:45.274482 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-novncproxy-0" Sep 30 21:03:45 crc kubenswrapper[4756]: I0930 21:03:45.285221 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Sep 30 21:03:45 crc kubenswrapper[4756]: I0930 21:03:45.291859 4756 scope.go:117] "RemoveContainer" containerID="9d38d34410f17ee87eced371c79f9c1f64f2bae3527855f9dcc38af11b4ef403" Sep 30 21:03:45 crc kubenswrapper[4756]: I0930 21:03:45.306813 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Sep 30 21:03:45 crc kubenswrapper[4756]: I0930 21:03:45.314000 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-nx4mc" Sep 30 21:03:45 crc kubenswrapper[4756]: I0930 21:03:45.323981 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Sep 30 21:03:45 crc kubenswrapper[4756]: E0930 21:03:45.324319 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c60a0dcf-789e-4d15-9456-6e3e414e18b1" containerName="init" Sep 30 21:03:45 crc kubenswrapper[4756]: I0930 21:03:45.324334 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="c60a0dcf-789e-4d15-9456-6e3e414e18b1" containerName="init" Sep 30 21:03:45 crc kubenswrapper[4756]: E0930 21:03:45.324346 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ae546baf-e30a-48a8-a4c1-9bc37adfbc12" containerName="nova-api-api" Sep 30 21:03:45 crc kubenswrapper[4756]: I0930 21:03:45.324353 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="ae546baf-e30a-48a8-a4c1-9bc37adfbc12" containerName="nova-api-api" Sep 30 21:03:45 crc kubenswrapper[4756]: E0930 21:03:45.324365 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1eb0c214-c7ca-4179-a52e-af13f1ce8c74" containerName="nova-metadata-log" Sep 30 21:03:45 crc kubenswrapper[4756]: I0930 21:03:45.324371 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="1eb0c214-c7ca-4179-a52e-af13f1ce8c74" containerName="nova-metadata-log" Sep 30 21:03:45 crc kubenswrapper[4756]: E0930 21:03:45.324382 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c60a0dcf-789e-4d15-9456-6e3e414e18b1" containerName="dnsmasq-dns" Sep 30 21:03:45 crc kubenswrapper[4756]: I0930 21:03:45.324388 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="c60a0dcf-789e-4d15-9456-6e3e414e18b1" containerName="dnsmasq-dns" Sep 30 21:03:45 crc kubenswrapper[4756]: E0930 21:03:45.324425 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1eb0c214-c7ca-4179-a52e-af13f1ce8c74" containerName="nova-metadata-metadata" Sep 30 21:03:45 crc kubenswrapper[4756]: I0930 21:03:45.324432 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="1eb0c214-c7ca-4179-a52e-af13f1ce8c74" containerName="nova-metadata-metadata" Sep 30 21:03:45 crc kubenswrapper[4756]: E0930 21:03:45.324445 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1c7caeef-7ea4-4e7f-9e4b-de3aed88f7e2" containerName="nova-manage" Sep 30 21:03:45 crc kubenswrapper[4756]: I0930 21:03:45.324452 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="1c7caeef-7ea4-4e7f-9e4b-de3aed88f7e2" containerName="nova-manage" Sep 30 21:03:45 crc kubenswrapper[4756]: E0930 21:03:45.324465 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ae546baf-e30a-48a8-a4c1-9bc37adfbc12" containerName="nova-api-log" Sep 30 21:03:45 crc kubenswrapper[4756]: I0930 21:03:45.324471 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="ae546baf-e30a-48a8-a4c1-9bc37adfbc12" containerName="nova-api-log" Sep 30 21:03:45 crc kubenswrapper[4756]: I0930 21:03:45.324663 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="ae546baf-e30a-48a8-a4c1-9bc37adfbc12" containerName="nova-api-log" Sep 30 21:03:45 crc kubenswrapper[4756]: I0930 21:03:45.324679 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="c60a0dcf-789e-4d15-9456-6e3e414e18b1" containerName="dnsmasq-dns" Sep 30 21:03:45 crc kubenswrapper[4756]: I0930 21:03:45.324690 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="1eb0c214-c7ca-4179-a52e-af13f1ce8c74" containerName="nova-metadata-metadata" Sep 30 21:03:45 crc kubenswrapper[4756]: I0930 21:03:45.324699 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="ae546baf-e30a-48a8-a4c1-9bc37adfbc12" containerName="nova-api-api" Sep 30 21:03:45 crc kubenswrapper[4756]: I0930 21:03:45.324708 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="1c7caeef-7ea4-4e7f-9e4b-de3aed88f7e2" containerName="nova-manage" Sep 30 21:03:45 crc kubenswrapper[4756]: I0930 21:03:45.324718 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="1eb0c214-c7ca-4179-a52e-af13f1ce8c74" containerName="nova-metadata-log" Sep 30 21:03:45 crc kubenswrapper[4756]: I0930 21:03:45.325676 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Sep 30 21:03:45 crc kubenswrapper[4756]: I0930 21:03:45.328344 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Sep 30 21:03:45 crc kubenswrapper[4756]: I0930 21:03:45.329341 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c60a0dcf-789e-4d15-9456-6e3e414e18b1-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "c60a0dcf-789e-4d15-9456-6e3e414e18b1" (UID: "c60a0dcf-789e-4d15-9456-6e3e414e18b1"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 21:03:45 crc kubenswrapper[4756]: I0930 21:03:45.338143 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Sep 30 21:03:45 crc kubenswrapper[4756]: I0930 21:03:45.338830 4756 scope.go:117] "RemoveContainer" containerID="b5cd482a383163dd81a4bf76899b377ec56ec2a8e22ac5dae8229bea29157163" Sep 30 21:03:45 crc kubenswrapper[4756]: I0930 21:03:45.342682 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c60a0dcf-789e-4d15-9456-6e3e414e18b1-config" (OuterVolumeSpecName: "config") pod "c60a0dcf-789e-4d15-9456-6e3e414e18b1" (UID: "c60a0dcf-789e-4d15-9456-6e3e414e18b1"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 21:03:45 crc kubenswrapper[4756]: I0930 21:03:45.348787 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Sep 30 21:03:45 crc kubenswrapper[4756]: I0930 21:03:45.351273 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-488qq\" (UniqueName: \"kubernetes.io/projected/c60a0dcf-789e-4d15-9456-6e3e414e18b1-kube-api-access-488qq\") on node \"crc\" DevicePath \"\"" Sep 30 21:03:45 crc kubenswrapper[4756]: I0930 21:03:45.351306 4756 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c60a0dcf-789e-4d15-9456-6e3e414e18b1-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Sep 30 21:03:45 crc kubenswrapper[4756]: I0930 21:03:45.351318 4756 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c60a0dcf-789e-4d15-9456-6e3e414e18b1-config\") on node \"crc\" DevicePath \"\"" Sep 30 21:03:45 crc kubenswrapper[4756]: I0930 21:03:45.369348 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Sep 30 21:03:45 crc kubenswrapper[4756]: I0930 21:03:45.377729 4756 scope.go:117] "RemoveContainer" containerID="63efcd988aee0ce6166f8dd15ee3000ca0d243974ecff44d6b97cd1d3d78fb61" Sep 30 21:03:45 crc kubenswrapper[4756]: I0930 21:03:45.381037 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c60a0dcf-789e-4d15-9456-6e3e414e18b1-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "c60a0dcf-789e-4d15-9456-6e3e414e18b1" (UID: "c60a0dcf-789e-4d15-9456-6e3e414e18b1"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 21:03:45 crc kubenswrapper[4756]: I0930 21:03:45.384195 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Sep 30 21:03:45 crc kubenswrapper[4756]: I0930 21:03:45.385751 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 30 21:03:45 crc kubenswrapper[4756]: I0930 21:03:45.389212 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Sep 30 21:03:45 crc kubenswrapper[4756]: I0930 21:03:45.410563 4756 scope.go:117] "RemoveContainer" containerID="b5cd482a383163dd81a4bf76899b377ec56ec2a8e22ac5dae8229bea29157163" Sep 30 21:03:45 crc kubenswrapper[4756]: E0930 21:03:45.411718 4756 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b5cd482a383163dd81a4bf76899b377ec56ec2a8e22ac5dae8229bea29157163\": container with ID starting with b5cd482a383163dd81a4bf76899b377ec56ec2a8e22ac5dae8229bea29157163 not found: ID does not exist" containerID="b5cd482a383163dd81a4bf76899b377ec56ec2a8e22ac5dae8229bea29157163" Sep 30 21:03:45 crc kubenswrapper[4756]: I0930 21:03:45.411788 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b5cd482a383163dd81a4bf76899b377ec56ec2a8e22ac5dae8229bea29157163"} err="failed to get container status \"b5cd482a383163dd81a4bf76899b377ec56ec2a8e22ac5dae8229bea29157163\": rpc error: code = NotFound desc = could not find container \"b5cd482a383163dd81a4bf76899b377ec56ec2a8e22ac5dae8229bea29157163\": container with ID starting with b5cd482a383163dd81a4bf76899b377ec56ec2a8e22ac5dae8229bea29157163 not found: ID does not exist" Sep 30 21:03:45 crc kubenswrapper[4756]: I0930 21:03:45.411810 4756 scope.go:117] "RemoveContainer" containerID="63efcd988aee0ce6166f8dd15ee3000ca0d243974ecff44d6b97cd1d3d78fb61" Sep 30 21:03:45 crc kubenswrapper[4756]: E0930 21:03:45.412065 4756 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"63efcd988aee0ce6166f8dd15ee3000ca0d243974ecff44d6b97cd1d3d78fb61\": container with ID starting with 63efcd988aee0ce6166f8dd15ee3000ca0d243974ecff44d6b97cd1d3d78fb61 not found: ID does not exist" containerID="63efcd988aee0ce6166f8dd15ee3000ca0d243974ecff44d6b97cd1d3d78fb61" Sep 30 21:03:45 crc kubenswrapper[4756]: I0930 21:03:45.412087 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"63efcd988aee0ce6166f8dd15ee3000ca0d243974ecff44d6b97cd1d3d78fb61"} err="failed to get container status \"63efcd988aee0ce6166f8dd15ee3000ca0d243974ecff44d6b97cd1d3d78fb61\": rpc error: code = NotFound desc = could not find container \"63efcd988aee0ce6166f8dd15ee3000ca0d243974ecff44d6b97cd1d3d78fb61\": container with ID starting with 63efcd988aee0ce6166f8dd15ee3000ca0d243974ecff44d6b97cd1d3d78fb61 not found: ID does not exist" Sep 30 21:03:45 crc kubenswrapper[4756]: I0930 21:03:45.412100 4756 scope.go:117] "RemoveContainer" containerID="35042716572a05f228b7bd000ab941a7f8400fb6a598d98b79c9821a5191a6f6" Sep 30 21:03:45 crc kubenswrapper[4756]: I0930 21:03:45.421660 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Sep 30 21:03:45 crc kubenswrapper[4756]: I0930 21:03:45.422070 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c60a0dcf-789e-4d15-9456-6e3e414e18b1-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "c60a0dcf-789e-4d15-9456-6e3e414e18b1" (UID: "c60a0dcf-789e-4d15-9456-6e3e414e18b1"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 21:03:45 crc kubenswrapper[4756]: I0930 21:03:45.452505 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fpwj8\" (UniqueName: \"kubernetes.io/projected/8d0db247-1024-4c82-9762-5787390965a4-kube-api-access-fpwj8\") pod \"nova-api-0\" (UID: \"8d0db247-1024-4c82-9762-5787390965a4\") " pod="openstack/nova-api-0" Sep 30 21:03:45 crc kubenswrapper[4756]: I0930 21:03:45.452549 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-69hc5\" (UniqueName: \"kubernetes.io/projected/2411423d-44e2-4f54-b22b-26a38bdfaec3-kube-api-access-69hc5\") pod \"nova-metadata-0\" (UID: \"2411423d-44e2-4f54-b22b-26a38bdfaec3\") " pod="openstack/nova-metadata-0" Sep 30 21:03:45 crc kubenswrapper[4756]: I0930 21:03:45.452647 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2411423d-44e2-4f54-b22b-26a38bdfaec3-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"2411423d-44e2-4f54-b22b-26a38bdfaec3\") " pod="openstack/nova-metadata-0" Sep 30 21:03:45 crc kubenswrapper[4756]: I0930 21:03:45.452671 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8d0db247-1024-4c82-9762-5787390965a4-logs\") pod \"nova-api-0\" (UID: \"8d0db247-1024-4c82-9762-5787390965a4\") " pod="openstack/nova-api-0" Sep 30 21:03:45 crc kubenswrapper[4756]: I0930 21:03:45.452698 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8d0db247-1024-4c82-9762-5787390965a4-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"8d0db247-1024-4c82-9762-5787390965a4\") " pod="openstack/nova-api-0" Sep 30 21:03:45 crc kubenswrapper[4756]: I0930 21:03:45.452722 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2411423d-44e2-4f54-b22b-26a38bdfaec3-config-data\") pod \"nova-metadata-0\" (UID: \"2411423d-44e2-4f54-b22b-26a38bdfaec3\") " pod="openstack/nova-metadata-0" Sep 30 21:03:45 crc kubenswrapper[4756]: I0930 21:03:45.452756 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8d0db247-1024-4c82-9762-5787390965a4-config-data\") pod \"nova-api-0\" (UID: \"8d0db247-1024-4c82-9762-5787390965a4\") " pod="openstack/nova-api-0" Sep 30 21:03:45 crc kubenswrapper[4756]: I0930 21:03:45.452827 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2411423d-44e2-4f54-b22b-26a38bdfaec3-logs\") pod \"nova-metadata-0\" (UID: \"2411423d-44e2-4f54-b22b-26a38bdfaec3\") " pod="openstack/nova-metadata-0" Sep 30 21:03:45 crc kubenswrapper[4756]: I0930 21:03:45.452869 4756 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c60a0dcf-789e-4d15-9456-6e3e414e18b1-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 30 21:03:45 crc kubenswrapper[4756]: I0930 21:03:45.452880 4756 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c60a0dcf-789e-4d15-9456-6e3e414e18b1-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Sep 30 21:03:45 crc kubenswrapper[4756]: I0930 21:03:45.467201 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-nx4mc"] Sep 30 21:03:45 crc kubenswrapper[4756]: I0930 21:03:45.514148 4756 scope.go:117] "RemoveContainer" containerID="37bde90c677de0db519757c57e6b0e40f4d5e43c77304d38b597bca75cdb357f" Sep 30 21:03:45 crc kubenswrapper[4756]: I0930 21:03:45.555011 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2411423d-44e2-4f54-b22b-26a38bdfaec3-logs\") pod \"nova-metadata-0\" (UID: \"2411423d-44e2-4f54-b22b-26a38bdfaec3\") " pod="openstack/nova-metadata-0" Sep 30 21:03:45 crc kubenswrapper[4756]: I0930 21:03:45.555100 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fpwj8\" (UniqueName: \"kubernetes.io/projected/8d0db247-1024-4c82-9762-5787390965a4-kube-api-access-fpwj8\") pod \"nova-api-0\" (UID: \"8d0db247-1024-4c82-9762-5787390965a4\") " pod="openstack/nova-api-0" Sep 30 21:03:45 crc kubenswrapper[4756]: I0930 21:03:45.555135 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-69hc5\" (UniqueName: \"kubernetes.io/projected/2411423d-44e2-4f54-b22b-26a38bdfaec3-kube-api-access-69hc5\") pod \"nova-metadata-0\" (UID: \"2411423d-44e2-4f54-b22b-26a38bdfaec3\") " pod="openstack/nova-metadata-0" Sep 30 21:03:45 crc kubenswrapper[4756]: I0930 21:03:45.555223 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2411423d-44e2-4f54-b22b-26a38bdfaec3-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"2411423d-44e2-4f54-b22b-26a38bdfaec3\") " pod="openstack/nova-metadata-0" Sep 30 21:03:45 crc kubenswrapper[4756]: I0930 21:03:45.555260 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8d0db247-1024-4c82-9762-5787390965a4-logs\") pod \"nova-api-0\" (UID: \"8d0db247-1024-4c82-9762-5787390965a4\") " pod="openstack/nova-api-0" Sep 30 21:03:45 crc kubenswrapper[4756]: I0930 21:03:45.555291 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8d0db247-1024-4c82-9762-5787390965a4-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"8d0db247-1024-4c82-9762-5787390965a4\") " pod="openstack/nova-api-0" Sep 30 21:03:45 crc kubenswrapper[4756]: I0930 21:03:45.555312 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2411423d-44e2-4f54-b22b-26a38bdfaec3-config-data\") pod \"nova-metadata-0\" (UID: \"2411423d-44e2-4f54-b22b-26a38bdfaec3\") " pod="openstack/nova-metadata-0" Sep 30 21:03:45 crc kubenswrapper[4756]: I0930 21:03:45.555343 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8d0db247-1024-4c82-9762-5787390965a4-config-data\") pod \"nova-api-0\" (UID: \"8d0db247-1024-4c82-9762-5787390965a4\") " pod="openstack/nova-api-0" Sep 30 21:03:45 crc kubenswrapper[4756]: I0930 21:03:45.555427 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2411423d-44e2-4f54-b22b-26a38bdfaec3-logs\") pod \"nova-metadata-0\" (UID: \"2411423d-44e2-4f54-b22b-26a38bdfaec3\") " pod="openstack/nova-metadata-0" Sep 30 21:03:45 crc kubenswrapper[4756]: I0930 21:03:45.555971 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8d0db247-1024-4c82-9762-5787390965a4-logs\") pod \"nova-api-0\" (UID: \"8d0db247-1024-4c82-9762-5787390965a4\") " pod="openstack/nova-api-0" Sep 30 21:03:45 crc kubenswrapper[4756]: I0930 21:03:45.559035 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2411423d-44e2-4f54-b22b-26a38bdfaec3-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"2411423d-44e2-4f54-b22b-26a38bdfaec3\") " pod="openstack/nova-metadata-0" Sep 30 21:03:45 crc kubenswrapper[4756]: I0930 21:03:45.559355 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8d0db247-1024-4c82-9762-5787390965a4-config-data\") pod \"nova-api-0\" (UID: \"8d0db247-1024-4c82-9762-5787390965a4\") " pod="openstack/nova-api-0" Sep 30 21:03:45 crc kubenswrapper[4756]: I0930 21:03:45.561839 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8d0db247-1024-4c82-9762-5787390965a4-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"8d0db247-1024-4c82-9762-5787390965a4\") " pod="openstack/nova-api-0" Sep 30 21:03:45 crc kubenswrapper[4756]: I0930 21:03:45.566275 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2411423d-44e2-4f54-b22b-26a38bdfaec3-config-data\") pod \"nova-metadata-0\" (UID: \"2411423d-44e2-4f54-b22b-26a38bdfaec3\") " pod="openstack/nova-metadata-0" Sep 30 21:03:45 crc kubenswrapper[4756]: I0930 21:03:45.576022 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-69hc5\" (UniqueName: \"kubernetes.io/projected/2411423d-44e2-4f54-b22b-26a38bdfaec3-kube-api-access-69hc5\") pod \"nova-metadata-0\" (UID: \"2411423d-44e2-4f54-b22b-26a38bdfaec3\") " pod="openstack/nova-metadata-0" Sep 30 21:03:45 crc kubenswrapper[4756]: I0930 21:03:45.588633 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fpwj8\" (UniqueName: \"kubernetes.io/projected/8d0db247-1024-4c82-9762-5787390965a4-kube-api-access-fpwj8\") pod \"nova-api-0\" (UID: \"8d0db247-1024-4c82-9762-5787390965a4\") " pod="openstack/nova-api-0" Sep 30 21:03:45 crc kubenswrapper[4756]: I0930 21:03:45.650505 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Sep 30 21:03:45 crc kubenswrapper[4756]: I0930 21:03:45.653549 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6cc6b65957-62z7q"] Sep 30 21:03:45 crc kubenswrapper[4756]: I0930 21:03:45.660213 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-6cc6b65957-62z7q"] Sep 30 21:03:45 crc kubenswrapper[4756]: I0930 21:03:45.810900 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 30 21:03:46 crc kubenswrapper[4756]: I0930 21:03:46.111266 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Sep 30 21:03:46 crc kubenswrapper[4756]: I0930 21:03:46.271439 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"2411423d-44e2-4f54-b22b-26a38bdfaec3","Type":"ContainerStarted","Data":"21b1ade6ad84c2e7d9117f9eef1ddd1074abec94a51be7db617de1efe01bdb08"} Sep 30 21:03:46 crc kubenswrapper[4756]: I0930 21:03:46.274713 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Sep 30 21:03:46 crc kubenswrapper[4756]: W0930 21:03:46.278607 4756 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8d0db247_1024_4c82_9762_5787390965a4.slice/crio-333f9cab5557d960cdd5c6902abd62408a47fa09426982f6629d009be39c1763 WatchSource:0}: Error finding container 333f9cab5557d960cdd5c6902abd62408a47fa09426982f6629d009be39c1763: Status 404 returned error can't find the container with id 333f9cab5557d960cdd5c6902abd62408a47fa09426982f6629d009be39c1763 Sep 30 21:03:47 crc kubenswrapper[4756]: I0930 21:03:47.127389 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1eb0c214-c7ca-4179-a52e-af13f1ce8c74" path="/var/lib/kubelet/pods/1eb0c214-c7ca-4179-a52e-af13f1ce8c74/volumes" Sep 30 21:03:47 crc kubenswrapper[4756]: I0930 21:03:47.128646 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ae546baf-e30a-48a8-a4c1-9bc37adfbc12" path="/var/lib/kubelet/pods/ae546baf-e30a-48a8-a4c1-9bc37adfbc12/volumes" Sep 30 21:03:47 crc kubenswrapper[4756]: I0930 21:03:47.129369 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c60a0dcf-789e-4d15-9456-6e3e414e18b1" path="/var/lib/kubelet/pods/c60a0dcf-789e-4d15-9456-6e3e414e18b1/volumes" Sep 30 21:03:47 crc kubenswrapper[4756]: I0930 21:03:47.294131 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"8d0db247-1024-4c82-9762-5787390965a4","Type":"ContainerStarted","Data":"c1bc3ebe4e28f838195bf1f9fb22ad724fbe554250753747ef3da66be17f9d4b"} Sep 30 21:03:47 crc kubenswrapper[4756]: I0930 21:03:47.294191 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"8d0db247-1024-4c82-9762-5787390965a4","Type":"ContainerStarted","Data":"a91833f88080418b783bedd6c5998cbc922085b529f3e3fb7df280f71bb73722"} Sep 30 21:03:47 crc kubenswrapper[4756]: I0930 21:03:47.294212 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"8d0db247-1024-4c82-9762-5787390965a4","Type":"ContainerStarted","Data":"333f9cab5557d960cdd5c6902abd62408a47fa09426982f6629d009be39c1763"} Sep 30 21:03:47 crc kubenswrapper[4756]: I0930 21:03:47.300072 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-nx4mc" podUID="14bad369-c240-4a03-803d-b0b6c2baf689" containerName="registry-server" containerID="cri-o://554b7b42382a25feb625e63ea4eca1520fe2f2d433d6753d6d8d78fe357e7a1a" gracePeriod=2 Sep 30 21:03:47 crc kubenswrapper[4756]: I0930 21:03:47.300217 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"2411423d-44e2-4f54-b22b-26a38bdfaec3","Type":"ContainerStarted","Data":"f1a2d68f33be81bc2113f035f8283691835ad13ed262e05ac486ed8b18e54073"} Sep 30 21:03:47 crc kubenswrapper[4756]: I0930 21:03:47.300261 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"2411423d-44e2-4f54-b22b-26a38bdfaec3","Type":"ContainerStarted","Data":"e4d6beac6b701f14d688bb9a1f727b32cf794076e97fd3cd59ab3e282e4fb5db"} Sep 30 21:03:47 crc kubenswrapper[4756]: I0930 21:03:47.343720 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.3436964 podStartE2EDuration="2.3436964s" podCreationTimestamp="2025-09-30 21:03:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 21:03:47.334978402 +0000 UTC m=+5556.955911909" watchObservedRunningTime="2025-09-30 21:03:47.3436964 +0000 UTC m=+5556.964629897" Sep 30 21:03:47 crc kubenswrapper[4756]: I0930 21:03:47.344106 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.3440957 podStartE2EDuration="2.3440957s" podCreationTimestamp="2025-09-30 21:03:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 21:03:47.320025163 +0000 UTC m=+5556.940958650" watchObservedRunningTime="2025-09-30 21:03:47.3440957 +0000 UTC m=+5556.965029197" Sep 30 21:03:47 crc kubenswrapper[4756]: I0930 21:03:47.912665 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-nx4mc" Sep 30 21:03:48 crc kubenswrapper[4756]: I0930 21:03:48.010127 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/14bad369-c240-4a03-803d-b0b6c2baf689-utilities\") pod \"14bad369-c240-4a03-803d-b0b6c2baf689\" (UID: \"14bad369-c240-4a03-803d-b0b6c2baf689\") " Sep 30 21:03:48 crc kubenswrapper[4756]: I0930 21:03:48.010216 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-64596\" (UniqueName: \"kubernetes.io/projected/14bad369-c240-4a03-803d-b0b6c2baf689-kube-api-access-64596\") pod \"14bad369-c240-4a03-803d-b0b6c2baf689\" (UID: \"14bad369-c240-4a03-803d-b0b6c2baf689\") " Sep 30 21:03:48 crc kubenswrapper[4756]: I0930 21:03:48.010364 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/14bad369-c240-4a03-803d-b0b6c2baf689-catalog-content\") pod \"14bad369-c240-4a03-803d-b0b6c2baf689\" (UID: \"14bad369-c240-4a03-803d-b0b6c2baf689\") " Sep 30 21:03:48 crc kubenswrapper[4756]: I0930 21:03:48.011327 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/14bad369-c240-4a03-803d-b0b6c2baf689-utilities" (OuterVolumeSpecName: "utilities") pod "14bad369-c240-4a03-803d-b0b6c2baf689" (UID: "14bad369-c240-4a03-803d-b0b6c2baf689"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 21:03:48 crc kubenswrapper[4756]: I0930 21:03:48.028540 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/14bad369-c240-4a03-803d-b0b6c2baf689-kube-api-access-64596" (OuterVolumeSpecName: "kube-api-access-64596") pod "14bad369-c240-4a03-803d-b0b6c2baf689" (UID: "14bad369-c240-4a03-803d-b0b6c2baf689"). InnerVolumeSpecName "kube-api-access-64596". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 21:03:48 crc kubenswrapper[4756]: I0930 21:03:48.110994 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/14bad369-c240-4a03-803d-b0b6c2baf689-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "14bad369-c240-4a03-803d-b0b6c2baf689" (UID: "14bad369-c240-4a03-803d-b0b6c2baf689"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 21:03:48 crc kubenswrapper[4756]: I0930 21:03:48.112278 4756 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/14bad369-c240-4a03-803d-b0b6c2baf689-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 21:03:48 crc kubenswrapper[4756]: I0930 21:03:48.112303 4756 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/14bad369-c240-4a03-803d-b0b6c2baf689-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 21:03:48 crc kubenswrapper[4756]: I0930 21:03:48.112317 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-64596\" (UniqueName: \"kubernetes.io/projected/14bad369-c240-4a03-803d-b0b6c2baf689-kube-api-access-64596\") on node \"crc\" DevicePath \"\"" Sep 30 21:03:48 crc kubenswrapper[4756]: I0930 21:03:48.197524 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Sep 30 21:03:48 crc kubenswrapper[4756]: I0930 21:03:48.309084 4756 generic.go:334] "Generic (PLEG): container finished" podID="5302e743-f6ec-4907-8de8-677e0151248b" containerID="4c679c9660e43fea8f1f3114659b5594e7ad1b47dd0b537f8d8817564a9ede77" exitCode=0 Sep 30 21:03:48 crc kubenswrapper[4756]: I0930 21:03:48.309162 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"5302e743-f6ec-4907-8de8-677e0151248b","Type":"ContainerDied","Data":"4c679c9660e43fea8f1f3114659b5594e7ad1b47dd0b537f8d8817564a9ede77"} Sep 30 21:03:48 crc kubenswrapper[4756]: I0930 21:03:48.309176 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Sep 30 21:03:48 crc kubenswrapper[4756]: I0930 21:03:48.309206 4756 scope.go:117] "RemoveContainer" containerID="4c679c9660e43fea8f1f3114659b5594e7ad1b47dd0b537f8d8817564a9ede77" Sep 30 21:03:48 crc kubenswrapper[4756]: I0930 21:03:48.309193 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"5302e743-f6ec-4907-8de8-677e0151248b","Type":"ContainerDied","Data":"6c224288cb2230b2586ef646b92bf7c7842985d5c94277617cc4731cc90d59e3"} Sep 30 21:03:48 crc kubenswrapper[4756]: I0930 21:03:48.312721 4756 generic.go:334] "Generic (PLEG): container finished" podID="14bad369-c240-4a03-803d-b0b6c2baf689" containerID="554b7b42382a25feb625e63ea4eca1520fe2f2d433d6753d6d8d78fe357e7a1a" exitCode=0 Sep 30 21:03:48 crc kubenswrapper[4756]: I0930 21:03:48.313129 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-nx4mc" event={"ID":"14bad369-c240-4a03-803d-b0b6c2baf689","Type":"ContainerDied","Data":"554b7b42382a25feb625e63ea4eca1520fe2f2d433d6753d6d8d78fe357e7a1a"} Sep 30 21:03:48 crc kubenswrapper[4756]: I0930 21:03:48.313167 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-nx4mc" event={"ID":"14bad369-c240-4a03-803d-b0b6c2baf689","Type":"ContainerDied","Data":"240ed9f39d92476a49befb6eb53f2e77b8653c941ce2a59b7f02dc0128d6c6fd"} Sep 30 21:03:48 crc kubenswrapper[4756]: I0930 21:03:48.313179 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-nx4mc" Sep 30 21:03:48 crc kubenswrapper[4756]: I0930 21:03:48.314258 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nb2ss\" (UniqueName: \"kubernetes.io/projected/5302e743-f6ec-4907-8de8-677e0151248b-kube-api-access-nb2ss\") pod \"5302e743-f6ec-4907-8de8-677e0151248b\" (UID: \"5302e743-f6ec-4907-8de8-677e0151248b\") " Sep 30 21:03:48 crc kubenswrapper[4756]: I0930 21:03:48.314499 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5302e743-f6ec-4907-8de8-677e0151248b-config-data\") pod \"5302e743-f6ec-4907-8de8-677e0151248b\" (UID: \"5302e743-f6ec-4907-8de8-677e0151248b\") " Sep 30 21:03:48 crc kubenswrapper[4756]: I0930 21:03:48.314580 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5302e743-f6ec-4907-8de8-677e0151248b-combined-ca-bundle\") pod \"5302e743-f6ec-4907-8de8-677e0151248b\" (UID: \"5302e743-f6ec-4907-8de8-677e0151248b\") " Sep 30 21:03:48 crc kubenswrapper[4756]: I0930 21:03:48.320410 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5302e743-f6ec-4907-8de8-677e0151248b-kube-api-access-nb2ss" (OuterVolumeSpecName: "kube-api-access-nb2ss") pod "5302e743-f6ec-4907-8de8-677e0151248b" (UID: "5302e743-f6ec-4907-8de8-677e0151248b"). InnerVolumeSpecName "kube-api-access-nb2ss". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 21:03:48 crc kubenswrapper[4756]: I0930 21:03:48.344002 4756 scope.go:117] "RemoveContainer" containerID="4c679c9660e43fea8f1f3114659b5594e7ad1b47dd0b537f8d8817564a9ede77" Sep 30 21:03:48 crc kubenswrapper[4756]: E0930 21:03:48.345029 4756 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4c679c9660e43fea8f1f3114659b5594e7ad1b47dd0b537f8d8817564a9ede77\": container with ID starting with 4c679c9660e43fea8f1f3114659b5594e7ad1b47dd0b537f8d8817564a9ede77 not found: ID does not exist" containerID="4c679c9660e43fea8f1f3114659b5594e7ad1b47dd0b537f8d8817564a9ede77" Sep 30 21:03:48 crc kubenswrapper[4756]: I0930 21:03:48.345094 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4c679c9660e43fea8f1f3114659b5594e7ad1b47dd0b537f8d8817564a9ede77"} err="failed to get container status \"4c679c9660e43fea8f1f3114659b5594e7ad1b47dd0b537f8d8817564a9ede77\": rpc error: code = NotFound desc = could not find container \"4c679c9660e43fea8f1f3114659b5594e7ad1b47dd0b537f8d8817564a9ede77\": container with ID starting with 4c679c9660e43fea8f1f3114659b5594e7ad1b47dd0b537f8d8817564a9ede77 not found: ID does not exist" Sep 30 21:03:48 crc kubenswrapper[4756]: I0930 21:03:48.345133 4756 scope.go:117] "RemoveContainer" containerID="554b7b42382a25feb625e63ea4eca1520fe2f2d433d6753d6d8d78fe357e7a1a" Sep 30 21:03:48 crc kubenswrapper[4756]: I0930 21:03:48.349336 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5302e743-f6ec-4907-8de8-677e0151248b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "5302e743-f6ec-4907-8de8-677e0151248b" (UID: "5302e743-f6ec-4907-8de8-677e0151248b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 21:03:48 crc kubenswrapper[4756]: I0930 21:03:48.377632 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5302e743-f6ec-4907-8de8-677e0151248b-config-data" (OuterVolumeSpecName: "config-data") pod "5302e743-f6ec-4907-8de8-677e0151248b" (UID: "5302e743-f6ec-4907-8de8-677e0151248b"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 21:03:48 crc kubenswrapper[4756]: I0930 21:03:48.394240 4756 scope.go:117] "RemoveContainer" containerID="a66c3f6decf58a9510d0a9a88b49e5c4f657786a6c70f024bbc8d1e8f05ec308" Sep 30 21:03:48 crc kubenswrapper[4756]: I0930 21:03:48.417722 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-nx4mc"] Sep 30 21:03:48 crc kubenswrapper[4756]: I0930 21:03:48.418830 4756 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5302e743-f6ec-4907-8de8-677e0151248b-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 21:03:48 crc kubenswrapper[4756]: I0930 21:03:48.418851 4756 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5302e743-f6ec-4907-8de8-677e0151248b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 21:03:48 crc kubenswrapper[4756]: I0930 21:03:48.418860 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nb2ss\" (UniqueName: \"kubernetes.io/projected/5302e743-f6ec-4907-8de8-677e0151248b-kube-api-access-nb2ss\") on node \"crc\" DevicePath \"\"" Sep 30 21:03:48 crc kubenswrapper[4756]: I0930 21:03:48.431447 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-nx4mc"] Sep 30 21:03:48 crc kubenswrapper[4756]: I0930 21:03:48.441592 4756 scope.go:117] "RemoveContainer" containerID="59ff8e3e7c82c73b1b11ecde388b5e0208f350c2d2f6eca416a862c2ff69a9b3" Sep 30 21:03:48 crc kubenswrapper[4756]: I0930 21:03:48.468099 4756 scope.go:117] "RemoveContainer" containerID="554b7b42382a25feb625e63ea4eca1520fe2f2d433d6753d6d8d78fe357e7a1a" Sep 30 21:03:48 crc kubenswrapper[4756]: E0930 21:03:48.470327 4756 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"554b7b42382a25feb625e63ea4eca1520fe2f2d433d6753d6d8d78fe357e7a1a\": container with ID starting with 554b7b42382a25feb625e63ea4eca1520fe2f2d433d6753d6d8d78fe357e7a1a not found: ID does not exist" containerID="554b7b42382a25feb625e63ea4eca1520fe2f2d433d6753d6d8d78fe357e7a1a" Sep 30 21:03:48 crc kubenswrapper[4756]: I0930 21:03:48.470371 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"554b7b42382a25feb625e63ea4eca1520fe2f2d433d6753d6d8d78fe357e7a1a"} err="failed to get container status \"554b7b42382a25feb625e63ea4eca1520fe2f2d433d6753d6d8d78fe357e7a1a\": rpc error: code = NotFound desc = could not find container \"554b7b42382a25feb625e63ea4eca1520fe2f2d433d6753d6d8d78fe357e7a1a\": container with ID starting with 554b7b42382a25feb625e63ea4eca1520fe2f2d433d6753d6d8d78fe357e7a1a not found: ID does not exist" Sep 30 21:03:48 crc kubenswrapper[4756]: I0930 21:03:48.470420 4756 scope.go:117] "RemoveContainer" containerID="a66c3f6decf58a9510d0a9a88b49e5c4f657786a6c70f024bbc8d1e8f05ec308" Sep 30 21:03:48 crc kubenswrapper[4756]: E0930 21:03:48.488460 4756 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a66c3f6decf58a9510d0a9a88b49e5c4f657786a6c70f024bbc8d1e8f05ec308\": container with ID starting with a66c3f6decf58a9510d0a9a88b49e5c4f657786a6c70f024bbc8d1e8f05ec308 not found: ID does not exist" containerID="a66c3f6decf58a9510d0a9a88b49e5c4f657786a6c70f024bbc8d1e8f05ec308" Sep 30 21:03:48 crc kubenswrapper[4756]: I0930 21:03:48.488519 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a66c3f6decf58a9510d0a9a88b49e5c4f657786a6c70f024bbc8d1e8f05ec308"} err="failed to get container status \"a66c3f6decf58a9510d0a9a88b49e5c4f657786a6c70f024bbc8d1e8f05ec308\": rpc error: code = NotFound desc = could not find container \"a66c3f6decf58a9510d0a9a88b49e5c4f657786a6c70f024bbc8d1e8f05ec308\": container with ID starting with a66c3f6decf58a9510d0a9a88b49e5c4f657786a6c70f024bbc8d1e8f05ec308 not found: ID does not exist" Sep 30 21:03:48 crc kubenswrapper[4756]: I0930 21:03:48.488544 4756 scope.go:117] "RemoveContainer" containerID="59ff8e3e7c82c73b1b11ecde388b5e0208f350c2d2f6eca416a862c2ff69a9b3" Sep 30 21:03:48 crc kubenswrapper[4756]: E0930 21:03:48.490050 4756 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"59ff8e3e7c82c73b1b11ecde388b5e0208f350c2d2f6eca416a862c2ff69a9b3\": container with ID starting with 59ff8e3e7c82c73b1b11ecde388b5e0208f350c2d2f6eca416a862c2ff69a9b3 not found: ID does not exist" containerID="59ff8e3e7c82c73b1b11ecde388b5e0208f350c2d2f6eca416a862c2ff69a9b3" Sep 30 21:03:48 crc kubenswrapper[4756]: I0930 21:03:48.490074 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"59ff8e3e7c82c73b1b11ecde388b5e0208f350c2d2f6eca416a862c2ff69a9b3"} err="failed to get container status \"59ff8e3e7c82c73b1b11ecde388b5e0208f350c2d2f6eca416a862c2ff69a9b3\": rpc error: code = NotFound desc = could not find container \"59ff8e3e7c82c73b1b11ecde388b5e0208f350c2d2f6eca416a862c2ff69a9b3\": container with ID starting with 59ff8e3e7c82c73b1b11ecde388b5e0208f350c2d2f6eca416a862c2ff69a9b3 not found: ID does not exist" Sep 30 21:03:48 crc kubenswrapper[4756]: I0930 21:03:48.642896 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Sep 30 21:03:48 crc kubenswrapper[4756]: I0930 21:03:48.653315 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Sep 30 21:03:48 crc kubenswrapper[4756]: I0930 21:03:48.662698 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Sep 30 21:03:48 crc kubenswrapper[4756]: E0930 21:03:48.663147 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="14bad369-c240-4a03-803d-b0b6c2baf689" containerName="registry-server" Sep 30 21:03:48 crc kubenswrapper[4756]: I0930 21:03:48.663173 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="14bad369-c240-4a03-803d-b0b6c2baf689" containerName="registry-server" Sep 30 21:03:48 crc kubenswrapper[4756]: E0930 21:03:48.663197 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="14bad369-c240-4a03-803d-b0b6c2baf689" containerName="extract-utilities" Sep 30 21:03:48 crc kubenswrapper[4756]: I0930 21:03:48.663206 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="14bad369-c240-4a03-803d-b0b6c2baf689" containerName="extract-utilities" Sep 30 21:03:48 crc kubenswrapper[4756]: E0930 21:03:48.663215 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5302e743-f6ec-4907-8de8-677e0151248b" containerName="nova-scheduler-scheduler" Sep 30 21:03:48 crc kubenswrapper[4756]: I0930 21:03:48.663224 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="5302e743-f6ec-4907-8de8-677e0151248b" containerName="nova-scheduler-scheduler" Sep 30 21:03:48 crc kubenswrapper[4756]: E0930 21:03:48.663239 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="14bad369-c240-4a03-803d-b0b6c2baf689" containerName="extract-content" Sep 30 21:03:48 crc kubenswrapper[4756]: I0930 21:03:48.663246 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="14bad369-c240-4a03-803d-b0b6c2baf689" containerName="extract-content" Sep 30 21:03:48 crc kubenswrapper[4756]: I0930 21:03:48.663462 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="14bad369-c240-4a03-803d-b0b6c2baf689" containerName="registry-server" Sep 30 21:03:48 crc kubenswrapper[4756]: I0930 21:03:48.663480 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="5302e743-f6ec-4907-8de8-677e0151248b" containerName="nova-scheduler-scheduler" Sep 30 21:03:48 crc kubenswrapper[4756]: I0930 21:03:48.664158 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Sep 30 21:03:48 crc kubenswrapper[4756]: I0930 21:03:48.667576 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Sep 30 21:03:48 crc kubenswrapper[4756]: I0930 21:03:48.675357 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Sep 30 21:03:48 crc kubenswrapper[4756]: I0930 21:03:48.826265 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cdfbf865-c00f-4f7d-a4bf-5aefe8742174-config-data\") pod \"nova-scheduler-0\" (UID: \"cdfbf865-c00f-4f7d-a4bf-5aefe8742174\") " pod="openstack/nova-scheduler-0" Sep 30 21:03:48 crc kubenswrapper[4756]: I0930 21:03:48.826345 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8wdb4\" (UniqueName: \"kubernetes.io/projected/cdfbf865-c00f-4f7d-a4bf-5aefe8742174-kube-api-access-8wdb4\") pod \"nova-scheduler-0\" (UID: \"cdfbf865-c00f-4f7d-a4bf-5aefe8742174\") " pod="openstack/nova-scheduler-0" Sep 30 21:03:48 crc kubenswrapper[4756]: I0930 21:03:48.826590 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cdfbf865-c00f-4f7d-a4bf-5aefe8742174-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"cdfbf865-c00f-4f7d-a4bf-5aefe8742174\") " pod="openstack/nova-scheduler-0" Sep 30 21:03:48 crc kubenswrapper[4756]: I0930 21:03:48.928305 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8wdb4\" (UniqueName: \"kubernetes.io/projected/cdfbf865-c00f-4f7d-a4bf-5aefe8742174-kube-api-access-8wdb4\") pod \"nova-scheduler-0\" (UID: \"cdfbf865-c00f-4f7d-a4bf-5aefe8742174\") " pod="openstack/nova-scheduler-0" Sep 30 21:03:48 crc kubenswrapper[4756]: I0930 21:03:48.928542 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cdfbf865-c00f-4f7d-a4bf-5aefe8742174-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"cdfbf865-c00f-4f7d-a4bf-5aefe8742174\") " pod="openstack/nova-scheduler-0" Sep 30 21:03:48 crc kubenswrapper[4756]: I0930 21:03:48.928677 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cdfbf865-c00f-4f7d-a4bf-5aefe8742174-config-data\") pod \"nova-scheduler-0\" (UID: \"cdfbf865-c00f-4f7d-a4bf-5aefe8742174\") " pod="openstack/nova-scheduler-0" Sep 30 21:03:48 crc kubenswrapper[4756]: I0930 21:03:48.935298 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cdfbf865-c00f-4f7d-a4bf-5aefe8742174-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"cdfbf865-c00f-4f7d-a4bf-5aefe8742174\") " pod="openstack/nova-scheduler-0" Sep 30 21:03:48 crc kubenswrapper[4756]: I0930 21:03:48.935303 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cdfbf865-c00f-4f7d-a4bf-5aefe8742174-config-data\") pod \"nova-scheduler-0\" (UID: \"cdfbf865-c00f-4f7d-a4bf-5aefe8742174\") " pod="openstack/nova-scheduler-0" Sep 30 21:03:48 crc kubenswrapper[4756]: I0930 21:03:48.945665 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8wdb4\" (UniqueName: \"kubernetes.io/projected/cdfbf865-c00f-4f7d-a4bf-5aefe8742174-kube-api-access-8wdb4\") pod \"nova-scheduler-0\" (UID: \"cdfbf865-c00f-4f7d-a4bf-5aefe8742174\") " pod="openstack/nova-scheduler-0" Sep 30 21:03:48 crc kubenswrapper[4756]: I0930 21:03:48.981101 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Sep 30 21:03:49 crc kubenswrapper[4756]: I0930 21:03:49.128021 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="14bad369-c240-4a03-803d-b0b6c2baf689" path="/var/lib/kubelet/pods/14bad369-c240-4a03-803d-b0b6c2baf689/volumes" Sep 30 21:03:49 crc kubenswrapper[4756]: I0930 21:03:49.128956 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5302e743-f6ec-4907-8de8-677e0151248b" path="/var/lib/kubelet/pods/5302e743-f6ec-4907-8de8-677e0151248b/volumes" Sep 30 21:03:49 crc kubenswrapper[4756]: I0930 21:03:49.500622 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Sep 30 21:03:50 crc kubenswrapper[4756]: I0930 21:03:50.339877 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"cdfbf865-c00f-4f7d-a4bf-5aefe8742174","Type":"ContainerStarted","Data":"7884a260933fd3c2e5eb4a2dfe059ed0d8a8b7194eda8e9b3cb13d41709ddec4"} Sep 30 21:03:50 crc kubenswrapper[4756]: I0930 21:03:50.340189 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"cdfbf865-c00f-4f7d-a4bf-5aefe8742174","Type":"ContainerStarted","Data":"98ae7169256b4bd019de53159550a2ad8f11fa9439b15c407717fa3cd3da0f59"} Sep 30 21:03:50 crc kubenswrapper[4756]: I0930 21:03:50.357860 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.357844886 podStartE2EDuration="2.357844886s" podCreationTimestamp="2025-09-30 21:03:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 21:03:50.356783519 +0000 UTC m=+5559.977717006" watchObservedRunningTime="2025-09-30 21:03:50.357844886 +0000 UTC m=+5559.978778363" Sep 30 21:03:50 crc kubenswrapper[4756]: I0930 21:03:50.650810 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Sep 30 21:03:50 crc kubenswrapper[4756]: I0930 21:03:50.650885 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Sep 30 21:03:52 crc kubenswrapper[4756]: I0930 21:03:52.599211 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-conductor-0" Sep 30 21:03:53 crc kubenswrapper[4756]: I0930 21:03:53.185832 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-cell-mapping-dvd7j"] Sep 30 21:03:53 crc kubenswrapper[4756]: I0930 21:03:53.188390 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-dvd7j" Sep 30 21:03:53 crc kubenswrapper[4756]: I0930 21:03:53.191070 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-manage-scripts" Sep 30 21:03:53 crc kubenswrapper[4756]: I0930 21:03:53.191311 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-manage-config-data" Sep 30 21:03:53 crc kubenswrapper[4756]: I0930 21:03:53.202581 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-cell-mapping-dvd7j"] Sep 30 21:03:53 crc kubenswrapper[4756]: I0930 21:03:53.314389 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2fab8c82-6b0a-4747-b130-d61ef9d1b329-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-dvd7j\" (UID: \"2fab8c82-6b0a-4747-b130-d61ef9d1b329\") " pod="openstack/nova-cell1-cell-mapping-dvd7j" Sep 30 21:03:53 crc kubenswrapper[4756]: I0930 21:03:53.314466 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2fab8c82-6b0a-4747-b130-d61ef9d1b329-config-data\") pod \"nova-cell1-cell-mapping-dvd7j\" (UID: \"2fab8c82-6b0a-4747-b130-d61ef9d1b329\") " pod="openstack/nova-cell1-cell-mapping-dvd7j" Sep 30 21:03:53 crc kubenswrapper[4756]: I0930 21:03:53.314567 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hbg8f\" (UniqueName: \"kubernetes.io/projected/2fab8c82-6b0a-4747-b130-d61ef9d1b329-kube-api-access-hbg8f\") pod \"nova-cell1-cell-mapping-dvd7j\" (UID: \"2fab8c82-6b0a-4747-b130-d61ef9d1b329\") " pod="openstack/nova-cell1-cell-mapping-dvd7j" Sep 30 21:03:53 crc kubenswrapper[4756]: I0930 21:03:53.314592 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2fab8c82-6b0a-4747-b130-d61ef9d1b329-scripts\") pod \"nova-cell1-cell-mapping-dvd7j\" (UID: \"2fab8c82-6b0a-4747-b130-d61ef9d1b329\") " pod="openstack/nova-cell1-cell-mapping-dvd7j" Sep 30 21:03:53 crc kubenswrapper[4756]: I0930 21:03:53.416223 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hbg8f\" (UniqueName: \"kubernetes.io/projected/2fab8c82-6b0a-4747-b130-d61ef9d1b329-kube-api-access-hbg8f\") pod \"nova-cell1-cell-mapping-dvd7j\" (UID: \"2fab8c82-6b0a-4747-b130-d61ef9d1b329\") " pod="openstack/nova-cell1-cell-mapping-dvd7j" Sep 30 21:03:53 crc kubenswrapper[4756]: I0930 21:03:53.416286 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2fab8c82-6b0a-4747-b130-d61ef9d1b329-scripts\") pod \"nova-cell1-cell-mapping-dvd7j\" (UID: \"2fab8c82-6b0a-4747-b130-d61ef9d1b329\") " pod="openstack/nova-cell1-cell-mapping-dvd7j" Sep 30 21:03:53 crc kubenswrapper[4756]: I0930 21:03:53.416430 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2fab8c82-6b0a-4747-b130-d61ef9d1b329-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-dvd7j\" (UID: \"2fab8c82-6b0a-4747-b130-d61ef9d1b329\") " pod="openstack/nova-cell1-cell-mapping-dvd7j" Sep 30 21:03:53 crc kubenswrapper[4756]: I0930 21:03:53.416472 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2fab8c82-6b0a-4747-b130-d61ef9d1b329-config-data\") pod \"nova-cell1-cell-mapping-dvd7j\" (UID: \"2fab8c82-6b0a-4747-b130-d61ef9d1b329\") " pod="openstack/nova-cell1-cell-mapping-dvd7j" Sep 30 21:03:53 crc kubenswrapper[4756]: I0930 21:03:53.425516 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2fab8c82-6b0a-4747-b130-d61ef9d1b329-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-dvd7j\" (UID: \"2fab8c82-6b0a-4747-b130-d61ef9d1b329\") " pod="openstack/nova-cell1-cell-mapping-dvd7j" Sep 30 21:03:53 crc kubenswrapper[4756]: I0930 21:03:53.427169 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2fab8c82-6b0a-4747-b130-d61ef9d1b329-scripts\") pod \"nova-cell1-cell-mapping-dvd7j\" (UID: \"2fab8c82-6b0a-4747-b130-d61ef9d1b329\") " pod="openstack/nova-cell1-cell-mapping-dvd7j" Sep 30 21:03:53 crc kubenswrapper[4756]: I0930 21:03:53.428166 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2fab8c82-6b0a-4747-b130-d61ef9d1b329-config-data\") pod \"nova-cell1-cell-mapping-dvd7j\" (UID: \"2fab8c82-6b0a-4747-b130-d61ef9d1b329\") " pod="openstack/nova-cell1-cell-mapping-dvd7j" Sep 30 21:03:53 crc kubenswrapper[4756]: I0930 21:03:53.454954 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hbg8f\" (UniqueName: \"kubernetes.io/projected/2fab8c82-6b0a-4747-b130-d61ef9d1b329-kube-api-access-hbg8f\") pod \"nova-cell1-cell-mapping-dvd7j\" (UID: \"2fab8c82-6b0a-4747-b130-d61ef9d1b329\") " pod="openstack/nova-cell1-cell-mapping-dvd7j" Sep 30 21:03:53 crc kubenswrapper[4756]: I0930 21:03:53.512754 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-dvd7j" Sep 30 21:03:53 crc kubenswrapper[4756]: I0930 21:03:53.972006 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-cell-mapping-dvd7j"] Sep 30 21:03:53 crc kubenswrapper[4756]: I0930 21:03:53.981485 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Sep 30 21:03:54 crc kubenswrapper[4756]: I0930 21:03:54.393029 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-dvd7j" event={"ID":"2fab8c82-6b0a-4747-b130-d61ef9d1b329","Type":"ContainerStarted","Data":"c54d32dfa8249d867f175844112222b3906bea1c51de1cc2408e0b68fcdedbe7"} Sep 30 21:03:54 crc kubenswrapper[4756]: I0930 21:03:54.393098 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-dvd7j" event={"ID":"2fab8c82-6b0a-4747-b130-d61ef9d1b329","Type":"ContainerStarted","Data":"33d60796381de070475a089519742cd077b1d5190e9d2d16bd173cdbd445d9ed"} Sep 30 21:03:54 crc kubenswrapper[4756]: I0930 21:03:54.414483 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-cell-mapping-dvd7j" podStartSLOduration=1.414467352 podStartE2EDuration="1.414467352s" podCreationTimestamp="2025-09-30 21:03:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 21:03:54.409961714 +0000 UTC m=+5564.030895191" watchObservedRunningTime="2025-09-30 21:03:54.414467352 +0000 UTC m=+5564.035400829" Sep 30 21:03:55 crc kubenswrapper[4756]: I0930 21:03:55.652023 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Sep 30 21:03:55 crc kubenswrapper[4756]: I0930 21:03:55.654683 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Sep 30 21:03:55 crc kubenswrapper[4756]: I0930 21:03:55.812179 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Sep 30 21:03:55 crc kubenswrapper[4756]: I0930 21:03:55.812223 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Sep 30 21:03:56 crc kubenswrapper[4756]: I0930 21:03:56.733532 4756 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="2411423d-44e2-4f54-b22b-26a38bdfaec3" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"http://10.217.1.71:8775/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Sep 30 21:03:56 crc kubenswrapper[4756]: I0930 21:03:56.733830 4756 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="2411423d-44e2-4f54-b22b-26a38bdfaec3" containerName="nova-metadata-log" probeResult="failure" output="Get \"http://10.217.1.71:8775/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Sep 30 21:03:56 crc kubenswrapper[4756]: I0930 21:03:56.895848 4756 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="8d0db247-1024-4c82-9762-5787390965a4" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.1.72:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Sep 30 21:03:56 crc kubenswrapper[4756]: I0930 21:03:56.896258 4756 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="8d0db247-1024-4c82-9762-5787390965a4" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.1.72:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Sep 30 21:03:58 crc kubenswrapper[4756]: I0930 21:03:58.981949 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Sep 30 21:03:59 crc kubenswrapper[4756]: I0930 21:03:59.018706 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Sep 30 21:03:59 crc kubenswrapper[4756]: I0930 21:03:59.451755 4756 generic.go:334] "Generic (PLEG): container finished" podID="2fab8c82-6b0a-4747-b130-d61ef9d1b329" containerID="c54d32dfa8249d867f175844112222b3906bea1c51de1cc2408e0b68fcdedbe7" exitCode=0 Sep 30 21:03:59 crc kubenswrapper[4756]: I0930 21:03:59.451862 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-dvd7j" event={"ID":"2fab8c82-6b0a-4747-b130-d61ef9d1b329","Type":"ContainerDied","Data":"c54d32dfa8249d867f175844112222b3906bea1c51de1cc2408e0b68fcdedbe7"} Sep 30 21:03:59 crc kubenswrapper[4756]: I0930 21:03:59.510343 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Sep 30 21:04:00 crc kubenswrapper[4756]: I0930 21:04:00.876574 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-dvd7j" Sep 30 21:04:00 crc kubenswrapper[4756]: I0930 21:04:00.987485 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2fab8c82-6b0a-4747-b130-d61ef9d1b329-scripts\") pod \"2fab8c82-6b0a-4747-b130-d61ef9d1b329\" (UID: \"2fab8c82-6b0a-4747-b130-d61ef9d1b329\") " Sep 30 21:04:00 crc kubenswrapper[4756]: I0930 21:04:00.987602 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2fab8c82-6b0a-4747-b130-d61ef9d1b329-config-data\") pod \"2fab8c82-6b0a-4747-b130-d61ef9d1b329\" (UID: \"2fab8c82-6b0a-4747-b130-d61ef9d1b329\") " Sep 30 21:04:00 crc kubenswrapper[4756]: I0930 21:04:00.987650 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hbg8f\" (UniqueName: \"kubernetes.io/projected/2fab8c82-6b0a-4747-b130-d61ef9d1b329-kube-api-access-hbg8f\") pod \"2fab8c82-6b0a-4747-b130-d61ef9d1b329\" (UID: \"2fab8c82-6b0a-4747-b130-d61ef9d1b329\") " Sep 30 21:04:00 crc kubenswrapper[4756]: I0930 21:04:00.987689 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2fab8c82-6b0a-4747-b130-d61ef9d1b329-combined-ca-bundle\") pod \"2fab8c82-6b0a-4747-b130-d61ef9d1b329\" (UID: \"2fab8c82-6b0a-4747-b130-d61ef9d1b329\") " Sep 30 21:04:00 crc kubenswrapper[4756]: I0930 21:04:00.994788 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2fab8c82-6b0a-4747-b130-d61ef9d1b329-kube-api-access-hbg8f" (OuterVolumeSpecName: "kube-api-access-hbg8f") pod "2fab8c82-6b0a-4747-b130-d61ef9d1b329" (UID: "2fab8c82-6b0a-4747-b130-d61ef9d1b329"). InnerVolumeSpecName "kube-api-access-hbg8f". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 21:04:01 crc kubenswrapper[4756]: I0930 21:04:01.001584 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2fab8c82-6b0a-4747-b130-d61ef9d1b329-scripts" (OuterVolumeSpecName: "scripts") pod "2fab8c82-6b0a-4747-b130-d61ef9d1b329" (UID: "2fab8c82-6b0a-4747-b130-d61ef9d1b329"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 21:04:01 crc kubenswrapper[4756]: I0930 21:04:01.017593 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2fab8c82-6b0a-4747-b130-d61ef9d1b329-config-data" (OuterVolumeSpecName: "config-data") pod "2fab8c82-6b0a-4747-b130-d61ef9d1b329" (UID: "2fab8c82-6b0a-4747-b130-d61ef9d1b329"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 21:04:01 crc kubenswrapper[4756]: I0930 21:04:01.023321 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2fab8c82-6b0a-4747-b130-d61ef9d1b329-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "2fab8c82-6b0a-4747-b130-d61ef9d1b329" (UID: "2fab8c82-6b0a-4747-b130-d61ef9d1b329"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 21:04:01 crc kubenswrapper[4756]: I0930 21:04:01.090883 4756 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2fab8c82-6b0a-4747-b130-d61ef9d1b329-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 21:04:01 crc kubenswrapper[4756]: I0930 21:04:01.090935 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hbg8f\" (UniqueName: \"kubernetes.io/projected/2fab8c82-6b0a-4747-b130-d61ef9d1b329-kube-api-access-hbg8f\") on node \"crc\" DevicePath \"\"" Sep 30 21:04:01 crc kubenswrapper[4756]: I0930 21:04:01.090956 4756 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2fab8c82-6b0a-4747-b130-d61ef9d1b329-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 21:04:01 crc kubenswrapper[4756]: I0930 21:04:01.090974 4756 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2fab8c82-6b0a-4747-b130-d61ef9d1b329-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 21:04:01 crc kubenswrapper[4756]: I0930 21:04:01.481177 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-dvd7j" event={"ID":"2fab8c82-6b0a-4747-b130-d61ef9d1b329","Type":"ContainerDied","Data":"33d60796381de070475a089519742cd077b1d5190e9d2d16bd173cdbd445d9ed"} Sep 30 21:04:01 crc kubenswrapper[4756]: I0930 21:04:01.481358 4756 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="33d60796381de070475a089519742cd077b1d5190e9d2d16bd173cdbd445d9ed" Sep 30 21:04:01 crc kubenswrapper[4756]: I0930 21:04:01.481483 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-dvd7j" Sep 30 21:04:01 crc kubenswrapper[4756]: I0930 21:04:01.699077 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Sep 30 21:04:01 crc kubenswrapper[4756]: I0930 21:04:01.699532 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="cdfbf865-c00f-4f7d-a4bf-5aefe8742174" containerName="nova-scheduler-scheduler" containerID="cri-o://7884a260933fd3c2e5eb4a2dfe059ed0d8a8b7194eda8e9b3cb13d41709ddec4" gracePeriod=30 Sep 30 21:04:01 crc kubenswrapper[4756]: I0930 21:04:01.708027 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Sep 30 21:04:01 crc kubenswrapper[4756]: I0930 21:04:01.708453 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="8d0db247-1024-4c82-9762-5787390965a4" containerName="nova-api-log" containerID="cri-o://a91833f88080418b783bedd6c5998cbc922085b529f3e3fb7df280f71bb73722" gracePeriod=30 Sep 30 21:04:01 crc kubenswrapper[4756]: I0930 21:04:01.708543 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="8d0db247-1024-4c82-9762-5787390965a4" containerName="nova-api-api" containerID="cri-o://c1bc3ebe4e28f838195bf1f9fb22ad724fbe554250753747ef3da66be17f9d4b" gracePeriod=30 Sep 30 21:04:01 crc kubenswrapper[4756]: I0930 21:04:01.740704 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Sep 30 21:04:01 crc kubenswrapper[4756]: I0930 21:04:01.741072 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="2411423d-44e2-4f54-b22b-26a38bdfaec3" containerName="nova-metadata-log" containerID="cri-o://e4d6beac6b701f14d688bb9a1f727b32cf794076e97fd3cd59ab3e282e4fb5db" gracePeriod=30 Sep 30 21:04:01 crc kubenswrapper[4756]: I0930 21:04:01.741193 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="2411423d-44e2-4f54-b22b-26a38bdfaec3" containerName="nova-metadata-metadata" containerID="cri-o://f1a2d68f33be81bc2113f035f8283691835ad13ed262e05ac486ed8b18e54073" gracePeriod=30 Sep 30 21:04:02 crc kubenswrapper[4756]: I0930 21:04:02.491063 4756 generic.go:334] "Generic (PLEG): container finished" podID="2411423d-44e2-4f54-b22b-26a38bdfaec3" containerID="e4d6beac6b701f14d688bb9a1f727b32cf794076e97fd3cd59ab3e282e4fb5db" exitCode=143 Sep 30 21:04:02 crc kubenswrapper[4756]: I0930 21:04:02.491147 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"2411423d-44e2-4f54-b22b-26a38bdfaec3","Type":"ContainerDied","Data":"e4d6beac6b701f14d688bb9a1f727b32cf794076e97fd3cd59ab3e282e4fb5db"} Sep 30 21:04:02 crc kubenswrapper[4756]: I0930 21:04:02.494308 4756 generic.go:334] "Generic (PLEG): container finished" podID="8d0db247-1024-4c82-9762-5787390965a4" containerID="a91833f88080418b783bedd6c5998cbc922085b529f3e3fb7df280f71bb73722" exitCode=143 Sep 30 21:04:02 crc kubenswrapper[4756]: I0930 21:04:02.494351 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"8d0db247-1024-4c82-9762-5787390965a4","Type":"ContainerDied","Data":"a91833f88080418b783bedd6c5998cbc922085b529f3e3fb7df280f71bb73722"} Sep 30 21:04:03 crc kubenswrapper[4756]: E0930 21:04:03.984704 4756 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="7884a260933fd3c2e5eb4a2dfe059ed0d8a8b7194eda8e9b3cb13d41709ddec4" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Sep 30 21:04:03 crc kubenswrapper[4756]: E0930 21:04:03.986743 4756 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="7884a260933fd3c2e5eb4a2dfe059ed0d8a8b7194eda8e9b3cb13d41709ddec4" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Sep 30 21:04:03 crc kubenswrapper[4756]: E0930 21:04:03.988953 4756 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="7884a260933fd3c2e5eb4a2dfe059ed0d8a8b7194eda8e9b3cb13d41709ddec4" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Sep 30 21:04:03 crc kubenswrapper[4756]: E0930 21:04:03.989034 4756 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-scheduler-0" podUID="cdfbf865-c00f-4f7d-a4bf-5aefe8742174" containerName="nova-scheduler-scheduler" Sep 30 21:04:05 crc kubenswrapper[4756]: I0930 21:04:05.373919 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 30 21:04:05 crc kubenswrapper[4756]: I0930 21:04:05.382284 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Sep 30 21:04:05 crc kubenswrapper[4756]: I0930 21:04:05.477963 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fpwj8\" (UniqueName: \"kubernetes.io/projected/8d0db247-1024-4c82-9762-5787390965a4-kube-api-access-fpwj8\") pod \"8d0db247-1024-4c82-9762-5787390965a4\" (UID: \"8d0db247-1024-4c82-9762-5787390965a4\") " Sep 30 21:04:05 crc kubenswrapper[4756]: I0930 21:04:05.478011 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2411423d-44e2-4f54-b22b-26a38bdfaec3-config-data\") pod \"2411423d-44e2-4f54-b22b-26a38bdfaec3\" (UID: \"2411423d-44e2-4f54-b22b-26a38bdfaec3\") " Sep 30 21:04:05 crc kubenswrapper[4756]: I0930 21:04:05.478078 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2411423d-44e2-4f54-b22b-26a38bdfaec3-combined-ca-bundle\") pod \"2411423d-44e2-4f54-b22b-26a38bdfaec3\" (UID: \"2411423d-44e2-4f54-b22b-26a38bdfaec3\") " Sep 30 21:04:05 crc kubenswrapper[4756]: I0930 21:04:05.478102 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8d0db247-1024-4c82-9762-5787390965a4-logs\") pod \"8d0db247-1024-4c82-9762-5787390965a4\" (UID: \"8d0db247-1024-4c82-9762-5787390965a4\") " Sep 30 21:04:05 crc kubenswrapper[4756]: I0930 21:04:05.478213 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2411423d-44e2-4f54-b22b-26a38bdfaec3-logs\") pod \"2411423d-44e2-4f54-b22b-26a38bdfaec3\" (UID: \"2411423d-44e2-4f54-b22b-26a38bdfaec3\") " Sep 30 21:04:05 crc kubenswrapper[4756]: I0930 21:04:05.478260 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8d0db247-1024-4c82-9762-5787390965a4-config-data\") pod \"8d0db247-1024-4c82-9762-5787390965a4\" (UID: \"8d0db247-1024-4c82-9762-5787390965a4\") " Sep 30 21:04:05 crc kubenswrapper[4756]: I0930 21:04:05.478286 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-69hc5\" (UniqueName: \"kubernetes.io/projected/2411423d-44e2-4f54-b22b-26a38bdfaec3-kube-api-access-69hc5\") pod \"2411423d-44e2-4f54-b22b-26a38bdfaec3\" (UID: \"2411423d-44e2-4f54-b22b-26a38bdfaec3\") " Sep 30 21:04:05 crc kubenswrapper[4756]: I0930 21:04:05.478363 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8d0db247-1024-4c82-9762-5787390965a4-combined-ca-bundle\") pod \"8d0db247-1024-4c82-9762-5787390965a4\" (UID: \"8d0db247-1024-4c82-9762-5787390965a4\") " Sep 30 21:04:05 crc kubenswrapper[4756]: I0930 21:04:05.478616 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2411423d-44e2-4f54-b22b-26a38bdfaec3-logs" (OuterVolumeSpecName: "logs") pod "2411423d-44e2-4f54-b22b-26a38bdfaec3" (UID: "2411423d-44e2-4f54-b22b-26a38bdfaec3"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 21:04:05 crc kubenswrapper[4756]: I0930 21:04:05.478650 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8d0db247-1024-4c82-9762-5787390965a4-logs" (OuterVolumeSpecName: "logs") pod "8d0db247-1024-4c82-9762-5787390965a4" (UID: "8d0db247-1024-4c82-9762-5787390965a4"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 21:04:05 crc kubenswrapper[4756]: I0930 21:04:05.478838 4756 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2411423d-44e2-4f54-b22b-26a38bdfaec3-logs\") on node \"crc\" DevicePath \"\"" Sep 30 21:04:05 crc kubenswrapper[4756]: I0930 21:04:05.478861 4756 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8d0db247-1024-4c82-9762-5787390965a4-logs\") on node \"crc\" DevicePath \"\"" Sep 30 21:04:05 crc kubenswrapper[4756]: I0930 21:04:05.483133 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8d0db247-1024-4c82-9762-5787390965a4-kube-api-access-fpwj8" (OuterVolumeSpecName: "kube-api-access-fpwj8") pod "8d0db247-1024-4c82-9762-5787390965a4" (UID: "8d0db247-1024-4c82-9762-5787390965a4"). InnerVolumeSpecName "kube-api-access-fpwj8". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 21:04:05 crc kubenswrapper[4756]: I0930 21:04:05.483298 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2411423d-44e2-4f54-b22b-26a38bdfaec3-kube-api-access-69hc5" (OuterVolumeSpecName: "kube-api-access-69hc5") pod "2411423d-44e2-4f54-b22b-26a38bdfaec3" (UID: "2411423d-44e2-4f54-b22b-26a38bdfaec3"). InnerVolumeSpecName "kube-api-access-69hc5". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 21:04:05 crc kubenswrapper[4756]: I0930 21:04:05.501422 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8d0db247-1024-4c82-9762-5787390965a4-config-data" (OuterVolumeSpecName: "config-data") pod "8d0db247-1024-4c82-9762-5787390965a4" (UID: "8d0db247-1024-4c82-9762-5787390965a4"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 21:04:05 crc kubenswrapper[4756]: I0930 21:04:05.502331 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2411423d-44e2-4f54-b22b-26a38bdfaec3-config-data" (OuterVolumeSpecName: "config-data") pod "2411423d-44e2-4f54-b22b-26a38bdfaec3" (UID: "2411423d-44e2-4f54-b22b-26a38bdfaec3"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 21:04:05 crc kubenswrapper[4756]: I0930 21:04:05.510969 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8d0db247-1024-4c82-9762-5787390965a4-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "8d0db247-1024-4c82-9762-5787390965a4" (UID: "8d0db247-1024-4c82-9762-5787390965a4"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 21:04:05 crc kubenswrapper[4756]: I0930 21:04:05.512334 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2411423d-44e2-4f54-b22b-26a38bdfaec3-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "2411423d-44e2-4f54-b22b-26a38bdfaec3" (UID: "2411423d-44e2-4f54-b22b-26a38bdfaec3"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 21:04:05 crc kubenswrapper[4756]: I0930 21:04:05.524459 4756 generic.go:334] "Generic (PLEG): container finished" podID="2411423d-44e2-4f54-b22b-26a38bdfaec3" containerID="f1a2d68f33be81bc2113f035f8283691835ad13ed262e05ac486ed8b18e54073" exitCode=0 Sep 30 21:04:05 crc kubenswrapper[4756]: I0930 21:04:05.524519 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Sep 30 21:04:05 crc kubenswrapper[4756]: I0930 21:04:05.524534 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"2411423d-44e2-4f54-b22b-26a38bdfaec3","Type":"ContainerDied","Data":"f1a2d68f33be81bc2113f035f8283691835ad13ed262e05ac486ed8b18e54073"} Sep 30 21:04:05 crc kubenswrapper[4756]: I0930 21:04:05.524589 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"2411423d-44e2-4f54-b22b-26a38bdfaec3","Type":"ContainerDied","Data":"21b1ade6ad84c2e7d9117f9eef1ddd1074abec94a51be7db617de1efe01bdb08"} Sep 30 21:04:05 crc kubenswrapper[4756]: I0930 21:04:05.524610 4756 scope.go:117] "RemoveContainer" containerID="f1a2d68f33be81bc2113f035f8283691835ad13ed262e05ac486ed8b18e54073" Sep 30 21:04:05 crc kubenswrapper[4756]: I0930 21:04:05.527271 4756 generic.go:334] "Generic (PLEG): container finished" podID="8d0db247-1024-4c82-9762-5787390965a4" containerID="c1bc3ebe4e28f838195bf1f9fb22ad724fbe554250753747ef3da66be17f9d4b" exitCode=0 Sep 30 21:04:05 crc kubenswrapper[4756]: I0930 21:04:05.527309 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"8d0db247-1024-4c82-9762-5787390965a4","Type":"ContainerDied","Data":"c1bc3ebe4e28f838195bf1f9fb22ad724fbe554250753747ef3da66be17f9d4b"} Sep 30 21:04:05 crc kubenswrapper[4756]: I0930 21:04:05.527339 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"8d0db247-1024-4c82-9762-5787390965a4","Type":"ContainerDied","Data":"333f9cab5557d960cdd5c6902abd62408a47fa09426982f6629d009be39c1763"} Sep 30 21:04:05 crc kubenswrapper[4756]: I0930 21:04:05.527436 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 30 21:04:05 crc kubenswrapper[4756]: I0930 21:04:05.555343 4756 scope.go:117] "RemoveContainer" containerID="e4d6beac6b701f14d688bb9a1f727b32cf794076e97fd3cd59ab3e282e4fb5db" Sep 30 21:04:05 crc kubenswrapper[4756]: I0930 21:04:05.569133 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Sep 30 21:04:05 crc kubenswrapper[4756]: I0930 21:04:05.580127 4756 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2411423d-44e2-4f54-b22b-26a38bdfaec3-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 21:04:05 crc kubenswrapper[4756]: I0930 21:04:05.580164 4756 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8d0db247-1024-4c82-9762-5787390965a4-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 21:04:05 crc kubenswrapper[4756]: I0930 21:04:05.580177 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-69hc5\" (UniqueName: \"kubernetes.io/projected/2411423d-44e2-4f54-b22b-26a38bdfaec3-kube-api-access-69hc5\") on node \"crc\" DevicePath \"\"" Sep 30 21:04:05 crc kubenswrapper[4756]: I0930 21:04:05.580187 4756 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8d0db247-1024-4c82-9762-5787390965a4-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 21:04:05 crc kubenswrapper[4756]: I0930 21:04:05.580197 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fpwj8\" (UniqueName: \"kubernetes.io/projected/8d0db247-1024-4c82-9762-5787390965a4-kube-api-access-fpwj8\") on node \"crc\" DevicePath \"\"" Sep 30 21:04:05 crc kubenswrapper[4756]: I0930 21:04:05.580207 4756 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2411423d-44e2-4f54-b22b-26a38bdfaec3-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 21:04:05 crc kubenswrapper[4756]: I0930 21:04:05.583241 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Sep 30 21:04:05 crc kubenswrapper[4756]: I0930 21:04:05.601631 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Sep 30 21:04:05 crc kubenswrapper[4756]: I0930 21:04:05.608934 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Sep 30 21:04:05 crc kubenswrapper[4756]: E0930 21:04:05.609337 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8d0db247-1024-4c82-9762-5787390965a4" containerName="nova-api-log" Sep 30 21:04:05 crc kubenswrapper[4756]: I0930 21:04:05.609364 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="8d0db247-1024-4c82-9762-5787390965a4" containerName="nova-api-log" Sep 30 21:04:05 crc kubenswrapper[4756]: E0930 21:04:05.609387 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2fab8c82-6b0a-4747-b130-d61ef9d1b329" containerName="nova-manage" Sep 30 21:04:05 crc kubenswrapper[4756]: I0930 21:04:05.609408 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="2fab8c82-6b0a-4747-b130-d61ef9d1b329" containerName="nova-manage" Sep 30 21:04:05 crc kubenswrapper[4756]: E0930 21:04:05.609436 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8d0db247-1024-4c82-9762-5787390965a4" containerName="nova-api-api" Sep 30 21:04:05 crc kubenswrapper[4756]: I0930 21:04:05.609457 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="8d0db247-1024-4c82-9762-5787390965a4" containerName="nova-api-api" Sep 30 21:04:05 crc kubenswrapper[4756]: E0930 21:04:05.609467 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2411423d-44e2-4f54-b22b-26a38bdfaec3" containerName="nova-metadata-metadata" Sep 30 21:04:05 crc kubenswrapper[4756]: I0930 21:04:05.609473 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="2411423d-44e2-4f54-b22b-26a38bdfaec3" containerName="nova-metadata-metadata" Sep 30 21:04:05 crc kubenswrapper[4756]: E0930 21:04:05.609486 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2411423d-44e2-4f54-b22b-26a38bdfaec3" containerName="nova-metadata-log" Sep 30 21:04:05 crc kubenswrapper[4756]: I0930 21:04:05.609493 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="2411423d-44e2-4f54-b22b-26a38bdfaec3" containerName="nova-metadata-log" Sep 30 21:04:05 crc kubenswrapper[4756]: I0930 21:04:05.609705 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="8d0db247-1024-4c82-9762-5787390965a4" containerName="nova-api-log" Sep 30 21:04:05 crc kubenswrapper[4756]: I0930 21:04:05.609719 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="2411423d-44e2-4f54-b22b-26a38bdfaec3" containerName="nova-metadata-metadata" Sep 30 21:04:05 crc kubenswrapper[4756]: I0930 21:04:05.609733 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="2fab8c82-6b0a-4747-b130-d61ef9d1b329" containerName="nova-manage" Sep 30 21:04:05 crc kubenswrapper[4756]: I0930 21:04:05.609749 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="8d0db247-1024-4c82-9762-5787390965a4" containerName="nova-api-api" Sep 30 21:04:05 crc kubenswrapper[4756]: I0930 21:04:05.609765 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="2411423d-44e2-4f54-b22b-26a38bdfaec3" containerName="nova-metadata-log" Sep 30 21:04:05 crc kubenswrapper[4756]: I0930 21:04:05.610812 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Sep 30 21:04:05 crc kubenswrapper[4756]: I0930 21:04:05.613887 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Sep 30 21:04:05 crc kubenswrapper[4756]: I0930 21:04:05.625679 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Sep 30 21:04:05 crc kubenswrapper[4756]: I0930 21:04:05.639205 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Sep 30 21:04:05 crc kubenswrapper[4756]: I0930 21:04:05.639267 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Sep 30 21:04:05 crc kubenswrapper[4756]: I0930 21:04:05.646302 4756 scope.go:117] "RemoveContainer" containerID="f1a2d68f33be81bc2113f035f8283691835ad13ed262e05ac486ed8b18e54073" Sep 30 21:04:05 crc kubenswrapper[4756]: I0930 21:04:05.647134 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 30 21:04:05 crc kubenswrapper[4756]: E0930 21:04:05.647747 4756 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f1a2d68f33be81bc2113f035f8283691835ad13ed262e05ac486ed8b18e54073\": container with ID starting with f1a2d68f33be81bc2113f035f8283691835ad13ed262e05ac486ed8b18e54073 not found: ID does not exist" containerID="f1a2d68f33be81bc2113f035f8283691835ad13ed262e05ac486ed8b18e54073" Sep 30 21:04:05 crc kubenswrapper[4756]: I0930 21:04:05.647782 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f1a2d68f33be81bc2113f035f8283691835ad13ed262e05ac486ed8b18e54073"} err="failed to get container status \"f1a2d68f33be81bc2113f035f8283691835ad13ed262e05ac486ed8b18e54073\": rpc error: code = NotFound desc = could not find container \"f1a2d68f33be81bc2113f035f8283691835ad13ed262e05ac486ed8b18e54073\": container with ID starting with f1a2d68f33be81bc2113f035f8283691835ad13ed262e05ac486ed8b18e54073 not found: ID does not exist" Sep 30 21:04:05 crc kubenswrapper[4756]: I0930 21:04:05.647806 4756 scope.go:117] "RemoveContainer" containerID="e4d6beac6b701f14d688bb9a1f727b32cf794076e97fd3cd59ab3e282e4fb5db" Sep 30 21:04:05 crc kubenswrapper[4756]: E0930 21:04:05.648128 4756 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e4d6beac6b701f14d688bb9a1f727b32cf794076e97fd3cd59ab3e282e4fb5db\": container with ID starting with e4d6beac6b701f14d688bb9a1f727b32cf794076e97fd3cd59ab3e282e4fb5db not found: ID does not exist" containerID="e4d6beac6b701f14d688bb9a1f727b32cf794076e97fd3cd59ab3e282e4fb5db" Sep 30 21:04:05 crc kubenswrapper[4756]: I0930 21:04:05.648172 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e4d6beac6b701f14d688bb9a1f727b32cf794076e97fd3cd59ab3e282e4fb5db"} err="failed to get container status \"e4d6beac6b701f14d688bb9a1f727b32cf794076e97fd3cd59ab3e282e4fb5db\": rpc error: code = NotFound desc = could not find container \"e4d6beac6b701f14d688bb9a1f727b32cf794076e97fd3cd59ab3e282e4fb5db\": container with ID starting with e4d6beac6b701f14d688bb9a1f727b32cf794076e97fd3cd59ab3e282e4fb5db not found: ID does not exist" Sep 30 21:04:05 crc kubenswrapper[4756]: I0930 21:04:05.648201 4756 scope.go:117] "RemoveContainer" containerID="c1bc3ebe4e28f838195bf1f9fb22ad724fbe554250753747ef3da66be17f9d4b" Sep 30 21:04:05 crc kubenswrapper[4756]: I0930 21:04:05.652830 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Sep 30 21:04:05 crc kubenswrapper[4756]: I0930 21:04:05.656690 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Sep 30 21:04:05 crc kubenswrapper[4756]: I0930 21:04:05.679134 4756 scope.go:117] "RemoveContainer" containerID="a91833f88080418b783bedd6c5998cbc922085b529f3e3fb7df280f71bb73722" Sep 30 21:04:05 crc kubenswrapper[4756]: I0930 21:04:05.681028 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fc3620f4-f8ca-4742-a1a4-18810a786afc-logs\") pod \"nova-metadata-0\" (UID: \"fc3620f4-f8ca-4742-a1a4-18810a786afc\") " pod="openstack/nova-metadata-0" Sep 30 21:04:05 crc kubenswrapper[4756]: I0930 21:04:05.681072 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5pxlb\" (UniqueName: \"kubernetes.io/projected/fc3620f4-f8ca-4742-a1a4-18810a786afc-kube-api-access-5pxlb\") pod \"nova-metadata-0\" (UID: \"fc3620f4-f8ca-4742-a1a4-18810a786afc\") " pod="openstack/nova-metadata-0" Sep 30 21:04:05 crc kubenswrapper[4756]: I0930 21:04:05.681172 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fc3620f4-f8ca-4742-a1a4-18810a786afc-config-data\") pod \"nova-metadata-0\" (UID: \"fc3620f4-f8ca-4742-a1a4-18810a786afc\") " pod="openstack/nova-metadata-0" Sep 30 21:04:05 crc kubenswrapper[4756]: I0930 21:04:05.681349 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fc3620f4-f8ca-4742-a1a4-18810a786afc-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"fc3620f4-f8ca-4742-a1a4-18810a786afc\") " pod="openstack/nova-metadata-0" Sep 30 21:04:05 crc kubenswrapper[4756]: I0930 21:04:05.696810 4756 scope.go:117] "RemoveContainer" containerID="c1bc3ebe4e28f838195bf1f9fb22ad724fbe554250753747ef3da66be17f9d4b" Sep 30 21:04:05 crc kubenswrapper[4756]: E0930 21:04:05.697196 4756 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c1bc3ebe4e28f838195bf1f9fb22ad724fbe554250753747ef3da66be17f9d4b\": container with ID starting with c1bc3ebe4e28f838195bf1f9fb22ad724fbe554250753747ef3da66be17f9d4b not found: ID does not exist" containerID="c1bc3ebe4e28f838195bf1f9fb22ad724fbe554250753747ef3da66be17f9d4b" Sep 30 21:04:05 crc kubenswrapper[4756]: I0930 21:04:05.697239 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c1bc3ebe4e28f838195bf1f9fb22ad724fbe554250753747ef3da66be17f9d4b"} err="failed to get container status \"c1bc3ebe4e28f838195bf1f9fb22ad724fbe554250753747ef3da66be17f9d4b\": rpc error: code = NotFound desc = could not find container \"c1bc3ebe4e28f838195bf1f9fb22ad724fbe554250753747ef3da66be17f9d4b\": container with ID starting with c1bc3ebe4e28f838195bf1f9fb22ad724fbe554250753747ef3da66be17f9d4b not found: ID does not exist" Sep 30 21:04:05 crc kubenswrapper[4756]: I0930 21:04:05.697273 4756 scope.go:117] "RemoveContainer" containerID="a91833f88080418b783bedd6c5998cbc922085b529f3e3fb7df280f71bb73722" Sep 30 21:04:05 crc kubenswrapper[4756]: E0930 21:04:05.697653 4756 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a91833f88080418b783bedd6c5998cbc922085b529f3e3fb7df280f71bb73722\": container with ID starting with a91833f88080418b783bedd6c5998cbc922085b529f3e3fb7df280f71bb73722 not found: ID does not exist" containerID="a91833f88080418b783bedd6c5998cbc922085b529f3e3fb7df280f71bb73722" Sep 30 21:04:05 crc kubenswrapper[4756]: I0930 21:04:05.697698 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a91833f88080418b783bedd6c5998cbc922085b529f3e3fb7df280f71bb73722"} err="failed to get container status \"a91833f88080418b783bedd6c5998cbc922085b529f3e3fb7df280f71bb73722\": rpc error: code = NotFound desc = could not find container \"a91833f88080418b783bedd6c5998cbc922085b529f3e3fb7df280f71bb73722\": container with ID starting with a91833f88080418b783bedd6c5998cbc922085b529f3e3fb7df280f71bb73722 not found: ID does not exist" Sep 30 21:04:05 crc kubenswrapper[4756]: I0930 21:04:05.783347 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fc3620f4-f8ca-4742-a1a4-18810a786afc-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"fc3620f4-f8ca-4742-a1a4-18810a786afc\") " pod="openstack/nova-metadata-0" Sep 30 21:04:05 crc kubenswrapper[4756]: I0930 21:04:05.783425 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3ef7ebf2-0e17-4d34-b8c2-3c99c084a632-logs\") pod \"nova-api-0\" (UID: \"3ef7ebf2-0e17-4d34-b8c2-3c99c084a632\") " pod="openstack/nova-api-0" Sep 30 21:04:05 crc kubenswrapper[4756]: I0930 21:04:05.783463 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fc3620f4-f8ca-4742-a1a4-18810a786afc-logs\") pod \"nova-metadata-0\" (UID: \"fc3620f4-f8ca-4742-a1a4-18810a786afc\") " pod="openstack/nova-metadata-0" Sep 30 21:04:05 crc kubenswrapper[4756]: I0930 21:04:05.783485 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5pxlb\" (UniqueName: \"kubernetes.io/projected/fc3620f4-f8ca-4742-a1a4-18810a786afc-kube-api-access-5pxlb\") pod \"nova-metadata-0\" (UID: \"fc3620f4-f8ca-4742-a1a4-18810a786afc\") " pod="openstack/nova-metadata-0" Sep 30 21:04:05 crc kubenswrapper[4756]: I0930 21:04:05.783513 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fc3620f4-f8ca-4742-a1a4-18810a786afc-config-data\") pod \"nova-metadata-0\" (UID: \"fc3620f4-f8ca-4742-a1a4-18810a786afc\") " pod="openstack/nova-metadata-0" Sep 30 21:04:05 crc kubenswrapper[4756]: I0930 21:04:05.783532 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pxzbp\" (UniqueName: \"kubernetes.io/projected/3ef7ebf2-0e17-4d34-b8c2-3c99c084a632-kube-api-access-pxzbp\") pod \"nova-api-0\" (UID: \"3ef7ebf2-0e17-4d34-b8c2-3c99c084a632\") " pod="openstack/nova-api-0" Sep 30 21:04:05 crc kubenswrapper[4756]: I0930 21:04:05.784431 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3ef7ebf2-0e17-4d34-b8c2-3c99c084a632-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"3ef7ebf2-0e17-4d34-b8c2-3c99c084a632\") " pod="openstack/nova-api-0" Sep 30 21:04:05 crc kubenswrapper[4756]: I0930 21:04:05.784663 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3ef7ebf2-0e17-4d34-b8c2-3c99c084a632-config-data\") pod \"nova-api-0\" (UID: \"3ef7ebf2-0e17-4d34-b8c2-3c99c084a632\") " pod="openstack/nova-api-0" Sep 30 21:04:05 crc kubenswrapper[4756]: I0930 21:04:05.784939 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fc3620f4-f8ca-4742-a1a4-18810a786afc-logs\") pod \"nova-metadata-0\" (UID: \"fc3620f4-f8ca-4742-a1a4-18810a786afc\") " pod="openstack/nova-metadata-0" Sep 30 21:04:05 crc kubenswrapper[4756]: I0930 21:04:05.787189 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fc3620f4-f8ca-4742-a1a4-18810a786afc-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"fc3620f4-f8ca-4742-a1a4-18810a786afc\") " pod="openstack/nova-metadata-0" Sep 30 21:04:05 crc kubenswrapper[4756]: I0930 21:04:05.788716 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fc3620f4-f8ca-4742-a1a4-18810a786afc-config-data\") pod \"nova-metadata-0\" (UID: \"fc3620f4-f8ca-4742-a1a4-18810a786afc\") " pod="openstack/nova-metadata-0" Sep 30 21:04:05 crc kubenswrapper[4756]: I0930 21:04:05.805008 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5pxlb\" (UniqueName: \"kubernetes.io/projected/fc3620f4-f8ca-4742-a1a4-18810a786afc-kube-api-access-5pxlb\") pod \"nova-metadata-0\" (UID: \"fc3620f4-f8ca-4742-a1a4-18810a786afc\") " pod="openstack/nova-metadata-0" Sep 30 21:04:05 crc kubenswrapper[4756]: I0930 21:04:05.887567 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pxzbp\" (UniqueName: \"kubernetes.io/projected/3ef7ebf2-0e17-4d34-b8c2-3c99c084a632-kube-api-access-pxzbp\") pod \"nova-api-0\" (UID: \"3ef7ebf2-0e17-4d34-b8c2-3c99c084a632\") " pod="openstack/nova-api-0" Sep 30 21:04:05 crc kubenswrapper[4756]: I0930 21:04:05.887921 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3ef7ebf2-0e17-4d34-b8c2-3c99c084a632-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"3ef7ebf2-0e17-4d34-b8c2-3c99c084a632\") " pod="openstack/nova-api-0" Sep 30 21:04:05 crc kubenswrapper[4756]: I0930 21:04:05.888684 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3ef7ebf2-0e17-4d34-b8c2-3c99c084a632-config-data\") pod \"nova-api-0\" (UID: \"3ef7ebf2-0e17-4d34-b8c2-3c99c084a632\") " pod="openstack/nova-api-0" Sep 30 21:04:05 crc kubenswrapper[4756]: I0930 21:04:05.889011 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3ef7ebf2-0e17-4d34-b8c2-3c99c084a632-logs\") pod \"nova-api-0\" (UID: \"3ef7ebf2-0e17-4d34-b8c2-3c99c084a632\") " pod="openstack/nova-api-0" Sep 30 21:04:05 crc kubenswrapper[4756]: I0930 21:04:05.889826 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3ef7ebf2-0e17-4d34-b8c2-3c99c084a632-logs\") pod \"nova-api-0\" (UID: \"3ef7ebf2-0e17-4d34-b8c2-3c99c084a632\") " pod="openstack/nova-api-0" Sep 30 21:04:05 crc kubenswrapper[4756]: I0930 21:04:05.892958 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3ef7ebf2-0e17-4d34-b8c2-3c99c084a632-config-data\") pod \"nova-api-0\" (UID: \"3ef7ebf2-0e17-4d34-b8c2-3c99c084a632\") " pod="openstack/nova-api-0" Sep 30 21:04:05 crc kubenswrapper[4756]: I0930 21:04:05.893727 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3ef7ebf2-0e17-4d34-b8c2-3c99c084a632-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"3ef7ebf2-0e17-4d34-b8c2-3c99c084a632\") " pod="openstack/nova-api-0" Sep 30 21:04:05 crc kubenswrapper[4756]: I0930 21:04:05.903361 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pxzbp\" (UniqueName: \"kubernetes.io/projected/3ef7ebf2-0e17-4d34-b8c2-3c99c084a632-kube-api-access-pxzbp\") pod \"nova-api-0\" (UID: \"3ef7ebf2-0e17-4d34-b8c2-3c99c084a632\") " pod="openstack/nova-api-0" Sep 30 21:04:05 crc kubenswrapper[4756]: I0930 21:04:05.978040 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Sep 30 21:04:05 crc kubenswrapper[4756]: I0930 21:04:05.981292 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 30 21:04:06 crc kubenswrapper[4756]: I0930 21:04:06.295643 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Sep 30 21:04:06 crc kubenswrapper[4756]: I0930 21:04:06.404960 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cdfbf865-c00f-4f7d-a4bf-5aefe8742174-config-data\") pod \"cdfbf865-c00f-4f7d-a4bf-5aefe8742174\" (UID: \"cdfbf865-c00f-4f7d-a4bf-5aefe8742174\") " Sep 30 21:04:06 crc kubenswrapper[4756]: I0930 21:04:06.405143 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cdfbf865-c00f-4f7d-a4bf-5aefe8742174-combined-ca-bundle\") pod \"cdfbf865-c00f-4f7d-a4bf-5aefe8742174\" (UID: \"cdfbf865-c00f-4f7d-a4bf-5aefe8742174\") " Sep 30 21:04:06 crc kubenswrapper[4756]: I0930 21:04:06.405216 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8wdb4\" (UniqueName: \"kubernetes.io/projected/cdfbf865-c00f-4f7d-a4bf-5aefe8742174-kube-api-access-8wdb4\") pod \"cdfbf865-c00f-4f7d-a4bf-5aefe8742174\" (UID: \"cdfbf865-c00f-4f7d-a4bf-5aefe8742174\") " Sep 30 21:04:06 crc kubenswrapper[4756]: I0930 21:04:06.410195 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cdfbf865-c00f-4f7d-a4bf-5aefe8742174-kube-api-access-8wdb4" (OuterVolumeSpecName: "kube-api-access-8wdb4") pod "cdfbf865-c00f-4f7d-a4bf-5aefe8742174" (UID: "cdfbf865-c00f-4f7d-a4bf-5aefe8742174"). InnerVolumeSpecName "kube-api-access-8wdb4". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 21:04:06 crc kubenswrapper[4756]: I0930 21:04:06.434862 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cdfbf865-c00f-4f7d-a4bf-5aefe8742174-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "cdfbf865-c00f-4f7d-a4bf-5aefe8742174" (UID: "cdfbf865-c00f-4f7d-a4bf-5aefe8742174"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 21:04:06 crc kubenswrapper[4756]: I0930 21:04:06.436370 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cdfbf865-c00f-4f7d-a4bf-5aefe8742174-config-data" (OuterVolumeSpecName: "config-data") pod "cdfbf865-c00f-4f7d-a4bf-5aefe8742174" (UID: "cdfbf865-c00f-4f7d-a4bf-5aefe8742174"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 21:04:06 crc kubenswrapper[4756]: W0930 21:04:06.465309 4756 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podfc3620f4_f8ca_4742_a1a4_18810a786afc.slice/crio-3a32dc5001a5b071b5e4a58e38cabb6594be9a24cf39620c7fb76e189e5138b8 WatchSource:0}: Error finding container 3a32dc5001a5b071b5e4a58e38cabb6594be9a24cf39620c7fb76e189e5138b8: Status 404 returned error can't find the container with id 3a32dc5001a5b071b5e4a58e38cabb6594be9a24cf39620c7fb76e189e5138b8 Sep 30 21:04:06 crc kubenswrapper[4756]: I0930 21:04:06.469788 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Sep 30 21:04:06 crc kubenswrapper[4756]: I0930 21:04:06.507787 4756 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cdfbf865-c00f-4f7d-a4bf-5aefe8742174-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 21:04:06 crc kubenswrapper[4756]: I0930 21:04:06.507823 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8wdb4\" (UniqueName: \"kubernetes.io/projected/cdfbf865-c00f-4f7d-a4bf-5aefe8742174-kube-api-access-8wdb4\") on node \"crc\" DevicePath \"\"" Sep 30 21:04:06 crc kubenswrapper[4756]: I0930 21:04:06.507877 4756 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cdfbf865-c00f-4f7d-a4bf-5aefe8742174-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 21:04:06 crc kubenswrapper[4756]: I0930 21:04:06.538997 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Sep 30 21:04:06 crc kubenswrapper[4756]: I0930 21:04:06.544409 4756 generic.go:334] "Generic (PLEG): container finished" podID="cdfbf865-c00f-4f7d-a4bf-5aefe8742174" containerID="7884a260933fd3c2e5eb4a2dfe059ed0d8a8b7194eda8e9b3cb13d41709ddec4" exitCode=0 Sep 30 21:04:06 crc kubenswrapper[4756]: I0930 21:04:06.544481 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"cdfbf865-c00f-4f7d-a4bf-5aefe8742174","Type":"ContainerDied","Data":"7884a260933fd3c2e5eb4a2dfe059ed0d8a8b7194eda8e9b3cb13d41709ddec4"} Sep 30 21:04:06 crc kubenswrapper[4756]: I0930 21:04:06.544508 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"cdfbf865-c00f-4f7d-a4bf-5aefe8742174","Type":"ContainerDied","Data":"98ae7169256b4bd019de53159550a2ad8f11fa9439b15c407717fa3cd3da0f59"} Sep 30 21:04:06 crc kubenswrapper[4756]: I0930 21:04:06.544543 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Sep 30 21:04:06 crc kubenswrapper[4756]: I0930 21:04:06.544553 4756 scope.go:117] "RemoveContainer" containerID="7884a260933fd3c2e5eb4a2dfe059ed0d8a8b7194eda8e9b3cb13d41709ddec4" Sep 30 21:04:06 crc kubenswrapper[4756]: I0930 21:04:06.545949 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"fc3620f4-f8ca-4742-a1a4-18810a786afc","Type":"ContainerStarted","Data":"3a32dc5001a5b071b5e4a58e38cabb6594be9a24cf39620c7fb76e189e5138b8"} Sep 30 21:04:06 crc kubenswrapper[4756]: W0930 21:04:06.551103 4756 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3ef7ebf2_0e17_4d34_b8c2_3c99c084a632.slice/crio-f2c3ea7a817fe53c7fb393ae348d6b0512140caca2d1e5b3caf8c40cc8029678 WatchSource:0}: Error finding container f2c3ea7a817fe53c7fb393ae348d6b0512140caca2d1e5b3caf8c40cc8029678: Status 404 returned error can't find the container with id f2c3ea7a817fe53c7fb393ae348d6b0512140caca2d1e5b3caf8c40cc8029678 Sep 30 21:04:06 crc kubenswrapper[4756]: I0930 21:04:06.572792 4756 scope.go:117] "RemoveContainer" containerID="7884a260933fd3c2e5eb4a2dfe059ed0d8a8b7194eda8e9b3cb13d41709ddec4" Sep 30 21:04:06 crc kubenswrapper[4756]: E0930 21:04:06.573334 4756 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7884a260933fd3c2e5eb4a2dfe059ed0d8a8b7194eda8e9b3cb13d41709ddec4\": container with ID starting with 7884a260933fd3c2e5eb4a2dfe059ed0d8a8b7194eda8e9b3cb13d41709ddec4 not found: ID does not exist" containerID="7884a260933fd3c2e5eb4a2dfe059ed0d8a8b7194eda8e9b3cb13d41709ddec4" Sep 30 21:04:06 crc kubenswrapper[4756]: I0930 21:04:06.573375 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7884a260933fd3c2e5eb4a2dfe059ed0d8a8b7194eda8e9b3cb13d41709ddec4"} err="failed to get container status \"7884a260933fd3c2e5eb4a2dfe059ed0d8a8b7194eda8e9b3cb13d41709ddec4\": rpc error: code = NotFound desc = could not find container \"7884a260933fd3c2e5eb4a2dfe059ed0d8a8b7194eda8e9b3cb13d41709ddec4\": container with ID starting with 7884a260933fd3c2e5eb4a2dfe059ed0d8a8b7194eda8e9b3cb13d41709ddec4 not found: ID does not exist" Sep 30 21:04:06 crc kubenswrapper[4756]: I0930 21:04:06.591259 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Sep 30 21:04:06 crc kubenswrapper[4756]: I0930 21:04:06.599425 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Sep 30 21:04:06 crc kubenswrapper[4756]: I0930 21:04:06.611089 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Sep 30 21:04:06 crc kubenswrapper[4756]: E0930 21:04:06.611668 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cdfbf865-c00f-4f7d-a4bf-5aefe8742174" containerName="nova-scheduler-scheduler" Sep 30 21:04:06 crc kubenswrapper[4756]: I0930 21:04:06.611688 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="cdfbf865-c00f-4f7d-a4bf-5aefe8742174" containerName="nova-scheduler-scheduler" Sep 30 21:04:06 crc kubenswrapper[4756]: I0930 21:04:06.611929 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="cdfbf865-c00f-4f7d-a4bf-5aefe8742174" containerName="nova-scheduler-scheduler" Sep 30 21:04:06 crc kubenswrapper[4756]: I0930 21:04:06.612784 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Sep 30 21:04:06 crc kubenswrapper[4756]: I0930 21:04:06.620207 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Sep 30 21:04:06 crc kubenswrapper[4756]: I0930 21:04:06.627834 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Sep 30 21:04:06 crc kubenswrapper[4756]: I0930 21:04:06.712005 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2b1a012e-bb74-4006-b959-4a8a1ded3f5e-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"2b1a012e-bb74-4006-b959-4a8a1ded3f5e\") " pod="openstack/nova-scheduler-0" Sep 30 21:04:06 crc kubenswrapper[4756]: I0930 21:04:06.712225 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-swhkc\" (UniqueName: \"kubernetes.io/projected/2b1a012e-bb74-4006-b959-4a8a1ded3f5e-kube-api-access-swhkc\") pod \"nova-scheduler-0\" (UID: \"2b1a012e-bb74-4006-b959-4a8a1ded3f5e\") " pod="openstack/nova-scheduler-0" Sep 30 21:04:06 crc kubenswrapper[4756]: I0930 21:04:06.712298 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2b1a012e-bb74-4006-b959-4a8a1ded3f5e-config-data\") pod \"nova-scheduler-0\" (UID: \"2b1a012e-bb74-4006-b959-4a8a1ded3f5e\") " pod="openstack/nova-scheduler-0" Sep 30 21:04:06 crc kubenswrapper[4756]: I0930 21:04:06.813617 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-swhkc\" (UniqueName: \"kubernetes.io/projected/2b1a012e-bb74-4006-b959-4a8a1ded3f5e-kube-api-access-swhkc\") pod \"nova-scheduler-0\" (UID: \"2b1a012e-bb74-4006-b959-4a8a1ded3f5e\") " pod="openstack/nova-scheduler-0" Sep 30 21:04:06 crc kubenswrapper[4756]: I0930 21:04:06.813683 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2b1a012e-bb74-4006-b959-4a8a1ded3f5e-config-data\") pod \"nova-scheduler-0\" (UID: \"2b1a012e-bb74-4006-b959-4a8a1ded3f5e\") " pod="openstack/nova-scheduler-0" Sep 30 21:04:06 crc kubenswrapper[4756]: I0930 21:04:06.813724 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2b1a012e-bb74-4006-b959-4a8a1ded3f5e-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"2b1a012e-bb74-4006-b959-4a8a1ded3f5e\") " pod="openstack/nova-scheduler-0" Sep 30 21:04:06 crc kubenswrapper[4756]: I0930 21:04:06.816764 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2b1a012e-bb74-4006-b959-4a8a1ded3f5e-config-data\") pod \"nova-scheduler-0\" (UID: \"2b1a012e-bb74-4006-b959-4a8a1ded3f5e\") " pod="openstack/nova-scheduler-0" Sep 30 21:04:06 crc kubenswrapper[4756]: I0930 21:04:06.817860 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2b1a012e-bb74-4006-b959-4a8a1ded3f5e-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"2b1a012e-bb74-4006-b959-4a8a1ded3f5e\") " pod="openstack/nova-scheduler-0" Sep 30 21:04:06 crc kubenswrapper[4756]: I0930 21:04:06.831349 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-swhkc\" (UniqueName: \"kubernetes.io/projected/2b1a012e-bb74-4006-b959-4a8a1ded3f5e-kube-api-access-swhkc\") pod \"nova-scheduler-0\" (UID: \"2b1a012e-bb74-4006-b959-4a8a1ded3f5e\") " pod="openstack/nova-scheduler-0" Sep 30 21:04:06 crc kubenswrapper[4756]: I0930 21:04:06.933490 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Sep 30 21:04:07 crc kubenswrapper[4756]: I0930 21:04:07.129192 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2411423d-44e2-4f54-b22b-26a38bdfaec3" path="/var/lib/kubelet/pods/2411423d-44e2-4f54-b22b-26a38bdfaec3/volumes" Sep 30 21:04:07 crc kubenswrapper[4756]: I0930 21:04:07.130143 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8d0db247-1024-4c82-9762-5787390965a4" path="/var/lib/kubelet/pods/8d0db247-1024-4c82-9762-5787390965a4/volumes" Sep 30 21:04:07 crc kubenswrapper[4756]: I0930 21:04:07.130726 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cdfbf865-c00f-4f7d-a4bf-5aefe8742174" path="/var/lib/kubelet/pods/cdfbf865-c00f-4f7d-a4bf-5aefe8742174/volumes" Sep 30 21:04:07 crc kubenswrapper[4756]: I0930 21:04:07.383742 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Sep 30 21:04:07 crc kubenswrapper[4756]: W0930 21:04:07.396230 4756 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2b1a012e_bb74_4006_b959_4a8a1ded3f5e.slice/crio-856a884015c33a2c2f1204414bc8d9b40af57895ea1e2b6c1c0976501e2f26aa WatchSource:0}: Error finding container 856a884015c33a2c2f1204414bc8d9b40af57895ea1e2b6c1c0976501e2f26aa: Status 404 returned error can't find the container with id 856a884015c33a2c2f1204414bc8d9b40af57895ea1e2b6c1c0976501e2f26aa Sep 30 21:04:07 crc kubenswrapper[4756]: I0930 21:04:07.568890 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"3ef7ebf2-0e17-4d34-b8c2-3c99c084a632","Type":"ContainerStarted","Data":"424264b339bbf775ce759d28632ff896927414a684f203d7d27da866391cd17e"} Sep 30 21:04:07 crc kubenswrapper[4756]: I0930 21:04:07.569500 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"3ef7ebf2-0e17-4d34-b8c2-3c99c084a632","Type":"ContainerStarted","Data":"70a011209c6f2a0a913c6d62d3c9a352aeb1ee89b2acd5bfe315d03d908d8caa"} Sep 30 21:04:07 crc kubenswrapper[4756]: I0930 21:04:07.569548 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"3ef7ebf2-0e17-4d34-b8c2-3c99c084a632","Type":"ContainerStarted","Data":"f2c3ea7a817fe53c7fb393ae348d6b0512140caca2d1e5b3caf8c40cc8029678"} Sep 30 21:04:07 crc kubenswrapper[4756]: I0930 21:04:07.598601 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"2b1a012e-bb74-4006-b959-4a8a1ded3f5e","Type":"ContainerStarted","Data":"856a884015c33a2c2f1204414bc8d9b40af57895ea1e2b6c1c0976501e2f26aa"} Sep 30 21:04:07 crc kubenswrapper[4756]: I0930 21:04:07.608340 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"fc3620f4-f8ca-4742-a1a4-18810a786afc","Type":"ContainerStarted","Data":"b692b5eac6b2bb0100cd23ae3cb43fbc5955a114923298854458e2f195a2e630"} Sep 30 21:04:07 crc kubenswrapper[4756]: I0930 21:04:07.608378 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"fc3620f4-f8ca-4742-a1a4-18810a786afc","Type":"ContainerStarted","Data":"061b08b6ec14f7f2905aaadce1b0078af830a244b4544561bf4f2b4ed101c4f6"} Sep 30 21:04:07 crc kubenswrapper[4756]: I0930 21:04:07.619911 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.619894768 podStartE2EDuration="2.619894768s" podCreationTimestamp="2025-09-30 21:04:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 21:04:07.598331445 +0000 UTC m=+5577.219264922" watchObservedRunningTime="2025-09-30 21:04:07.619894768 +0000 UTC m=+5577.240828235" Sep 30 21:04:07 crc kubenswrapper[4756]: I0930 21:04:07.636042 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.636023948 podStartE2EDuration="2.636023948s" podCreationTimestamp="2025-09-30 21:04:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 21:04:07.627993729 +0000 UTC m=+5577.248927206" watchObservedRunningTime="2025-09-30 21:04:07.636023948 +0000 UTC m=+5577.256957425" Sep 30 21:04:08 crc kubenswrapper[4756]: I0930 21:04:08.621052 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"2b1a012e-bb74-4006-b959-4a8a1ded3f5e","Type":"ContainerStarted","Data":"06c5ebf5a85ecc4680f25768a91c769bf158e437b05d3a14869c9ce79270cdd1"} Sep 30 21:04:08 crc kubenswrapper[4756]: I0930 21:04:08.647197 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.647175762 podStartE2EDuration="2.647175762s" podCreationTimestamp="2025-09-30 21:04:06 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 21:04:08.638230968 +0000 UTC m=+5578.259164455" watchObservedRunningTime="2025-09-30 21:04:08.647175762 +0000 UTC m=+5578.268109249" Sep 30 21:04:10 crc kubenswrapper[4756]: I0930 21:04:10.979583 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Sep 30 21:04:10 crc kubenswrapper[4756]: I0930 21:04:10.979877 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Sep 30 21:04:11 crc kubenswrapper[4756]: I0930 21:04:11.933634 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Sep 30 21:04:15 crc kubenswrapper[4756]: I0930 21:04:15.979078 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Sep 30 21:04:15 crc kubenswrapper[4756]: I0930 21:04:15.980666 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Sep 30 21:04:15 crc kubenswrapper[4756]: I0930 21:04:15.982644 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Sep 30 21:04:15 crc kubenswrapper[4756]: I0930 21:04:15.982736 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Sep 30 21:04:16 crc kubenswrapper[4756]: I0930 21:04:16.934216 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Sep 30 21:04:17 crc kubenswrapper[4756]: I0930 21:04:17.023069 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Sep 30 21:04:17 crc kubenswrapper[4756]: I0930 21:04:17.145658 4756 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="fc3620f4-f8ca-4742-a1a4-18810a786afc" containerName="nova-metadata-log" probeResult="failure" output="Get \"http://10.217.1.75:8775/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Sep 30 21:04:17 crc kubenswrapper[4756]: I0930 21:04:17.145985 4756 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="fc3620f4-f8ca-4742-a1a4-18810a786afc" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"http://10.217.1.75:8775/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Sep 30 21:04:17 crc kubenswrapper[4756]: I0930 21:04:17.145724 4756 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="3ef7ebf2-0e17-4d34-b8c2-3c99c084a632" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.1.76:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Sep 30 21:04:17 crc kubenswrapper[4756]: I0930 21:04:17.146048 4756 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="3ef7ebf2-0e17-4d34-b8c2-3c99c084a632" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.1.76:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Sep 30 21:04:17 crc kubenswrapper[4756]: I0930 21:04:17.777567 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Sep 30 21:04:25 crc kubenswrapper[4756]: I0930 21:04:25.982488 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Sep 30 21:04:25 crc kubenswrapper[4756]: I0930 21:04:25.984632 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Sep 30 21:04:25 crc kubenswrapper[4756]: I0930 21:04:25.986755 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Sep 30 21:04:25 crc kubenswrapper[4756]: I0930 21:04:25.987444 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Sep 30 21:04:25 crc kubenswrapper[4756]: I0930 21:04:25.987506 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Sep 30 21:04:25 crc kubenswrapper[4756]: I0930 21:04:25.988341 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Sep 30 21:04:25 crc kubenswrapper[4756]: I0930 21:04:25.991894 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Sep 30 21:04:25 crc kubenswrapper[4756]: I0930 21:04:25.992002 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Sep 30 21:04:26 crc kubenswrapper[4756]: I0930 21:04:26.810262 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Sep 30 21:04:26 crc kubenswrapper[4756]: I0930 21:04:26.816198 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Sep 30 21:04:27 crc kubenswrapper[4756]: I0930 21:04:27.042301 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-b97b5ffb9-8ggjl"] Sep 30 21:04:27 crc kubenswrapper[4756]: I0930 21:04:27.044073 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-b97b5ffb9-8ggjl" Sep 30 21:04:27 crc kubenswrapper[4756]: I0930 21:04:27.072588 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-b97b5ffb9-8ggjl"] Sep 30 21:04:27 crc kubenswrapper[4756]: I0930 21:04:27.100340 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8f732efe-df8f-4a0e-8586-922e1482274d-config\") pod \"dnsmasq-dns-b97b5ffb9-8ggjl\" (UID: \"8f732efe-df8f-4a0e-8586-922e1482274d\") " pod="openstack/dnsmasq-dns-b97b5ffb9-8ggjl" Sep 30 21:04:27 crc kubenswrapper[4756]: I0930 21:04:27.100404 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8f732efe-df8f-4a0e-8586-922e1482274d-ovsdbserver-sb\") pod \"dnsmasq-dns-b97b5ffb9-8ggjl\" (UID: \"8f732efe-df8f-4a0e-8586-922e1482274d\") " pod="openstack/dnsmasq-dns-b97b5ffb9-8ggjl" Sep 30 21:04:27 crc kubenswrapper[4756]: I0930 21:04:27.100440 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hmjfn\" (UniqueName: \"kubernetes.io/projected/8f732efe-df8f-4a0e-8586-922e1482274d-kube-api-access-hmjfn\") pod \"dnsmasq-dns-b97b5ffb9-8ggjl\" (UID: \"8f732efe-df8f-4a0e-8586-922e1482274d\") " pod="openstack/dnsmasq-dns-b97b5ffb9-8ggjl" Sep 30 21:04:27 crc kubenswrapper[4756]: I0930 21:04:27.100482 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8f732efe-df8f-4a0e-8586-922e1482274d-dns-svc\") pod \"dnsmasq-dns-b97b5ffb9-8ggjl\" (UID: \"8f732efe-df8f-4a0e-8586-922e1482274d\") " pod="openstack/dnsmasq-dns-b97b5ffb9-8ggjl" Sep 30 21:04:27 crc kubenswrapper[4756]: I0930 21:04:27.100498 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8f732efe-df8f-4a0e-8586-922e1482274d-ovsdbserver-nb\") pod \"dnsmasq-dns-b97b5ffb9-8ggjl\" (UID: \"8f732efe-df8f-4a0e-8586-922e1482274d\") " pod="openstack/dnsmasq-dns-b97b5ffb9-8ggjl" Sep 30 21:04:27 crc kubenswrapper[4756]: I0930 21:04:27.202074 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8f732efe-df8f-4a0e-8586-922e1482274d-ovsdbserver-sb\") pod \"dnsmasq-dns-b97b5ffb9-8ggjl\" (UID: \"8f732efe-df8f-4a0e-8586-922e1482274d\") " pod="openstack/dnsmasq-dns-b97b5ffb9-8ggjl" Sep 30 21:04:27 crc kubenswrapper[4756]: I0930 21:04:27.202173 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hmjfn\" (UniqueName: \"kubernetes.io/projected/8f732efe-df8f-4a0e-8586-922e1482274d-kube-api-access-hmjfn\") pod \"dnsmasq-dns-b97b5ffb9-8ggjl\" (UID: \"8f732efe-df8f-4a0e-8586-922e1482274d\") " pod="openstack/dnsmasq-dns-b97b5ffb9-8ggjl" Sep 30 21:04:27 crc kubenswrapper[4756]: I0930 21:04:27.202248 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8f732efe-df8f-4a0e-8586-922e1482274d-dns-svc\") pod \"dnsmasq-dns-b97b5ffb9-8ggjl\" (UID: \"8f732efe-df8f-4a0e-8586-922e1482274d\") " pod="openstack/dnsmasq-dns-b97b5ffb9-8ggjl" Sep 30 21:04:27 crc kubenswrapper[4756]: I0930 21:04:27.202270 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8f732efe-df8f-4a0e-8586-922e1482274d-ovsdbserver-nb\") pod \"dnsmasq-dns-b97b5ffb9-8ggjl\" (UID: \"8f732efe-df8f-4a0e-8586-922e1482274d\") " pod="openstack/dnsmasq-dns-b97b5ffb9-8ggjl" Sep 30 21:04:27 crc kubenswrapper[4756]: I0930 21:04:27.203256 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8f732efe-df8f-4a0e-8586-922e1482274d-config\") pod \"dnsmasq-dns-b97b5ffb9-8ggjl\" (UID: \"8f732efe-df8f-4a0e-8586-922e1482274d\") " pod="openstack/dnsmasq-dns-b97b5ffb9-8ggjl" Sep 30 21:04:27 crc kubenswrapper[4756]: I0930 21:04:27.203521 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8f732efe-df8f-4a0e-8586-922e1482274d-ovsdbserver-nb\") pod \"dnsmasq-dns-b97b5ffb9-8ggjl\" (UID: \"8f732efe-df8f-4a0e-8586-922e1482274d\") " pod="openstack/dnsmasq-dns-b97b5ffb9-8ggjl" Sep 30 21:04:27 crc kubenswrapper[4756]: I0930 21:04:27.204010 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8f732efe-df8f-4a0e-8586-922e1482274d-ovsdbserver-sb\") pod \"dnsmasq-dns-b97b5ffb9-8ggjl\" (UID: \"8f732efe-df8f-4a0e-8586-922e1482274d\") " pod="openstack/dnsmasq-dns-b97b5ffb9-8ggjl" Sep 30 21:04:27 crc kubenswrapper[4756]: I0930 21:04:27.204260 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8f732efe-df8f-4a0e-8586-922e1482274d-config\") pod \"dnsmasq-dns-b97b5ffb9-8ggjl\" (UID: \"8f732efe-df8f-4a0e-8586-922e1482274d\") " pod="openstack/dnsmasq-dns-b97b5ffb9-8ggjl" Sep 30 21:04:27 crc kubenswrapper[4756]: I0930 21:04:27.204320 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8f732efe-df8f-4a0e-8586-922e1482274d-dns-svc\") pod \"dnsmasq-dns-b97b5ffb9-8ggjl\" (UID: \"8f732efe-df8f-4a0e-8586-922e1482274d\") " pod="openstack/dnsmasq-dns-b97b5ffb9-8ggjl" Sep 30 21:04:27 crc kubenswrapper[4756]: I0930 21:04:27.226969 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hmjfn\" (UniqueName: \"kubernetes.io/projected/8f732efe-df8f-4a0e-8586-922e1482274d-kube-api-access-hmjfn\") pod \"dnsmasq-dns-b97b5ffb9-8ggjl\" (UID: \"8f732efe-df8f-4a0e-8586-922e1482274d\") " pod="openstack/dnsmasq-dns-b97b5ffb9-8ggjl" Sep 30 21:04:27 crc kubenswrapper[4756]: I0930 21:04:27.370757 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-b97b5ffb9-8ggjl" Sep 30 21:04:27 crc kubenswrapper[4756]: I0930 21:04:27.883237 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-b97b5ffb9-8ggjl"] Sep 30 21:04:27 crc kubenswrapper[4756]: W0930 21:04:27.884094 4756 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8f732efe_df8f_4a0e_8586_922e1482274d.slice/crio-4abc5d4d1cfa7482ef3414b7c3e2f486d65c7e2bff747f111bfdab41e2fb9539 WatchSource:0}: Error finding container 4abc5d4d1cfa7482ef3414b7c3e2f486d65c7e2bff747f111bfdab41e2fb9539: Status 404 returned error can't find the container with id 4abc5d4d1cfa7482ef3414b7c3e2f486d65c7e2bff747f111bfdab41e2fb9539 Sep 30 21:04:28 crc kubenswrapper[4756]: I0930 21:04:28.833985 4756 generic.go:334] "Generic (PLEG): container finished" podID="8f732efe-df8f-4a0e-8586-922e1482274d" containerID="94214aec96db9b781aa3342ee63ce347ba826682a9f96d6627e9982bdab3ac15" exitCode=0 Sep 30 21:04:28 crc kubenswrapper[4756]: I0930 21:04:28.834023 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-b97b5ffb9-8ggjl" event={"ID":"8f732efe-df8f-4a0e-8586-922e1482274d","Type":"ContainerDied","Data":"94214aec96db9b781aa3342ee63ce347ba826682a9f96d6627e9982bdab3ac15"} Sep 30 21:04:28 crc kubenswrapper[4756]: I0930 21:04:28.834424 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-b97b5ffb9-8ggjl" event={"ID":"8f732efe-df8f-4a0e-8586-922e1482274d","Type":"ContainerStarted","Data":"4abc5d4d1cfa7482ef3414b7c3e2f486d65c7e2bff747f111bfdab41e2fb9539"} Sep 30 21:04:29 crc kubenswrapper[4756]: I0930 21:04:29.847964 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-b97b5ffb9-8ggjl" event={"ID":"8f732efe-df8f-4a0e-8586-922e1482274d","Type":"ContainerStarted","Data":"031e829e3c60bd9f44fbba3d3cb8d844ac9f8e9629aace72e128eed11e1bf516"} Sep 30 21:04:29 crc kubenswrapper[4756]: I0930 21:04:29.849622 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-b97b5ffb9-8ggjl" Sep 30 21:04:29 crc kubenswrapper[4756]: I0930 21:04:29.880166 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-b97b5ffb9-8ggjl" podStartSLOduration=2.880148783 podStartE2EDuration="2.880148783s" podCreationTimestamp="2025-09-30 21:04:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 21:04:29.86889369 +0000 UTC m=+5599.489827207" watchObservedRunningTime="2025-09-30 21:04:29.880148783 +0000 UTC m=+5599.501082260" Sep 30 21:04:37 crc kubenswrapper[4756]: I0930 21:04:37.373746 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-b97b5ffb9-8ggjl" Sep 30 21:04:37 crc kubenswrapper[4756]: I0930 21:04:37.457483 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-58b45b7695-v65nt"] Sep 30 21:04:37 crc kubenswrapper[4756]: I0930 21:04:37.457756 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-58b45b7695-v65nt" podUID="e093a2b1-f219-40f0-9abc-bb97939f235a" containerName="dnsmasq-dns" containerID="cri-o://2d4d409e7c8bca878f1a8f203d1f7cae4e23a711dff3dba78a4d1f6a26ff60f3" gracePeriod=10 Sep 30 21:04:37 crc kubenswrapper[4756]: I0930 21:04:37.949078 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-58b45b7695-v65nt" Sep 30 21:04:37 crc kubenswrapper[4756]: I0930 21:04:37.949594 4756 generic.go:334] "Generic (PLEG): container finished" podID="e093a2b1-f219-40f0-9abc-bb97939f235a" containerID="2d4d409e7c8bca878f1a8f203d1f7cae4e23a711dff3dba78a4d1f6a26ff60f3" exitCode=0 Sep 30 21:04:37 crc kubenswrapper[4756]: I0930 21:04:37.949631 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-58b45b7695-v65nt" event={"ID":"e093a2b1-f219-40f0-9abc-bb97939f235a","Type":"ContainerDied","Data":"2d4d409e7c8bca878f1a8f203d1f7cae4e23a711dff3dba78a4d1f6a26ff60f3"} Sep 30 21:04:37 crc kubenswrapper[4756]: I0930 21:04:37.949657 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-58b45b7695-v65nt" event={"ID":"e093a2b1-f219-40f0-9abc-bb97939f235a","Type":"ContainerDied","Data":"d6abefba6282900c444f788efdcfbc1fd514dd34dbb973884f5f8ab8a328fb20"} Sep 30 21:04:37 crc kubenswrapper[4756]: I0930 21:04:37.949676 4756 scope.go:117] "RemoveContainer" containerID="2d4d409e7c8bca878f1a8f203d1f7cae4e23a711dff3dba78a4d1f6a26ff60f3" Sep 30 21:04:37 crc kubenswrapper[4756]: I0930 21:04:37.971651 4756 scope.go:117] "RemoveContainer" containerID="72fb08264183c81aecc89659064a7f2a89ea376a916d3bdf4187631f0e92318d" Sep 30 21:04:37 crc kubenswrapper[4756]: I0930 21:04:37.992450 4756 scope.go:117] "RemoveContainer" containerID="2d4d409e7c8bca878f1a8f203d1f7cae4e23a711dff3dba78a4d1f6a26ff60f3" Sep 30 21:04:37 crc kubenswrapper[4756]: E0930 21:04:37.993772 4756 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2d4d409e7c8bca878f1a8f203d1f7cae4e23a711dff3dba78a4d1f6a26ff60f3\": container with ID starting with 2d4d409e7c8bca878f1a8f203d1f7cae4e23a711dff3dba78a4d1f6a26ff60f3 not found: ID does not exist" containerID="2d4d409e7c8bca878f1a8f203d1f7cae4e23a711dff3dba78a4d1f6a26ff60f3" Sep 30 21:04:37 crc kubenswrapper[4756]: I0930 21:04:37.993830 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2d4d409e7c8bca878f1a8f203d1f7cae4e23a711dff3dba78a4d1f6a26ff60f3"} err="failed to get container status \"2d4d409e7c8bca878f1a8f203d1f7cae4e23a711dff3dba78a4d1f6a26ff60f3\": rpc error: code = NotFound desc = could not find container \"2d4d409e7c8bca878f1a8f203d1f7cae4e23a711dff3dba78a4d1f6a26ff60f3\": container with ID starting with 2d4d409e7c8bca878f1a8f203d1f7cae4e23a711dff3dba78a4d1f6a26ff60f3 not found: ID does not exist" Sep 30 21:04:37 crc kubenswrapper[4756]: I0930 21:04:37.993866 4756 scope.go:117] "RemoveContainer" containerID="72fb08264183c81aecc89659064a7f2a89ea376a916d3bdf4187631f0e92318d" Sep 30 21:04:37 crc kubenswrapper[4756]: E0930 21:04:37.995281 4756 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"72fb08264183c81aecc89659064a7f2a89ea376a916d3bdf4187631f0e92318d\": container with ID starting with 72fb08264183c81aecc89659064a7f2a89ea376a916d3bdf4187631f0e92318d not found: ID does not exist" containerID="72fb08264183c81aecc89659064a7f2a89ea376a916d3bdf4187631f0e92318d" Sep 30 21:04:38 crc kubenswrapper[4756]: I0930 21:04:37.995364 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"72fb08264183c81aecc89659064a7f2a89ea376a916d3bdf4187631f0e92318d"} err="failed to get container status \"72fb08264183c81aecc89659064a7f2a89ea376a916d3bdf4187631f0e92318d\": rpc error: code = NotFound desc = could not find container \"72fb08264183c81aecc89659064a7f2a89ea376a916d3bdf4187631f0e92318d\": container with ID starting with 72fb08264183c81aecc89659064a7f2a89ea376a916d3bdf4187631f0e92318d not found: ID does not exist" Sep 30 21:04:38 crc kubenswrapper[4756]: I0930 21:04:38.027306 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e093a2b1-f219-40f0-9abc-bb97939f235a-ovsdbserver-nb\") pod \"e093a2b1-f219-40f0-9abc-bb97939f235a\" (UID: \"e093a2b1-f219-40f0-9abc-bb97939f235a\") " Sep 30 21:04:38 crc kubenswrapper[4756]: I0930 21:04:38.027431 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4872q\" (UniqueName: \"kubernetes.io/projected/e093a2b1-f219-40f0-9abc-bb97939f235a-kube-api-access-4872q\") pod \"e093a2b1-f219-40f0-9abc-bb97939f235a\" (UID: \"e093a2b1-f219-40f0-9abc-bb97939f235a\") " Sep 30 21:04:38 crc kubenswrapper[4756]: I0930 21:04:38.027484 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e093a2b1-f219-40f0-9abc-bb97939f235a-dns-svc\") pod \"e093a2b1-f219-40f0-9abc-bb97939f235a\" (UID: \"e093a2b1-f219-40f0-9abc-bb97939f235a\") " Sep 30 21:04:38 crc kubenswrapper[4756]: I0930 21:04:38.027573 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e093a2b1-f219-40f0-9abc-bb97939f235a-config\") pod \"e093a2b1-f219-40f0-9abc-bb97939f235a\" (UID: \"e093a2b1-f219-40f0-9abc-bb97939f235a\") " Sep 30 21:04:38 crc kubenswrapper[4756]: I0930 21:04:38.027631 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e093a2b1-f219-40f0-9abc-bb97939f235a-ovsdbserver-sb\") pod \"e093a2b1-f219-40f0-9abc-bb97939f235a\" (UID: \"e093a2b1-f219-40f0-9abc-bb97939f235a\") " Sep 30 21:04:38 crc kubenswrapper[4756]: I0930 21:04:38.034305 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e093a2b1-f219-40f0-9abc-bb97939f235a-kube-api-access-4872q" (OuterVolumeSpecName: "kube-api-access-4872q") pod "e093a2b1-f219-40f0-9abc-bb97939f235a" (UID: "e093a2b1-f219-40f0-9abc-bb97939f235a"). InnerVolumeSpecName "kube-api-access-4872q". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 21:04:38 crc kubenswrapper[4756]: I0930 21:04:38.077606 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e093a2b1-f219-40f0-9abc-bb97939f235a-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "e093a2b1-f219-40f0-9abc-bb97939f235a" (UID: "e093a2b1-f219-40f0-9abc-bb97939f235a"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 21:04:38 crc kubenswrapper[4756]: I0930 21:04:38.085148 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e093a2b1-f219-40f0-9abc-bb97939f235a-config" (OuterVolumeSpecName: "config") pod "e093a2b1-f219-40f0-9abc-bb97939f235a" (UID: "e093a2b1-f219-40f0-9abc-bb97939f235a"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 21:04:38 crc kubenswrapper[4756]: I0930 21:04:38.097280 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e093a2b1-f219-40f0-9abc-bb97939f235a-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "e093a2b1-f219-40f0-9abc-bb97939f235a" (UID: "e093a2b1-f219-40f0-9abc-bb97939f235a"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 21:04:38 crc kubenswrapper[4756]: I0930 21:04:38.097420 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e093a2b1-f219-40f0-9abc-bb97939f235a-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "e093a2b1-f219-40f0-9abc-bb97939f235a" (UID: "e093a2b1-f219-40f0-9abc-bb97939f235a"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 21:04:38 crc kubenswrapper[4756]: I0930 21:04:38.129973 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4872q\" (UniqueName: \"kubernetes.io/projected/e093a2b1-f219-40f0-9abc-bb97939f235a-kube-api-access-4872q\") on node \"crc\" DevicePath \"\"" Sep 30 21:04:38 crc kubenswrapper[4756]: I0930 21:04:38.130011 4756 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e093a2b1-f219-40f0-9abc-bb97939f235a-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 30 21:04:38 crc kubenswrapper[4756]: I0930 21:04:38.130025 4756 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e093a2b1-f219-40f0-9abc-bb97939f235a-config\") on node \"crc\" DevicePath \"\"" Sep 30 21:04:38 crc kubenswrapper[4756]: I0930 21:04:38.130039 4756 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e093a2b1-f219-40f0-9abc-bb97939f235a-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Sep 30 21:04:38 crc kubenswrapper[4756]: I0930 21:04:38.130051 4756 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e093a2b1-f219-40f0-9abc-bb97939f235a-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Sep 30 21:04:38 crc kubenswrapper[4756]: I0930 21:04:38.962283 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-58b45b7695-v65nt" Sep 30 21:04:39 crc kubenswrapper[4756]: I0930 21:04:39.028991 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-58b45b7695-v65nt"] Sep 30 21:04:39 crc kubenswrapper[4756]: I0930 21:04:39.039103 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-58b45b7695-v65nt"] Sep 30 21:04:39 crc kubenswrapper[4756]: I0930 21:04:39.138452 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e093a2b1-f219-40f0-9abc-bb97939f235a" path="/var/lib/kubelet/pods/e093a2b1-f219-40f0-9abc-bb97939f235a/volumes" Sep 30 21:04:40 crc kubenswrapper[4756]: I0930 21:04:40.855765 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-db-create-8gl65"] Sep 30 21:04:40 crc kubenswrapper[4756]: E0930 21:04:40.856385 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e093a2b1-f219-40f0-9abc-bb97939f235a" containerName="dnsmasq-dns" Sep 30 21:04:40 crc kubenswrapper[4756]: I0930 21:04:40.856418 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="e093a2b1-f219-40f0-9abc-bb97939f235a" containerName="dnsmasq-dns" Sep 30 21:04:40 crc kubenswrapper[4756]: E0930 21:04:40.856467 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e093a2b1-f219-40f0-9abc-bb97939f235a" containerName="init" Sep 30 21:04:40 crc kubenswrapper[4756]: I0930 21:04:40.856476 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="e093a2b1-f219-40f0-9abc-bb97939f235a" containerName="init" Sep 30 21:04:40 crc kubenswrapper[4756]: I0930 21:04:40.856669 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="e093a2b1-f219-40f0-9abc-bb97939f235a" containerName="dnsmasq-dns" Sep 30 21:04:40 crc kubenswrapper[4756]: I0930 21:04:40.857510 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-8gl65" Sep 30 21:04:40 crc kubenswrapper[4756]: I0930 21:04:40.865824 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-create-8gl65"] Sep 30 21:04:40 crc kubenswrapper[4756]: I0930 21:04:40.884304 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r54ck\" (UniqueName: \"kubernetes.io/projected/c17ad536-947a-46ba-8e65-7278fd29341d-kube-api-access-r54ck\") pod \"cinder-db-create-8gl65\" (UID: \"c17ad536-947a-46ba-8e65-7278fd29341d\") " pod="openstack/cinder-db-create-8gl65" Sep 30 21:04:40 crc kubenswrapper[4756]: I0930 21:04:40.985539 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r54ck\" (UniqueName: \"kubernetes.io/projected/c17ad536-947a-46ba-8e65-7278fd29341d-kube-api-access-r54ck\") pod \"cinder-db-create-8gl65\" (UID: \"c17ad536-947a-46ba-8e65-7278fd29341d\") " pod="openstack/cinder-db-create-8gl65" Sep 30 21:04:41 crc kubenswrapper[4756]: I0930 21:04:41.006912 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r54ck\" (UniqueName: \"kubernetes.io/projected/c17ad536-947a-46ba-8e65-7278fd29341d-kube-api-access-r54ck\") pod \"cinder-db-create-8gl65\" (UID: \"c17ad536-947a-46ba-8e65-7278fd29341d\") " pod="openstack/cinder-db-create-8gl65" Sep 30 21:04:41 crc kubenswrapper[4756]: I0930 21:04:41.184446 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-8gl65" Sep 30 21:04:41 crc kubenswrapper[4756]: I0930 21:04:41.713374 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-create-8gl65"] Sep 30 21:04:41 crc kubenswrapper[4756]: I0930 21:04:41.992694 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-8gl65" event={"ID":"c17ad536-947a-46ba-8e65-7278fd29341d","Type":"ContainerStarted","Data":"ef2b0afd717aa33f6df89fea39ee762a49ac54116ec6479f76c5b4c9d2282417"} Sep 30 21:04:41 crc kubenswrapper[4756]: I0930 21:04:41.993046 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-8gl65" event={"ID":"c17ad536-947a-46ba-8e65-7278fd29341d","Type":"ContainerStarted","Data":"8f5376ce99602616116993aacede0f36c8957e103db2fe6c1c7053fb8fbcc345"} Sep 30 21:04:42 crc kubenswrapper[4756]: I0930 21:04:42.013053 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-db-create-8gl65" podStartSLOduration=2.013026215 podStartE2EDuration="2.013026215s" podCreationTimestamp="2025-09-30 21:04:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 21:04:42.011575947 +0000 UTC m=+5611.632509434" watchObservedRunningTime="2025-09-30 21:04:42.013026215 +0000 UTC m=+5611.633959722" Sep 30 21:04:43 crc kubenswrapper[4756]: I0930 21:04:43.002226 4756 generic.go:334] "Generic (PLEG): container finished" podID="c17ad536-947a-46ba-8e65-7278fd29341d" containerID="ef2b0afd717aa33f6df89fea39ee762a49ac54116ec6479f76c5b4c9d2282417" exitCode=0 Sep 30 21:04:43 crc kubenswrapper[4756]: I0930 21:04:43.002316 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-8gl65" event={"ID":"c17ad536-947a-46ba-8e65-7278fd29341d","Type":"ContainerDied","Data":"ef2b0afd717aa33f6df89fea39ee762a49ac54116ec6479f76c5b4c9d2282417"} Sep 30 21:04:44 crc kubenswrapper[4756]: I0930 21:04:44.320563 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-8gl65" Sep 30 21:04:44 crc kubenswrapper[4756]: I0930 21:04:44.456909 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-r54ck\" (UniqueName: \"kubernetes.io/projected/c17ad536-947a-46ba-8e65-7278fd29341d-kube-api-access-r54ck\") pod \"c17ad536-947a-46ba-8e65-7278fd29341d\" (UID: \"c17ad536-947a-46ba-8e65-7278fd29341d\") " Sep 30 21:04:44 crc kubenswrapper[4756]: I0930 21:04:44.464574 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c17ad536-947a-46ba-8e65-7278fd29341d-kube-api-access-r54ck" (OuterVolumeSpecName: "kube-api-access-r54ck") pod "c17ad536-947a-46ba-8e65-7278fd29341d" (UID: "c17ad536-947a-46ba-8e65-7278fd29341d"). InnerVolumeSpecName "kube-api-access-r54ck". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 21:04:44 crc kubenswrapper[4756]: I0930 21:04:44.559682 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-r54ck\" (UniqueName: \"kubernetes.io/projected/c17ad536-947a-46ba-8e65-7278fd29341d-kube-api-access-r54ck\") on node \"crc\" DevicePath \"\"" Sep 30 21:04:45 crc kubenswrapper[4756]: I0930 21:04:45.025803 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-8gl65" event={"ID":"c17ad536-947a-46ba-8e65-7278fd29341d","Type":"ContainerDied","Data":"8f5376ce99602616116993aacede0f36c8957e103db2fe6c1c7053fb8fbcc345"} Sep 30 21:04:45 crc kubenswrapper[4756]: I0930 21:04:45.025863 4756 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8f5376ce99602616116993aacede0f36c8957e103db2fe6c1c7053fb8fbcc345" Sep 30 21:04:45 crc kubenswrapper[4756]: I0930 21:04:45.025917 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-8gl65" Sep 30 21:04:50 crc kubenswrapper[4756]: I0930 21:04:50.927897 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-489a-account-create-jdh7m"] Sep 30 21:04:50 crc kubenswrapper[4756]: E0930 21:04:50.929048 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c17ad536-947a-46ba-8e65-7278fd29341d" containerName="mariadb-database-create" Sep 30 21:04:50 crc kubenswrapper[4756]: I0930 21:04:50.929066 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="c17ad536-947a-46ba-8e65-7278fd29341d" containerName="mariadb-database-create" Sep 30 21:04:50 crc kubenswrapper[4756]: I0930 21:04:50.929242 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="c17ad536-947a-46ba-8e65-7278fd29341d" containerName="mariadb-database-create" Sep 30 21:04:50 crc kubenswrapper[4756]: I0930 21:04:50.929929 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-489a-account-create-jdh7m" Sep 30 21:04:50 crc kubenswrapper[4756]: I0930 21:04:50.932274 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-db-secret" Sep 30 21:04:50 crc kubenswrapper[4756]: I0930 21:04:50.938328 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-489a-account-create-jdh7m"] Sep 30 21:04:51 crc kubenswrapper[4756]: I0930 21:04:51.105467 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t86js\" (UniqueName: \"kubernetes.io/projected/1ce7b085-4d4c-4a96-a6fb-98c40eca978e-kube-api-access-t86js\") pod \"cinder-489a-account-create-jdh7m\" (UID: \"1ce7b085-4d4c-4a96-a6fb-98c40eca978e\") " pod="openstack/cinder-489a-account-create-jdh7m" Sep 30 21:04:51 crc kubenswrapper[4756]: I0930 21:04:51.207362 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t86js\" (UniqueName: \"kubernetes.io/projected/1ce7b085-4d4c-4a96-a6fb-98c40eca978e-kube-api-access-t86js\") pod \"cinder-489a-account-create-jdh7m\" (UID: \"1ce7b085-4d4c-4a96-a6fb-98c40eca978e\") " pod="openstack/cinder-489a-account-create-jdh7m" Sep 30 21:04:51 crc kubenswrapper[4756]: I0930 21:04:51.241597 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t86js\" (UniqueName: \"kubernetes.io/projected/1ce7b085-4d4c-4a96-a6fb-98c40eca978e-kube-api-access-t86js\") pod \"cinder-489a-account-create-jdh7m\" (UID: \"1ce7b085-4d4c-4a96-a6fb-98c40eca978e\") " pod="openstack/cinder-489a-account-create-jdh7m" Sep 30 21:04:51 crc kubenswrapper[4756]: I0930 21:04:51.255694 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-489a-account-create-jdh7m" Sep 30 21:04:51 crc kubenswrapper[4756]: I0930 21:04:51.761366 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-489a-account-create-jdh7m"] Sep 30 21:04:52 crc kubenswrapper[4756]: I0930 21:04:52.108483 4756 generic.go:334] "Generic (PLEG): container finished" podID="1ce7b085-4d4c-4a96-a6fb-98c40eca978e" containerID="9edaec9102677cb3ad0033bc639eb569160f9fa78284ce28bf43aec76f0e2086" exitCode=0 Sep 30 21:04:52 crc kubenswrapper[4756]: I0930 21:04:52.108523 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-489a-account-create-jdh7m" event={"ID":"1ce7b085-4d4c-4a96-a6fb-98c40eca978e","Type":"ContainerDied","Data":"9edaec9102677cb3ad0033bc639eb569160f9fa78284ce28bf43aec76f0e2086"} Sep 30 21:04:52 crc kubenswrapper[4756]: I0930 21:04:52.108548 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-489a-account-create-jdh7m" event={"ID":"1ce7b085-4d4c-4a96-a6fb-98c40eca978e","Type":"ContainerStarted","Data":"ffec952393d7653635b7b6a662e0500454ed0b0f5c1d72e207e484ffd24ac2b5"} Sep 30 21:04:53 crc kubenswrapper[4756]: I0930 21:04:53.488185 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-489a-account-create-jdh7m" Sep 30 21:04:53 crc kubenswrapper[4756]: I0930 21:04:53.658167 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-t86js\" (UniqueName: \"kubernetes.io/projected/1ce7b085-4d4c-4a96-a6fb-98c40eca978e-kube-api-access-t86js\") pod \"1ce7b085-4d4c-4a96-a6fb-98c40eca978e\" (UID: \"1ce7b085-4d4c-4a96-a6fb-98c40eca978e\") " Sep 30 21:04:53 crc kubenswrapper[4756]: I0930 21:04:53.666863 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1ce7b085-4d4c-4a96-a6fb-98c40eca978e-kube-api-access-t86js" (OuterVolumeSpecName: "kube-api-access-t86js") pod "1ce7b085-4d4c-4a96-a6fb-98c40eca978e" (UID: "1ce7b085-4d4c-4a96-a6fb-98c40eca978e"). InnerVolumeSpecName "kube-api-access-t86js". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 21:04:53 crc kubenswrapper[4756]: I0930 21:04:53.760041 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-t86js\" (UniqueName: \"kubernetes.io/projected/1ce7b085-4d4c-4a96-a6fb-98c40eca978e-kube-api-access-t86js\") on node \"crc\" DevicePath \"\"" Sep 30 21:04:54 crc kubenswrapper[4756]: I0930 21:04:54.127344 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-489a-account-create-jdh7m" event={"ID":"1ce7b085-4d4c-4a96-a6fb-98c40eca978e","Type":"ContainerDied","Data":"ffec952393d7653635b7b6a662e0500454ed0b0f5c1d72e207e484ffd24ac2b5"} Sep 30 21:04:54 crc kubenswrapper[4756]: I0930 21:04:54.127381 4756 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ffec952393d7653635b7b6a662e0500454ed0b0f5c1d72e207e484ffd24ac2b5" Sep 30 21:04:54 crc kubenswrapper[4756]: I0930 21:04:54.127386 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-489a-account-create-jdh7m" Sep 30 21:04:56 crc kubenswrapper[4756]: I0930 21:04:56.084917 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-db-sync-vtfrx"] Sep 30 21:04:56 crc kubenswrapper[4756]: E0930 21:04:56.085844 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1ce7b085-4d4c-4a96-a6fb-98c40eca978e" containerName="mariadb-account-create" Sep 30 21:04:56 crc kubenswrapper[4756]: I0930 21:04:56.085873 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="1ce7b085-4d4c-4a96-a6fb-98c40eca978e" containerName="mariadb-account-create" Sep 30 21:04:56 crc kubenswrapper[4756]: I0930 21:04:56.086164 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="1ce7b085-4d4c-4a96-a6fb-98c40eca978e" containerName="mariadb-account-create" Sep 30 21:04:56 crc kubenswrapper[4756]: I0930 21:04:56.087101 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-vtfrx" Sep 30 21:04:56 crc kubenswrapper[4756]: I0930 21:04:56.090376 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-config-data" Sep 30 21:04:56 crc kubenswrapper[4756]: I0930 21:04:56.091806 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-cinder-dockercfg-h5p88" Sep 30 21:04:56 crc kubenswrapper[4756]: I0930 21:04:56.092420 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scripts" Sep 30 21:04:56 crc kubenswrapper[4756]: I0930 21:04:56.098053 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-sync-vtfrx"] Sep 30 21:04:56 crc kubenswrapper[4756]: I0930 21:04:56.208478 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/515e601c-2353-45a9-8acc-db3f68605dd4-etc-machine-id\") pod \"cinder-db-sync-vtfrx\" (UID: \"515e601c-2353-45a9-8acc-db3f68605dd4\") " pod="openstack/cinder-db-sync-vtfrx" Sep 30 21:04:56 crc kubenswrapper[4756]: I0930 21:04:56.208605 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/515e601c-2353-45a9-8acc-db3f68605dd4-combined-ca-bundle\") pod \"cinder-db-sync-vtfrx\" (UID: \"515e601c-2353-45a9-8acc-db3f68605dd4\") " pod="openstack/cinder-db-sync-vtfrx" Sep 30 21:04:56 crc kubenswrapper[4756]: I0930 21:04:56.208900 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/515e601c-2353-45a9-8acc-db3f68605dd4-scripts\") pod \"cinder-db-sync-vtfrx\" (UID: \"515e601c-2353-45a9-8acc-db3f68605dd4\") " pod="openstack/cinder-db-sync-vtfrx" Sep 30 21:04:56 crc kubenswrapper[4756]: I0930 21:04:56.209038 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/515e601c-2353-45a9-8acc-db3f68605dd4-db-sync-config-data\") pod \"cinder-db-sync-vtfrx\" (UID: \"515e601c-2353-45a9-8acc-db3f68605dd4\") " pod="openstack/cinder-db-sync-vtfrx" Sep 30 21:04:56 crc kubenswrapper[4756]: I0930 21:04:56.209189 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tzzqb\" (UniqueName: \"kubernetes.io/projected/515e601c-2353-45a9-8acc-db3f68605dd4-kube-api-access-tzzqb\") pod \"cinder-db-sync-vtfrx\" (UID: \"515e601c-2353-45a9-8acc-db3f68605dd4\") " pod="openstack/cinder-db-sync-vtfrx" Sep 30 21:04:56 crc kubenswrapper[4756]: I0930 21:04:56.209248 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/515e601c-2353-45a9-8acc-db3f68605dd4-config-data\") pod \"cinder-db-sync-vtfrx\" (UID: \"515e601c-2353-45a9-8acc-db3f68605dd4\") " pod="openstack/cinder-db-sync-vtfrx" Sep 30 21:04:56 crc kubenswrapper[4756]: I0930 21:04:56.310923 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tzzqb\" (UniqueName: \"kubernetes.io/projected/515e601c-2353-45a9-8acc-db3f68605dd4-kube-api-access-tzzqb\") pod \"cinder-db-sync-vtfrx\" (UID: \"515e601c-2353-45a9-8acc-db3f68605dd4\") " pod="openstack/cinder-db-sync-vtfrx" Sep 30 21:04:56 crc kubenswrapper[4756]: I0930 21:04:56.310973 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/515e601c-2353-45a9-8acc-db3f68605dd4-config-data\") pod \"cinder-db-sync-vtfrx\" (UID: \"515e601c-2353-45a9-8acc-db3f68605dd4\") " pod="openstack/cinder-db-sync-vtfrx" Sep 30 21:04:56 crc kubenswrapper[4756]: I0930 21:04:56.311033 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/515e601c-2353-45a9-8acc-db3f68605dd4-etc-machine-id\") pod \"cinder-db-sync-vtfrx\" (UID: \"515e601c-2353-45a9-8acc-db3f68605dd4\") " pod="openstack/cinder-db-sync-vtfrx" Sep 30 21:04:56 crc kubenswrapper[4756]: I0930 21:04:56.311098 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/515e601c-2353-45a9-8acc-db3f68605dd4-combined-ca-bundle\") pod \"cinder-db-sync-vtfrx\" (UID: \"515e601c-2353-45a9-8acc-db3f68605dd4\") " pod="openstack/cinder-db-sync-vtfrx" Sep 30 21:04:56 crc kubenswrapper[4756]: I0930 21:04:56.311180 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/515e601c-2353-45a9-8acc-db3f68605dd4-scripts\") pod \"cinder-db-sync-vtfrx\" (UID: \"515e601c-2353-45a9-8acc-db3f68605dd4\") " pod="openstack/cinder-db-sync-vtfrx" Sep 30 21:04:56 crc kubenswrapper[4756]: I0930 21:04:56.311223 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/515e601c-2353-45a9-8acc-db3f68605dd4-db-sync-config-data\") pod \"cinder-db-sync-vtfrx\" (UID: \"515e601c-2353-45a9-8acc-db3f68605dd4\") " pod="openstack/cinder-db-sync-vtfrx" Sep 30 21:04:56 crc kubenswrapper[4756]: I0930 21:04:56.311264 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/515e601c-2353-45a9-8acc-db3f68605dd4-etc-machine-id\") pod \"cinder-db-sync-vtfrx\" (UID: \"515e601c-2353-45a9-8acc-db3f68605dd4\") " pod="openstack/cinder-db-sync-vtfrx" Sep 30 21:04:56 crc kubenswrapper[4756]: I0930 21:04:56.315703 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/515e601c-2353-45a9-8acc-db3f68605dd4-db-sync-config-data\") pod \"cinder-db-sync-vtfrx\" (UID: \"515e601c-2353-45a9-8acc-db3f68605dd4\") " pod="openstack/cinder-db-sync-vtfrx" Sep 30 21:04:56 crc kubenswrapper[4756]: I0930 21:04:56.316985 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/515e601c-2353-45a9-8acc-db3f68605dd4-config-data\") pod \"cinder-db-sync-vtfrx\" (UID: \"515e601c-2353-45a9-8acc-db3f68605dd4\") " pod="openstack/cinder-db-sync-vtfrx" Sep 30 21:04:56 crc kubenswrapper[4756]: I0930 21:04:56.319124 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/515e601c-2353-45a9-8acc-db3f68605dd4-scripts\") pod \"cinder-db-sync-vtfrx\" (UID: \"515e601c-2353-45a9-8acc-db3f68605dd4\") " pod="openstack/cinder-db-sync-vtfrx" Sep 30 21:04:56 crc kubenswrapper[4756]: I0930 21:04:56.320025 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/515e601c-2353-45a9-8acc-db3f68605dd4-combined-ca-bundle\") pod \"cinder-db-sync-vtfrx\" (UID: \"515e601c-2353-45a9-8acc-db3f68605dd4\") " pod="openstack/cinder-db-sync-vtfrx" Sep 30 21:04:56 crc kubenswrapper[4756]: I0930 21:04:56.333992 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tzzqb\" (UniqueName: \"kubernetes.io/projected/515e601c-2353-45a9-8acc-db3f68605dd4-kube-api-access-tzzqb\") pod \"cinder-db-sync-vtfrx\" (UID: \"515e601c-2353-45a9-8acc-db3f68605dd4\") " pod="openstack/cinder-db-sync-vtfrx" Sep 30 21:04:56 crc kubenswrapper[4756]: I0930 21:04:56.421933 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-vtfrx" Sep 30 21:04:56 crc kubenswrapper[4756]: I0930 21:04:56.919080 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-sync-vtfrx"] Sep 30 21:04:57 crc kubenswrapper[4756]: I0930 21:04:57.163252 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-vtfrx" event={"ID":"515e601c-2353-45a9-8acc-db3f68605dd4","Type":"ContainerStarted","Data":"c1952401eb6100b8be8efb3cbfb961952978cbe555e02dac5eb895e60c122d5d"} Sep 30 21:04:58 crc kubenswrapper[4756]: I0930 21:04:58.183254 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-vtfrx" event={"ID":"515e601c-2353-45a9-8acc-db3f68605dd4","Type":"ContainerStarted","Data":"3f42fb4d9bddb3a9bca5ae7139ec584e94997c02854d55611896e919d136f7c3"} Sep 30 21:04:58 crc kubenswrapper[4756]: I0930 21:04:58.206762 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-db-sync-vtfrx" podStartSLOduration=2.206744763 podStartE2EDuration="2.206744763s" podCreationTimestamp="2025-09-30 21:04:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 21:04:58.202969005 +0000 UTC m=+5627.823902502" watchObservedRunningTime="2025-09-30 21:04:58.206744763 +0000 UTC m=+5627.827678250" Sep 30 21:05:00 crc kubenswrapper[4756]: I0930 21:05:00.204336 4756 generic.go:334] "Generic (PLEG): container finished" podID="515e601c-2353-45a9-8acc-db3f68605dd4" containerID="3f42fb4d9bddb3a9bca5ae7139ec584e94997c02854d55611896e919d136f7c3" exitCode=0 Sep 30 21:05:00 crc kubenswrapper[4756]: I0930 21:05:00.204517 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-vtfrx" event={"ID":"515e601c-2353-45a9-8acc-db3f68605dd4","Type":"ContainerDied","Data":"3f42fb4d9bddb3a9bca5ae7139ec584e94997c02854d55611896e919d136f7c3"} Sep 30 21:05:01 crc kubenswrapper[4756]: I0930 21:05:01.602211 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-vtfrx" Sep 30 21:05:01 crc kubenswrapper[4756]: I0930 21:05:01.719090 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/515e601c-2353-45a9-8acc-db3f68605dd4-config-data\") pod \"515e601c-2353-45a9-8acc-db3f68605dd4\" (UID: \"515e601c-2353-45a9-8acc-db3f68605dd4\") " Sep 30 21:05:01 crc kubenswrapper[4756]: I0930 21:05:01.719139 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/515e601c-2353-45a9-8acc-db3f68605dd4-etc-machine-id\") pod \"515e601c-2353-45a9-8acc-db3f68605dd4\" (UID: \"515e601c-2353-45a9-8acc-db3f68605dd4\") " Sep 30 21:05:01 crc kubenswrapper[4756]: I0930 21:05:01.719161 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/515e601c-2353-45a9-8acc-db3f68605dd4-combined-ca-bundle\") pod \"515e601c-2353-45a9-8acc-db3f68605dd4\" (UID: \"515e601c-2353-45a9-8acc-db3f68605dd4\") " Sep 30 21:05:01 crc kubenswrapper[4756]: I0930 21:05:01.719220 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tzzqb\" (UniqueName: \"kubernetes.io/projected/515e601c-2353-45a9-8acc-db3f68605dd4-kube-api-access-tzzqb\") pod \"515e601c-2353-45a9-8acc-db3f68605dd4\" (UID: \"515e601c-2353-45a9-8acc-db3f68605dd4\") " Sep 30 21:05:01 crc kubenswrapper[4756]: I0930 21:05:01.719261 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/515e601c-2353-45a9-8acc-db3f68605dd4-db-sync-config-data\") pod \"515e601c-2353-45a9-8acc-db3f68605dd4\" (UID: \"515e601c-2353-45a9-8acc-db3f68605dd4\") " Sep 30 21:05:01 crc kubenswrapper[4756]: I0930 21:05:01.719354 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/515e601c-2353-45a9-8acc-db3f68605dd4-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "515e601c-2353-45a9-8acc-db3f68605dd4" (UID: "515e601c-2353-45a9-8acc-db3f68605dd4"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 21:05:01 crc kubenswrapper[4756]: I0930 21:05:01.720105 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/515e601c-2353-45a9-8acc-db3f68605dd4-scripts\") pod \"515e601c-2353-45a9-8acc-db3f68605dd4\" (UID: \"515e601c-2353-45a9-8acc-db3f68605dd4\") " Sep 30 21:05:01 crc kubenswrapper[4756]: I0930 21:05:01.720578 4756 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/515e601c-2353-45a9-8acc-db3f68605dd4-etc-machine-id\") on node \"crc\" DevicePath \"\"" Sep 30 21:05:01 crc kubenswrapper[4756]: I0930 21:05:01.726206 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/515e601c-2353-45a9-8acc-db3f68605dd4-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "515e601c-2353-45a9-8acc-db3f68605dd4" (UID: "515e601c-2353-45a9-8acc-db3f68605dd4"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 21:05:01 crc kubenswrapper[4756]: I0930 21:05:01.726643 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/515e601c-2353-45a9-8acc-db3f68605dd4-scripts" (OuterVolumeSpecName: "scripts") pod "515e601c-2353-45a9-8acc-db3f68605dd4" (UID: "515e601c-2353-45a9-8acc-db3f68605dd4"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 21:05:01 crc kubenswrapper[4756]: I0930 21:05:01.727257 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/515e601c-2353-45a9-8acc-db3f68605dd4-kube-api-access-tzzqb" (OuterVolumeSpecName: "kube-api-access-tzzqb") pod "515e601c-2353-45a9-8acc-db3f68605dd4" (UID: "515e601c-2353-45a9-8acc-db3f68605dd4"). InnerVolumeSpecName "kube-api-access-tzzqb". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 21:05:01 crc kubenswrapper[4756]: I0930 21:05:01.773154 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/515e601c-2353-45a9-8acc-db3f68605dd4-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "515e601c-2353-45a9-8acc-db3f68605dd4" (UID: "515e601c-2353-45a9-8acc-db3f68605dd4"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 21:05:01 crc kubenswrapper[4756]: I0930 21:05:01.792761 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/515e601c-2353-45a9-8acc-db3f68605dd4-config-data" (OuterVolumeSpecName: "config-data") pod "515e601c-2353-45a9-8acc-db3f68605dd4" (UID: "515e601c-2353-45a9-8acc-db3f68605dd4"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 21:05:01 crc kubenswrapper[4756]: I0930 21:05:01.822653 4756 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/515e601c-2353-45a9-8acc-db3f68605dd4-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 21:05:01 crc kubenswrapper[4756]: I0930 21:05:01.822853 4756 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/515e601c-2353-45a9-8acc-db3f68605dd4-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 21:05:01 crc kubenswrapper[4756]: I0930 21:05:01.822939 4756 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/515e601c-2353-45a9-8acc-db3f68605dd4-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 21:05:01 crc kubenswrapper[4756]: I0930 21:05:01.823017 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tzzqb\" (UniqueName: \"kubernetes.io/projected/515e601c-2353-45a9-8acc-db3f68605dd4-kube-api-access-tzzqb\") on node \"crc\" DevicePath \"\"" Sep 30 21:05:01 crc kubenswrapper[4756]: I0930 21:05:01.823092 4756 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/515e601c-2353-45a9-8acc-db3f68605dd4-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 21:05:02 crc kubenswrapper[4756]: I0930 21:05:02.233165 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-vtfrx" event={"ID":"515e601c-2353-45a9-8acc-db3f68605dd4","Type":"ContainerDied","Data":"c1952401eb6100b8be8efb3cbfb961952978cbe555e02dac5eb895e60c122d5d"} Sep 30 21:05:02 crc kubenswrapper[4756]: I0930 21:05:02.233553 4756 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c1952401eb6100b8be8efb3cbfb961952978cbe555e02dac5eb895e60c122d5d" Sep 30 21:05:02 crc kubenswrapper[4756]: I0930 21:05:02.233261 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-vtfrx" Sep 30 21:05:02 crc kubenswrapper[4756]: I0930 21:05:02.617782 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-f7bc97665-ssfql"] Sep 30 21:05:02 crc kubenswrapper[4756]: E0930 21:05:02.618651 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="515e601c-2353-45a9-8acc-db3f68605dd4" containerName="cinder-db-sync" Sep 30 21:05:02 crc kubenswrapper[4756]: I0930 21:05:02.618667 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="515e601c-2353-45a9-8acc-db3f68605dd4" containerName="cinder-db-sync" Sep 30 21:05:02 crc kubenswrapper[4756]: I0930 21:05:02.618852 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="515e601c-2353-45a9-8acc-db3f68605dd4" containerName="cinder-db-sync" Sep 30 21:05:02 crc kubenswrapper[4756]: I0930 21:05:02.619758 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-f7bc97665-ssfql" Sep 30 21:05:02 crc kubenswrapper[4756]: I0930 21:05:02.636516 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-f7bc97665-ssfql"] Sep 30 21:05:02 crc kubenswrapper[4756]: I0930 21:05:02.743342 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bf306b44-0f89-4344-949c-d24959e4e757-config\") pod \"dnsmasq-dns-f7bc97665-ssfql\" (UID: \"bf306b44-0f89-4344-949c-d24959e4e757\") " pod="openstack/dnsmasq-dns-f7bc97665-ssfql" Sep 30 21:05:02 crc kubenswrapper[4756]: I0930 21:05:02.743430 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/bf306b44-0f89-4344-949c-d24959e4e757-ovsdbserver-sb\") pod \"dnsmasq-dns-f7bc97665-ssfql\" (UID: \"bf306b44-0f89-4344-949c-d24959e4e757\") " pod="openstack/dnsmasq-dns-f7bc97665-ssfql" Sep 30 21:05:02 crc kubenswrapper[4756]: I0930 21:05:02.743464 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-snprs\" (UniqueName: \"kubernetes.io/projected/bf306b44-0f89-4344-949c-d24959e4e757-kube-api-access-snprs\") pod \"dnsmasq-dns-f7bc97665-ssfql\" (UID: \"bf306b44-0f89-4344-949c-d24959e4e757\") " pod="openstack/dnsmasq-dns-f7bc97665-ssfql" Sep 30 21:05:02 crc kubenswrapper[4756]: I0930 21:05:02.743527 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/bf306b44-0f89-4344-949c-d24959e4e757-ovsdbserver-nb\") pod \"dnsmasq-dns-f7bc97665-ssfql\" (UID: \"bf306b44-0f89-4344-949c-d24959e4e757\") " pod="openstack/dnsmasq-dns-f7bc97665-ssfql" Sep 30 21:05:02 crc kubenswrapper[4756]: I0930 21:05:02.743571 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/bf306b44-0f89-4344-949c-d24959e4e757-dns-svc\") pod \"dnsmasq-dns-f7bc97665-ssfql\" (UID: \"bf306b44-0f89-4344-949c-d24959e4e757\") " pod="openstack/dnsmasq-dns-f7bc97665-ssfql" Sep 30 21:05:02 crc kubenswrapper[4756]: I0930 21:05:02.813194 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-api-0"] Sep 30 21:05:02 crc kubenswrapper[4756]: I0930 21:05:02.823383 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Sep 30 21:05:02 crc kubenswrapper[4756]: I0930 21:05:02.823523 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Sep 30 21:05:02 crc kubenswrapper[4756]: I0930 21:05:02.829624 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-cinder-dockercfg-h5p88" Sep 30 21:05:02 crc kubenswrapper[4756]: I0930 21:05:02.829650 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scripts" Sep 30 21:05:02 crc kubenswrapper[4756]: I0930 21:05:02.829731 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-api-config-data" Sep 30 21:05:02 crc kubenswrapper[4756]: I0930 21:05:02.830032 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-config-data" Sep 30 21:05:02 crc kubenswrapper[4756]: I0930 21:05:02.844793 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bf306b44-0f89-4344-949c-d24959e4e757-config\") pod \"dnsmasq-dns-f7bc97665-ssfql\" (UID: \"bf306b44-0f89-4344-949c-d24959e4e757\") " pod="openstack/dnsmasq-dns-f7bc97665-ssfql" Sep 30 21:05:02 crc kubenswrapper[4756]: I0930 21:05:02.844862 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/bf306b44-0f89-4344-949c-d24959e4e757-ovsdbserver-sb\") pod \"dnsmasq-dns-f7bc97665-ssfql\" (UID: \"bf306b44-0f89-4344-949c-d24959e4e757\") " pod="openstack/dnsmasq-dns-f7bc97665-ssfql" Sep 30 21:05:02 crc kubenswrapper[4756]: I0930 21:05:02.844893 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-snprs\" (UniqueName: \"kubernetes.io/projected/bf306b44-0f89-4344-949c-d24959e4e757-kube-api-access-snprs\") pod \"dnsmasq-dns-f7bc97665-ssfql\" (UID: \"bf306b44-0f89-4344-949c-d24959e4e757\") " pod="openstack/dnsmasq-dns-f7bc97665-ssfql" Sep 30 21:05:02 crc kubenswrapper[4756]: I0930 21:05:02.844933 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/bf306b44-0f89-4344-949c-d24959e4e757-ovsdbserver-nb\") pod \"dnsmasq-dns-f7bc97665-ssfql\" (UID: \"bf306b44-0f89-4344-949c-d24959e4e757\") " pod="openstack/dnsmasq-dns-f7bc97665-ssfql" Sep 30 21:05:02 crc kubenswrapper[4756]: I0930 21:05:02.844974 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/bf306b44-0f89-4344-949c-d24959e4e757-dns-svc\") pod \"dnsmasq-dns-f7bc97665-ssfql\" (UID: \"bf306b44-0f89-4344-949c-d24959e4e757\") " pod="openstack/dnsmasq-dns-f7bc97665-ssfql" Sep 30 21:05:02 crc kubenswrapper[4756]: I0930 21:05:02.845779 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/bf306b44-0f89-4344-949c-d24959e4e757-dns-svc\") pod \"dnsmasq-dns-f7bc97665-ssfql\" (UID: \"bf306b44-0f89-4344-949c-d24959e4e757\") " pod="openstack/dnsmasq-dns-f7bc97665-ssfql" Sep 30 21:05:02 crc kubenswrapper[4756]: I0930 21:05:02.846261 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bf306b44-0f89-4344-949c-d24959e4e757-config\") pod \"dnsmasq-dns-f7bc97665-ssfql\" (UID: \"bf306b44-0f89-4344-949c-d24959e4e757\") " pod="openstack/dnsmasq-dns-f7bc97665-ssfql" Sep 30 21:05:02 crc kubenswrapper[4756]: I0930 21:05:02.848151 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/bf306b44-0f89-4344-949c-d24959e4e757-ovsdbserver-nb\") pod \"dnsmasq-dns-f7bc97665-ssfql\" (UID: \"bf306b44-0f89-4344-949c-d24959e4e757\") " pod="openstack/dnsmasq-dns-f7bc97665-ssfql" Sep 30 21:05:02 crc kubenswrapper[4756]: I0930 21:05:02.848597 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/bf306b44-0f89-4344-949c-d24959e4e757-ovsdbserver-sb\") pod \"dnsmasq-dns-f7bc97665-ssfql\" (UID: \"bf306b44-0f89-4344-949c-d24959e4e757\") " pod="openstack/dnsmasq-dns-f7bc97665-ssfql" Sep 30 21:05:02 crc kubenswrapper[4756]: I0930 21:05:02.865921 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-snprs\" (UniqueName: \"kubernetes.io/projected/bf306b44-0f89-4344-949c-d24959e4e757-kube-api-access-snprs\") pod \"dnsmasq-dns-f7bc97665-ssfql\" (UID: \"bf306b44-0f89-4344-949c-d24959e4e757\") " pod="openstack/dnsmasq-dns-f7bc97665-ssfql" Sep 30 21:05:02 crc kubenswrapper[4756]: I0930 21:05:02.936829 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-f7bc97665-ssfql" Sep 30 21:05:02 crc kubenswrapper[4756]: I0930 21:05:02.946752 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9c67fc4c-7197-4099-8c81-d383c95e4840-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"9c67fc4c-7197-4099-8c81-d383c95e4840\") " pod="openstack/cinder-api-0" Sep 30 21:05:02 crc kubenswrapper[4756]: I0930 21:05:02.946805 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9c67fc4c-7197-4099-8c81-d383c95e4840-scripts\") pod \"cinder-api-0\" (UID: \"9c67fc4c-7197-4099-8c81-d383c95e4840\") " pod="openstack/cinder-api-0" Sep 30 21:05:02 crc kubenswrapper[4756]: I0930 21:05:02.946910 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/9c67fc4c-7197-4099-8c81-d383c95e4840-etc-machine-id\") pod \"cinder-api-0\" (UID: \"9c67fc4c-7197-4099-8c81-d383c95e4840\") " pod="openstack/cinder-api-0" Sep 30 21:05:02 crc kubenswrapper[4756]: I0930 21:05:02.946963 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/9c67fc4c-7197-4099-8c81-d383c95e4840-config-data-custom\") pod \"cinder-api-0\" (UID: \"9c67fc4c-7197-4099-8c81-d383c95e4840\") " pod="openstack/cinder-api-0" Sep 30 21:05:02 crc kubenswrapper[4756]: I0930 21:05:02.946998 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9c67fc4c-7197-4099-8c81-d383c95e4840-logs\") pod \"cinder-api-0\" (UID: \"9c67fc4c-7197-4099-8c81-d383c95e4840\") " pod="openstack/cinder-api-0" Sep 30 21:05:02 crc kubenswrapper[4756]: I0930 21:05:02.947029 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9c67fc4c-7197-4099-8c81-d383c95e4840-config-data\") pod \"cinder-api-0\" (UID: \"9c67fc4c-7197-4099-8c81-d383c95e4840\") " pod="openstack/cinder-api-0" Sep 30 21:05:02 crc kubenswrapper[4756]: I0930 21:05:02.947070 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kvsr4\" (UniqueName: \"kubernetes.io/projected/9c67fc4c-7197-4099-8c81-d383c95e4840-kube-api-access-kvsr4\") pod \"cinder-api-0\" (UID: \"9c67fc4c-7197-4099-8c81-d383c95e4840\") " pod="openstack/cinder-api-0" Sep 30 21:05:03 crc kubenswrapper[4756]: I0930 21:05:03.051475 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/9c67fc4c-7197-4099-8c81-d383c95e4840-etc-machine-id\") pod \"cinder-api-0\" (UID: \"9c67fc4c-7197-4099-8c81-d383c95e4840\") " pod="openstack/cinder-api-0" Sep 30 21:05:03 crc kubenswrapper[4756]: I0930 21:05:03.051791 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/9c67fc4c-7197-4099-8c81-d383c95e4840-config-data-custom\") pod \"cinder-api-0\" (UID: \"9c67fc4c-7197-4099-8c81-d383c95e4840\") " pod="openstack/cinder-api-0" Sep 30 21:05:03 crc kubenswrapper[4756]: I0930 21:05:03.051817 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9c67fc4c-7197-4099-8c81-d383c95e4840-logs\") pod \"cinder-api-0\" (UID: \"9c67fc4c-7197-4099-8c81-d383c95e4840\") " pod="openstack/cinder-api-0" Sep 30 21:05:03 crc kubenswrapper[4756]: I0930 21:05:03.051841 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9c67fc4c-7197-4099-8c81-d383c95e4840-config-data\") pod \"cinder-api-0\" (UID: \"9c67fc4c-7197-4099-8c81-d383c95e4840\") " pod="openstack/cinder-api-0" Sep 30 21:05:03 crc kubenswrapper[4756]: I0930 21:05:03.051869 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kvsr4\" (UniqueName: \"kubernetes.io/projected/9c67fc4c-7197-4099-8c81-d383c95e4840-kube-api-access-kvsr4\") pod \"cinder-api-0\" (UID: \"9c67fc4c-7197-4099-8c81-d383c95e4840\") " pod="openstack/cinder-api-0" Sep 30 21:05:03 crc kubenswrapper[4756]: I0930 21:05:03.051916 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9c67fc4c-7197-4099-8c81-d383c95e4840-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"9c67fc4c-7197-4099-8c81-d383c95e4840\") " pod="openstack/cinder-api-0" Sep 30 21:05:03 crc kubenswrapper[4756]: I0930 21:05:03.051933 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9c67fc4c-7197-4099-8c81-d383c95e4840-scripts\") pod \"cinder-api-0\" (UID: \"9c67fc4c-7197-4099-8c81-d383c95e4840\") " pod="openstack/cinder-api-0" Sep 30 21:05:03 crc kubenswrapper[4756]: I0930 21:05:03.052545 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9c67fc4c-7197-4099-8c81-d383c95e4840-logs\") pod \"cinder-api-0\" (UID: \"9c67fc4c-7197-4099-8c81-d383c95e4840\") " pod="openstack/cinder-api-0" Sep 30 21:05:03 crc kubenswrapper[4756]: I0930 21:05:03.052605 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/9c67fc4c-7197-4099-8c81-d383c95e4840-etc-machine-id\") pod \"cinder-api-0\" (UID: \"9c67fc4c-7197-4099-8c81-d383c95e4840\") " pod="openstack/cinder-api-0" Sep 30 21:05:03 crc kubenswrapper[4756]: I0930 21:05:03.063596 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/9c67fc4c-7197-4099-8c81-d383c95e4840-config-data-custom\") pod \"cinder-api-0\" (UID: \"9c67fc4c-7197-4099-8c81-d383c95e4840\") " pod="openstack/cinder-api-0" Sep 30 21:05:03 crc kubenswrapper[4756]: I0930 21:05:03.067511 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9c67fc4c-7197-4099-8c81-d383c95e4840-scripts\") pod \"cinder-api-0\" (UID: \"9c67fc4c-7197-4099-8c81-d383c95e4840\") " pod="openstack/cinder-api-0" Sep 30 21:05:03 crc kubenswrapper[4756]: I0930 21:05:03.069722 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9c67fc4c-7197-4099-8c81-d383c95e4840-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"9c67fc4c-7197-4099-8c81-d383c95e4840\") " pod="openstack/cinder-api-0" Sep 30 21:05:03 crc kubenswrapper[4756]: I0930 21:05:03.083208 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kvsr4\" (UniqueName: \"kubernetes.io/projected/9c67fc4c-7197-4099-8c81-d383c95e4840-kube-api-access-kvsr4\") pod \"cinder-api-0\" (UID: \"9c67fc4c-7197-4099-8c81-d383c95e4840\") " pod="openstack/cinder-api-0" Sep 30 21:05:03 crc kubenswrapper[4756]: I0930 21:05:03.084506 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9c67fc4c-7197-4099-8c81-d383c95e4840-config-data\") pod \"cinder-api-0\" (UID: \"9c67fc4c-7197-4099-8c81-d383c95e4840\") " pod="openstack/cinder-api-0" Sep 30 21:05:03 crc kubenswrapper[4756]: I0930 21:05:03.137146 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Sep 30 21:05:03 crc kubenswrapper[4756]: I0930 21:05:03.468761 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-f7bc97665-ssfql"] Sep 30 21:05:03 crc kubenswrapper[4756]: I0930 21:05:03.636984 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Sep 30 21:05:03 crc kubenswrapper[4756]: W0930 21:05:03.643815 4756 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9c67fc4c_7197_4099_8c81_d383c95e4840.slice/crio-9fc3de1a0073697df68d29433e8006802184140c57076c6bf06fa0d03a281dfa WatchSource:0}: Error finding container 9fc3de1a0073697df68d29433e8006802184140c57076c6bf06fa0d03a281dfa: Status 404 returned error can't find the container with id 9fc3de1a0073697df68d29433e8006802184140c57076c6bf06fa0d03a281dfa Sep 30 21:05:04 crc kubenswrapper[4756]: I0930 21:05:04.257167 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"9c67fc4c-7197-4099-8c81-d383c95e4840","Type":"ContainerStarted","Data":"9fc3de1a0073697df68d29433e8006802184140c57076c6bf06fa0d03a281dfa"} Sep 30 21:05:04 crc kubenswrapper[4756]: I0930 21:05:04.259054 4756 generic.go:334] "Generic (PLEG): container finished" podID="bf306b44-0f89-4344-949c-d24959e4e757" containerID="7123d9f29acfbe7bc3e46c418c6b45c32bfae6f3cf5b6b054ba979afbc73b84e" exitCode=0 Sep 30 21:05:04 crc kubenswrapper[4756]: I0930 21:05:04.259081 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-f7bc97665-ssfql" event={"ID":"bf306b44-0f89-4344-949c-d24959e4e757","Type":"ContainerDied","Data":"7123d9f29acfbe7bc3e46c418c6b45c32bfae6f3cf5b6b054ba979afbc73b84e"} Sep 30 21:05:04 crc kubenswrapper[4756]: I0930 21:05:04.259095 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-f7bc97665-ssfql" event={"ID":"bf306b44-0f89-4344-949c-d24959e4e757","Type":"ContainerStarted","Data":"0438a81493d16f9527d6dbb5f91efc704c44a7ad0c740a00ba97d4745a0bbd6e"} Sep 30 21:05:05 crc kubenswrapper[4756]: I0930 21:05:05.278214 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"9c67fc4c-7197-4099-8c81-d383c95e4840","Type":"ContainerStarted","Data":"f44488eb42c5657c0b457fda52277ba71832398c0280b4f8cb32ddd5518e9fc2"} Sep 30 21:05:05 crc kubenswrapper[4756]: I0930 21:05:05.278765 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cinder-api-0" Sep 30 21:05:05 crc kubenswrapper[4756]: I0930 21:05:05.278777 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"9c67fc4c-7197-4099-8c81-d383c95e4840","Type":"ContainerStarted","Data":"b84101524b97f1eeb619092d021eb68ebea7f888ac37c8bd8a607f9dd5e8cb67"} Sep 30 21:05:05 crc kubenswrapper[4756]: I0930 21:05:05.282664 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-f7bc97665-ssfql" event={"ID":"bf306b44-0f89-4344-949c-d24959e4e757","Type":"ContainerStarted","Data":"105b09f331644cb6fab8275f34bb6d8529e61eac7e68fee8646ef634682ac9df"} Sep 30 21:05:05 crc kubenswrapper[4756]: I0930 21:05:05.283369 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-f7bc97665-ssfql" Sep 30 21:05:05 crc kubenswrapper[4756]: I0930 21:05:05.304187 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-api-0" podStartSLOduration=3.30416746 podStartE2EDuration="3.30416746s" podCreationTimestamp="2025-09-30 21:05:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 21:05:05.294905468 +0000 UTC m=+5634.915838955" watchObservedRunningTime="2025-09-30 21:05:05.30416746 +0000 UTC m=+5634.925100937" Sep 30 21:05:05 crc kubenswrapper[4756]: I0930 21:05:05.318306 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-f7bc97665-ssfql" podStartSLOduration=3.318290308 podStartE2EDuration="3.318290308s" podCreationTimestamp="2025-09-30 21:05:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 21:05:05.311181063 +0000 UTC m=+5634.932114530" watchObservedRunningTime="2025-09-30 21:05:05.318290308 +0000 UTC m=+5634.939223785" Sep 30 21:05:11 crc kubenswrapper[4756]: I0930 21:05:11.539432 4756 patch_prober.go:28] interesting pod/machine-config-daemon-4n9zj container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 21:05:11 crc kubenswrapper[4756]: I0930 21:05:11.539702 4756 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 21:05:12 crc kubenswrapper[4756]: I0930 21:05:12.938683 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-f7bc97665-ssfql" Sep 30 21:05:13 crc kubenswrapper[4756]: I0930 21:05:13.036383 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-b97b5ffb9-8ggjl"] Sep 30 21:05:13 crc kubenswrapper[4756]: I0930 21:05:13.036833 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-b97b5ffb9-8ggjl" podUID="8f732efe-df8f-4a0e-8586-922e1482274d" containerName="dnsmasq-dns" containerID="cri-o://031e829e3c60bd9f44fbba3d3cb8d844ac9f8e9629aace72e128eed11e1bf516" gracePeriod=10 Sep 30 21:05:13 crc kubenswrapper[4756]: I0930 21:05:13.370519 4756 generic.go:334] "Generic (PLEG): container finished" podID="8f732efe-df8f-4a0e-8586-922e1482274d" containerID="031e829e3c60bd9f44fbba3d3cb8d844ac9f8e9629aace72e128eed11e1bf516" exitCode=0 Sep 30 21:05:13 crc kubenswrapper[4756]: I0930 21:05:13.370921 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-b97b5ffb9-8ggjl" event={"ID":"8f732efe-df8f-4a0e-8586-922e1482274d","Type":"ContainerDied","Data":"031e829e3c60bd9f44fbba3d3cb8d844ac9f8e9629aace72e128eed11e1bf516"} Sep 30 21:05:13 crc kubenswrapper[4756]: I0930 21:05:13.531773 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-b97b5ffb9-8ggjl" Sep 30 21:05:13 crc kubenswrapper[4756]: I0930 21:05:13.680216 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8f732efe-df8f-4a0e-8586-922e1482274d-config\") pod \"8f732efe-df8f-4a0e-8586-922e1482274d\" (UID: \"8f732efe-df8f-4a0e-8586-922e1482274d\") " Sep 30 21:05:13 crc kubenswrapper[4756]: I0930 21:05:13.680322 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hmjfn\" (UniqueName: \"kubernetes.io/projected/8f732efe-df8f-4a0e-8586-922e1482274d-kube-api-access-hmjfn\") pod \"8f732efe-df8f-4a0e-8586-922e1482274d\" (UID: \"8f732efe-df8f-4a0e-8586-922e1482274d\") " Sep 30 21:05:13 crc kubenswrapper[4756]: I0930 21:05:13.680413 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8f732efe-df8f-4a0e-8586-922e1482274d-ovsdbserver-nb\") pod \"8f732efe-df8f-4a0e-8586-922e1482274d\" (UID: \"8f732efe-df8f-4a0e-8586-922e1482274d\") " Sep 30 21:05:13 crc kubenswrapper[4756]: I0930 21:05:13.680434 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8f732efe-df8f-4a0e-8586-922e1482274d-ovsdbserver-sb\") pod \"8f732efe-df8f-4a0e-8586-922e1482274d\" (UID: \"8f732efe-df8f-4a0e-8586-922e1482274d\") " Sep 30 21:05:13 crc kubenswrapper[4756]: I0930 21:05:13.680518 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8f732efe-df8f-4a0e-8586-922e1482274d-dns-svc\") pod \"8f732efe-df8f-4a0e-8586-922e1482274d\" (UID: \"8f732efe-df8f-4a0e-8586-922e1482274d\") " Sep 30 21:05:13 crc kubenswrapper[4756]: I0930 21:05:13.698327 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f732efe-df8f-4a0e-8586-922e1482274d-kube-api-access-hmjfn" (OuterVolumeSpecName: "kube-api-access-hmjfn") pod "8f732efe-df8f-4a0e-8586-922e1482274d" (UID: "8f732efe-df8f-4a0e-8586-922e1482274d"). InnerVolumeSpecName "kube-api-access-hmjfn". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 21:05:13 crc kubenswrapper[4756]: I0930 21:05:13.739957 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f732efe-df8f-4a0e-8586-922e1482274d-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "8f732efe-df8f-4a0e-8586-922e1482274d" (UID: "8f732efe-df8f-4a0e-8586-922e1482274d"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 21:05:13 crc kubenswrapper[4756]: I0930 21:05:13.749861 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f732efe-df8f-4a0e-8586-922e1482274d-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "8f732efe-df8f-4a0e-8586-922e1482274d" (UID: "8f732efe-df8f-4a0e-8586-922e1482274d"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 21:05:13 crc kubenswrapper[4756]: I0930 21:05:13.756967 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f732efe-df8f-4a0e-8586-922e1482274d-config" (OuterVolumeSpecName: "config") pod "8f732efe-df8f-4a0e-8586-922e1482274d" (UID: "8f732efe-df8f-4a0e-8586-922e1482274d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 21:05:13 crc kubenswrapper[4756]: I0930 21:05:13.764940 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f732efe-df8f-4a0e-8586-922e1482274d-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "8f732efe-df8f-4a0e-8586-922e1482274d" (UID: "8f732efe-df8f-4a0e-8586-922e1482274d"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 21:05:13 crc kubenswrapper[4756]: I0930 21:05:13.782709 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hmjfn\" (UniqueName: \"kubernetes.io/projected/8f732efe-df8f-4a0e-8586-922e1482274d-kube-api-access-hmjfn\") on node \"crc\" DevicePath \"\"" Sep 30 21:05:13 crc kubenswrapper[4756]: I0930 21:05:13.782980 4756 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8f732efe-df8f-4a0e-8586-922e1482274d-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Sep 30 21:05:13 crc kubenswrapper[4756]: I0930 21:05:13.783062 4756 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8f732efe-df8f-4a0e-8586-922e1482274d-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Sep 30 21:05:13 crc kubenswrapper[4756]: I0930 21:05:13.783137 4756 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8f732efe-df8f-4a0e-8586-922e1482274d-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 30 21:05:13 crc kubenswrapper[4756]: I0930 21:05:13.783208 4756 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8f732efe-df8f-4a0e-8586-922e1482274d-config\") on node \"crc\" DevicePath \"\"" Sep 30 21:05:14 crc kubenswrapper[4756]: I0930 21:05:14.381567 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-b97b5ffb9-8ggjl" event={"ID":"8f732efe-df8f-4a0e-8586-922e1482274d","Type":"ContainerDied","Data":"4abc5d4d1cfa7482ef3414b7c3e2f486d65c7e2bff747f111bfdab41e2fb9539"} Sep 30 21:05:14 crc kubenswrapper[4756]: I0930 21:05:14.381605 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-b97b5ffb9-8ggjl" Sep 30 21:05:14 crc kubenswrapper[4756]: I0930 21:05:14.381627 4756 scope.go:117] "RemoveContainer" containerID="031e829e3c60bd9f44fbba3d3cb8d844ac9f8e9629aace72e128eed11e1bf516" Sep 30 21:05:14 crc kubenswrapper[4756]: I0930 21:05:14.415010 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-b97b5ffb9-8ggjl"] Sep 30 21:05:14 crc kubenswrapper[4756]: I0930 21:05:14.419534 4756 scope.go:117] "RemoveContainer" containerID="94214aec96db9b781aa3342ee63ce347ba826682a9f96d6627e9982bdab3ac15" Sep 30 21:05:14 crc kubenswrapper[4756]: I0930 21:05:14.422625 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-b97b5ffb9-8ggjl"] Sep 30 21:05:14 crc kubenswrapper[4756]: I0930 21:05:14.849079 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-0"] Sep 30 21:05:14 crc kubenswrapper[4756]: I0930 21:05:14.849729 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell0-conductor-0" podUID="ca6b90e2-e28e-41e7-8029-58917d24e5c1" containerName="nova-cell0-conductor-conductor" containerID="cri-o://71e64e2544e4f16deb6082288d1fb345e335e6c25f7b3ccdb401549c129339ff" gracePeriod=30 Sep 30 21:05:14 crc kubenswrapper[4756]: I0930 21:05:14.858892 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Sep 30 21:05:14 crc kubenswrapper[4756]: I0930 21:05:14.859104 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="2b1a012e-bb74-4006-b959-4a8a1ded3f5e" containerName="nova-scheduler-scheduler" containerID="cri-o://06c5ebf5a85ecc4680f25768a91c769bf158e437b05d3a14869c9ce79270cdd1" gracePeriod=30 Sep 30 21:05:14 crc kubenswrapper[4756]: I0930 21:05:14.875012 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Sep 30 21:05:14 crc kubenswrapper[4756]: I0930 21:05:14.875302 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="3ef7ebf2-0e17-4d34-b8c2-3c99c084a632" containerName="nova-api-log" containerID="cri-o://70a011209c6f2a0a913c6d62d3c9a352aeb1ee89b2acd5bfe315d03d908d8caa" gracePeriod=30 Sep 30 21:05:14 crc kubenswrapper[4756]: I0930 21:05:14.875836 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="3ef7ebf2-0e17-4d34-b8c2-3c99c084a632" containerName="nova-api-api" containerID="cri-o://424264b339bbf775ce759d28632ff896927414a684f203d7d27da866391cd17e" gracePeriod=30 Sep 30 21:05:14 crc kubenswrapper[4756]: I0930 21:05:14.900575 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Sep 30 21:05:14 crc kubenswrapper[4756]: I0930 21:05:14.901196 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell1-novncproxy-0" podUID="af618cfb-1960-4d7b-a513-48dcbe0a7ddd" containerName="nova-cell1-novncproxy-novncproxy" containerID="cri-o://b4f178050b95397b0936c2387eae8ce4a218128e2c5bc238ad5f93d9cdfab028" gracePeriod=30 Sep 30 21:05:14 crc kubenswrapper[4756]: I0930 21:05:14.906933 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Sep 30 21:05:14 crc kubenswrapper[4756]: I0930 21:05:14.907157 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="fc3620f4-f8ca-4742-a1a4-18810a786afc" containerName="nova-metadata-log" containerID="cri-o://061b08b6ec14f7f2905aaadce1b0078af830a244b4544561bf4f2b4ed101c4f6" gracePeriod=30 Sep 30 21:05:14 crc kubenswrapper[4756]: I0930 21:05:14.907285 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="fc3620f4-f8ca-4742-a1a4-18810a786afc" containerName="nova-metadata-metadata" containerID="cri-o://b692b5eac6b2bb0100cd23ae3cb43fbc5955a114923298854458e2f195a2e630" gracePeriod=30 Sep 30 21:05:15 crc kubenswrapper[4756]: I0930 21:05:15.127939 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8f732efe-df8f-4a0e-8586-922e1482274d" path="/var/lib/kubelet/pods/8f732efe-df8f-4a0e-8586-922e1482274d/volumes" Sep 30 21:05:15 crc kubenswrapper[4756]: I0930 21:05:15.246417 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/cinder-api-0" Sep 30 21:05:15 crc kubenswrapper[4756]: I0930 21:05:15.395811 4756 generic.go:334] "Generic (PLEG): container finished" podID="fc3620f4-f8ca-4742-a1a4-18810a786afc" containerID="061b08b6ec14f7f2905aaadce1b0078af830a244b4544561bf4f2b4ed101c4f6" exitCode=143 Sep 30 21:05:15 crc kubenswrapper[4756]: I0930 21:05:15.395867 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"fc3620f4-f8ca-4742-a1a4-18810a786afc","Type":"ContainerDied","Data":"061b08b6ec14f7f2905aaadce1b0078af830a244b4544561bf4f2b4ed101c4f6"} Sep 30 21:05:15 crc kubenswrapper[4756]: I0930 21:05:15.398495 4756 generic.go:334] "Generic (PLEG): container finished" podID="af618cfb-1960-4d7b-a513-48dcbe0a7ddd" containerID="b4f178050b95397b0936c2387eae8ce4a218128e2c5bc238ad5f93d9cdfab028" exitCode=0 Sep 30 21:05:15 crc kubenswrapper[4756]: I0930 21:05:15.398546 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"af618cfb-1960-4d7b-a513-48dcbe0a7ddd","Type":"ContainerDied","Data":"b4f178050b95397b0936c2387eae8ce4a218128e2c5bc238ad5f93d9cdfab028"} Sep 30 21:05:15 crc kubenswrapper[4756]: I0930 21:05:15.400587 4756 generic.go:334] "Generic (PLEG): container finished" podID="3ef7ebf2-0e17-4d34-b8c2-3c99c084a632" containerID="70a011209c6f2a0a913c6d62d3c9a352aeb1ee89b2acd5bfe315d03d908d8caa" exitCode=143 Sep 30 21:05:15 crc kubenswrapper[4756]: I0930 21:05:15.400628 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"3ef7ebf2-0e17-4d34-b8c2-3c99c084a632","Type":"ContainerDied","Data":"70a011209c6f2a0a913c6d62d3c9a352aeb1ee89b2acd5bfe315d03d908d8caa"} Sep 30 21:05:15 crc kubenswrapper[4756]: I0930 21:05:15.644438 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Sep 30 21:05:15 crc kubenswrapper[4756]: I0930 21:05:15.715751 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/af618cfb-1960-4d7b-a513-48dcbe0a7ddd-config-data\") pod \"af618cfb-1960-4d7b-a513-48dcbe0a7ddd\" (UID: \"af618cfb-1960-4d7b-a513-48dcbe0a7ddd\") " Sep 30 21:05:15 crc kubenswrapper[4756]: I0930 21:05:15.715781 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/af618cfb-1960-4d7b-a513-48dcbe0a7ddd-combined-ca-bundle\") pod \"af618cfb-1960-4d7b-a513-48dcbe0a7ddd\" (UID: \"af618cfb-1960-4d7b-a513-48dcbe0a7ddd\") " Sep 30 21:05:15 crc kubenswrapper[4756]: I0930 21:05:15.715915 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fgdxt\" (UniqueName: \"kubernetes.io/projected/af618cfb-1960-4d7b-a513-48dcbe0a7ddd-kube-api-access-fgdxt\") pod \"af618cfb-1960-4d7b-a513-48dcbe0a7ddd\" (UID: \"af618cfb-1960-4d7b-a513-48dcbe0a7ddd\") " Sep 30 21:05:15 crc kubenswrapper[4756]: I0930 21:05:15.730155 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/af618cfb-1960-4d7b-a513-48dcbe0a7ddd-kube-api-access-fgdxt" (OuterVolumeSpecName: "kube-api-access-fgdxt") pod "af618cfb-1960-4d7b-a513-48dcbe0a7ddd" (UID: "af618cfb-1960-4d7b-a513-48dcbe0a7ddd"). InnerVolumeSpecName "kube-api-access-fgdxt". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 21:05:15 crc kubenswrapper[4756]: I0930 21:05:15.743002 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/af618cfb-1960-4d7b-a513-48dcbe0a7ddd-config-data" (OuterVolumeSpecName: "config-data") pod "af618cfb-1960-4d7b-a513-48dcbe0a7ddd" (UID: "af618cfb-1960-4d7b-a513-48dcbe0a7ddd"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 21:05:15 crc kubenswrapper[4756]: I0930 21:05:15.764127 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/af618cfb-1960-4d7b-a513-48dcbe0a7ddd-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "af618cfb-1960-4d7b-a513-48dcbe0a7ddd" (UID: "af618cfb-1960-4d7b-a513-48dcbe0a7ddd"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 21:05:15 crc kubenswrapper[4756]: I0930 21:05:15.817946 4756 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/af618cfb-1960-4d7b-a513-48dcbe0a7ddd-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 21:05:15 crc kubenswrapper[4756]: I0930 21:05:15.817973 4756 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/af618cfb-1960-4d7b-a513-48dcbe0a7ddd-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 21:05:15 crc kubenswrapper[4756]: I0930 21:05:15.817983 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fgdxt\" (UniqueName: \"kubernetes.io/projected/af618cfb-1960-4d7b-a513-48dcbe0a7ddd-kube-api-access-fgdxt\") on node \"crc\" DevicePath \"\"" Sep 30 21:05:16 crc kubenswrapper[4756]: I0930 21:05:16.051422 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Sep 30 21:05:16 crc kubenswrapper[4756]: I0930 21:05:16.121765 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2b1a012e-bb74-4006-b959-4a8a1ded3f5e-combined-ca-bundle\") pod \"2b1a012e-bb74-4006-b959-4a8a1ded3f5e\" (UID: \"2b1a012e-bb74-4006-b959-4a8a1ded3f5e\") " Sep 30 21:05:16 crc kubenswrapper[4756]: I0930 21:05:16.122028 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-swhkc\" (UniqueName: \"kubernetes.io/projected/2b1a012e-bb74-4006-b959-4a8a1ded3f5e-kube-api-access-swhkc\") pod \"2b1a012e-bb74-4006-b959-4a8a1ded3f5e\" (UID: \"2b1a012e-bb74-4006-b959-4a8a1ded3f5e\") " Sep 30 21:05:16 crc kubenswrapper[4756]: I0930 21:05:16.122084 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2b1a012e-bb74-4006-b959-4a8a1ded3f5e-config-data\") pod \"2b1a012e-bb74-4006-b959-4a8a1ded3f5e\" (UID: \"2b1a012e-bb74-4006-b959-4a8a1ded3f5e\") " Sep 30 21:05:16 crc kubenswrapper[4756]: I0930 21:05:16.126510 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2b1a012e-bb74-4006-b959-4a8a1ded3f5e-kube-api-access-swhkc" (OuterVolumeSpecName: "kube-api-access-swhkc") pod "2b1a012e-bb74-4006-b959-4a8a1ded3f5e" (UID: "2b1a012e-bb74-4006-b959-4a8a1ded3f5e"). InnerVolumeSpecName "kube-api-access-swhkc". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 21:05:16 crc kubenswrapper[4756]: I0930 21:05:16.150775 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2b1a012e-bb74-4006-b959-4a8a1ded3f5e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "2b1a012e-bb74-4006-b959-4a8a1ded3f5e" (UID: "2b1a012e-bb74-4006-b959-4a8a1ded3f5e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 21:05:16 crc kubenswrapper[4756]: I0930 21:05:16.161036 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2b1a012e-bb74-4006-b959-4a8a1ded3f5e-config-data" (OuterVolumeSpecName: "config-data") pod "2b1a012e-bb74-4006-b959-4a8a1ded3f5e" (UID: "2b1a012e-bb74-4006-b959-4a8a1ded3f5e"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 21:05:16 crc kubenswrapper[4756]: I0930 21:05:16.225147 4756 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2b1a012e-bb74-4006-b959-4a8a1ded3f5e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 21:05:16 crc kubenswrapper[4756]: I0930 21:05:16.225184 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-swhkc\" (UniqueName: \"kubernetes.io/projected/2b1a012e-bb74-4006-b959-4a8a1ded3f5e-kube-api-access-swhkc\") on node \"crc\" DevicePath \"\"" Sep 30 21:05:16 crc kubenswrapper[4756]: I0930 21:05:16.225198 4756 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2b1a012e-bb74-4006-b959-4a8a1ded3f5e-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 21:05:16 crc kubenswrapper[4756]: I0930 21:05:16.409381 4756 generic.go:334] "Generic (PLEG): container finished" podID="2b1a012e-bb74-4006-b959-4a8a1ded3f5e" containerID="06c5ebf5a85ecc4680f25768a91c769bf158e437b05d3a14869c9ce79270cdd1" exitCode=0 Sep 30 21:05:16 crc kubenswrapper[4756]: I0930 21:05:16.409480 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Sep 30 21:05:16 crc kubenswrapper[4756]: I0930 21:05:16.409507 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"2b1a012e-bb74-4006-b959-4a8a1ded3f5e","Type":"ContainerDied","Data":"06c5ebf5a85ecc4680f25768a91c769bf158e437b05d3a14869c9ce79270cdd1"} Sep 30 21:05:16 crc kubenswrapper[4756]: I0930 21:05:16.410557 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"2b1a012e-bb74-4006-b959-4a8a1ded3f5e","Type":"ContainerDied","Data":"856a884015c33a2c2f1204414bc8d9b40af57895ea1e2b6c1c0976501e2f26aa"} Sep 30 21:05:16 crc kubenswrapper[4756]: I0930 21:05:16.410582 4756 scope.go:117] "RemoveContainer" containerID="06c5ebf5a85ecc4680f25768a91c769bf158e437b05d3a14869c9ce79270cdd1" Sep 30 21:05:16 crc kubenswrapper[4756]: I0930 21:05:16.412487 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"af618cfb-1960-4d7b-a513-48dcbe0a7ddd","Type":"ContainerDied","Data":"b48077e2640e1f83ad61ae391cf5c1a6496ff81cfb758649484098c6e3f34fd6"} Sep 30 21:05:16 crc kubenswrapper[4756]: I0930 21:05:16.412614 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Sep 30 21:05:16 crc kubenswrapper[4756]: I0930 21:05:16.443796 4756 scope.go:117] "RemoveContainer" containerID="06c5ebf5a85ecc4680f25768a91c769bf158e437b05d3a14869c9ce79270cdd1" Sep 30 21:05:16 crc kubenswrapper[4756]: E0930 21:05:16.444437 4756 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"06c5ebf5a85ecc4680f25768a91c769bf158e437b05d3a14869c9ce79270cdd1\": container with ID starting with 06c5ebf5a85ecc4680f25768a91c769bf158e437b05d3a14869c9ce79270cdd1 not found: ID does not exist" containerID="06c5ebf5a85ecc4680f25768a91c769bf158e437b05d3a14869c9ce79270cdd1" Sep 30 21:05:16 crc kubenswrapper[4756]: I0930 21:05:16.444572 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"06c5ebf5a85ecc4680f25768a91c769bf158e437b05d3a14869c9ce79270cdd1"} err="failed to get container status \"06c5ebf5a85ecc4680f25768a91c769bf158e437b05d3a14869c9ce79270cdd1\": rpc error: code = NotFound desc = could not find container \"06c5ebf5a85ecc4680f25768a91c769bf158e437b05d3a14869c9ce79270cdd1\": container with ID starting with 06c5ebf5a85ecc4680f25768a91c769bf158e437b05d3a14869c9ce79270cdd1 not found: ID does not exist" Sep 30 21:05:16 crc kubenswrapper[4756]: I0930 21:05:16.444673 4756 scope.go:117] "RemoveContainer" containerID="b4f178050b95397b0936c2387eae8ce4a218128e2c5bc238ad5f93d9cdfab028" Sep 30 21:05:16 crc kubenswrapper[4756]: I0930 21:05:16.444860 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Sep 30 21:05:16 crc kubenswrapper[4756]: I0930 21:05:16.457428 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Sep 30 21:05:16 crc kubenswrapper[4756]: I0930 21:05:16.468258 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Sep 30 21:05:16 crc kubenswrapper[4756]: I0930 21:05:16.481552 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Sep 30 21:05:16 crc kubenswrapper[4756]: I0930 21:05:16.511558 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Sep 30 21:05:16 crc kubenswrapper[4756]: E0930 21:05:16.512151 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2b1a012e-bb74-4006-b959-4a8a1ded3f5e" containerName="nova-scheduler-scheduler" Sep 30 21:05:16 crc kubenswrapper[4756]: I0930 21:05:16.512189 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="2b1a012e-bb74-4006-b959-4a8a1ded3f5e" containerName="nova-scheduler-scheduler" Sep 30 21:05:16 crc kubenswrapper[4756]: E0930 21:05:16.512231 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="af618cfb-1960-4d7b-a513-48dcbe0a7ddd" containerName="nova-cell1-novncproxy-novncproxy" Sep 30 21:05:16 crc kubenswrapper[4756]: I0930 21:05:16.512247 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="af618cfb-1960-4d7b-a513-48dcbe0a7ddd" containerName="nova-cell1-novncproxy-novncproxy" Sep 30 21:05:16 crc kubenswrapper[4756]: E0930 21:05:16.512269 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8f732efe-df8f-4a0e-8586-922e1482274d" containerName="init" Sep 30 21:05:16 crc kubenswrapper[4756]: I0930 21:05:16.512281 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="8f732efe-df8f-4a0e-8586-922e1482274d" containerName="init" Sep 30 21:05:16 crc kubenswrapper[4756]: E0930 21:05:16.512323 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8f732efe-df8f-4a0e-8586-922e1482274d" containerName="dnsmasq-dns" Sep 30 21:05:16 crc kubenswrapper[4756]: I0930 21:05:16.512335 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="8f732efe-df8f-4a0e-8586-922e1482274d" containerName="dnsmasq-dns" Sep 30 21:05:16 crc kubenswrapper[4756]: I0930 21:05:16.513097 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="af618cfb-1960-4d7b-a513-48dcbe0a7ddd" containerName="nova-cell1-novncproxy-novncproxy" Sep 30 21:05:16 crc kubenswrapper[4756]: I0930 21:05:16.513162 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="8f732efe-df8f-4a0e-8586-922e1482274d" containerName="dnsmasq-dns" Sep 30 21:05:16 crc kubenswrapper[4756]: I0930 21:05:16.513190 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="2b1a012e-bb74-4006-b959-4a8a1ded3f5e" containerName="nova-scheduler-scheduler" Sep 30 21:05:16 crc kubenswrapper[4756]: I0930 21:05:16.514370 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Sep 30 21:05:16 crc kubenswrapper[4756]: I0930 21:05:16.523634 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-novncproxy-config-data" Sep 30 21:05:16 crc kubenswrapper[4756]: I0930 21:05:16.531066 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Sep 30 21:05:16 crc kubenswrapper[4756]: I0930 21:05:16.533559 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Sep 30 21:05:16 crc kubenswrapper[4756]: I0930 21:05:16.546103 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Sep 30 21:05:16 crc kubenswrapper[4756]: I0930 21:05:16.546269 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Sep 30 21:05:16 crc kubenswrapper[4756]: I0930 21:05:16.560983 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Sep 30 21:05:16 crc kubenswrapper[4756]: I0930 21:05:16.634965 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a23cdb4e-60a7-4298-824c-29f6ccf5d502-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"a23cdb4e-60a7-4298-824c-29f6ccf5d502\") " pod="openstack/nova-cell1-novncproxy-0" Sep 30 21:05:16 crc kubenswrapper[4756]: I0930 21:05:16.635071 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a23cdb4e-60a7-4298-824c-29f6ccf5d502-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"a23cdb4e-60a7-4298-824c-29f6ccf5d502\") " pod="openstack/nova-cell1-novncproxy-0" Sep 30 21:05:16 crc kubenswrapper[4756]: I0930 21:05:16.635098 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fmqtw\" (UniqueName: \"kubernetes.io/projected/cd2a9763-32c6-4c38-8565-42dea0aff93c-kube-api-access-fmqtw\") pod \"nova-scheduler-0\" (UID: \"cd2a9763-32c6-4c38-8565-42dea0aff93c\") " pod="openstack/nova-scheduler-0" Sep 30 21:05:16 crc kubenswrapper[4756]: I0930 21:05:16.635165 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cd2a9763-32c6-4c38-8565-42dea0aff93c-config-data\") pod \"nova-scheduler-0\" (UID: \"cd2a9763-32c6-4c38-8565-42dea0aff93c\") " pod="openstack/nova-scheduler-0" Sep 30 21:05:16 crc kubenswrapper[4756]: I0930 21:05:16.635270 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cd2a9763-32c6-4c38-8565-42dea0aff93c-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"cd2a9763-32c6-4c38-8565-42dea0aff93c\") " pod="openstack/nova-scheduler-0" Sep 30 21:05:16 crc kubenswrapper[4756]: I0930 21:05:16.635485 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sqch6\" (UniqueName: \"kubernetes.io/projected/a23cdb4e-60a7-4298-824c-29f6ccf5d502-kube-api-access-sqch6\") pod \"nova-cell1-novncproxy-0\" (UID: \"a23cdb4e-60a7-4298-824c-29f6ccf5d502\") " pod="openstack/nova-cell1-novncproxy-0" Sep 30 21:05:16 crc kubenswrapper[4756]: I0930 21:05:16.736580 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cd2a9763-32c6-4c38-8565-42dea0aff93c-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"cd2a9763-32c6-4c38-8565-42dea0aff93c\") " pod="openstack/nova-scheduler-0" Sep 30 21:05:16 crc kubenswrapper[4756]: I0930 21:05:16.736677 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sqch6\" (UniqueName: \"kubernetes.io/projected/a23cdb4e-60a7-4298-824c-29f6ccf5d502-kube-api-access-sqch6\") pod \"nova-cell1-novncproxy-0\" (UID: \"a23cdb4e-60a7-4298-824c-29f6ccf5d502\") " pod="openstack/nova-cell1-novncproxy-0" Sep 30 21:05:16 crc kubenswrapper[4756]: I0930 21:05:16.736731 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a23cdb4e-60a7-4298-824c-29f6ccf5d502-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"a23cdb4e-60a7-4298-824c-29f6ccf5d502\") " pod="openstack/nova-cell1-novncproxy-0" Sep 30 21:05:16 crc kubenswrapper[4756]: I0930 21:05:16.736764 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a23cdb4e-60a7-4298-824c-29f6ccf5d502-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"a23cdb4e-60a7-4298-824c-29f6ccf5d502\") " pod="openstack/nova-cell1-novncproxy-0" Sep 30 21:05:16 crc kubenswrapper[4756]: I0930 21:05:16.736786 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fmqtw\" (UniqueName: \"kubernetes.io/projected/cd2a9763-32c6-4c38-8565-42dea0aff93c-kube-api-access-fmqtw\") pod \"nova-scheduler-0\" (UID: \"cd2a9763-32c6-4c38-8565-42dea0aff93c\") " pod="openstack/nova-scheduler-0" Sep 30 21:05:16 crc kubenswrapper[4756]: I0930 21:05:16.736839 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cd2a9763-32c6-4c38-8565-42dea0aff93c-config-data\") pod \"nova-scheduler-0\" (UID: \"cd2a9763-32c6-4c38-8565-42dea0aff93c\") " pod="openstack/nova-scheduler-0" Sep 30 21:05:16 crc kubenswrapper[4756]: I0930 21:05:16.741055 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a23cdb4e-60a7-4298-824c-29f6ccf5d502-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"a23cdb4e-60a7-4298-824c-29f6ccf5d502\") " pod="openstack/nova-cell1-novncproxy-0" Sep 30 21:05:16 crc kubenswrapper[4756]: I0930 21:05:16.741289 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a23cdb4e-60a7-4298-824c-29f6ccf5d502-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"a23cdb4e-60a7-4298-824c-29f6ccf5d502\") " pod="openstack/nova-cell1-novncproxy-0" Sep 30 21:05:16 crc kubenswrapper[4756]: I0930 21:05:16.742156 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cd2a9763-32c6-4c38-8565-42dea0aff93c-config-data\") pod \"nova-scheduler-0\" (UID: \"cd2a9763-32c6-4c38-8565-42dea0aff93c\") " pod="openstack/nova-scheduler-0" Sep 30 21:05:16 crc kubenswrapper[4756]: I0930 21:05:16.742361 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cd2a9763-32c6-4c38-8565-42dea0aff93c-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"cd2a9763-32c6-4c38-8565-42dea0aff93c\") " pod="openstack/nova-scheduler-0" Sep 30 21:05:16 crc kubenswrapper[4756]: I0930 21:05:16.770160 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fmqtw\" (UniqueName: \"kubernetes.io/projected/cd2a9763-32c6-4c38-8565-42dea0aff93c-kube-api-access-fmqtw\") pod \"nova-scheduler-0\" (UID: \"cd2a9763-32c6-4c38-8565-42dea0aff93c\") " pod="openstack/nova-scheduler-0" Sep 30 21:05:16 crc kubenswrapper[4756]: I0930 21:05:16.778180 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sqch6\" (UniqueName: \"kubernetes.io/projected/a23cdb4e-60a7-4298-824c-29f6ccf5d502-kube-api-access-sqch6\") pod \"nova-cell1-novncproxy-0\" (UID: \"a23cdb4e-60a7-4298-824c-29f6ccf5d502\") " pod="openstack/nova-cell1-novncproxy-0" Sep 30 21:05:16 crc kubenswrapper[4756]: I0930 21:05:16.837076 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Sep 30 21:05:16 crc kubenswrapper[4756]: I0930 21:05:16.857554 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Sep 30 21:05:17 crc kubenswrapper[4756]: I0930 21:05:17.142741 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2b1a012e-bb74-4006-b959-4a8a1ded3f5e" path="/var/lib/kubelet/pods/2b1a012e-bb74-4006-b959-4a8a1ded3f5e/volumes" Sep 30 21:05:17 crc kubenswrapper[4756]: I0930 21:05:17.143988 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="af618cfb-1960-4d7b-a513-48dcbe0a7ddd" path="/var/lib/kubelet/pods/af618cfb-1960-4d7b-a513-48dcbe0a7ddd/volumes" Sep 30 21:05:17 crc kubenswrapper[4756]: I0930 21:05:17.189638 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Sep 30 21:05:17 crc kubenswrapper[4756]: I0930 21:05:17.250180 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pf7sq\" (UniqueName: \"kubernetes.io/projected/ca6b90e2-e28e-41e7-8029-58917d24e5c1-kube-api-access-pf7sq\") pod \"ca6b90e2-e28e-41e7-8029-58917d24e5c1\" (UID: \"ca6b90e2-e28e-41e7-8029-58917d24e5c1\") " Sep 30 21:05:17 crc kubenswrapper[4756]: I0930 21:05:17.250242 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ca6b90e2-e28e-41e7-8029-58917d24e5c1-combined-ca-bundle\") pod \"ca6b90e2-e28e-41e7-8029-58917d24e5c1\" (UID: \"ca6b90e2-e28e-41e7-8029-58917d24e5c1\") " Sep 30 21:05:17 crc kubenswrapper[4756]: I0930 21:05:17.250310 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ca6b90e2-e28e-41e7-8029-58917d24e5c1-config-data\") pod \"ca6b90e2-e28e-41e7-8029-58917d24e5c1\" (UID: \"ca6b90e2-e28e-41e7-8029-58917d24e5c1\") " Sep 30 21:05:17 crc kubenswrapper[4756]: I0930 21:05:17.257410 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ca6b90e2-e28e-41e7-8029-58917d24e5c1-kube-api-access-pf7sq" (OuterVolumeSpecName: "kube-api-access-pf7sq") pod "ca6b90e2-e28e-41e7-8029-58917d24e5c1" (UID: "ca6b90e2-e28e-41e7-8029-58917d24e5c1"). InnerVolumeSpecName "kube-api-access-pf7sq". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 21:05:17 crc kubenswrapper[4756]: I0930 21:05:17.282341 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ca6b90e2-e28e-41e7-8029-58917d24e5c1-config-data" (OuterVolumeSpecName: "config-data") pod "ca6b90e2-e28e-41e7-8029-58917d24e5c1" (UID: "ca6b90e2-e28e-41e7-8029-58917d24e5c1"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 21:05:17 crc kubenswrapper[4756]: I0930 21:05:17.289592 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ca6b90e2-e28e-41e7-8029-58917d24e5c1-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ca6b90e2-e28e-41e7-8029-58917d24e5c1" (UID: "ca6b90e2-e28e-41e7-8029-58917d24e5c1"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 21:05:17 crc kubenswrapper[4756]: I0930 21:05:17.353414 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pf7sq\" (UniqueName: \"kubernetes.io/projected/ca6b90e2-e28e-41e7-8029-58917d24e5c1-kube-api-access-pf7sq\") on node \"crc\" DevicePath \"\"" Sep 30 21:05:17 crc kubenswrapper[4756]: I0930 21:05:17.353718 4756 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ca6b90e2-e28e-41e7-8029-58917d24e5c1-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 21:05:17 crc kubenswrapper[4756]: I0930 21:05:17.353730 4756 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ca6b90e2-e28e-41e7-8029-58917d24e5c1-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 21:05:17 crc kubenswrapper[4756]: I0930 21:05:17.416147 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Sep 30 21:05:17 crc kubenswrapper[4756]: I0930 21:05:17.423666 4756 generic.go:334] "Generic (PLEG): container finished" podID="ca6b90e2-e28e-41e7-8029-58917d24e5c1" containerID="71e64e2544e4f16deb6082288d1fb345e335e6c25f7b3ccdb401549c129339ff" exitCode=0 Sep 30 21:05:17 crc kubenswrapper[4756]: I0930 21:05:17.423712 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Sep 30 21:05:17 crc kubenswrapper[4756]: I0930 21:05:17.423759 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"ca6b90e2-e28e-41e7-8029-58917d24e5c1","Type":"ContainerDied","Data":"71e64e2544e4f16deb6082288d1fb345e335e6c25f7b3ccdb401549c129339ff"} Sep 30 21:05:17 crc kubenswrapper[4756]: I0930 21:05:17.423798 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"ca6b90e2-e28e-41e7-8029-58917d24e5c1","Type":"ContainerDied","Data":"3eeb074521fb16396db4828f04ed80c50e115fbbb0c012eeb58829d420e1a736"} Sep 30 21:05:17 crc kubenswrapper[4756]: I0930 21:05:17.423817 4756 scope.go:117] "RemoveContainer" containerID="71e64e2544e4f16deb6082288d1fb345e335e6c25f7b3ccdb401549c129339ff" Sep 30 21:05:17 crc kubenswrapper[4756]: W0930 21:05:17.424021 4756 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda23cdb4e_60a7_4298_824c_29f6ccf5d502.slice/crio-0661243d85b9b19ed2cf9d027ff101b32059456451f1228081e82f3f9a44c877 WatchSource:0}: Error finding container 0661243d85b9b19ed2cf9d027ff101b32059456451f1228081e82f3f9a44c877: Status 404 returned error can't find the container with id 0661243d85b9b19ed2cf9d027ff101b32059456451f1228081e82f3f9a44c877 Sep 30 21:05:17 crc kubenswrapper[4756]: I0930 21:05:17.426459 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Sep 30 21:05:17 crc kubenswrapper[4756]: W0930 21:05:17.427736 4756 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podcd2a9763_32c6_4c38_8565_42dea0aff93c.slice/crio-15ef33fca60c18805bcc8cd96fb5d97001efb1ae0986a004670c3de88bd3ed57 WatchSource:0}: Error finding container 15ef33fca60c18805bcc8cd96fb5d97001efb1ae0986a004670c3de88bd3ed57: Status 404 returned error can't find the container with id 15ef33fca60c18805bcc8cd96fb5d97001efb1ae0986a004670c3de88bd3ed57 Sep 30 21:05:17 crc kubenswrapper[4756]: I0930 21:05:17.510359 4756 scope.go:117] "RemoveContainer" containerID="71e64e2544e4f16deb6082288d1fb345e335e6c25f7b3ccdb401549c129339ff" Sep 30 21:05:17 crc kubenswrapper[4756]: E0930 21:05:17.513015 4756 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"71e64e2544e4f16deb6082288d1fb345e335e6c25f7b3ccdb401549c129339ff\": container with ID starting with 71e64e2544e4f16deb6082288d1fb345e335e6c25f7b3ccdb401549c129339ff not found: ID does not exist" containerID="71e64e2544e4f16deb6082288d1fb345e335e6c25f7b3ccdb401549c129339ff" Sep 30 21:05:17 crc kubenswrapper[4756]: I0930 21:05:17.513054 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"71e64e2544e4f16deb6082288d1fb345e335e6c25f7b3ccdb401549c129339ff"} err="failed to get container status \"71e64e2544e4f16deb6082288d1fb345e335e6c25f7b3ccdb401549c129339ff\": rpc error: code = NotFound desc = could not find container \"71e64e2544e4f16deb6082288d1fb345e335e6c25f7b3ccdb401549c129339ff\": container with ID starting with 71e64e2544e4f16deb6082288d1fb345e335e6c25f7b3ccdb401549c129339ff not found: ID does not exist" Sep 30 21:05:17 crc kubenswrapper[4756]: I0930 21:05:17.630019 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-0"] Sep 30 21:05:17 crc kubenswrapper[4756]: I0930 21:05:17.650214 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-conductor-0"] Sep 30 21:05:17 crc kubenswrapper[4756]: I0930 21:05:17.657176 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-0"] Sep 30 21:05:17 crc kubenswrapper[4756]: E0930 21:05:17.657597 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ca6b90e2-e28e-41e7-8029-58917d24e5c1" containerName="nova-cell0-conductor-conductor" Sep 30 21:05:17 crc kubenswrapper[4756]: I0930 21:05:17.657662 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="ca6b90e2-e28e-41e7-8029-58917d24e5c1" containerName="nova-cell0-conductor-conductor" Sep 30 21:05:17 crc kubenswrapper[4756]: I0930 21:05:17.657930 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="ca6b90e2-e28e-41e7-8029-58917d24e5c1" containerName="nova-cell0-conductor-conductor" Sep 30 21:05:17 crc kubenswrapper[4756]: I0930 21:05:17.658797 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Sep 30 21:05:17 crc kubenswrapper[4756]: I0930 21:05:17.664951 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Sep 30 21:05:17 crc kubenswrapper[4756]: I0930 21:05:17.667977 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Sep 30 21:05:17 crc kubenswrapper[4756]: I0930 21:05:17.759293 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/26ca4516-71bc-4d89-8008-feac31530eba-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"26ca4516-71bc-4d89-8008-feac31530eba\") " pod="openstack/nova-cell0-conductor-0" Sep 30 21:05:17 crc kubenswrapper[4756]: I0930 21:05:17.759360 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/26ca4516-71bc-4d89-8008-feac31530eba-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"26ca4516-71bc-4d89-8008-feac31530eba\") " pod="openstack/nova-cell0-conductor-0" Sep 30 21:05:17 crc kubenswrapper[4756]: I0930 21:05:17.759505 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qpgz8\" (UniqueName: \"kubernetes.io/projected/26ca4516-71bc-4d89-8008-feac31530eba-kube-api-access-qpgz8\") pod \"nova-cell0-conductor-0\" (UID: \"26ca4516-71bc-4d89-8008-feac31530eba\") " pod="openstack/nova-cell0-conductor-0" Sep 30 21:05:17 crc kubenswrapper[4756]: I0930 21:05:17.861178 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/26ca4516-71bc-4d89-8008-feac31530eba-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"26ca4516-71bc-4d89-8008-feac31530eba\") " pod="openstack/nova-cell0-conductor-0" Sep 30 21:05:17 crc kubenswrapper[4756]: I0930 21:05:17.861259 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/26ca4516-71bc-4d89-8008-feac31530eba-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"26ca4516-71bc-4d89-8008-feac31530eba\") " pod="openstack/nova-cell0-conductor-0" Sep 30 21:05:17 crc kubenswrapper[4756]: I0930 21:05:17.861424 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qpgz8\" (UniqueName: \"kubernetes.io/projected/26ca4516-71bc-4d89-8008-feac31530eba-kube-api-access-qpgz8\") pod \"nova-cell0-conductor-0\" (UID: \"26ca4516-71bc-4d89-8008-feac31530eba\") " pod="openstack/nova-cell0-conductor-0" Sep 30 21:05:17 crc kubenswrapper[4756]: I0930 21:05:17.867054 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/26ca4516-71bc-4d89-8008-feac31530eba-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"26ca4516-71bc-4d89-8008-feac31530eba\") " pod="openstack/nova-cell0-conductor-0" Sep 30 21:05:17 crc kubenswrapper[4756]: I0930 21:05:17.868998 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/26ca4516-71bc-4d89-8008-feac31530eba-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"26ca4516-71bc-4d89-8008-feac31530eba\") " pod="openstack/nova-cell0-conductor-0" Sep 30 21:05:17 crc kubenswrapper[4756]: I0930 21:05:17.889913 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qpgz8\" (UniqueName: \"kubernetes.io/projected/26ca4516-71bc-4d89-8008-feac31530eba-kube-api-access-qpgz8\") pod \"nova-cell0-conductor-0\" (UID: \"26ca4516-71bc-4d89-8008-feac31530eba\") " pod="openstack/nova-cell0-conductor-0" Sep 30 21:05:17 crc kubenswrapper[4756]: I0930 21:05:17.985417 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Sep 30 21:05:18 crc kubenswrapper[4756]: I0930 21:05:18.089486 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-conductor-0"] Sep 30 21:05:18 crc kubenswrapper[4756]: I0930 21:05:18.089914 4756 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="fc3620f4-f8ca-4742-a1a4-18810a786afc" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"http://10.217.1.75:8775/\": read tcp 10.217.0.2:36900->10.217.1.75:8775: read: connection reset by peer" Sep 30 21:05:18 crc kubenswrapper[4756]: I0930 21:05:18.089963 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell1-conductor-0" podUID="cecb74a6-d43b-46e2-b423-628303002ae3" containerName="nova-cell1-conductor-conductor" containerID="cri-o://0fdcb8d72e9fe20acbee104323b320b1e5934321e2519011362bb10d0b1fa117" gracePeriod=30 Sep 30 21:05:18 crc kubenswrapper[4756]: I0930 21:05:18.089964 4756 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="fc3620f4-f8ca-4742-a1a4-18810a786afc" containerName="nova-metadata-log" probeResult="failure" output="Get \"http://10.217.1.75:8775/\": read tcp 10.217.0.2:36916->10.217.1.75:8775: read: connection reset by peer" Sep 30 21:05:18 crc kubenswrapper[4756]: I0930 21:05:18.444343 4756 generic.go:334] "Generic (PLEG): container finished" podID="3ef7ebf2-0e17-4d34-b8c2-3c99c084a632" containerID="424264b339bbf775ce759d28632ff896927414a684f203d7d27da866391cd17e" exitCode=0 Sep 30 21:05:18 crc kubenswrapper[4756]: I0930 21:05:18.444420 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"3ef7ebf2-0e17-4d34-b8c2-3c99c084a632","Type":"ContainerDied","Data":"424264b339bbf775ce759d28632ff896927414a684f203d7d27da866391cd17e"} Sep 30 21:05:18 crc kubenswrapper[4756]: I0930 21:05:18.459864 4756 generic.go:334] "Generic (PLEG): container finished" podID="fc3620f4-f8ca-4742-a1a4-18810a786afc" containerID="b692b5eac6b2bb0100cd23ae3cb43fbc5955a114923298854458e2f195a2e630" exitCode=0 Sep 30 21:05:18 crc kubenswrapper[4756]: I0930 21:05:18.459960 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"fc3620f4-f8ca-4742-a1a4-18810a786afc","Type":"ContainerDied","Data":"b692b5eac6b2bb0100cd23ae3cb43fbc5955a114923298854458e2f195a2e630"} Sep 30 21:05:18 crc kubenswrapper[4756]: I0930 21:05:18.479037 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"cd2a9763-32c6-4c38-8565-42dea0aff93c","Type":"ContainerStarted","Data":"5b31ab9867065a983dd575cd3453772718ba9b7758fad0e147efad0b37ef5d55"} Sep 30 21:05:18 crc kubenswrapper[4756]: I0930 21:05:18.479090 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"cd2a9763-32c6-4c38-8565-42dea0aff93c","Type":"ContainerStarted","Data":"15ef33fca60c18805bcc8cd96fb5d97001efb1ae0986a004670c3de88bd3ed57"} Sep 30 21:05:18 crc kubenswrapper[4756]: I0930 21:05:18.490309 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"a23cdb4e-60a7-4298-824c-29f6ccf5d502","Type":"ContainerStarted","Data":"4447a76f5aeb365dc158d0a9d62f03100d52f18fd7b3c7f82353ad21f658f4aa"} Sep 30 21:05:18 crc kubenswrapper[4756]: I0930 21:05:18.490383 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"a23cdb4e-60a7-4298-824c-29f6ccf5d502","Type":"ContainerStarted","Data":"0661243d85b9b19ed2cf9d027ff101b32059456451f1228081e82f3f9a44c877"} Sep 30 21:05:18 crc kubenswrapper[4756]: I0930 21:05:18.500058 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.500042157 podStartE2EDuration="2.500042157s" podCreationTimestamp="2025-09-30 21:05:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 21:05:18.498046405 +0000 UTC m=+5648.118979882" watchObservedRunningTime="2025-09-30 21:05:18.500042157 +0000 UTC m=+5648.120975634" Sep 30 21:05:18 crc kubenswrapper[4756]: I0930 21:05:18.527039 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-novncproxy-0" podStartSLOduration=2.52701709 podStartE2EDuration="2.52701709s" podCreationTimestamp="2025-09-30 21:05:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 21:05:18.513839487 +0000 UTC m=+5648.134772964" watchObservedRunningTime="2025-09-30 21:05:18.52701709 +0000 UTC m=+5648.147950567" Sep 30 21:05:18 crc kubenswrapper[4756]: W0930 21:05:18.543460 4756 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod26ca4516_71bc_4d89_8008_feac31530eba.slice/crio-a17371e121e10d96a0950a7455c615f37fc3f5ee72e556e0c4926cd580544408 WatchSource:0}: Error finding container a17371e121e10d96a0950a7455c615f37fc3f5ee72e556e0c4926cd580544408: Status 404 returned error can't find the container with id a17371e121e10d96a0950a7455c615f37fc3f5ee72e556e0c4926cd580544408 Sep 30 21:05:18 crc kubenswrapper[4756]: I0930 21:05:18.544086 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Sep 30 21:05:18 crc kubenswrapper[4756]: I0930 21:05:18.681333 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 30 21:05:18 crc kubenswrapper[4756]: I0930 21:05:18.695094 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Sep 30 21:05:18 crc kubenswrapper[4756]: I0930 21:05:18.778486 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fc3620f4-f8ca-4742-a1a4-18810a786afc-combined-ca-bundle\") pod \"fc3620f4-f8ca-4742-a1a4-18810a786afc\" (UID: \"fc3620f4-f8ca-4742-a1a4-18810a786afc\") " Sep 30 21:05:18 crc kubenswrapper[4756]: I0930 21:05:18.778551 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5pxlb\" (UniqueName: \"kubernetes.io/projected/fc3620f4-f8ca-4742-a1a4-18810a786afc-kube-api-access-5pxlb\") pod \"fc3620f4-f8ca-4742-a1a4-18810a786afc\" (UID: \"fc3620f4-f8ca-4742-a1a4-18810a786afc\") " Sep 30 21:05:18 crc kubenswrapper[4756]: I0930 21:05:18.778587 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3ef7ebf2-0e17-4d34-b8c2-3c99c084a632-logs\") pod \"3ef7ebf2-0e17-4d34-b8c2-3c99c084a632\" (UID: \"3ef7ebf2-0e17-4d34-b8c2-3c99c084a632\") " Sep 30 21:05:18 crc kubenswrapper[4756]: I0930 21:05:18.778614 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3ef7ebf2-0e17-4d34-b8c2-3c99c084a632-combined-ca-bundle\") pod \"3ef7ebf2-0e17-4d34-b8c2-3c99c084a632\" (UID: \"3ef7ebf2-0e17-4d34-b8c2-3c99c084a632\") " Sep 30 21:05:18 crc kubenswrapper[4756]: I0930 21:05:18.778639 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pxzbp\" (UniqueName: \"kubernetes.io/projected/3ef7ebf2-0e17-4d34-b8c2-3c99c084a632-kube-api-access-pxzbp\") pod \"3ef7ebf2-0e17-4d34-b8c2-3c99c084a632\" (UID: \"3ef7ebf2-0e17-4d34-b8c2-3c99c084a632\") " Sep 30 21:05:18 crc kubenswrapper[4756]: I0930 21:05:18.778703 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fc3620f4-f8ca-4742-a1a4-18810a786afc-config-data\") pod \"fc3620f4-f8ca-4742-a1a4-18810a786afc\" (UID: \"fc3620f4-f8ca-4742-a1a4-18810a786afc\") " Sep 30 21:05:18 crc kubenswrapper[4756]: I0930 21:05:18.778745 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3ef7ebf2-0e17-4d34-b8c2-3c99c084a632-config-data\") pod \"3ef7ebf2-0e17-4d34-b8c2-3c99c084a632\" (UID: \"3ef7ebf2-0e17-4d34-b8c2-3c99c084a632\") " Sep 30 21:05:18 crc kubenswrapper[4756]: I0930 21:05:18.778768 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fc3620f4-f8ca-4742-a1a4-18810a786afc-logs\") pod \"fc3620f4-f8ca-4742-a1a4-18810a786afc\" (UID: \"fc3620f4-f8ca-4742-a1a4-18810a786afc\") " Sep 30 21:05:18 crc kubenswrapper[4756]: I0930 21:05:18.782832 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3ef7ebf2-0e17-4d34-b8c2-3c99c084a632-logs" (OuterVolumeSpecName: "logs") pod "3ef7ebf2-0e17-4d34-b8c2-3c99c084a632" (UID: "3ef7ebf2-0e17-4d34-b8c2-3c99c084a632"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 21:05:18 crc kubenswrapper[4756]: I0930 21:05:18.784866 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fc3620f4-f8ca-4742-a1a4-18810a786afc-logs" (OuterVolumeSpecName: "logs") pod "fc3620f4-f8ca-4742-a1a4-18810a786afc" (UID: "fc3620f4-f8ca-4742-a1a4-18810a786afc"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 21:05:18 crc kubenswrapper[4756]: I0930 21:05:18.788953 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3ef7ebf2-0e17-4d34-b8c2-3c99c084a632-kube-api-access-pxzbp" (OuterVolumeSpecName: "kube-api-access-pxzbp") pod "3ef7ebf2-0e17-4d34-b8c2-3c99c084a632" (UID: "3ef7ebf2-0e17-4d34-b8c2-3c99c084a632"). InnerVolumeSpecName "kube-api-access-pxzbp". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 21:05:18 crc kubenswrapper[4756]: I0930 21:05:18.790579 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fc3620f4-f8ca-4742-a1a4-18810a786afc-kube-api-access-5pxlb" (OuterVolumeSpecName: "kube-api-access-5pxlb") pod "fc3620f4-f8ca-4742-a1a4-18810a786afc" (UID: "fc3620f4-f8ca-4742-a1a4-18810a786afc"). InnerVolumeSpecName "kube-api-access-5pxlb". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 21:05:18 crc kubenswrapper[4756]: I0930 21:05:18.818222 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3ef7ebf2-0e17-4d34-b8c2-3c99c084a632-config-data" (OuterVolumeSpecName: "config-data") pod "3ef7ebf2-0e17-4d34-b8c2-3c99c084a632" (UID: "3ef7ebf2-0e17-4d34-b8c2-3c99c084a632"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 21:05:18 crc kubenswrapper[4756]: I0930 21:05:18.818236 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3ef7ebf2-0e17-4d34-b8c2-3c99c084a632-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "3ef7ebf2-0e17-4d34-b8c2-3c99c084a632" (UID: "3ef7ebf2-0e17-4d34-b8c2-3c99c084a632"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 21:05:18 crc kubenswrapper[4756]: I0930 21:05:18.825647 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fc3620f4-f8ca-4742-a1a4-18810a786afc-config-data" (OuterVolumeSpecName: "config-data") pod "fc3620f4-f8ca-4742-a1a4-18810a786afc" (UID: "fc3620f4-f8ca-4742-a1a4-18810a786afc"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 21:05:18 crc kubenswrapper[4756]: I0930 21:05:18.829750 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fc3620f4-f8ca-4742-a1a4-18810a786afc-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "fc3620f4-f8ca-4742-a1a4-18810a786afc" (UID: "fc3620f4-f8ca-4742-a1a4-18810a786afc"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 21:05:18 crc kubenswrapper[4756]: I0930 21:05:18.880346 4756 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fc3620f4-f8ca-4742-a1a4-18810a786afc-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 21:05:18 crc kubenswrapper[4756]: I0930 21:05:18.880380 4756 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3ef7ebf2-0e17-4d34-b8c2-3c99c084a632-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 21:05:18 crc kubenswrapper[4756]: I0930 21:05:18.880406 4756 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fc3620f4-f8ca-4742-a1a4-18810a786afc-logs\") on node \"crc\" DevicePath \"\"" Sep 30 21:05:18 crc kubenswrapper[4756]: I0930 21:05:18.880415 4756 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fc3620f4-f8ca-4742-a1a4-18810a786afc-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 21:05:18 crc kubenswrapper[4756]: I0930 21:05:18.880427 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5pxlb\" (UniqueName: \"kubernetes.io/projected/fc3620f4-f8ca-4742-a1a4-18810a786afc-kube-api-access-5pxlb\") on node \"crc\" DevicePath \"\"" Sep 30 21:05:18 crc kubenswrapper[4756]: I0930 21:05:18.880437 4756 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3ef7ebf2-0e17-4d34-b8c2-3c99c084a632-logs\") on node \"crc\" DevicePath \"\"" Sep 30 21:05:18 crc kubenswrapper[4756]: I0930 21:05:18.880445 4756 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3ef7ebf2-0e17-4d34-b8c2-3c99c084a632-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 21:05:18 crc kubenswrapper[4756]: I0930 21:05:18.880454 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pxzbp\" (UniqueName: \"kubernetes.io/projected/3ef7ebf2-0e17-4d34-b8c2-3c99c084a632-kube-api-access-pxzbp\") on node \"crc\" DevicePath \"\"" Sep 30 21:05:19 crc kubenswrapper[4756]: I0930 21:05:19.137900 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ca6b90e2-e28e-41e7-8029-58917d24e5c1" path="/var/lib/kubelet/pods/ca6b90e2-e28e-41e7-8029-58917d24e5c1/volumes" Sep 30 21:05:19 crc kubenswrapper[4756]: I0930 21:05:19.500841 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"3ef7ebf2-0e17-4d34-b8c2-3c99c084a632","Type":"ContainerDied","Data":"f2c3ea7a817fe53c7fb393ae348d6b0512140caca2d1e5b3caf8c40cc8029678"} Sep 30 21:05:19 crc kubenswrapper[4756]: I0930 21:05:19.500896 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 30 21:05:19 crc kubenswrapper[4756]: I0930 21:05:19.500903 4756 scope.go:117] "RemoveContainer" containerID="424264b339bbf775ce759d28632ff896927414a684f203d7d27da866391cd17e" Sep 30 21:05:19 crc kubenswrapper[4756]: I0930 21:05:19.506553 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"26ca4516-71bc-4d89-8008-feac31530eba","Type":"ContainerStarted","Data":"5e1a59aa52160f9fabf84565812d6648cdd2b1da3b06da70e4f53ff267c99e61"} Sep 30 21:05:19 crc kubenswrapper[4756]: I0930 21:05:19.506602 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"26ca4516-71bc-4d89-8008-feac31530eba","Type":"ContainerStarted","Data":"a17371e121e10d96a0950a7455c615f37fc3f5ee72e556e0c4926cd580544408"} Sep 30 21:05:19 crc kubenswrapper[4756]: I0930 21:05:19.506726 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell0-conductor-0" Sep 30 21:05:19 crc kubenswrapper[4756]: I0930 21:05:19.509439 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"fc3620f4-f8ca-4742-a1a4-18810a786afc","Type":"ContainerDied","Data":"3a32dc5001a5b071b5e4a58e38cabb6594be9a24cf39620c7fb76e189e5138b8"} Sep 30 21:05:19 crc kubenswrapper[4756]: I0930 21:05:19.509502 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Sep 30 21:05:19 crc kubenswrapper[4756]: I0930 21:05:19.536686 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-0" podStartSLOduration=2.536637814 podStartE2EDuration="2.536637814s" podCreationTimestamp="2025-09-30 21:05:17 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 21:05:19.526667364 +0000 UTC m=+5649.147600841" watchObservedRunningTime="2025-09-30 21:05:19.536637814 +0000 UTC m=+5649.157571321" Sep 30 21:05:19 crc kubenswrapper[4756]: I0930 21:05:19.543105 4756 scope.go:117] "RemoveContainer" containerID="70a011209c6f2a0a913c6d62d3c9a352aeb1ee89b2acd5bfe315d03d908d8caa" Sep 30 21:05:19 crc kubenswrapper[4756]: I0930 21:05:19.568901 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Sep 30 21:05:19 crc kubenswrapper[4756]: I0930 21:05:19.597683 4756 scope.go:117] "RemoveContainer" containerID="b692b5eac6b2bb0100cd23ae3cb43fbc5955a114923298854458e2f195a2e630" Sep 30 21:05:19 crc kubenswrapper[4756]: I0930 21:05:19.618604 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Sep 30 21:05:19 crc kubenswrapper[4756]: I0930 21:05:19.640977 4756 scope.go:117] "RemoveContainer" containerID="061b08b6ec14f7f2905aaadce1b0078af830a244b4544561bf4f2b4ed101c4f6" Sep 30 21:05:19 crc kubenswrapper[4756]: I0930 21:05:19.661660 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Sep 30 21:05:19 crc kubenswrapper[4756]: I0930 21:05:19.670071 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Sep 30 21:05:19 crc kubenswrapper[4756]: I0930 21:05:19.676818 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Sep 30 21:05:19 crc kubenswrapper[4756]: E0930 21:05:19.677454 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3ef7ebf2-0e17-4d34-b8c2-3c99c084a632" containerName="nova-api-api" Sep 30 21:05:19 crc kubenswrapper[4756]: I0930 21:05:19.677469 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="3ef7ebf2-0e17-4d34-b8c2-3c99c084a632" containerName="nova-api-api" Sep 30 21:05:19 crc kubenswrapper[4756]: E0930 21:05:19.677487 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3ef7ebf2-0e17-4d34-b8c2-3c99c084a632" containerName="nova-api-log" Sep 30 21:05:19 crc kubenswrapper[4756]: I0930 21:05:19.677493 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="3ef7ebf2-0e17-4d34-b8c2-3c99c084a632" containerName="nova-api-log" Sep 30 21:05:19 crc kubenswrapper[4756]: E0930 21:05:19.677506 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fc3620f4-f8ca-4742-a1a4-18810a786afc" containerName="nova-metadata-log" Sep 30 21:05:19 crc kubenswrapper[4756]: I0930 21:05:19.677513 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="fc3620f4-f8ca-4742-a1a4-18810a786afc" containerName="nova-metadata-log" Sep 30 21:05:19 crc kubenswrapper[4756]: E0930 21:05:19.677526 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fc3620f4-f8ca-4742-a1a4-18810a786afc" containerName="nova-metadata-metadata" Sep 30 21:05:19 crc kubenswrapper[4756]: I0930 21:05:19.677533 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="fc3620f4-f8ca-4742-a1a4-18810a786afc" containerName="nova-metadata-metadata" Sep 30 21:05:19 crc kubenswrapper[4756]: I0930 21:05:19.677760 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="fc3620f4-f8ca-4742-a1a4-18810a786afc" containerName="nova-metadata-metadata" Sep 30 21:05:19 crc kubenswrapper[4756]: I0930 21:05:19.677773 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="3ef7ebf2-0e17-4d34-b8c2-3c99c084a632" containerName="nova-api-log" Sep 30 21:05:19 crc kubenswrapper[4756]: I0930 21:05:19.677784 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="3ef7ebf2-0e17-4d34-b8c2-3c99c084a632" containerName="nova-api-api" Sep 30 21:05:19 crc kubenswrapper[4756]: I0930 21:05:19.677796 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="fc3620f4-f8ca-4742-a1a4-18810a786afc" containerName="nova-metadata-log" Sep 30 21:05:19 crc kubenswrapper[4756]: I0930 21:05:19.678913 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 30 21:05:19 crc kubenswrapper[4756]: I0930 21:05:19.682216 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Sep 30 21:05:19 crc kubenswrapper[4756]: I0930 21:05:19.684229 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Sep 30 21:05:19 crc kubenswrapper[4756]: I0930 21:05:19.686667 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Sep 30 21:05:19 crc kubenswrapper[4756]: I0930 21:05:19.692099 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Sep 30 21:05:19 crc kubenswrapper[4756]: I0930 21:05:19.692625 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Sep 30 21:05:19 crc kubenswrapper[4756]: I0930 21:05:19.701654 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Sep 30 21:05:19 crc kubenswrapper[4756]: I0930 21:05:19.802687 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6e750932-dd9c-4dbc-b42b-b8c17481e118-config-data\") pod \"nova-api-0\" (UID: \"6e750932-dd9c-4dbc-b42b-b8c17481e118\") " pod="openstack/nova-api-0" Sep 30 21:05:19 crc kubenswrapper[4756]: I0930 21:05:19.803649 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6e750932-dd9c-4dbc-b42b-b8c17481e118-logs\") pod \"nova-api-0\" (UID: \"6e750932-dd9c-4dbc-b42b-b8c17481e118\") " pod="openstack/nova-api-0" Sep 30 21:05:19 crc kubenswrapper[4756]: I0930 21:05:19.806579 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-krnft\" (UniqueName: \"kubernetes.io/projected/7fa0d9a9-bf84-42ba-980e-0ca8010de3e7-kube-api-access-krnft\") pod \"nova-metadata-0\" (UID: \"7fa0d9a9-bf84-42ba-980e-0ca8010de3e7\") " pod="openstack/nova-metadata-0" Sep 30 21:05:19 crc kubenswrapper[4756]: I0930 21:05:19.806667 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7fa0d9a9-bf84-42ba-980e-0ca8010de3e7-config-data\") pod \"nova-metadata-0\" (UID: \"7fa0d9a9-bf84-42ba-980e-0ca8010de3e7\") " pod="openstack/nova-metadata-0" Sep 30 21:05:19 crc kubenswrapper[4756]: I0930 21:05:19.806910 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7fa0d9a9-bf84-42ba-980e-0ca8010de3e7-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"7fa0d9a9-bf84-42ba-980e-0ca8010de3e7\") " pod="openstack/nova-metadata-0" Sep 30 21:05:19 crc kubenswrapper[4756]: I0930 21:05:19.806977 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6e750932-dd9c-4dbc-b42b-b8c17481e118-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"6e750932-dd9c-4dbc-b42b-b8c17481e118\") " pod="openstack/nova-api-0" Sep 30 21:05:19 crc kubenswrapper[4756]: I0930 21:05:19.807267 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7fa0d9a9-bf84-42ba-980e-0ca8010de3e7-logs\") pod \"nova-metadata-0\" (UID: \"7fa0d9a9-bf84-42ba-980e-0ca8010de3e7\") " pod="openstack/nova-metadata-0" Sep 30 21:05:19 crc kubenswrapper[4756]: I0930 21:05:19.807356 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jpr5n\" (UniqueName: \"kubernetes.io/projected/6e750932-dd9c-4dbc-b42b-b8c17481e118-kube-api-access-jpr5n\") pod \"nova-api-0\" (UID: \"6e750932-dd9c-4dbc-b42b-b8c17481e118\") " pod="openstack/nova-api-0" Sep 30 21:05:19 crc kubenswrapper[4756]: I0930 21:05:19.908440 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7fa0d9a9-bf84-42ba-980e-0ca8010de3e7-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"7fa0d9a9-bf84-42ba-980e-0ca8010de3e7\") " pod="openstack/nova-metadata-0" Sep 30 21:05:19 crc kubenswrapper[4756]: I0930 21:05:19.908607 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6e750932-dd9c-4dbc-b42b-b8c17481e118-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"6e750932-dd9c-4dbc-b42b-b8c17481e118\") " pod="openstack/nova-api-0" Sep 30 21:05:19 crc kubenswrapper[4756]: I0930 21:05:19.908659 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7fa0d9a9-bf84-42ba-980e-0ca8010de3e7-logs\") pod \"nova-metadata-0\" (UID: \"7fa0d9a9-bf84-42ba-980e-0ca8010de3e7\") " pod="openstack/nova-metadata-0" Sep 30 21:05:19 crc kubenswrapper[4756]: I0930 21:05:19.908680 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jpr5n\" (UniqueName: \"kubernetes.io/projected/6e750932-dd9c-4dbc-b42b-b8c17481e118-kube-api-access-jpr5n\") pod \"nova-api-0\" (UID: \"6e750932-dd9c-4dbc-b42b-b8c17481e118\") " pod="openstack/nova-api-0" Sep 30 21:05:19 crc kubenswrapper[4756]: I0930 21:05:19.908700 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6e750932-dd9c-4dbc-b42b-b8c17481e118-config-data\") pod \"nova-api-0\" (UID: \"6e750932-dd9c-4dbc-b42b-b8c17481e118\") " pod="openstack/nova-api-0" Sep 30 21:05:19 crc kubenswrapper[4756]: I0930 21:05:19.909355 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6e750932-dd9c-4dbc-b42b-b8c17481e118-logs\") pod \"nova-api-0\" (UID: \"6e750932-dd9c-4dbc-b42b-b8c17481e118\") " pod="openstack/nova-api-0" Sep 30 21:05:19 crc kubenswrapper[4756]: I0930 21:05:19.909358 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7fa0d9a9-bf84-42ba-980e-0ca8010de3e7-logs\") pod \"nova-metadata-0\" (UID: \"7fa0d9a9-bf84-42ba-980e-0ca8010de3e7\") " pod="openstack/nova-metadata-0" Sep 30 21:05:19 crc kubenswrapper[4756]: I0930 21:05:19.909413 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-krnft\" (UniqueName: \"kubernetes.io/projected/7fa0d9a9-bf84-42ba-980e-0ca8010de3e7-kube-api-access-krnft\") pod \"nova-metadata-0\" (UID: \"7fa0d9a9-bf84-42ba-980e-0ca8010de3e7\") " pod="openstack/nova-metadata-0" Sep 30 21:05:19 crc kubenswrapper[4756]: I0930 21:05:19.909445 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7fa0d9a9-bf84-42ba-980e-0ca8010de3e7-config-data\") pod \"nova-metadata-0\" (UID: \"7fa0d9a9-bf84-42ba-980e-0ca8010de3e7\") " pod="openstack/nova-metadata-0" Sep 30 21:05:19 crc kubenswrapper[4756]: I0930 21:05:19.909758 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6e750932-dd9c-4dbc-b42b-b8c17481e118-logs\") pod \"nova-api-0\" (UID: \"6e750932-dd9c-4dbc-b42b-b8c17481e118\") " pod="openstack/nova-api-0" Sep 30 21:05:19 crc kubenswrapper[4756]: I0930 21:05:19.914564 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6e750932-dd9c-4dbc-b42b-b8c17481e118-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"6e750932-dd9c-4dbc-b42b-b8c17481e118\") " pod="openstack/nova-api-0" Sep 30 21:05:19 crc kubenswrapper[4756]: I0930 21:05:19.921019 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6e750932-dd9c-4dbc-b42b-b8c17481e118-config-data\") pod \"nova-api-0\" (UID: \"6e750932-dd9c-4dbc-b42b-b8c17481e118\") " pod="openstack/nova-api-0" Sep 30 21:05:19 crc kubenswrapper[4756]: I0930 21:05:19.927200 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7fa0d9a9-bf84-42ba-980e-0ca8010de3e7-config-data\") pod \"nova-metadata-0\" (UID: \"7fa0d9a9-bf84-42ba-980e-0ca8010de3e7\") " pod="openstack/nova-metadata-0" Sep 30 21:05:19 crc kubenswrapper[4756]: I0930 21:05:19.929896 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jpr5n\" (UniqueName: \"kubernetes.io/projected/6e750932-dd9c-4dbc-b42b-b8c17481e118-kube-api-access-jpr5n\") pod \"nova-api-0\" (UID: \"6e750932-dd9c-4dbc-b42b-b8c17481e118\") " pod="openstack/nova-api-0" Sep 30 21:05:19 crc kubenswrapper[4756]: I0930 21:05:19.930716 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7fa0d9a9-bf84-42ba-980e-0ca8010de3e7-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"7fa0d9a9-bf84-42ba-980e-0ca8010de3e7\") " pod="openstack/nova-metadata-0" Sep 30 21:05:19 crc kubenswrapper[4756]: I0930 21:05:19.948950 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-krnft\" (UniqueName: \"kubernetes.io/projected/7fa0d9a9-bf84-42ba-980e-0ca8010de3e7-kube-api-access-krnft\") pod \"nova-metadata-0\" (UID: \"7fa0d9a9-bf84-42ba-980e-0ca8010de3e7\") " pod="openstack/nova-metadata-0" Sep 30 21:05:20 crc kubenswrapper[4756]: I0930 21:05:20.004957 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 30 21:05:20 crc kubenswrapper[4756]: I0930 21:05:20.013009 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Sep 30 21:05:20 crc kubenswrapper[4756]: I0930 21:05:20.510900 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Sep 30 21:05:20 crc kubenswrapper[4756]: I0930 21:05:20.608237 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Sep 30 21:05:20 crc kubenswrapper[4756]: W0930 21:05:20.608889 4756 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7fa0d9a9_bf84_42ba_980e_0ca8010de3e7.slice/crio-43f4c06a5f326959a8ade394674618e284cca9ade61cc59b0d038becdfe0b332 WatchSource:0}: Error finding container 43f4c06a5f326959a8ade394674618e284cca9ade61cc59b0d038becdfe0b332: Status 404 returned error can't find the container with id 43f4c06a5f326959a8ade394674618e284cca9ade61cc59b0d038becdfe0b332 Sep 30 21:05:21 crc kubenswrapper[4756]: I0930 21:05:21.126465 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3ef7ebf2-0e17-4d34-b8c2-3c99c084a632" path="/var/lib/kubelet/pods/3ef7ebf2-0e17-4d34-b8c2-3c99c084a632/volumes" Sep 30 21:05:21 crc kubenswrapper[4756]: I0930 21:05:21.128361 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fc3620f4-f8ca-4742-a1a4-18810a786afc" path="/var/lib/kubelet/pods/fc3620f4-f8ca-4742-a1a4-18810a786afc/volumes" Sep 30 21:05:21 crc kubenswrapper[4756]: I0930 21:05:21.543122 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"6e750932-dd9c-4dbc-b42b-b8c17481e118","Type":"ContainerStarted","Data":"881f6a50ed7a988b44691b0efa35d98922c0b765dab34c5ea049f5c8d742bb60"} Sep 30 21:05:21 crc kubenswrapper[4756]: I0930 21:05:21.544691 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"6e750932-dd9c-4dbc-b42b-b8c17481e118","Type":"ContainerStarted","Data":"90afe3069f1d5dc7a4dec747534cda85116a56085cb9aee8cf6649c0945ff9a1"} Sep 30 21:05:21 crc kubenswrapper[4756]: I0930 21:05:21.544808 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"6e750932-dd9c-4dbc-b42b-b8c17481e118","Type":"ContainerStarted","Data":"add0c5fb511b3a71cea21467fde45b30fba6d5630615423ebc1dd4296fcdffce"} Sep 30 21:05:21 crc kubenswrapper[4756]: I0930 21:05:21.545776 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"7fa0d9a9-bf84-42ba-980e-0ca8010de3e7","Type":"ContainerStarted","Data":"49441f21aa260eeb08f2e14258b1dbbbd1360618c1d9b72e543eee109660a4bc"} Sep 30 21:05:21 crc kubenswrapper[4756]: I0930 21:05:21.545894 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"7fa0d9a9-bf84-42ba-980e-0ca8010de3e7","Type":"ContainerStarted","Data":"5fbefdf511d9bd4ced70fe54c0f239a1c8ce0e97bbe5c46c0440819945b4f0a0"} Sep 30 21:05:21 crc kubenswrapper[4756]: I0930 21:05:21.546015 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"7fa0d9a9-bf84-42ba-980e-0ca8010de3e7","Type":"ContainerStarted","Data":"43f4c06a5f326959a8ade394674618e284cca9ade61cc59b0d038becdfe0b332"} Sep 30 21:05:21 crc kubenswrapper[4756]: I0930 21:05:21.549673 4756 generic.go:334] "Generic (PLEG): container finished" podID="cecb74a6-d43b-46e2-b423-628303002ae3" containerID="0fdcb8d72e9fe20acbee104323b320b1e5934321e2519011362bb10d0b1fa117" exitCode=0 Sep 30 21:05:21 crc kubenswrapper[4756]: I0930 21:05:21.549728 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"cecb74a6-d43b-46e2-b423-628303002ae3","Type":"ContainerDied","Data":"0fdcb8d72e9fe20acbee104323b320b1e5934321e2519011362bb10d0b1fa117"} Sep 30 21:05:21 crc kubenswrapper[4756]: I0930 21:05:21.562300 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.562282936 podStartE2EDuration="2.562282936s" podCreationTimestamp="2025-09-30 21:05:19 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 21:05:21.559263788 +0000 UTC m=+5651.180197265" watchObservedRunningTime="2025-09-30 21:05:21.562282936 +0000 UTC m=+5651.183216413" Sep 30 21:05:21 crc kubenswrapper[4756]: I0930 21:05:21.601756 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.601739176 podStartE2EDuration="2.601739176s" podCreationTimestamp="2025-09-30 21:05:19 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 21:05:21.588754627 +0000 UTC m=+5651.209688124" watchObservedRunningTime="2025-09-30 21:05:21.601739176 +0000 UTC m=+5651.222672653" Sep 30 21:05:21 crc kubenswrapper[4756]: I0930 21:05:21.837796 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-novncproxy-0" Sep 30 21:05:21 crc kubenswrapper[4756]: I0930 21:05:21.860467 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Sep 30 21:05:21 crc kubenswrapper[4756]: I0930 21:05:21.884277 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Sep 30 21:05:21 crc kubenswrapper[4756]: I0930 21:05:21.948798 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-p77qn\" (UniqueName: \"kubernetes.io/projected/cecb74a6-d43b-46e2-b423-628303002ae3-kube-api-access-p77qn\") pod \"cecb74a6-d43b-46e2-b423-628303002ae3\" (UID: \"cecb74a6-d43b-46e2-b423-628303002ae3\") " Sep 30 21:05:21 crc kubenswrapper[4756]: I0930 21:05:21.948941 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cecb74a6-d43b-46e2-b423-628303002ae3-combined-ca-bundle\") pod \"cecb74a6-d43b-46e2-b423-628303002ae3\" (UID: \"cecb74a6-d43b-46e2-b423-628303002ae3\") " Sep 30 21:05:21 crc kubenswrapper[4756]: I0930 21:05:21.949065 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cecb74a6-d43b-46e2-b423-628303002ae3-config-data\") pod \"cecb74a6-d43b-46e2-b423-628303002ae3\" (UID: \"cecb74a6-d43b-46e2-b423-628303002ae3\") " Sep 30 21:05:21 crc kubenswrapper[4756]: I0930 21:05:21.964655 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cecb74a6-d43b-46e2-b423-628303002ae3-kube-api-access-p77qn" (OuterVolumeSpecName: "kube-api-access-p77qn") pod "cecb74a6-d43b-46e2-b423-628303002ae3" (UID: "cecb74a6-d43b-46e2-b423-628303002ae3"). InnerVolumeSpecName "kube-api-access-p77qn". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 21:05:21 crc kubenswrapper[4756]: I0930 21:05:21.981517 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cecb74a6-d43b-46e2-b423-628303002ae3-config-data" (OuterVolumeSpecName: "config-data") pod "cecb74a6-d43b-46e2-b423-628303002ae3" (UID: "cecb74a6-d43b-46e2-b423-628303002ae3"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 21:05:21 crc kubenswrapper[4756]: I0930 21:05:21.983267 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cecb74a6-d43b-46e2-b423-628303002ae3-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "cecb74a6-d43b-46e2-b423-628303002ae3" (UID: "cecb74a6-d43b-46e2-b423-628303002ae3"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 21:05:22 crc kubenswrapper[4756]: I0930 21:05:22.051966 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-p77qn\" (UniqueName: \"kubernetes.io/projected/cecb74a6-d43b-46e2-b423-628303002ae3-kube-api-access-p77qn\") on node \"crc\" DevicePath \"\"" Sep 30 21:05:22 crc kubenswrapper[4756]: I0930 21:05:22.052256 4756 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cecb74a6-d43b-46e2-b423-628303002ae3-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 21:05:22 crc kubenswrapper[4756]: I0930 21:05:22.052271 4756 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cecb74a6-d43b-46e2-b423-628303002ae3-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 21:05:22 crc kubenswrapper[4756]: I0930 21:05:22.561359 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"cecb74a6-d43b-46e2-b423-628303002ae3","Type":"ContainerDied","Data":"36f42cda1b6ab56df4091d997f0976bb6255a0984cce114c99eb09fea90feeef"} Sep 30 21:05:22 crc kubenswrapper[4756]: I0930 21:05:22.561512 4756 scope.go:117] "RemoveContainer" containerID="0fdcb8d72e9fe20acbee104323b320b1e5934321e2519011362bb10d0b1fa117" Sep 30 21:05:22 crc kubenswrapper[4756]: I0930 21:05:22.561660 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Sep 30 21:05:22 crc kubenswrapper[4756]: I0930 21:05:22.634346 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-conductor-0"] Sep 30 21:05:22 crc kubenswrapper[4756]: I0930 21:05:22.644504 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-conductor-0"] Sep 30 21:05:22 crc kubenswrapper[4756]: I0930 21:05:22.664495 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-0"] Sep 30 21:05:22 crc kubenswrapper[4756]: E0930 21:05:22.664994 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cecb74a6-d43b-46e2-b423-628303002ae3" containerName="nova-cell1-conductor-conductor" Sep 30 21:05:22 crc kubenswrapper[4756]: I0930 21:05:22.665018 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="cecb74a6-d43b-46e2-b423-628303002ae3" containerName="nova-cell1-conductor-conductor" Sep 30 21:05:22 crc kubenswrapper[4756]: I0930 21:05:22.665217 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="cecb74a6-d43b-46e2-b423-628303002ae3" containerName="nova-cell1-conductor-conductor" Sep 30 21:05:22 crc kubenswrapper[4756]: I0930 21:05:22.666017 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Sep 30 21:05:22 crc kubenswrapper[4756]: I0930 21:05:22.667936 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Sep 30 21:05:22 crc kubenswrapper[4756]: I0930 21:05:22.670632 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Sep 30 21:05:22 crc kubenswrapper[4756]: I0930 21:05:22.771373 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6c2c0ab7-db88-4062-808b-9db9cb1f2264-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"6c2c0ab7-db88-4062-808b-9db9cb1f2264\") " pod="openstack/nova-cell1-conductor-0" Sep 30 21:05:22 crc kubenswrapper[4756]: I0930 21:05:22.771464 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7kmrd\" (UniqueName: \"kubernetes.io/projected/6c2c0ab7-db88-4062-808b-9db9cb1f2264-kube-api-access-7kmrd\") pod \"nova-cell1-conductor-0\" (UID: \"6c2c0ab7-db88-4062-808b-9db9cb1f2264\") " pod="openstack/nova-cell1-conductor-0" Sep 30 21:05:22 crc kubenswrapper[4756]: I0930 21:05:22.771578 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6c2c0ab7-db88-4062-808b-9db9cb1f2264-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"6c2c0ab7-db88-4062-808b-9db9cb1f2264\") " pod="openstack/nova-cell1-conductor-0" Sep 30 21:05:22 crc kubenswrapper[4756]: I0930 21:05:22.876540 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6c2c0ab7-db88-4062-808b-9db9cb1f2264-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"6c2c0ab7-db88-4062-808b-9db9cb1f2264\") " pod="openstack/nova-cell1-conductor-0" Sep 30 21:05:22 crc kubenswrapper[4756]: I0930 21:05:22.876636 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6c2c0ab7-db88-4062-808b-9db9cb1f2264-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"6c2c0ab7-db88-4062-808b-9db9cb1f2264\") " pod="openstack/nova-cell1-conductor-0" Sep 30 21:05:22 crc kubenswrapper[4756]: I0930 21:05:22.876681 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7kmrd\" (UniqueName: \"kubernetes.io/projected/6c2c0ab7-db88-4062-808b-9db9cb1f2264-kube-api-access-7kmrd\") pod \"nova-cell1-conductor-0\" (UID: \"6c2c0ab7-db88-4062-808b-9db9cb1f2264\") " pod="openstack/nova-cell1-conductor-0" Sep 30 21:05:22 crc kubenswrapper[4756]: I0930 21:05:22.881948 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6c2c0ab7-db88-4062-808b-9db9cb1f2264-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"6c2c0ab7-db88-4062-808b-9db9cb1f2264\") " pod="openstack/nova-cell1-conductor-0" Sep 30 21:05:22 crc kubenswrapper[4756]: I0930 21:05:22.882781 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6c2c0ab7-db88-4062-808b-9db9cb1f2264-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"6c2c0ab7-db88-4062-808b-9db9cb1f2264\") " pod="openstack/nova-cell1-conductor-0" Sep 30 21:05:22 crc kubenswrapper[4756]: I0930 21:05:22.900105 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7kmrd\" (UniqueName: \"kubernetes.io/projected/6c2c0ab7-db88-4062-808b-9db9cb1f2264-kube-api-access-7kmrd\") pod \"nova-cell1-conductor-0\" (UID: \"6c2c0ab7-db88-4062-808b-9db9cb1f2264\") " pod="openstack/nova-cell1-conductor-0" Sep 30 21:05:22 crc kubenswrapper[4756]: I0930 21:05:22.984241 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Sep 30 21:05:23 crc kubenswrapper[4756]: I0930 21:05:23.139233 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cecb74a6-d43b-46e2-b423-628303002ae3" path="/var/lib/kubelet/pods/cecb74a6-d43b-46e2-b423-628303002ae3/volumes" Sep 30 21:05:23 crc kubenswrapper[4756]: I0930 21:05:23.470104 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Sep 30 21:05:23 crc kubenswrapper[4756]: W0930 21:05:23.480580 4756 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6c2c0ab7_db88_4062_808b_9db9cb1f2264.slice/crio-9e050f186c54f7310f3b65d6a408b9ce0cbef1ed6f8c2a1b0c986e5db6d63971 WatchSource:0}: Error finding container 9e050f186c54f7310f3b65d6a408b9ce0cbef1ed6f8c2a1b0c986e5db6d63971: Status 404 returned error can't find the container with id 9e050f186c54f7310f3b65d6a408b9ce0cbef1ed6f8c2a1b0c986e5db6d63971 Sep 30 21:05:23 crc kubenswrapper[4756]: I0930 21:05:23.580684 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"6c2c0ab7-db88-4062-808b-9db9cb1f2264","Type":"ContainerStarted","Data":"9e050f186c54f7310f3b65d6a408b9ce0cbef1ed6f8c2a1b0c986e5db6d63971"} Sep 30 21:05:24 crc kubenswrapper[4756]: I0930 21:05:24.594278 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"6c2c0ab7-db88-4062-808b-9db9cb1f2264","Type":"ContainerStarted","Data":"ab8f15771b9000eb1e37bc9575fb5224d7772ef6666bd5471e3b9498b88d6eea"} Sep 30 21:05:24 crc kubenswrapper[4756]: I0930 21:05:24.594801 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-conductor-0" Sep 30 21:05:24 crc kubenswrapper[4756]: I0930 21:05:24.614862 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-0" podStartSLOduration=2.614836754 podStartE2EDuration="2.614836754s" podCreationTimestamp="2025-09-30 21:05:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 21:05:24.609493094 +0000 UTC m=+5654.230426601" watchObservedRunningTime="2025-09-30 21:05:24.614836754 +0000 UTC m=+5654.235770261" Sep 30 21:05:25 crc kubenswrapper[4756]: I0930 21:05:25.014060 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Sep 30 21:05:25 crc kubenswrapper[4756]: I0930 21:05:25.014184 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Sep 30 21:05:26 crc kubenswrapper[4756]: I0930 21:05:26.837835 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-cell1-novncproxy-0" Sep 30 21:05:26 crc kubenswrapper[4756]: I0930 21:05:26.848862 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-cell1-novncproxy-0" Sep 30 21:05:26 crc kubenswrapper[4756]: I0930 21:05:26.858712 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Sep 30 21:05:26 crc kubenswrapper[4756]: I0930 21:05:26.888758 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Sep 30 21:05:27 crc kubenswrapper[4756]: I0930 21:05:27.633625 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-novncproxy-0" Sep 30 21:05:27 crc kubenswrapper[4756]: I0930 21:05:27.685531 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Sep 30 21:05:28 crc kubenswrapper[4756]: I0930 21:05:28.018622 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell0-conductor-0" Sep 30 21:05:30 crc kubenswrapper[4756]: I0930 21:05:30.006025 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Sep 30 21:05:30 crc kubenswrapper[4756]: I0930 21:05:30.006418 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Sep 30 21:05:30 crc kubenswrapper[4756]: I0930 21:05:30.014672 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Sep 30 21:05:30 crc kubenswrapper[4756]: I0930 21:05:30.014816 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Sep 30 21:05:31 crc kubenswrapper[4756]: I0930 21:05:31.173606 4756 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="7fa0d9a9-bf84-42ba-980e-0ca8010de3e7" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"http://10.217.1.88:8775/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Sep 30 21:05:31 crc kubenswrapper[4756]: I0930 21:05:31.173835 4756 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="6e750932-dd9c-4dbc-b42b-b8c17481e118" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.1.87:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Sep 30 21:05:31 crc kubenswrapper[4756]: I0930 21:05:31.174065 4756 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="6e750932-dd9c-4dbc-b42b-b8c17481e118" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.1.87:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Sep 30 21:05:31 crc kubenswrapper[4756]: I0930 21:05:31.174094 4756 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="7fa0d9a9-bf84-42ba-980e-0ca8010de3e7" containerName="nova-metadata-log" probeResult="failure" output="Get \"http://10.217.1.88:8775/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Sep 30 21:05:33 crc kubenswrapper[4756]: I0930 21:05:33.022633 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-conductor-0" Sep 30 21:05:34 crc kubenswrapper[4756]: I0930 21:05:34.930726 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-scheduler-0"] Sep 30 21:05:34 crc kubenswrapper[4756]: I0930 21:05:34.933611 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Sep 30 21:05:34 crc kubenswrapper[4756]: I0930 21:05:34.937528 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scheduler-config-data" Sep 30 21:05:34 crc kubenswrapper[4756]: I0930 21:05:34.947048 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Sep 30 21:05:35 crc kubenswrapper[4756]: I0930 21:05:35.112138 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b8456c4c-7259-4e33-aa46-961db6c7b617-scripts\") pod \"cinder-scheduler-0\" (UID: \"b8456c4c-7259-4e33-aa46-961db6c7b617\") " pod="openstack/cinder-scheduler-0" Sep 30 21:05:35 crc kubenswrapper[4756]: I0930 21:05:35.112249 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b8456c4c-7259-4e33-aa46-961db6c7b617-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"b8456c4c-7259-4e33-aa46-961db6c7b617\") " pod="openstack/cinder-scheduler-0" Sep 30 21:05:35 crc kubenswrapper[4756]: I0930 21:05:35.112327 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b8456c4c-7259-4e33-aa46-961db6c7b617-config-data\") pod \"cinder-scheduler-0\" (UID: \"b8456c4c-7259-4e33-aa46-961db6c7b617\") " pod="openstack/cinder-scheduler-0" Sep 30 21:05:35 crc kubenswrapper[4756]: I0930 21:05:35.112391 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/b8456c4c-7259-4e33-aa46-961db6c7b617-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"b8456c4c-7259-4e33-aa46-961db6c7b617\") " pod="openstack/cinder-scheduler-0" Sep 30 21:05:35 crc kubenswrapper[4756]: I0930 21:05:35.112446 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b8456c4c-7259-4e33-aa46-961db6c7b617-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"b8456c4c-7259-4e33-aa46-961db6c7b617\") " pod="openstack/cinder-scheduler-0" Sep 30 21:05:35 crc kubenswrapper[4756]: I0930 21:05:35.112494 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dzfg2\" (UniqueName: \"kubernetes.io/projected/b8456c4c-7259-4e33-aa46-961db6c7b617-kube-api-access-dzfg2\") pod \"cinder-scheduler-0\" (UID: \"b8456c4c-7259-4e33-aa46-961db6c7b617\") " pod="openstack/cinder-scheduler-0" Sep 30 21:05:35 crc kubenswrapper[4756]: I0930 21:05:35.214764 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b8456c4c-7259-4e33-aa46-961db6c7b617-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"b8456c4c-7259-4e33-aa46-961db6c7b617\") " pod="openstack/cinder-scheduler-0" Sep 30 21:05:35 crc kubenswrapper[4756]: I0930 21:05:35.214828 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b8456c4c-7259-4e33-aa46-961db6c7b617-config-data\") pod \"cinder-scheduler-0\" (UID: \"b8456c4c-7259-4e33-aa46-961db6c7b617\") " pod="openstack/cinder-scheduler-0" Sep 30 21:05:35 crc kubenswrapper[4756]: I0930 21:05:35.214889 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/b8456c4c-7259-4e33-aa46-961db6c7b617-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"b8456c4c-7259-4e33-aa46-961db6c7b617\") " pod="openstack/cinder-scheduler-0" Sep 30 21:05:35 crc kubenswrapper[4756]: I0930 21:05:35.214926 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b8456c4c-7259-4e33-aa46-961db6c7b617-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"b8456c4c-7259-4e33-aa46-961db6c7b617\") " pod="openstack/cinder-scheduler-0" Sep 30 21:05:35 crc kubenswrapper[4756]: I0930 21:05:35.214975 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dzfg2\" (UniqueName: \"kubernetes.io/projected/b8456c4c-7259-4e33-aa46-961db6c7b617-kube-api-access-dzfg2\") pod \"cinder-scheduler-0\" (UID: \"b8456c4c-7259-4e33-aa46-961db6c7b617\") " pod="openstack/cinder-scheduler-0" Sep 30 21:05:35 crc kubenswrapper[4756]: I0930 21:05:35.215067 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b8456c4c-7259-4e33-aa46-961db6c7b617-scripts\") pod \"cinder-scheduler-0\" (UID: \"b8456c4c-7259-4e33-aa46-961db6c7b617\") " pod="openstack/cinder-scheduler-0" Sep 30 21:05:35 crc kubenswrapper[4756]: I0930 21:05:35.215825 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/b8456c4c-7259-4e33-aa46-961db6c7b617-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"b8456c4c-7259-4e33-aa46-961db6c7b617\") " pod="openstack/cinder-scheduler-0" Sep 30 21:05:35 crc kubenswrapper[4756]: I0930 21:05:35.220802 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b8456c4c-7259-4e33-aa46-961db6c7b617-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"b8456c4c-7259-4e33-aa46-961db6c7b617\") " pod="openstack/cinder-scheduler-0" Sep 30 21:05:35 crc kubenswrapper[4756]: I0930 21:05:35.221502 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b8456c4c-7259-4e33-aa46-961db6c7b617-scripts\") pod \"cinder-scheduler-0\" (UID: \"b8456c4c-7259-4e33-aa46-961db6c7b617\") " pod="openstack/cinder-scheduler-0" Sep 30 21:05:35 crc kubenswrapper[4756]: I0930 21:05:35.223304 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b8456c4c-7259-4e33-aa46-961db6c7b617-config-data\") pod \"cinder-scheduler-0\" (UID: \"b8456c4c-7259-4e33-aa46-961db6c7b617\") " pod="openstack/cinder-scheduler-0" Sep 30 21:05:35 crc kubenswrapper[4756]: I0930 21:05:35.228069 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b8456c4c-7259-4e33-aa46-961db6c7b617-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"b8456c4c-7259-4e33-aa46-961db6c7b617\") " pod="openstack/cinder-scheduler-0" Sep 30 21:05:35 crc kubenswrapper[4756]: I0930 21:05:35.234482 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dzfg2\" (UniqueName: \"kubernetes.io/projected/b8456c4c-7259-4e33-aa46-961db6c7b617-kube-api-access-dzfg2\") pod \"cinder-scheduler-0\" (UID: \"b8456c4c-7259-4e33-aa46-961db6c7b617\") " pod="openstack/cinder-scheduler-0" Sep 30 21:05:35 crc kubenswrapper[4756]: I0930 21:05:35.274476 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Sep 30 21:05:35 crc kubenswrapper[4756]: W0930 21:05:35.753212 4756 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb8456c4c_7259_4e33_aa46_961db6c7b617.slice/crio-91de67c6b513fe90a5bbc2cd560441dffa3c4c0a8829a5a528e52897563f445c WatchSource:0}: Error finding container 91de67c6b513fe90a5bbc2cd560441dffa3c4c0a8829a5a528e52897563f445c: Status 404 returned error can't find the container with id 91de67c6b513fe90a5bbc2cd560441dffa3c4c0a8829a5a528e52897563f445c Sep 30 21:05:35 crc kubenswrapper[4756]: I0930 21:05:35.755895 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Sep 30 21:05:36 crc kubenswrapper[4756]: I0930 21:05:36.335495 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Sep 30 21:05:36 crc kubenswrapper[4756]: I0930 21:05:36.336267 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="9c67fc4c-7197-4099-8c81-d383c95e4840" containerName="cinder-api-log" containerID="cri-o://b84101524b97f1eeb619092d021eb68ebea7f888ac37c8bd8a607f9dd5e8cb67" gracePeriod=30 Sep 30 21:05:36 crc kubenswrapper[4756]: I0930 21:05:36.336737 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="9c67fc4c-7197-4099-8c81-d383c95e4840" containerName="cinder-api" containerID="cri-o://f44488eb42c5657c0b457fda52277ba71832398c0280b4f8cb32ddd5518e9fc2" gracePeriod=30 Sep 30 21:05:36 crc kubenswrapper[4756]: I0930 21:05:36.728555 4756 generic.go:334] "Generic (PLEG): container finished" podID="9c67fc4c-7197-4099-8c81-d383c95e4840" containerID="b84101524b97f1eeb619092d021eb68ebea7f888ac37c8bd8a607f9dd5e8cb67" exitCode=143 Sep 30 21:05:36 crc kubenswrapper[4756]: I0930 21:05:36.728646 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"9c67fc4c-7197-4099-8c81-d383c95e4840","Type":"ContainerDied","Data":"b84101524b97f1eeb619092d021eb68ebea7f888ac37c8bd8a607f9dd5e8cb67"} Sep 30 21:05:36 crc kubenswrapper[4756]: I0930 21:05:36.732199 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"b8456c4c-7259-4e33-aa46-961db6c7b617","Type":"ContainerStarted","Data":"2704e9e315fd70449497ad00f9f04cd18e83832822ccf417d45e26444c254ce7"} Sep 30 21:05:36 crc kubenswrapper[4756]: I0930 21:05:36.732230 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"b8456c4c-7259-4e33-aa46-961db6c7b617","Type":"ContainerStarted","Data":"91de67c6b513fe90a5bbc2cd560441dffa3c4c0a8829a5a528e52897563f445c"} Sep 30 21:05:36 crc kubenswrapper[4756]: I0930 21:05:36.958849 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-volume-volume1-0"] Sep 30 21:05:36 crc kubenswrapper[4756]: I0930 21:05:36.960448 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-volume-volume1-0" Sep 30 21:05:36 crc kubenswrapper[4756]: I0930 21:05:36.964759 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-volume-volume1-config-data" Sep 30 21:05:36 crc kubenswrapper[4756]: I0930 21:05:36.972561 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-volume-volume1-0"] Sep 30 21:05:37 crc kubenswrapper[4756]: I0930 21:05:37.172610 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/10f935ba-d208-4e87-ac0f-e80fbb14c586-etc-machine-id\") pod \"cinder-volume-volume1-0\" (UID: \"10f935ba-d208-4e87-ac0f-e80fbb14c586\") " pod="openstack/cinder-volume-volume1-0" Sep 30 21:05:37 crc kubenswrapper[4756]: I0930 21:05:37.173021 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/10f935ba-d208-4e87-ac0f-e80fbb14c586-lib-modules\") pod \"cinder-volume-volume1-0\" (UID: \"10f935ba-d208-4e87-ac0f-e80fbb14c586\") " pod="openstack/cinder-volume-volume1-0" Sep 30 21:05:37 crc kubenswrapper[4756]: I0930 21:05:37.173052 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run\" (UniqueName: \"kubernetes.io/host-path/10f935ba-d208-4e87-ac0f-e80fbb14c586-run\") pod \"cinder-volume-volume1-0\" (UID: \"10f935ba-d208-4e87-ac0f-e80fbb14c586\") " pod="openstack/cinder-volume-volume1-0" Sep 30 21:05:37 crc kubenswrapper[4756]: I0930 21:05:37.174991 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/10f935ba-d208-4e87-ac0f-e80fbb14c586-etc-nvme\") pod \"cinder-volume-volume1-0\" (UID: \"10f935ba-d208-4e87-ac0f-e80fbb14c586\") " pod="openstack/cinder-volume-volume1-0" Sep 30 21:05:37 crc kubenswrapper[4756]: I0930 21:05:37.175084 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/10f935ba-d208-4e87-ac0f-e80fbb14c586-config-data-custom\") pod \"cinder-volume-volume1-0\" (UID: \"10f935ba-d208-4e87-ac0f-e80fbb14c586\") " pod="openstack/cinder-volume-volume1-0" Sep 30 21:05:37 crc kubenswrapper[4756]: I0930 21:05:37.175212 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/10f935ba-d208-4e87-ac0f-e80fbb14c586-scripts\") pod \"cinder-volume-volume1-0\" (UID: \"10f935ba-d208-4e87-ac0f-e80fbb14c586\") " pod="openstack/cinder-volume-volume1-0" Sep 30 21:05:37 crc kubenswrapper[4756]: I0930 21:05:37.175371 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/10f935ba-d208-4e87-ac0f-e80fbb14c586-combined-ca-bundle\") pod \"cinder-volume-volume1-0\" (UID: \"10f935ba-d208-4e87-ac0f-e80fbb14c586\") " pod="openstack/cinder-volume-volume1-0" Sep 30 21:05:37 crc kubenswrapper[4756]: I0930 21:05:37.175432 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/10f935ba-d208-4e87-ac0f-e80fbb14c586-etc-iscsi\") pod \"cinder-volume-volume1-0\" (UID: \"10f935ba-d208-4e87-ac0f-e80fbb14c586\") " pod="openstack/cinder-volume-volume1-0" Sep 30 21:05:37 crc kubenswrapper[4756]: I0930 21:05:37.175457 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/10f935ba-d208-4e87-ac0f-e80fbb14c586-sys\") pod \"cinder-volume-volume1-0\" (UID: \"10f935ba-d208-4e87-ac0f-e80fbb14c586\") " pod="openstack/cinder-volume-volume1-0" Sep 30 21:05:37 crc kubenswrapper[4756]: I0930 21:05:37.175490 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/10f935ba-d208-4e87-ac0f-e80fbb14c586-ceph\") pod \"cinder-volume-volume1-0\" (UID: \"10f935ba-d208-4e87-ac0f-e80fbb14c586\") " pod="openstack/cinder-volume-volume1-0" Sep 30 21:05:37 crc kubenswrapper[4756]: I0930 21:05:37.175552 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/10f935ba-d208-4e87-ac0f-e80fbb14c586-config-data\") pod \"cinder-volume-volume1-0\" (UID: \"10f935ba-d208-4e87-ac0f-e80fbb14c586\") " pod="openstack/cinder-volume-volume1-0" Sep 30 21:05:37 crc kubenswrapper[4756]: I0930 21:05:37.175619 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-locks-cinder\" (UniqueName: \"kubernetes.io/host-path/10f935ba-d208-4e87-ac0f-e80fbb14c586-var-locks-cinder\") pod \"cinder-volume-volume1-0\" (UID: \"10f935ba-d208-4e87-ac0f-e80fbb14c586\") " pod="openstack/cinder-volume-volume1-0" Sep 30 21:05:37 crc kubenswrapper[4756]: I0930 21:05:37.175650 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/10f935ba-d208-4e87-ac0f-e80fbb14c586-var-locks-brick\") pod \"cinder-volume-volume1-0\" (UID: \"10f935ba-d208-4e87-ac0f-e80fbb14c586\") " pod="openstack/cinder-volume-volume1-0" Sep 30 21:05:37 crc kubenswrapper[4756]: I0930 21:05:37.175676 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/10f935ba-d208-4e87-ac0f-e80fbb14c586-dev\") pod \"cinder-volume-volume1-0\" (UID: \"10f935ba-d208-4e87-ac0f-e80fbb14c586\") " pod="openstack/cinder-volume-volume1-0" Sep 30 21:05:37 crc kubenswrapper[4756]: I0930 21:05:37.175708 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bmbgv\" (UniqueName: \"kubernetes.io/projected/10f935ba-d208-4e87-ac0f-e80fbb14c586-kube-api-access-bmbgv\") pod \"cinder-volume-volume1-0\" (UID: \"10f935ba-d208-4e87-ac0f-e80fbb14c586\") " pod="openstack/cinder-volume-volume1-0" Sep 30 21:05:37 crc kubenswrapper[4756]: I0930 21:05:37.175739 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-cinder\" (UniqueName: \"kubernetes.io/host-path/10f935ba-d208-4e87-ac0f-e80fbb14c586-var-lib-cinder\") pod \"cinder-volume-volume1-0\" (UID: \"10f935ba-d208-4e87-ac0f-e80fbb14c586\") " pod="openstack/cinder-volume-volume1-0" Sep 30 21:05:37 crc kubenswrapper[4756]: I0930 21:05:37.277513 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/10f935ba-d208-4e87-ac0f-e80fbb14c586-etc-iscsi\") pod \"cinder-volume-volume1-0\" (UID: \"10f935ba-d208-4e87-ac0f-e80fbb14c586\") " pod="openstack/cinder-volume-volume1-0" Sep 30 21:05:37 crc kubenswrapper[4756]: I0930 21:05:37.277584 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/10f935ba-d208-4e87-ac0f-e80fbb14c586-sys\") pod \"cinder-volume-volume1-0\" (UID: \"10f935ba-d208-4e87-ac0f-e80fbb14c586\") " pod="openstack/cinder-volume-volume1-0" Sep 30 21:05:37 crc kubenswrapper[4756]: I0930 21:05:37.277631 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/10f935ba-d208-4e87-ac0f-e80fbb14c586-ceph\") pod \"cinder-volume-volume1-0\" (UID: \"10f935ba-d208-4e87-ac0f-e80fbb14c586\") " pod="openstack/cinder-volume-volume1-0" Sep 30 21:05:37 crc kubenswrapper[4756]: I0930 21:05:37.277689 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/10f935ba-d208-4e87-ac0f-e80fbb14c586-config-data\") pod \"cinder-volume-volume1-0\" (UID: \"10f935ba-d208-4e87-ac0f-e80fbb14c586\") " pod="openstack/cinder-volume-volume1-0" Sep 30 21:05:37 crc kubenswrapper[4756]: I0930 21:05:37.277735 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-locks-cinder\" (UniqueName: \"kubernetes.io/host-path/10f935ba-d208-4e87-ac0f-e80fbb14c586-var-locks-cinder\") pod \"cinder-volume-volume1-0\" (UID: \"10f935ba-d208-4e87-ac0f-e80fbb14c586\") " pod="openstack/cinder-volume-volume1-0" Sep 30 21:05:37 crc kubenswrapper[4756]: I0930 21:05:37.277773 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/10f935ba-d208-4e87-ac0f-e80fbb14c586-var-locks-brick\") pod \"cinder-volume-volume1-0\" (UID: \"10f935ba-d208-4e87-ac0f-e80fbb14c586\") " pod="openstack/cinder-volume-volume1-0" Sep 30 21:05:37 crc kubenswrapper[4756]: I0930 21:05:37.277809 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/10f935ba-d208-4e87-ac0f-e80fbb14c586-dev\") pod \"cinder-volume-volume1-0\" (UID: \"10f935ba-d208-4e87-ac0f-e80fbb14c586\") " pod="openstack/cinder-volume-volume1-0" Sep 30 21:05:37 crc kubenswrapper[4756]: I0930 21:05:37.277847 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bmbgv\" (UniqueName: \"kubernetes.io/projected/10f935ba-d208-4e87-ac0f-e80fbb14c586-kube-api-access-bmbgv\") pod \"cinder-volume-volume1-0\" (UID: \"10f935ba-d208-4e87-ac0f-e80fbb14c586\") " pod="openstack/cinder-volume-volume1-0" Sep 30 21:05:37 crc kubenswrapper[4756]: I0930 21:05:37.277881 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-cinder\" (UniqueName: \"kubernetes.io/host-path/10f935ba-d208-4e87-ac0f-e80fbb14c586-var-lib-cinder\") pod \"cinder-volume-volume1-0\" (UID: \"10f935ba-d208-4e87-ac0f-e80fbb14c586\") " pod="openstack/cinder-volume-volume1-0" Sep 30 21:05:37 crc kubenswrapper[4756]: I0930 21:05:37.277934 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/10f935ba-d208-4e87-ac0f-e80fbb14c586-etc-machine-id\") pod \"cinder-volume-volume1-0\" (UID: \"10f935ba-d208-4e87-ac0f-e80fbb14c586\") " pod="openstack/cinder-volume-volume1-0" Sep 30 21:05:37 crc kubenswrapper[4756]: I0930 21:05:37.278008 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/10f935ba-d208-4e87-ac0f-e80fbb14c586-lib-modules\") pod \"cinder-volume-volume1-0\" (UID: \"10f935ba-d208-4e87-ac0f-e80fbb14c586\") " pod="openstack/cinder-volume-volume1-0" Sep 30 21:05:37 crc kubenswrapper[4756]: I0930 21:05:37.278057 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run\" (UniqueName: \"kubernetes.io/host-path/10f935ba-d208-4e87-ac0f-e80fbb14c586-run\") pod \"cinder-volume-volume1-0\" (UID: \"10f935ba-d208-4e87-ac0f-e80fbb14c586\") " pod="openstack/cinder-volume-volume1-0" Sep 30 21:05:37 crc kubenswrapper[4756]: I0930 21:05:37.278187 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/10f935ba-d208-4e87-ac0f-e80fbb14c586-etc-nvme\") pod \"cinder-volume-volume1-0\" (UID: \"10f935ba-d208-4e87-ac0f-e80fbb14c586\") " pod="openstack/cinder-volume-volume1-0" Sep 30 21:05:37 crc kubenswrapper[4756]: I0930 21:05:37.278263 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/10f935ba-d208-4e87-ac0f-e80fbb14c586-config-data-custom\") pod \"cinder-volume-volume1-0\" (UID: \"10f935ba-d208-4e87-ac0f-e80fbb14c586\") " pod="openstack/cinder-volume-volume1-0" Sep 30 21:05:37 crc kubenswrapper[4756]: I0930 21:05:37.278355 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/10f935ba-d208-4e87-ac0f-e80fbb14c586-scripts\") pod \"cinder-volume-volume1-0\" (UID: \"10f935ba-d208-4e87-ac0f-e80fbb14c586\") " pod="openstack/cinder-volume-volume1-0" Sep 30 21:05:37 crc kubenswrapper[4756]: I0930 21:05:37.278473 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/10f935ba-d208-4e87-ac0f-e80fbb14c586-combined-ca-bundle\") pod \"cinder-volume-volume1-0\" (UID: \"10f935ba-d208-4e87-ac0f-e80fbb14c586\") " pod="openstack/cinder-volume-volume1-0" Sep 30 21:05:37 crc kubenswrapper[4756]: I0930 21:05:37.278616 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/10f935ba-d208-4e87-ac0f-e80fbb14c586-dev\") pod \"cinder-volume-volume1-0\" (UID: \"10f935ba-d208-4e87-ac0f-e80fbb14c586\") " pod="openstack/cinder-volume-volume1-0" Sep 30 21:05:37 crc kubenswrapper[4756]: I0930 21:05:37.278629 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/10f935ba-d208-4e87-ac0f-e80fbb14c586-etc-iscsi\") pod \"cinder-volume-volume1-0\" (UID: \"10f935ba-d208-4e87-ac0f-e80fbb14c586\") " pod="openstack/cinder-volume-volume1-0" Sep 30 21:05:37 crc kubenswrapper[4756]: I0930 21:05:37.278739 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-cinder\" (UniqueName: \"kubernetes.io/host-path/10f935ba-d208-4e87-ac0f-e80fbb14c586-var-lib-cinder\") pod \"cinder-volume-volume1-0\" (UID: \"10f935ba-d208-4e87-ac0f-e80fbb14c586\") " pod="openstack/cinder-volume-volume1-0" Sep 30 21:05:37 crc kubenswrapper[4756]: I0930 21:05:37.278935 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/10f935ba-d208-4e87-ac0f-e80fbb14c586-etc-nvme\") pod \"cinder-volume-volume1-0\" (UID: \"10f935ba-d208-4e87-ac0f-e80fbb14c586\") " pod="openstack/cinder-volume-volume1-0" Sep 30 21:05:37 crc kubenswrapper[4756]: I0930 21:05:37.278972 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-locks-cinder\" (UniqueName: \"kubernetes.io/host-path/10f935ba-d208-4e87-ac0f-e80fbb14c586-var-locks-cinder\") pod \"cinder-volume-volume1-0\" (UID: \"10f935ba-d208-4e87-ac0f-e80fbb14c586\") " pod="openstack/cinder-volume-volume1-0" Sep 30 21:05:37 crc kubenswrapper[4756]: I0930 21:05:37.279050 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/10f935ba-d208-4e87-ac0f-e80fbb14c586-var-locks-brick\") pod \"cinder-volume-volume1-0\" (UID: \"10f935ba-d208-4e87-ac0f-e80fbb14c586\") " pod="openstack/cinder-volume-volume1-0" Sep 30 21:05:37 crc kubenswrapper[4756]: I0930 21:05:37.279056 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/10f935ba-d208-4e87-ac0f-e80fbb14c586-lib-modules\") pod \"cinder-volume-volume1-0\" (UID: \"10f935ba-d208-4e87-ac0f-e80fbb14c586\") " pod="openstack/cinder-volume-volume1-0" Sep 30 21:05:37 crc kubenswrapper[4756]: I0930 21:05:37.279070 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run\" (UniqueName: \"kubernetes.io/host-path/10f935ba-d208-4e87-ac0f-e80fbb14c586-run\") pod \"cinder-volume-volume1-0\" (UID: \"10f935ba-d208-4e87-ac0f-e80fbb14c586\") " pod="openstack/cinder-volume-volume1-0" Sep 30 21:05:37 crc kubenswrapper[4756]: I0930 21:05:37.279083 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/10f935ba-d208-4e87-ac0f-e80fbb14c586-etc-machine-id\") pod \"cinder-volume-volume1-0\" (UID: \"10f935ba-d208-4e87-ac0f-e80fbb14c586\") " pod="openstack/cinder-volume-volume1-0" Sep 30 21:05:37 crc kubenswrapper[4756]: I0930 21:05:37.279124 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/10f935ba-d208-4e87-ac0f-e80fbb14c586-sys\") pod \"cinder-volume-volume1-0\" (UID: \"10f935ba-d208-4e87-ac0f-e80fbb14c586\") " pod="openstack/cinder-volume-volume1-0" Sep 30 21:05:37 crc kubenswrapper[4756]: I0930 21:05:37.285649 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/10f935ba-d208-4e87-ac0f-e80fbb14c586-config-data-custom\") pod \"cinder-volume-volume1-0\" (UID: \"10f935ba-d208-4e87-ac0f-e80fbb14c586\") " pod="openstack/cinder-volume-volume1-0" Sep 30 21:05:37 crc kubenswrapper[4756]: I0930 21:05:37.288203 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/10f935ba-d208-4e87-ac0f-e80fbb14c586-ceph\") pod \"cinder-volume-volume1-0\" (UID: \"10f935ba-d208-4e87-ac0f-e80fbb14c586\") " pod="openstack/cinder-volume-volume1-0" Sep 30 21:05:37 crc kubenswrapper[4756]: I0930 21:05:37.294894 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/10f935ba-d208-4e87-ac0f-e80fbb14c586-config-data\") pod \"cinder-volume-volume1-0\" (UID: \"10f935ba-d208-4e87-ac0f-e80fbb14c586\") " pod="openstack/cinder-volume-volume1-0" Sep 30 21:05:37 crc kubenswrapper[4756]: I0930 21:05:37.298544 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/10f935ba-d208-4e87-ac0f-e80fbb14c586-combined-ca-bundle\") pod \"cinder-volume-volume1-0\" (UID: \"10f935ba-d208-4e87-ac0f-e80fbb14c586\") " pod="openstack/cinder-volume-volume1-0" Sep 30 21:05:37 crc kubenswrapper[4756]: I0930 21:05:37.304197 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bmbgv\" (UniqueName: \"kubernetes.io/projected/10f935ba-d208-4e87-ac0f-e80fbb14c586-kube-api-access-bmbgv\") pod \"cinder-volume-volume1-0\" (UID: \"10f935ba-d208-4e87-ac0f-e80fbb14c586\") " pod="openstack/cinder-volume-volume1-0" Sep 30 21:05:37 crc kubenswrapper[4756]: I0930 21:05:37.308793 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/10f935ba-d208-4e87-ac0f-e80fbb14c586-scripts\") pod \"cinder-volume-volume1-0\" (UID: \"10f935ba-d208-4e87-ac0f-e80fbb14c586\") " pod="openstack/cinder-volume-volume1-0" Sep 30 21:05:37 crc kubenswrapper[4756]: I0930 21:05:37.575793 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-volume-volume1-0" Sep 30 21:05:37 crc kubenswrapper[4756]: I0930 21:05:37.626050 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-backup-0"] Sep 30 21:05:37 crc kubenswrapper[4756]: I0930 21:05:37.628627 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-backup-0" Sep 30 21:05:37 crc kubenswrapper[4756]: I0930 21:05:37.631772 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-backup-config-data" Sep 30 21:05:37 crc kubenswrapper[4756]: I0930 21:05:37.660058 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-backup-0"] Sep 30 21:05:37 crc kubenswrapper[4756]: I0930 21:05:37.754683 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"b8456c4c-7259-4e33-aa46-961db6c7b617","Type":"ContainerStarted","Data":"f6c4bf6a2168a182d2520663495dd681418bd0d60f0eb873e649a31859f7c548"} Sep 30 21:05:37 crc kubenswrapper[4756]: I0930 21:05:37.783783 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-scheduler-0" podStartSLOduration=3.783765959 podStartE2EDuration="3.783765959s" podCreationTimestamp="2025-09-30 21:05:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 21:05:37.781727016 +0000 UTC m=+5667.402660503" watchObservedRunningTime="2025-09-30 21:05:37.783765959 +0000 UTC m=+5667.404699436" Sep 30 21:05:37 crc kubenswrapper[4756]: I0930 21:05:37.785960 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/313af6b2-d2f9-4bf6-90d5-6de0ada1ca1a-combined-ca-bundle\") pod \"cinder-backup-0\" (UID: \"313af6b2-d2f9-4bf6-90d5-6de0ada1ca1a\") " pod="openstack/cinder-backup-0" Sep 30 21:05:37 crc kubenswrapper[4756]: I0930 21:05:37.786006 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-cinder\" (UniqueName: \"kubernetes.io/host-path/313af6b2-d2f9-4bf6-90d5-6de0ada1ca1a-var-lib-cinder\") pod \"cinder-backup-0\" (UID: \"313af6b2-d2f9-4bf6-90d5-6de0ada1ca1a\") " pod="openstack/cinder-backup-0" Sep 30 21:05:37 crc kubenswrapper[4756]: I0930 21:05:37.786031 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/313af6b2-d2f9-4bf6-90d5-6de0ada1ca1a-scripts\") pod \"cinder-backup-0\" (UID: \"313af6b2-d2f9-4bf6-90d5-6de0ada1ca1a\") " pod="openstack/cinder-backup-0" Sep 30 21:05:37 crc kubenswrapper[4756]: I0930 21:05:37.786047 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/313af6b2-d2f9-4bf6-90d5-6de0ada1ca1a-config-data\") pod \"cinder-backup-0\" (UID: \"313af6b2-d2f9-4bf6-90d5-6de0ada1ca1a\") " pod="openstack/cinder-backup-0" Sep 30 21:05:37 crc kubenswrapper[4756]: I0930 21:05:37.786073 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/313af6b2-d2f9-4bf6-90d5-6de0ada1ca1a-etc-nvme\") pod \"cinder-backup-0\" (UID: \"313af6b2-d2f9-4bf6-90d5-6de0ada1ca1a\") " pod="openstack/cinder-backup-0" Sep 30 21:05:37 crc kubenswrapper[4756]: I0930 21:05:37.786087 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run\" (UniqueName: \"kubernetes.io/host-path/313af6b2-d2f9-4bf6-90d5-6de0ada1ca1a-run\") pod \"cinder-backup-0\" (UID: \"313af6b2-d2f9-4bf6-90d5-6de0ada1ca1a\") " pod="openstack/cinder-backup-0" Sep 30 21:05:37 crc kubenswrapper[4756]: I0930 21:05:37.786109 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/313af6b2-d2f9-4bf6-90d5-6de0ada1ca1a-var-locks-brick\") pod \"cinder-backup-0\" (UID: \"313af6b2-d2f9-4bf6-90d5-6de0ada1ca1a\") " pod="openstack/cinder-backup-0" Sep 30 21:05:37 crc kubenswrapper[4756]: I0930 21:05:37.786124 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/313af6b2-d2f9-4bf6-90d5-6de0ada1ca1a-lib-modules\") pod \"cinder-backup-0\" (UID: \"313af6b2-d2f9-4bf6-90d5-6de0ada1ca1a\") " pod="openstack/cinder-backup-0" Sep 30 21:05:37 crc kubenswrapper[4756]: I0930 21:05:37.786149 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/313af6b2-d2f9-4bf6-90d5-6de0ada1ca1a-config-data-custom\") pod \"cinder-backup-0\" (UID: \"313af6b2-d2f9-4bf6-90d5-6de0ada1ca1a\") " pod="openstack/cinder-backup-0" Sep 30 21:05:37 crc kubenswrapper[4756]: I0930 21:05:37.786173 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/313af6b2-d2f9-4bf6-90d5-6de0ada1ca1a-ceph\") pod \"cinder-backup-0\" (UID: \"313af6b2-d2f9-4bf6-90d5-6de0ada1ca1a\") " pod="openstack/cinder-backup-0" Sep 30 21:05:37 crc kubenswrapper[4756]: I0930 21:05:37.786199 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g8647\" (UniqueName: \"kubernetes.io/projected/313af6b2-d2f9-4bf6-90d5-6de0ada1ca1a-kube-api-access-g8647\") pod \"cinder-backup-0\" (UID: \"313af6b2-d2f9-4bf6-90d5-6de0ada1ca1a\") " pod="openstack/cinder-backup-0" Sep 30 21:05:37 crc kubenswrapper[4756]: I0930 21:05:37.786219 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/313af6b2-d2f9-4bf6-90d5-6de0ada1ca1a-etc-machine-id\") pod \"cinder-backup-0\" (UID: \"313af6b2-d2f9-4bf6-90d5-6de0ada1ca1a\") " pod="openstack/cinder-backup-0" Sep 30 21:05:37 crc kubenswrapper[4756]: I0930 21:05:37.786235 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/313af6b2-d2f9-4bf6-90d5-6de0ada1ca1a-sys\") pod \"cinder-backup-0\" (UID: \"313af6b2-d2f9-4bf6-90d5-6de0ada1ca1a\") " pod="openstack/cinder-backup-0" Sep 30 21:05:37 crc kubenswrapper[4756]: I0930 21:05:37.786284 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/313af6b2-d2f9-4bf6-90d5-6de0ada1ca1a-etc-iscsi\") pod \"cinder-backup-0\" (UID: \"313af6b2-d2f9-4bf6-90d5-6de0ada1ca1a\") " pod="openstack/cinder-backup-0" Sep 30 21:05:37 crc kubenswrapper[4756]: I0930 21:05:37.786429 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/313af6b2-d2f9-4bf6-90d5-6de0ada1ca1a-dev\") pod \"cinder-backup-0\" (UID: \"313af6b2-d2f9-4bf6-90d5-6de0ada1ca1a\") " pod="openstack/cinder-backup-0" Sep 30 21:05:37 crc kubenswrapper[4756]: I0930 21:05:37.786445 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-locks-cinder\" (UniqueName: \"kubernetes.io/host-path/313af6b2-d2f9-4bf6-90d5-6de0ada1ca1a-var-locks-cinder\") pod \"cinder-backup-0\" (UID: \"313af6b2-d2f9-4bf6-90d5-6de0ada1ca1a\") " pod="openstack/cinder-backup-0" Sep 30 21:05:37 crc kubenswrapper[4756]: I0930 21:05:37.887941 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/313af6b2-d2f9-4bf6-90d5-6de0ada1ca1a-scripts\") pod \"cinder-backup-0\" (UID: \"313af6b2-d2f9-4bf6-90d5-6de0ada1ca1a\") " pod="openstack/cinder-backup-0" Sep 30 21:05:37 crc kubenswrapper[4756]: I0930 21:05:37.887986 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/313af6b2-d2f9-4bf6-90d5-6de0ada1ca1a-config-data\") pod \"cinder-backup-0\" (UID: \"313af6b2-d2f9-4bf6-90d5-6de0ada1ca1a\") " pod="openstack/cinder-backup-0" Sep 30 21:05:37 crc kubenswrapper[4756]: I0930 21:05:37.888019 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/313af6b2-d2f9-4bf6-90d5-6de0ada1ca1a-etc-nvme\") pod \"cinder-backup-0\" (UID: \"313af6b2-d2f9-4bf6-90d5-6de0ada1ca1a\") " pod="openstack/cinder-backup-0" Sep 30 21:05:37 crc kubenswrapper[4756]: I0930 21:05:37.888038 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run\" (UniqueName: \"kubernetes.io/host-path/313af6b2-d2f9-4bf6-90d5-6de0ada1ca1a-run\") pod \"cinder-backup-0\" (UID: \"313af6b2-d2f9-4bf6-90d5-6de0ada1ca1a\") " pod="openstack/cinder-backup-0" Sep 30 21:05:37 crc kubenswrapper[4756]: I0930 21:05:37.888074 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/313af6b2-d2f9-4bf6-90d5-6de0ada1ca1a-var-locks-brick\") pod \"cinder-backup-0\" (UID: \"313af6b2-d2f9-4bf6-90d5-6de0ada1ca1a\") " pod="openstack/cinder-backup-0" Sep 30 21:05:37 crc kubenswrapper[4756]: I0930 21:05:37.888097 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/313af6b2-d2f9-4bf6-90d5-6de0ada1ca1a-lib-modules\") pod \"cinder-backup-0\" (UID: \"313af6b2-d2f9-4bf6-90d5-6de0ada1ca1a\") " pod="openstack/cinder-backup-0" Sep 30 21:05:37 crc kubenswrapper[4756]: I0930 21:05:37.888131 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/313af6b2-d2f9-4bf6-90d5-6de0ada1ca1a-config-data-custom\") pod \"cinder-backup-0\" (UID: \"313af6b2-d2f9-4bf6-90d5-6de0ada1ca1a\") " pod="openstack/cinder-backup-0" Sep 30 21:05:37 crc kubenswrapper[4756]: I0930 21:05:37.888160 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/313af6b2-d2f9-4bf6-90d5-6de0ada1ca1a-ceph\") pod \"cinder-backup-0\" (UID: \"313af6b2-d2f9-4bf6-90d5-6de0ada1ca1a\") " pod="openstack/cinder-backup-0" Sep 30 21:05:37 crc kubenswrapper[4756]: I0930 21:05:37.888194 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g8647\" (UniqueName: \"kubernetes.io/projected/313af6b2-d2f9-4bf6-90d5-6de0ada1ca1a-kube-api-access-g8647\") pod \"cinder-backup-0\" (UID: \"313af6b2-d2f9-4bf6-90d5-6de0ada1ca1a\") " pod="openstack/cinder-backup-0" Sep 30 21:05:37 crc kubenswrapper[4756]: I0930 21:05:37.888231 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/313af6b2-d2f9-4bf6-90d5-6de0ada1ca1a-etc-nvme\") pod \"cinder-backup-0\" (UID: \"313af6b2-d2f9-4bf6-90d5-6de0ada1ca1a\") " pod="openstack/cinder-backup-0" Sep 30 21:05:37 crc kubenswrapper[4756]: I0930 21:05:37.888267 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/313af6b2-d2f9-4bf6-90d5-6de0ada1ca1a-etc-machine-id\") pod \"cinder-backup-0\" (UID: \"313af6b2-d2f9-4bf6-90d5-6de0ada1ca1a\") " pod="openstack/cinder-backup-0" Sep 30 21:05:37 crc kubenswrapper[4756]: I0930 21:05:37.888284 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/313af6b2-d2f9-4bf6-90d5-6de0ada1ca1a-lib-modules\") pod \"cinder-backup-0\" (UID: \"313af6b2-d2f9-4bf6-90d5-6de0ada1ca1a\") " pod="openstack/cinder-backup-0" Sep 30 21:05:37 crc kubenswrapper[4756]: I0930 21:05:37.888293 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/313af6b2-d2f9-4bf6-90d5-6de0ada1ca1a-sys\") pod \"cinder-backup-0\" (UID: \"313af6b2-d2f9-4bf6-90d5-6de0ada1ca1a\") " pod="openstack/cinder-backup-0" Sep 30 21:05:37 crc kubenswrapper[4756]: I0930 21:05:37.888312 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run\" (UniqueName: \"kubernetes.io/host-path/313af6b2-d2f9-4bf6-90d5-6de0ada1ca1a-run\") pod \"cinder-backup-0\" (UID: \"313af6b2-d2f9-4bf6-90d5-6de0ada1ca1a\") " pod="openstack/cinder-backup-0" Sep 30 21:05:37 crc kubenswrapper[4756]: I0930 21:05:37.888352 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/313af6b2-d2f9-4bf6-90d5-6de0ada1ca1a-var-locks-brick\") pod \"cinder-backup-0\" (UID: \"313af6b2-d2f9-4bf6-90d5-6de0ada1ca1a\") " pod="openstack/cinder-backup-0" Sep 30 21:05:37 crc kubenswrapper[4756]: I0930 21:05:37.888413 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/313af6b2-d2f9-4bf6-90d5-6de0ada1ca1a-etc-iscsi\") pod \"cinder-backup-0\" (UID: \"313af6b2-d2f9-4bf6-90d5-6de0ada1ca1a\") " pod="openstack/cinder-backup-0" Sep 30 21:05:37 crc kubenswrapper[4756]: I0930 21:05:37.888467 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/313af6b2-d2f9-4bf6-90d5-6de0ada1ca1a-sys\") pod \"cinder-backup-0\" (UID: \"313af6b2-d2f9-4bf6-90d5-6de0ada1ca1a\") " pod="openstack/cinder-backup-0" Sep 30 21:05:37 crc kubenswrapper[4756]: I0930 21:05:37.888499 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/313af6b2-d2f9-4bf6-90d5-6de0ada1ca1a-dev\") pod \"cinder-backup-0\" (UID: \"313af6b2-d2f9-4bf6-90d5-6de0ada1ca1a\") " pod="openstack/cinder-backup-0" Sep 30 21:05:37 crc kubenswrapper[4756]: I0930 21:05:37.888523 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-locks-cinder\" (UniqueName: \"kubernetes.io/host-path/313af6b2-d2f9-4bf6-90d5-6de0ada1ca1a-var-locks-cinder\") pod \"cinder-backup-0\" (UID: \"313af6b2-d2f9-4bf6-90d5-6de0ada1ca1a\") " pod="openstack/cinder-backup-0" Sep 30 21:05:37 crc kubenswrapper[4756]: I0930 21:05:37.888524 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/313af6b2-d2f9-4bf6-90d5-6de0ada1ca1a-etc-machine-id\") pod \"cinder-backup-0\" (UID: \"313af6b2-d2f9-4bf6-90d5-6de0ada1ca1a\") " pod="openstack/cinder-backup-0" Sep 30 21:05:37 crc kubenswrapper[4756]: I0930 21:05:37.888556 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/313af6b2-d2f9-4bf6-90d5-6de0ada1ca1a-combined-ca-bundle\") pod \"cinder-backup-0\" (UID: \"313af6b2-d2f9-4bf6-90d5-6de0ada1ca1a\") " pod="openstack/cinder-backup-0" Sep 30 21:05:37 crc kubenswrapper[4756]: I0930 21:05:37.888580 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/313af6b2-d2f9-4bf6-90d5-6de0ada1ca1a-dev\") pod \"cinder-backup-0\" (UID: \"313af6b2-d2f9-4bf6-90d5-6de0ada1ca1a\") " pod="openstack/cinder-backup-0" Sep 30 21:05:37 crc kubenswrapper[4756]: I0930 21:05:37.888587 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-cinder\" (UniqueName: \"kubernetes.io/host-path/313af6b2-d2f9-4bf6-90d5-6de0ada1ca1a-var-lib-cinder\") pod \"cinder-backup-0\" (UID: \"313af6b2-d2f9-4bf6-90d5-6de0ada1ca1a\") " pod="openstack/cinder-backup-0" Sep 30 21:05:37 crc kubenswrapper[4756]: I0930 21:05:37.888553 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/313af6b2-d2f9-4bf6-90d5-6de0ada1ca1a-etc-iscsi\") pod \"cinder-backup-0\" (UID: \"313af6b2-d2f9-4bf6-90d5-6de0ada1ca1a\") " pod="openstack/cinder-backup-0" Sep 30 21:05:37 crc kubenswrapper[4756]: I0930 21:05:37.888732 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-locks-cinder\" (UniqueName: \"kubernetes.io/host-path/313af6b2-d2f9-4bf6-90d5-6de0ada1ca1a-var-locks-cinder\") pod \"cinder-backup-0\" (UID: \"313af6b2-d2f9-4bf6-90d5-6de0ada1ca1a\") " pod="openstack/cinder-backup-0" Sep 30 21:05:37 crc kubenswrapper[4756]: I0930 21:05:37.889062 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-cinder\" (UniqueName: \"kubernetes.io/host-path/313af6b2-d2f9-4bf6-90d5-6de0ada1ca1a-var-lib-cinder\") pod \"cinder-backup-0\" (UID: \"313af6b2-d2f9-4bf6-90d5-6de0ada1ca1a\") " pod="openstack/cinder-backup-0" Sep 30 21:05:37 crc kubenswrapper[4756]: I0930 21:05:37.893843 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/313af6b2-d2f9-4bf6-90d5-6de0ada1ca1a-ceph\") pod \"cinder-backup-0\" (UID: \"313af6b2-d2f9-4bf6-90d5-6de0ada1ca1a\") " pod="openstack/cinder-backup-0" Sep 30 21:05:37 crc kubenswrapper[4756]: I0930 21:05:37.902365 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/313af6b2-d2f9-4bf6-90d5-6de0ada1ca1a-scripts\") pod \"cinder-backup-0\" (UID: \"313af6b2-d2f9-4bf6-90d5-6de0ada1ca1a\") " pod="openstack/cinder-backup-0" Sep 30 21:05:37 crc kubenswrapper[4756]: I0930 21:05:37.903900 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/313af6b2-d2f9-4bf6-90d5-6de0ada1ca1a-config-data-custom\") pod \"cinder-backup-0\" (UID: \"313af6b2-d2f9-4bf6-90d5-6de0ada1ca1a\") " pod="openstack/cinder-backup-0" Sep 30 21:05:37 crc kubenswrapper[4756]: I0930 21:05:37.904407 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/313af6b2-d2f9-4bf6-90d5-6de0ada1ca1a-combined-ca-bundle\") pod \"cinder-backup-0\" (UID: \"313af6b2-d2f9-4bf6-90d5-6de0ada1ca1a\") " pod="openstack/cinder-backup-0" Sep 30 21:05:37 crc kubenswrapper[4756]: I0930 21:05:37.904642 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/313af6b2-d2f9-4bf6-90d5-6de0ada1ca1a-config-data\") pod \"cinder-backup-0\" (UID: \"313af6b2-d2f9-4bf6-90d5-6de0ada1ca1a\") " pod="openstack/cinder-backup-0" Sep 30 21:05:37 crc kubenswrapper[4756]: I0930 21:05:37.907852 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g8647\" (UniqueName: \"kubernetes.io/projected/313af6b2-d2f9-4bf6-90d5-6de0ada1ca1a-kube-api-access-g8647\") pod \"cinder-backup-0\" (UID: \"313af6b2-d2f9-4bf6-90d5-6de0ada1ca1a\") " pod="openstack/cinder-backup-0" Sep 30 21:05:38 crc kubenswrapper[4756]: I0930 21:05:38.008887 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-backup-0" Sep 30 21:05:38 crc kubenswrapper[4756]: I0930 21:05:38.232374 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-volume-volume1-0"] Sep 30 21:05:38 crc kubenswrapper[4756]: W0930 21:05:38.248792 4756 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod10f935ba_d208_4e87_ac0f_e80fbb14c586.slice/crio-d43f09baa2c7181e4ce40dcd936306e5f952cdae74786087b1bc63814718a2aa WatchSource:0}: Error finding container d43f09baa2c7181e4ce40dcd936306e5f952cdae74786087b1bc63814718a2aa: Status 404 returned error can't find the container with id d43f09baa2c7181e4ce40dcd936306e5f952cdae74786087b1bc63814718a2aa Sep 30 21:05:38 crc kubenswrapper[4756]: I0930 21:05:38.252299 4756 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Sep 30 21:05:38 crc kubenswrapper[4756]: I0930 21:05:38.610947 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-backup-0"] Sep 30 21:05:38 crc kubenswrapper[4756]: I0930 21:05:38.764777 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-volume-volume1-0" event={"ID":"10f935ba-d208-4e87-ac0f-e80fbb14c586","Type":"ContainerStarted","Data":"d43f09baa2c7181e4ce40dcd936306e5f952cdae74786087b1bc63814718a2aa"} Sep 30 21:05:38 crc kubenswrapper[4756]: I0930 21:05:38.766313 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-backup-0" event={"ID":"313af6b2-d2f9-4bf6-90d5-6de0ada1ca1a","Type":"ContainerStarted","Data":"88a0b23322f9e9165a194b3cf1ab622f29c401d7a9ed5dad9ab57f4362790834"} Sep 30 21:05:39 crc kubenswrapper[4756]: I0930 21:05:39.478611 4756 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/cinder-api-0" podUID="9c67fc4c-7197-4099-8c81-d383c95e4840" containerName="cinder-api" probeResult="failure" output="Get \"http://10.217.1.83:8776/healthcheck\": read tcp 10.217.0.2:52512->10.217.1.83:8776: read: connection reset by peer" Sep 30 21:05:39 crc kubenswrapper[4756]: I0930 21:05:39.784911 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-backup-0" event={"ID":"313af6b2-d2f9-4bf6-90d5-6de0ada1ca1a","Type":"ContainerStarted","Data":"9b34d676bec0fdd85a6bd68097738ded07982299bfc0be2784315ccef7623910"} Sep 30 21:05:39 crc kubenswrapper[4756]: I0930 21:05:39.798161 4756 generic.go:334] "Generic (PLEG): container finished" podID="9c67fc4c-7197-4099-8c81-d383c95e4840" containerID="f44488eb42c5657c0b457fda52277ba71832398c0280b4f8cb32ddd5518e9fc2" exitCode=0 Sep 30 21:05:39 crc kubenswrapper[4756]: I0930 21:05:39.798244 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"9c67fc4c-7197-4099-8c81-d383c95e4840","Type":"ContainerDied","Data":"f44488eb42c5657c0b457fda52277ba71832398c0280b4f8cb32ddd5518e9fc2"} Sep 30 21:05:39 crc kubenswrapper[4756]: I0930 21:05:39.820981 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-volume-volume1-0" event={"ID":"10f935ba-d208-4e87-ac0f-e80fbb14c586","Type":"ContainerStarted","Data":"767f2abd255f50e7aac1c770f6e6a342f8e0b0c5785c2c0cf94d42896d860ee2"} Sep 30 21:05:39 crc kubenswrapper[4756]: I0930 21:05:39.821030 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-volume-volume1-0" event={"ID":"10f935ba-d208-4e87-ac0f-e80fbb14c586","Type":"ContainerStarted","Data":"3b123e77e02259c631d49843e412988ebe1e3ae20978220b6beaf8eaaa088c85"} Sep 30 21:05:39 crc kubenswrapper[4756]: I0930 21:05:39.848318 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-volume-volume1-0" podStartSLOduration=2.984869247 podStartE2EDuration="3.848281566s" podCreationTimestamp="2025-09-30 21:05:36 +0000 UTC" firstStartedPulling="2025-09-30 21:05:38.252129775 +0000 UTC m=+5667.873063252" lastFinishedPulling="2025-09-30 21:05:39.115542094 +0000 UTC m=+5668.736475571" observedRunningTime="2025-09-30 21:05:39.840895653 +0000 UTC m=+5669.461829130" watchObservedRunningTime="2025-09-30 21:05:39.848281566 +0000 UTC m=+5669.469215043" Sep 30 21:05:39 crc kubenswrapper[4756]: I0930 21:05:39.897623 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Sep 30 21:05:40 crc kubenswrapper[4756]: I0930 21:05:40.023591 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Sep 30 21:05:40 crc kubenswrapper[4756]: I0930 21:05:40.024025 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Sep 30 21:05:40 crc kubenswrapper[4756]: I0930 21:05:40.026317 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Sep 30 21:05:40 crc kubenswrapper[4756]: I0930 21:05:40.026364 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Sep 30 21:05:40 crc kubenswrapper[4756]: I0930 21:05:40.032976 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Sep 30 21:05:40 crc kubenswrapper[4756]: I0930 21:05:40.036778 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Sep 30 21:05:40 crc kubenswrapper[4756]: I0930 21:05:40.036892 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Sep 30 21:05:40 crc kubenswrapper[4756]: I0930 21:05:40.057221 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9c67fc4c-7197-4099-8c81-d383c95e4840-logs\") pod \"9c67fc4c-7197-4099-8c81-d383c95e4840\" (UID: \"9c67fc4c-7197-4099-8c81-d383c95e4840\") " Sep 30 21:05:40 crc kubenswrapper[4756]: I0930 21:05:40.057578 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/9c67fc4c-7197-4099-8c81-d383c95e4840-config-data-custom\") pod \"9c67fc4c-7197-4099-8c81-d383c95e4840\" (UID: \"9c67fc4c-7197-4099-8c81-d383c95e4840\") " Sep 30 21:05:40 crc kubenswrapper[4756]: I0930 21:05:40.057663 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9c67fc4c-7197-4099-8c81-d383c95e4840-scripts\") pod \"9c67fc4c-7197-4099-8c81-d383c95e4840\" (UID: \"9c67fc4c-7197-4099-8c81-d383c95e4840\") " Sep 30 21:05:40 crc kubenswrapper[4756]: I0930 21:05:40.057833 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9c67fc4c-7197-4099-8c81-d383c95e4840-config-data\") pod \"9c67fc4c-7197-4099-8c81-d383c95e4840\" (UID: \"9c67fc4c-7197-4099-8c81-d383c95e4840\") " Sep 30 21:05:40 crc kubenswrapper[4756]: I0930 21:05:40.057937 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9c67fc4c-7197-4099-8c81-d383c95e4840-combined-ca-bundle\") pod \"9c67fc4c-7197-4099-8c81-d383c95e4840\" (UID: \"9c67fc4c-7197-4099-8c81-d383c95e4840\") " Sep 30 21:05:40 crc kubenswrapper[4756]: I0930 21:05:40.058009 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/9c67fc4c-7197-4099-8c81-d383c95e4840-etc-machine-id\") pod \"9c67fc4c-7197-4099-8c81-d383c95e4840\" (UID: \"9c67fc4c-7197-4099-8c81-d383c95e4840\") " Sep 30 21:05:40 crc kubenswrapper[4756]: I0930 21:05:40.058066 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9c67fc4c-7197-4099-8c81-d383c95e4840-logs" (OuterVolumeSpecName: "logs") pod "9c67fc4c-7197-4099-8c81-d383c95e4840" (UID: "9c67fc4c-7197-4099-8c81-d383c95e4840"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 21:05:40 crc kubenswrapper[4756]: I0930 21:05:40.058268 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kvsr4\" (UniqueName: \"kubernetes.io/projected/9c67fc4c-7197-4099-8c81-d383c95e4840-kube-api-access-kvsr4\") pod \"9c67fc4c-7197-4099-8c81-d383c95e4840\" (UID: \"9c67fc4c-7197-4099-8c81-d383c95e4840\") " Sep 30 21:05:40 crc kubenswrapper[4756]: I0930 21:05:40.058641 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/9c67fc4c-7197-4099-8c81-d383c95e4840-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "9c67fc4c-7197-4099-8c81-d383c95e4840" (UID: "9c67fc4c-7197-4099-8c81-d383c95e4840"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 21:05:40 crc kubenswrapper[4756]: I0930 21:05:40.059296 4756 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9c67fc4c-7197-4099-8c81-d383c95e4840-logs\") on node \"crc\" DevicePath \"\"" Sep 30 21:05:40 crc kubenswrapper[4756]: I0930 21:05:40.059410 4756 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/9c67fc4c-7197-4099-8c81-d383c95e4840-etc-machine-id\") on node \"crc\" DevicePath \"\"" Sep 30 21:05:40 crc kubenswrapper[4756]: I0930 21:05:40.063555 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9c67fc4c-7197-4099-8c81-d383c95e4840-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "9c67fc4c-7197-4099-8c81-d383c95e4840" (UID: "9c67fc4c-7197-4099-8c81-d383c95e4840"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 21:05:40 crc kubenswrapper[4756]: I0930 21:05:40.066458 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9c67fc4c-7197-4099-8c81-d383c95e4840-scripts" (OuterVolumeSpecName: "scripts") pod "9c67fc4c-7197-4099-8c81-d383c95e4840" (UID: "9c67fc4c-7197-4099-8c81-d383c95e4840"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 21:05:40 crc kubenswrapper[4756]: I0930 21:05:40.067374 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9c67fc4c-7197-4099-8c81-d383c95e4840-kube-api-access-kvsr4" (OuterVolumeSpecName: "kube-api-access-kvsr4") pod "9c67fc4c-7197-4099-8c81-d383c95e4840" (UID: "9c67fc4c-7197-4099-8c81-d383c95e4840"). InnerVolumeSpecName "kube-api-access-kvsr4". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 21:05:40 crc kubenswrapper[4756]: I0930 21:05:40.092576 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9c67fc4c-7197-4099-8c81-d383c95e4840-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "9c67fc4c-7197-4099-8c81-d383c95e4840" (UID: "9c67fc4c-7197-4099-8c81-d383c95e4840"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 21:05:40 crc kubenswrapper[4756]: I0930 21:05:40.127059 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9c67fc4c-7197-4099-8c81-d383c95e4840-config-data" (OuterVolumeSpecName: "config-data") pod "9c67fc4c-7197-4099-8c81-d383c95e4840" (UID: "9c67fc4c-7197-4099-8c81-d383c95e4840"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 21:05:40 crc kubenswrapper[4756]: I0930 21:05:40.161056 4756 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9c67fc4c-7197-4099-8c81-d383c95e4840-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 21:05:40 crc kubenswrapper[4756]: I0930 21:05:40.162497 4756 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9c67fc4c-7197-4099-8c81-d383c95e4840-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 21:05:40 crc kubenswrapper[4756]: I0930 21:05:40.162517 4756 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9c67fc4c-7197-4099-8c81-d383c95e4840-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 21:05:40 crc kubenswrapper[4756]: I0930 21:05:40.162529 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kvsr4\" (UniqueName: \"kubernetes.io/projected/9c67fc4c-7197-4099-8c81-d383c95e4840-kube-api-access-kvsr4\") on node \"crc\" DevicePath \"\"" Sep 30 21:05:40 crc kubenswrapper[4756]: I0930 21:05:40.162539 4756 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/9c67fc4c-7197-4099-8c81-d383c95e4840-config-data-custom\") on node \"crc\" DevicePath \"\"" Sep 30 21:05:40 crc kubenswrapper[4756]: I0930 21:05:40.275713 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-scheduler-0" Sep 30 21:05:40 crc kubenswrapper[4756]: I0930 21:05:40.832799 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-backup-0" event={"ID":"313af6b2-d2f9-4bf6-90d5-6de0ada1ca1a","Type":"ContainerStarted","Data":"e270ebd997169dec0dc5f51abdf9a8da943d15226c2a759e69d15d725af43237"} Sep 30 21:05:40 crc kubenswrapper[4756]: I0930 21:05:40.838284 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"9c67fc4c-7197-4099-8c81-d383c95e4840","Type":"ContainerDied","Data":"9fc3de1a0073697df68d29433e8006802184140c57076c6bf06fa0d03a281dfa"} Sep 30 21:05:40 crc kubenswrapper[4756]: I0930 21:05:40.838361 4756 scope.go:117] "RemoveContainer" containerID="f44488eb42c5657c0b457fda52277ba71832398c0280b4f8cb32ddd5518e9fc2" Sep 30 21:05:40 crc kubenswrapper[4756]: I0930 21:05:40.838649 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Sep 30 21:05:40 crc kubenswrapper[4756]: I0930 21:05:40.838678 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Sep 30 21:05:40 crc kubenswrapper[4756]: I0930 21:05:40.857015 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Sep 30 21:05:40 crc kubenswrapper[4756]: I0930 21:05:40.858100 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Sep 30 21:05:40 crc kubenswrapper[4756]: I0930 21:05:40.868648 4756 scope.go:117] "RemoveContainer" containerID="b84101524b97f1eeb619092d021eb68ebea7f888ac37c8bd8a607f9dd5e8cb67" Sep 30 21:05:40 crc kubenswrapper[4756]: I0930 21:05:40.883688 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-backup-0" podStartSLOduration=3.084921848 podStartE2EDuration="3.883671861s" podCreationTimestamp="2025-09-30 21:05:37 +0000 UTC" firstStartedPulling="2025-09-30 21:05:38.618069369 +0000 UTC m=+5668.239002846" lastFinishedPulling="2025-09-30 21:05:39.416819392 +0000 UTC m=+5669.037752859" observedRunningTime="2025-09-30 21:05:40.875054106 +0000 UTC m=+5670.495987613" watchObservedRunningTime="2025-09-30 21:05:40.883671861 +0000 UTC m=+5670.504605338" Sep 30 21:05:40 crc kubenswrapper[4756]: I0930 21:05:40.972429 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Sep 30 21:05:40 crc kubenswrapper[4756]: I0930 21:05:40.992481 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-api-0"] Sep 30 21:05:41 crc kubenswrapper[4756]: I0930 21:05:41.021424 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-api-0"] Sep 30 21:05:41 crc kubenswrapper[4756]: E0930 21:05:41.021825 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9c67fc4c-7197-4099-8c81-d383c95e4840" containerName="cinder-api" Sep 30 21:05:41 crc kubenswrapper[4756]: I0930 21:05:41.021843 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="9c67fc4c-7197-4099-8c81-d383c95e4840" containerName="cinder-api" Sep 30 21:05:41 crc kubenswrapper[4756]: E0930 21:05:41.021862 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9c67fc4c-7197-4099-8c81-d383c95e4840" containerName="cinder-api-log" Sep 30 21:05:41 crc kubenswrapper[4756]: I0930 21:05:41.021870 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="9c67fc4c-7197-4099-8c81-d383c95e4840" containerName="cinder-api-log" Sep 30 21:05:41 crc kubenswrapper[4756]: I0930 21:05:41.022027 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="9c67fc4c-7197-4099-8c81-d383c95e4840" containerName="cinder-api-log" Sep 30 21:05:41 crc kubenswrapper[4756]: I0930 21:05:41.022046 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="9c67fc4c-7197-4099-8c81-d383c95e4840" containerName="cinder-api" Sep 30 21:05:41 crc kubenswrapper[4756]: I0930 21:05:41.023025 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Sep 30 21:05:41 crc kubenswrapper[4756]: I0930 21:05:41.027632 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-api-config-data" Sep 30 21:05:41 crc kubenswrapper[4756]: I0930 21:05:41.056859 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Sep 30 21:05:41 crc kubenswrapper[4756]: I0930 21:05:41.134260 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9c67fc4c-7197-4099-8c81-d383c95e4840" path="/var/lib/kubelet/pods/9c67fc4c-7197-4099-8c81-d383c95e4840/volumes" Sep 30 21:05:41 crc kubenswrapper[4756]: I0930 21:05:41.203748 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/a4a49533-8aea-4c26-a272-8849cd7cab10-etc-machine-id\") pod \"cinder-api-0\" (UID: \"a4a49533-8aea-4c26-a272-8849cd7cab10\") " pod="openstack/cinder-api-0" Sep 30 21:05:41 crc kubenswrapper[4756]: I0930 21:05:41.203818 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a4a49533-8aea-4c26-a272-8849cd7cab10-scripts\") pod \"cinder-api-0\" (UID: \"a4a49533-8aea-4c26-a272-8849cd7cab10\") " pod="openstack/cinder-api-0" Sep 30 21:05:41 crc kubenswrapper[4756]: I0930 21:05:41.203850 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a4a49533-8aea-4c26-a272-8849cd7cab10-config-data\") pod \"cinder-api-0\" (UID: \"a4a49533-8aea-4c26-a272-8849cd7cab10\") " pod="openstack/cinder-api-0" Sep 30 21:05:41 crc kubenswrapper[4756]: I0930 21:05:41.203872 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/a4a49533-8aea-4c26-a272-8849cd7cab10-config-data-custom\") pod \"cinder-api-0\" (UID: \"a4a49533-8aea-4c26-a272-8849cd7cab10\") " pod="openstack/cinder-api-0" Sep 30 21:05:41 crc kubenswrapper[4756]: I0930 21:05:41.203912 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pbhbj\" (UniqueName: \"kubernetes.io/projected/a4a49533-8aea-4c26-a272-8849cd7cab10-kube-api-access-pbhbj\") pod \"cinder-api-0\" (UID: \"a4a49533-8aea-4c26-a272-8849cd7cab10\") " pod="openstack/cinder-api-0" Sep 30 21:05:41 crc kubenswrapper[4756]: I0930 21:05:41.203943 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a4a49533-8aea-4c26-a272-8849cd7cab10-logs\") pod \"cinder-api-0\" (UID: \"a4a49533-8aea-4c26-a272-8849cd7cab10\") " pod="openstack/cinder-api-0" Sep 30 21:05:41 crc kubenswrapper[4756]: I0930 21:05:41.203966 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a4a49533-8aea-4c26-a272-8849cd7cab10-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"a4a49533-8aea-4c26-a272-8849cd7cab10\") " pod="openstack/cinder-api-0" Sep 30 21:05:41 crc kubenswrapper[4756]: I0930 21:05:41.305432 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/a4a49533-8aea-4c26-a272-8849cd7cab10-etc-machine-id\") pod \"cinder-api-0\" (UID: \"a4a49533-8aea-4c26-a272-8849cd7cab10\") " pod="openstack/cinder-api-0" Sep 30 21:05:41 crc kubenswrapper[4756]: I0930 21:05:41.305712 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a4a49533-8aea-4c26-a272-8849cd7cab10-scripts\") pod \"cinder-api-0\" (UID: \"a4a49533-8aea-4c26-a272-8849cd7cab10\") " pod="openstack/cinder-api-0" Sep 30 21:05:41 crc kubenswrapper[4756]: I0930 21:05:41.305750 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a4a49533-8aea-4c26-a272-8849cd7cab10-config-data\") pod \"cinder-api-0\" (UID: \"a4a49533-8aea-4c26-a272-8849cd7cab10\") " pod="openstack/cinder-api-0" Sep 30 21:05:41 crc kubenswrapper[4756]: I0930 21:05:41.305781 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/a4a49533-8aea-4c26-a272-8849cd7cab10-config-data-custom\") pod \"cinder-api-0\" (UID: \"a4a49533-8aea-4c26-a272-8849cd7cab10\") " pod="openstack/cinder-api-0" Sep 30 21:05:41 crc kubenswrapper[4756]: I0930 21:05:41.305821 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pbhbj\" (UniqueName: \"kubernetes.io/projected/a4a49533-8aea-4c26-a272-8849cd7cab10-kube-api-access-pbhbj\") pod \"cinder-api-0\" (UID: \"a4a49533-8aea-4c26-a272-8849cd7cab10\") " pod="openstack/cinder-api-0" Sep 30 21:05:41 crc kubenswrapper[4756]: I0930 21:05:41.305853 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a4a49533-8aea-4c26-a272-8849cd7cab10-logs\") pod \"cinder-api-0\" (UID: \"a4a49533-8aea-4c26-a272-8849cd7cab10\") " pod="openstack/cinder-api-0" Sep 30 21:05:41 crc kubenswrapper[4756]: I0930 21:05:41.305872 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a4a49533-8aea-4c26-a272-8849cd7cab10-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"a4a49533-8aea-4c26-a272-8849cd7cab10\") " pod="openstack/cinder-api-0" Sep 30 21:05:41 crc kubenswrapper[4756]: I0930 21:05:41.306274 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/a4a49533-8aea-4c26-a272-8849cd7cab10-etc-machine-id\") pod \"cinder-api-0\" (UID: \"a4a49533-8aea-4c26-a272-8849cd7cab10\") " pod="openstack/cinder-api-0" Sep 30 21:05:41 crc kubenswrapper[4756]: I0930 21:05:41.307448 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a4a49533-8aea-4c26-a272-8849cd7cab10-logs\") pod \"cinder-api-0\" (UID: \"a4a49533-8aea-4c26-a272-8849cd7cab10\") " pod="openstack/cinder-api-0" Sep 30 21:05:41 crc kubenswrapper[4756]: I0930 21:05:41.311391 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a4a49533-8aea-4c26-a272-8849cd7cab10-scripts\") pod \"cinder-api-0\" (UID: \"a4a49533-8aea-4c26-a272-8849cd7cab10\") " pod="openstack/cinder-api-0" Sep 30 21:05:41 crc kubenswrapper[4756]: I0930 21:05:41.311578 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/a4a49533-8aea-4c26-a272-8849cd7cab10-config-data-custom\") pod \"cinder-api-0\" (UID: \"a4a49533-8aea-4c26-a272-8849cd7cab10\") " pod="openstack/cinder-api-0" Sep 30 21:05:41 crc kubenswrapper[4756]: I0930 21:05:41.311847 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a4a49533-8aea-4c26-a272-8849cd7cab10-config-data\") pod \"cinder-api-0\" (UID: \"a4a49533-8aea-4c26-a272-8849cd7cab10\") " pod="openstack/cinder-api-0" Sep 30 21:05:41 crc kubenswrapper[4756]: I0930 21:05:41.323339 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a4a49533-8aea-4c26-a272-8849cd7cab10-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"a4a49533-8aea-4c26-a272-8849cd7cab10\") " pod="openstack/cinder-api-0" Sep 30 21:05:41 crc kubenswrapper[4756]: I0930 21:05:41.324954 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pbhbj\" (UniqueName: \"kubernetes.io/projected/a4a49533-8aea-4c26-a272-8849cd7cab10-kube-api-access-pbhbj\") pod \"cinder-api-0\" (UID: \"a4a49533-8aea-4c26-a272-8849cd7cab10\") " pod="openstack/cinder-api-0" Sep 30 21:05:41 crc kubenswrapper[4756]: I0930 21:05:41.363792 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Sep 30 21:05:41 crc kubenswrapper[4756]: I0930 21:05:41.539907 4756 patch_prober.go:28] interesting pod/machine-config-daemon-4n9zj container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 21:05:41 crc kubenswrapper[4756]: I0930 21:05:41.539994 4756 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 21:05:41 crc kubenswrapper[4756]: I0930 21:05:41.828619 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Sep 30 21:05:41 crc kubenswrapper[4756]: I0930 21:05:41.859737 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"a4a49533-8aea-4c26-a272-8849cd7cab10","Type":"ContainerStarted","Data":"89bc5f6ece5472c888fe33a67ed93aa6f5f855dd4dbabb61c93f078b983e558d"} Sep 30 21:05:42 crc kubenswrapper[4756]: I0930 21:05:42.577120 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-volume-volume1-0" Sep 30 21:05:42 crc kubenswrapper[4756]: I0930 21:05:42.880794 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"a4a49533-8aea-4c26-a272-8849cd7cab10","Type":"ContainerStarted","Data":"f2cfcc8dddcec3e983ef61d5af52780b2d9be979e8e84aceb1050f44f4338fb2"} Sep 30 21:05:43 crc kubenswrapper[4756]: I0930 21:05:43.010512 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-backup-0" Sep 30 21:05:43 crc kubenswrapper[4756]: I0930 21:05:43.894749 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"a4a49533-8aea-4c26-a272-8849cd7cab10","Type":"ContainerStarted","Data":"8a5a93e407e906638141867db28057732516d777fb2139c93abd0a6722373253"} Sep 30 21:05:43 crc kubenswrapper[4756]: I0930 21:05:43.895576 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cinder-api-0" Sep 30 21:05:43 crc kubenswrapper[4756]: I0930 21:05:43.922635 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-api-0" podStartSLOduration=3.922616843 podStartE2EDuration="3.922616843s" podCreationTimestamp="2025-09-30 21:05:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 21:05:43.915616081 +0000 UTC m=+5673.536549558" watchObservedRunningTime="2025-09-30 21:05:43.922616843 +0000 UTC m=+5673.543550310" Sep 30 21:05:45 crc kubenswrapper[4756]: I0930 21:05:45.489730 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-scheduler-0" Sep 30 21:05:45 crc kubenswrapper[4756]: I0930 21:05:45.549968 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Sep 30 21:05:45 crc kubenswrapper[4756]: I0930 21:05:45.918583 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="b8456c4c-7259-4e33-aa46-961db6c7b617" containerName="cinder-scheduler" containerID="cri-o://2704e9e315fd70449497ad00f9f04cd18e83832822ccf417d45e26444c254ce7" gracePeriod=30 Sep 30 21:05:45 crc kubenswrapper[4756]: I0930 21:05:45.919061 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="b8456c4c-7259-4e33-aa46-961db6c7b617" containerName="probe" containerID="cri-o://f6c4bf6a2168a182d2520663495dd681418bd0d60f0eb873e649a31859f7c548" gracePeriod=30 Sep 30 21:05:46 crc kubenswrapper[4756]: I0930 21:05:46.937012 4756 generic.go:334] "Generic (PLEG): container finished" podID="b8456c4c-7259-4e33-aa46-961db6c7b617" containerID="f6c4bf6a2168a182d2520663495dd681418bd0d60f0eb873e649a31859f7c548" exitCode=0 Sep 30 21:05:46 crc kubenswrapper[4756]: I0930 21:05:46.937103 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"b8456c4c-7259-4e33-aa46-961db6c7b617","Type":"ContainerDied","Data":"f6c4bf6a2168a182d2520663495dd681418bd0d60f0eb873e649a31859f7c548"} Sep 30 21:05:47 crc kubenswrapper[4756]: I0930 21:05:47.772755 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-volume-volume1-0" Sep 30 21:05:48 crc kubenswrapper[4756]: I0930 21:05:48.245648 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-backup-0" Sep 30 21:05:48 crc kubenswrapper[4756]: I0930 21:05:48.579757 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Sep 30 21:05:48 crc kubenswrapper[4756]: I0930 21:05:48.752623 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dzfg2\" (UniqueName: \"kubernetes.io/projected/b8456c4c-7259-4e33-aa46-961db6c7b617-kube-api-access-dzfg2\") pod \"b8456c4c-7259-4e33-aa46-961db6c7b617\" (UID: \"b8456c4c-7259-4e33-aa46-961db6c7b617\") " Sep 30 21:05:48 crc kubenswrapper[4756]: I0930 21:05:48.753001 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b8456c4c-7259-4e33-aa46-961db6c7b617-config-data\") pod \"b8456c4c-7259-4e33-aa46-961db6c7b617\" (UID: \"b8456c4c-7259-4e33-aa46-961db6c7b617\") " Sep 30 21:05:48 crc kubenswrapper[4756]: I0930 21:05:48.753048 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b8456c4c-7259-4e33-aa46-961db6c7b617-combined-ca-bundle\") pod \"b8456c4c-7259-4e33-aa46-961db6c7b617\" (UID: \"b8456c4c-7259-4e33-aa46-961db6c7b617\") " Sep 30 21:05:48 crc kubenswrapper[4756]: I0930 21:05:48.753151 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b8456c4c-7259-4e33-aa46-961db6c7b617-config-data-custom\") pod \"b8456c4c-7259-4e33-aa46-961db6c7b617\" (UID: \"b8456c4c-7259-4e33-aa46-961db6c7b617\") " Sep 30 21:05:48 crc kubenswrapper[4756]: I0930 21:05:48.753310 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b8456c4c-7259-4e33-aa46-961db6c7b617-scripts\") pod \"b8456c4c-7259-4e33-aa46-961db6c7b617\" (UID: \"b8456c4c-7259-4e33-aa46-961db6c7b617\") " Sep 30 21:05:48 crc kubenswrapper[4756]: I0930 21:05:48.753378 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/b8456c4c-7259-4e33-aa46-961db6c7b617-etc-machine-id\") pod \"b8456c4c-7259-4e33-aa46-961db6c7b617\" (UID: \"b8456c4c-7259-4e33-aa46-961db6c7b617\") " Sep 30 21:05:48 crc kubenswrapper[4756]: I0930 21:05:48.753887 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/b8456c4c-7259-4e33-aa46-961db6c7b617-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "b8456c4c-7259-4e33-aa46-961db6c7b617" (UID: "b8456c4c-7259-4e33-aa46-961db6c7b617"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 21:05:48 crc kubenswrapper[4756]: I0930 21:05:48.759825 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b8456c4c-7259-4e33-aa46-961db6c7b617-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "b8456c4c-7259-4e33-aa46-961db6c7b617" (UID: "b8456c4c-7259-4e33-aa46-961db6c7b617"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 21:05:48 crc kubenswrapper[4756]: I0930 21:05:48.760680 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b8456c4c-7259-4e33-aa46-961db6c7b617-kube-api-access-dzfg2" (OuterVolumeSpecName: "kube-api-access-dzfg2") pod "b8456c4c-7259-4e33-aa46-961db6c7b617" (UID: "b8456c4c-7259-4e33-aa46-961db6c7b617"). InnerVolumeSpecName "kube-api-access-dzfg2". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 21:05:48 crc kubenswrapper[4756]: I0930 21:05:48.762614 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b8456c4c-7259-4e33-aa46-961db6c7b617-scripts" (OuterVolumeSpecName: "scripts") pod "b8456c4c-7259-4e33-aa46-961db6c7b617" (UID: "b8456c4c-7259-4e33-aa46-961db6c7b617"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 21:05:48 crc kubenswrapper[4756]: I0930 21:05:48.839040 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b8456c4c-7259-4e33-aa46-961db6c7b617-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b8456c4c-7259-4e33-aa46-961db6c7b617" (UID: "b8456c4c-7259-4e33-aa46-961db6c7b617"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 21:05:48 crc kubenswrapper[4756]: I0930 21:05:48.855874 4756 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b8456c4c-7259-4e33-aa46-961db6c7b617-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 21:05:48 crc kubenswrapper[4756]: I0930 21:05:48.855924 4756 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/b8456c4c-7259-4e33-aa46-961db6c7b617-etc-machine-id\") on node \"crc\" DevicePath \"\"" Sep 30 21:05:48 crc kubenswrapper[4756]: I0930 21:05:48.855951 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dzfg2\" (UniqueName: \"kubernetes.io/projected/b8456c4c-7259-4e33-aa46-961db6c7b617-kube-api-access-dzfg2\") on node \"crc\" DevicePath \"\"" Sep 30 21:05:48 crc kubenswrapper[4756]: I0930 21:05:48.855972 4756 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b8456c4c-7259-4e33-aa46-961db6c7b617-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 21:05:48 crc kubenswrapper[4756]: I0930 21:05:48.855992 4756 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b8456c4c-7259-4e33-aa46-961db6c7b617-config-data-custom\") on node \"crc\" DevicePath \"\"" Sep 30 21:05:48 crc kubenswrapper[4756]: I0930 21:05:48.879755 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b8456c4c-7259-4e33-aa46-961db6c7b617-config-data" (OuterVolumeSpecName: "config-data") pod "b8456c4c-7259-4e33-aa46-961db6c7b617" (UID: "b8456c4c-7259-4e33-aa46-961db6c7b617"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 21:05:48 crc kubenswrapper[4756]: I0930 21:05:48.958708 4756 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b8456c4c-7259-4e33-aa46-961db6c7b617-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 21:05:48 crc kubenswrapper[4756]: I0930 21:05:48.963238 4756 generic.go:334] "Generic (PLEG): container finished" podID="b8456c4c-7259-4e33-aa46-961db6c7b617" containerID="2704e9e315fd70449497ad00f9f04cd18e83832822ccf417d45e26444c254ce7" exitCode=0 Sep 30 21:05:48 crc kubenswrapper[4756]: I0930 21:05:48.963272 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"b8456c4c-7259-4e33-aa46-961db6c7b617","Type":"ContainerDied","Data":"2704e9e315fd70449497ad00f9f04cd18e83832822ccf417d45e26444c254ce7"} Sep 30 21:05:48 crc kubenswrapper[4756]: I0930 21:05:48.963560 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"b8456c4c-7259-4e33-aa46-961db6c7b617","Type":"ContainerDied","Data":"91de67c6b513fe90a5bbc2cd560441dffa3c4c0a8829a5a528e52897563f445c"} Sep 30 21:05:48 crc kubenswrapper[4756]: I0930 21:05:48.963340 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Sep 30 21:05:48 crc kubenswrapper[4756]: I0930 21:05:48.963590 4756 scope.go:117] "RemoveContainer" containerID="f6c4bf6a2168a182d2520663495dd681418bd0d60f0eb873e649a31859f7c548" Sep 30 21:05:49 crc kubenswrapper[4756]: I0930 21:05:49.012825 4756 scope.go:117] "RemoveContainer" containerID="2704e9e315fd70449497ad00f9f04cd18e83832822ccf417d45e26444c254ce7" Sep 30 21:05:49 crc kubenswrapper[4756]: I0930 21:05:49.015933 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Sep 30 21:05:49 crc kubenswrapper[4756]: I0930 21:05:49.035799 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-scheduler-0"] Sep 30 21:05:49 crc kubenswrapper[4756]: I0930 21:05:49.055489 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-scheduler-0"] Sep 30 21:05:49 crc kubenswrapper[4756]: E0930 21:05:49.056014 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b8456c4c-7259-4e33-aa46-961db6c7b617" containerName="cinder-scheduler" Sep 30 21:05:49 crc kubenswrapper[4756]: I0930 21:05:49.056039 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="b8456c4c-7259-4e33-aa46-961db6c7b617" containerName="cinder-scheduler" Sep 30 21:05:49 crc kubenswrapper[4756]: E0930 21:05:49.056056 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b8456c4c-7259-4e33-aa46-961db6c7b617" containerName="probe" Sep 30 21:05:49 crc kubenswrapper[4756]: I0930 21:05:49.056064 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="b8456c4c-7259-4e33-aa46-961db6c7b617" containerName="probe" Sep 30 21:05:49 crc kubenswrapper[4756]: I0930 21:05:49.056296 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="b8456c4c-7259-4e33-aa46-961db6c7b617" containerName="probe" Sep 30 21:05:49 crc kubenswrapper[4756]: I0930 21:05:49.056332 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="b8456c4c-7259-4e33-aa46-961db6c7b617" containerName="cinder-scheduler" Sep 30 21:05:49 crc kubenswrapper[4756]: I0930 21:05:49.057943 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Sep 30 21:05:49 crc kubenswrapper[4756]: I0930 21:05:49.060286 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scheduler-config-data" Sep 30 21:05:49 crc kubenswrapper[4756]: I0930 21:05:49.064788 4756 scope.go:117] "RemoveContainer" containerID="f6c4bf6a2168a182d2520663495dd681418bd0d60f0eb873e649a31859f7c548" Sep 30 21:05:49 crc kubenswrapper[4756]: I0930 21:05:49.077591 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Sep 30 21:05:49 crc kubenswrapper[4756]: E0930 21:05:49.096353 4756 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f6c4bf6a2168a182d2520663495dd681418bd0d60f0eb873e649a31859f7c548\": container with ID starting with f6c4bf6a2168a182d2520663495dd681418bd0d60f0eb873e649a31859f7c548 not found: ID does not exist" containerID="f6c4bf6a2168a182d2520663495dd681418bd0d60f0eb873e649a31859f7c548" Sep 30 21:05:49 crc kubenswrapper[4756]: I0930 21:05:49.096435 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f6c4bf6a2168a182d2520663495dd681418bd0d60f0eb873e649a31859f7c548"} err="failed to get container status \"f6c4bf6a2168a182d2520663495dd681418bd0d60f0eb873e649a31859f7c548\": rpc error: code = NotFound desc = could not find container \"f6c4bf6a2168a182d2520663495dd681418bd0d60f0eb873e649a31859f7c548\": container with ID starting with f6c4bf6a2168a182d2520663495dd681418bd0d60f0eb873e649a31859f7c548 not found: ID does not exist" Sep 30 21:05:49 crc kubenswrapper[4756]: I0930 21:05:49.096474 4756 scope.go:117] "RemoveContainer" containerID="2704e9e315fd70449497ad00f9f04cd18e83832822ccf417d45e26444c254ce7" Sep 30 21:05:49 crc kubenswrapper[4756]: E0930 21:05:49.097334 4756 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2704e9e315fd70449497ad00f9f04cd18e83832822ccf417d45e26444c254ce7\": container with ID starting with 2704e9e315fd70449497ad00f9f04cd18e83832822ccf417d45e26444c254ce7 not found: ID does not exist" containerID="2704e9e315fd70449497ad00f9f04cd18e83832822ccf417d45e26444c254ce7" Sep 30 21:05:49 crc kubenswrapper[4756]: I0930 21:05:49.097364 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2704e9e315fd70449497ad00f9f04cd18e83832822ccf417d45e26444c254ce7"} err="failed to get container status \"2704e9e315fd70449497ad00f9f04cd18e83832822ccf417d45e26444c254ce7\": rpc error: code = NotFound desc = could not find container \"2704e9e315fd70449497ad00f9f04cd18e83832822ccf417d45e26444c254ce7\": container with ID starting with 2704e9e315fd70449497ad00f9f04cd18e83832822ccf417d45e26444c254ce7 not found: ID does not exist" Sep 30 21:05:49 crc kubenswrapper[4756]: I0930 21:05:49.126270 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b8456c4c-7259-4e33-aa46-961db6c7b617" path="/var/lib/kubelet/pods/b8456c4c-7259-4e33-aa46-961db6c7b617/volumes" Sep 30 21:05:49 crc kubenswrapper[4756]: I0930 21:05:49.161227 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7c75023e-8962-4ef9-8da3-f2a36ac299d3-config-data\") pod \"cinder-scheduler-0\" (UID: \"7c75023e-8962-4ef9-8da3-f2a36ac299d3\") " pod="openstack/cinder-scheduler-0" Sep 30 21:05:49 crc kubenswrapper[4756]: I0930 21:05:49.161324 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7c75023e-8962-4ef9-8da3-f2a36ac299d3-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"7c75023e-8962-4ef9-8da3-f2a36ac299d3\") " pod="openstack/cinder-scheduler-0" Sep 30 21:05:49 crc kubenswrapper[4756]: I0930 21:05:49.161354 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/7c75023e-8962-4ef9-8da3-f2a36ac299d3-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"7c75023e-8962-4ef9-8da3-f2a36ac299d3\") " pod="openstack/cinder-scheduler-0" Sep 30 21:05:49 crc kubenswrapper[4756]: I0930 21:05:49.161437 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zs6xt\" (UniqueName: \"kubernetes.io/projected/7c75023e-8962-4ef9-8da3-f2a36ac299d3-kube-api-access-zs6xt\") pod \"cinder-scheduler-0\" (UID: \"7c75023e-8962-4ef9-8da3-f2a36ac299d3\") " pod="openstack/cinder-scheduler-0" Sep 30 21:05:49 crc kubenswrapper[4756]: I0930 21:05:49.161475 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/7c75023e-8962-4ef9-8da3-f2a36ac299d3-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"7c75023e-8962-4ef9-8da3-f2a36ac299d3\") " pod="openstack/cinder-scheduler-0" Sep 30 21:05:49 crc kubenswrapper[4756]: I0930 21:05:49.161505 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7c75023e-8962-4ef9-8da3-f2a36ac299d3-scripts\") pod \"cinder-scheduler-0\" (UID: \"7c75023e-8962-4ef9-8da3-f2a36ac299d3\") " pod="openstack/cinder-scheduler-0" Sep 30 21:05:49 crc kubenswrapper[4756]: I0930 21:05:49.266586 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/7c75023e-8962-4ef9-8da3-f2a36ac299d3-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"7c75023e-8962-4ef9-8da3-f2a36ac299d3\") " pod="openstack/cinder-scheduler-0" Sep 30 21:05:49 crc kubenswrapper[4756]: I0930 21:05:49.266779 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7c75023e-8962-4ef9-8da3-f2a36ac299d3-scripts\") pod \"cinder-scheduler-0\" (UID: \"7c75023e-8962-4ef9-8da3-f2a36ac299d3\") " pod="openstack/cinder-scheduler-0" Sep 30 21:05:49 crc kubenswrapper[4756]: I0930 21:05:49.266892 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7c75023e-8962-4ef9-8da3-f2a36ac299d3-config-data\") pod \"cinder-scheduler-0\" (UID: \"7c75023e-8962-4ef9-8da3-f2a36ac299d3\") " pod="openstack/cinder-scheduler-0" Sep 30 21:05:49 crc kubenswrapper[4756]: I0930 21:05:49.266909 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7c75023e-8962-4ef9-8da3-f2a36ac299d3-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"7c75023e-8962-4ef9-8da3-f2a36ac299d3\") " pod="openstack/cinder-scheduler-0" Sep 30 21:05:49 crc kubenswrapper[4756]: I0930 21:05:49.266937 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/7c75023e-8962-4ef9-8da3-f2a36ac299d3-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"7c75023e-8962-4ef9-8da3-f2a36ac299d3\") " pod="openstack/cinder-scheduler-0" Sep 30 21:05:49 crc kubenswrapper[4756]: I0930 21:05:49.266983 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zs6xt\" (UniqueName: \"kubernetes.io/projected/7c75023e-8962-4ef9-8da3-f2a36ac299d3-kube-api-access-zs6xt\") pod \"cinder-scheduler-0\" (UID: \"7c75023e-8962-4ef9-8da3-f2a36ac299d3\") " pod="openstack/cinder-scheduler-0" Sep 30 21:05:49 crc kubenswrapper[4756]: I0930 21:05:49.270356 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/7c75023e-8962-4ef9-8da3-f2a36ac299d3-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"7c75023e-8962-4ef9-8da3-f2a36ac299d3\") " pod="openstack/cinder-scheduler-0" Sep 30 21:05:49 crc kubenswrapper[4756]: I0930 21:05:49.270484 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7c75023e-8962-4ef9-8da3-f2a36ac299d3-scripts\") pod \"cinder-scheduler-0\" (UID: \"7c75023e-8962-4ef9-8da3-f2a36ac299d3\") " pod="openstack/cinder-scheduler-0" Sep 30 21:05:49 crc kubenswrapper[4756]: I0930 21:05:49.270770 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/7c75023e-8962-4ef9-8da3-f2a36ac299d3-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"7c75023e-8962-4ef9-8da3-f2a36ac299d3\") " pod="openstack/cinder-scheduler-0" Sep 30 21:05:49 crc kubenswrapper[4756]: I0930 21:05:49.274477 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7c75023e-8962-4ef9-8da3-f2a36ac299d3-config-data\") pod \"cinder-scheduler-0\" (UID: \"7c75023e-8962-4ef9-8da3-f2a36ac299d3\") " pod="openstack/cinder-scheduler-0" Sep 30 21:05:49 crc kubenswrapper[4756]: I0930 21:05:49.274995 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7c75023e-8962-4ef9-8da3-f2a36ac299d3-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"7c75023e-8962-4ef9-8da3-f2a36ac299d3\") " pod="openstack/cinder-scheduler-0" Sep 30 21:05:49 crc kubenswrapper[4756]: I0930 21:05:49.286508 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zs6xt\" (UniqueName: \"kubernetes.io/projected/7c75023e-8962-4ef9-8da3-f2a36ac299d3-kube-api-access-zs6xt\") pod \"cinder-scheduler-0\" (UID: \"7c75023e-8962-4ef9-8da3-f2a36ac299d3\") " pod="openstack/cinder-scheduler-0" Sep 30 21:05:49 crc kubenswrapper[4756]: I0930 21:05:49.412775 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Sep 30 21:05:49 crc kubenswrapper[4756]: I0930 21:05:49.682203 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Sep 30 21:05:49 crc kubenswrapper[4756]: W0930 21:05:49.685380 4756 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7c75023e_8962_4ef9_8da3_f2a36ac299d3.slice/crio-355c10132a17b541169afb4ac3ff9b8b9384a34e489c9266c9aa9bd829dcf538 WatchSource:0}: Error finding container 355c10132a17b541169afb4ac3ff9b8b9384a34e489c9266c9aa9bd829dcf538: Status 404 returned error can't find the container with id 355c10132a17b541169afb4ac3ff9b8b9384a34e489c9266c9aa9bd829dcf538 Sep 30 21:05:49 crc kubenswrapper[4756]: I0930 21:05:49.978245 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"7c75023e-8962-4ef9-8da3-f2a36ac299d3","Type":"ContainerStarted","Data":"355c10132a17b541169afb4ac3ff9b8b9384a34e489c9266c9aa9bd829dcf538"} Sep 30 21:05:50 crc kubenswrapper[4756]: I0930 21:05:50.996639 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"7c75023e-8962-4ef9-8da3-f2a36ac299d3","Type":"ContainerStarted","Data":"1cea5cb6c1051b73e597bedfeb26586d4e62a7a3c67ffbc1c9a596779f3d80ea"} Sep 30 21:05:50 crc kubenswrapper[4756]: I0930 21:05:50.997079 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"7c75023e-8962-4ef9-8da3-f2a36ac299d3","Type":"ContainerStarted","Data":"74eeb148e77485aeee596819f503ff855d5c2dc7a3b2fcbb03bfcf90359dd7a5"} Sep 30 21:05:51 crc kubenswrapper[4756]: I0930 21:05:51.024972 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-scheduler-0" podStartSLOduration=2.024949868 podStartE2EDuration="2.024949868s" podCreationTimestamp="2025-09-30 21:05:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 21:05:51.017587326 +0000 UTC m=+5680.638520893" watchObservedRunningTime="2025-09-30 21:05:51.024949868 +0000 UTC m=+5680.645883355" Sep 30 21:05:53 crc kubenswrapper[4756]: I0930 21:05:53.291802 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/cinder-api-0" Sep 30 21:05:54 crc kubenswrapper[4756]: I0930 21:05:54.413093 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-scheduler-0" Sep 30 21:05:59 crc kubenswrapper[4756]: I0930 21:05:59.627000 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-scheduler-0" Sep 30 21:06:11 crc kubenswrapper[4756]: I0930 21:06:11.538580 4756 patch_prober.go:28] interesting pod/machine-config-daemon-4n9zj container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 21:06:11 crc kubenswrapper[4756]: I0930 21:06:11.539152 4756 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 21:06:11 crc kubenswrapper[4756]: I0930 21:06:11.539215 4756 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" Sep 30 21:06:11 crc kubenswrapper[4756]: I0930 21:06:11.540321 4756 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"6282743b2855b6e8d5a5f2de10949d881993e076a7475251b665c1caeac15b6c"} pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 30 21:06:11 crc kubenswrapper[4756]: I0930 21:06:11.540450 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" containerName="machine-config-daemon" containerID="cri-o://6282743b2855b6e8d5a5f2de10949d881993e076a7475251b665c1caeac15b6c" gracePeriod=600 Sep 30 21:06:11 crc kubenswrapper[4756]: E0930 21:06:11.670610 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4n9zj_openshift-machine-config-operator(3370c2ca-fec3-4f90-8df7-51e21e6c7e1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" Sep 30 21:06:12 crc kubenswrapper[4756]: I0930 21:06:12.269478 4756 generic.go:334] "Generic (PLEG): container finished" podID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" containerID="6282743b2855b6e8d5a5f2de10949d881993e076a7475251b665c1caeac15b6c" exitCode=0 Sep 30 21:06:12 crc kubenswrapper[4756]: I0930 21:06:12.269576 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" event={"ID":"3370c2ca-fec3-4f90-8df7-51e21e6c7e1c","Type":"ContainerDied","Data":"6282743b2855b6e8d5a5f2de10949d881993e076a7475251b665c1caeac15b6c"} Sep 30 21:06:12 crc kubenswrapper[4756]: I0930 21:06:12.270098 4756 scope.go:117] "RemoveContainer" containerID="b07c7b0720849ce41d8fe35c4a65c847659c084f93d181b0409c8a9ac837fcae" Sep 30 21:06:12 crc kubenswrapper[4756]: I0930 21:06:12.271331 4756 scope.go:117] "RemoveContainer" containerID="6282743b2855b6e8d5a5f2de10949d881993e076a7475251b665c1caeac15b6c" Sep 30 21:06:12 crc kubenswrapper[4756]: E0930 21:06:12.271866 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4n9zj_openshift-machine-config-operator(3370c2ca-fec3-4f90-8df7-51e21e6c7e1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" Sep 30 21:06:23 crc kubenswrapper[4756]: I0930 21:06:23.089858 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-create-2h268"] Sep 30 21:06:23 crc kubenswrapper[4756]: I0930 21:06:23.101823 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-create-2h268"] Sep 30 21:06:23 crc kubenswrapper[4756]: I0930 21:06:23.158952 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1de8243f-1534-4d86-a9f8-2f2df9ba6ba7" path="/var/lib/kubelet/pods/1de8243f-1534-4d86-a9f8-2f2df9ba6ba7/volumes" Sep 30 21:06:25 crc kubenswrapper[4756]: I0930 21:06:25.116727 4756 scope.go:117] "RemoveContainer" containerID="6282743b2855b6e8d5a5f2de10949d881993e076a7475251b665c1caeac15b6c" Sep 30 21:06:25 crc kubenswrapper[4756]: E0930 21:06:25.117765 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4n9zj_openshift-machine-config-operator(3370c2ca-fec3-4f90-8df7-51e21e6c7e1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" Sep 30 21:06:33 crc kubenswrapper[4756]: I0930 21:06:33.066193 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-9b8d-account-create-pvdf6"] Sep 30 21:06:33 crc kubenswrapper[4756]: I0930 21:06:33.075761 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-9b8d-account-create-pvdf6"] Sep 30 21:06:33 crc kubenswrapper[4756]: I0930 21:06:33.135156 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f46f27e6-1550-4485-b876-caa797674f2c" path="/var/lib/kubelet/pods/f46f27e6-1550-4485-b876-caa797674f2c/volumes" Sep 30 21:06:37 crc kubenswrapper[4756]: I0930 21:06:37.116519 4756 scope.go:117] "RemoveContainer" containerID="6282743b2855b6e8d5a5f2de10949d881993e076a7475251b665c1caeac15b6c" Sep 30 21:06:37 crc kubenswrapper[4756]: E0930 21:06:37.117472 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4n9zj_openshift-machine-config-operator(3370c2ca-fec3-4f90-8df7-51e21e6c7e1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" Sep 30 21:06:40 crc kubenswrapper[4756]: I0930 21:06:40.029907 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-sync-gkpxw"] Sep 30 21:06:40 crc kubenswrapper[4756]: I0930 21:06:40.037108 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-sync-gkpxw"] Sep 30 21:06:41 crc kubenswrapper[4756]: I0930 21:06:41.126142 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8ac64df1-a909-4013-981b-23bfc2cf0e4e" path="/var/lib/kubelet/pods/8ac64df1-a909-4013-981b-23bfc2cf0e4e/volumes" Sep 30 21:06:48 crc kubenswrapper[4756]: I0930 21:06:48.117901 4756 scope.go:117] "RemoveContainer" containerID="6282743b2855b6e8d5a5f2de10949d881993e076a7475251b665c1caeac15b6c" Sep 30 21:06:48 crc kubenswrapper[4756]: E0930 21:06:48.118738 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4n9zj_openshift-machine-config-operator(3370c2ca-fec3-4f90-8df7-51e21e6c7e1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" Sep 30 21:06:53 crc kubenswrapper[4756]: I0930 21:06:53.059992 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-4zpvm"] Sep 30 21:06:53 crc kubenswrapper[4756]: I0930 21:06:53.071083 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-4zpvm"] Sep 30 21:06:53 crc kubenswrapper[4756]: I0930 21:06:53.127320 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="894ad8d4-57b4-4822-9ef7-f69bcd1222df" path="/var/lib/kubelet/pods/894ad8d4-57b4-4822-9ef7-f69bcd1222df/volumes" Sep 30 21:07:02 crc kubenswrapper[4756]: I0930 21:07:02.115980 4756 scope.go:117] "RemoveContainer" containerID="6282743b2855b6e8d5a5f2de10949d881993e076a7475251b665c1caeac15b6c" Sep 30 21:07:02 crc kubenswrapper[4756]: E0930 21:07:02.116872 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4n9zj_openshift-machine-config-operator(3370c2ca-fec3-4f90-8df7-51e21e6c7e1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" Sep 30 21:07:16 crc kubenswrapper[4756]: I0930 21:07:16.116267 4756 scope.go:117] "RemoveContainer" containerID="6282743b2855b6e8d5a5f2de10949d881993e076a7475251b665c1caeac15b6c" Sep 30 21:07:16 crc kubenswrapper[4756]: E0930 21:07:16.117455 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4n9zj_openshift-machine-config-operator(3370c2ca-fec3-4f90-8df7-51e21e6c7e1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" Sep 30 21:07:21 crc kubenswrapper[4756]: I0930 21:07:21.672116 4756 scope.go:117] "RemoveContainer" containerID="1e2397e4928273b5a7b200819279cdc0b3a5136f36fd569c13144d1da47cb6ae" Sep 30 21:07:21 crc kubenswrapper[4756]: I0930 21:07:21.699690 4756 scope.go:117] "RemoveContainer" containerID="8abdcf3a96ef10f873ae33c102f9ccc7fef5646367a55c5d9a830bb0eeacbb87" Sep 30 21:07:21 crc kubenswrapper[4756]: I0930 21:07:21.799464 4756 scope.go:117] "RemoveContainer" containerID="9afa79264565db561d923f42d248b7598ee531b2e203d481d6f80ed0fd35d191" Sep 30 21:07:21 crc kubenswrapper[4756]: I0930 21:07:21.854290 4756 scope.go:117] "RemoveContainer" containerID="f9c953bd4a1be94c439c1e1296bf492ec49b81a59f65827c62a85d2a1dc8b6c8" Sep 30 21:07:27 crc kubenswrapper[4756]: I0930 21:07:27.115853 4756 scope.go:117] "RemoveContainer" containerID="6282743b2855b6e8d5a5f2de10949d881993e076a7475251b665c1caeac15b6c" Sep 30 21:07:27 crc kubenswrapper[4756]: E0930 21:07:27.116590 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4n9zj_openshift-machine-config-operator(3370c2ca-fec3-4f90-8df7-51e21e6c7e1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" Sep 30 21:07:40 crc kubenswrapper[4756]: I0930 21:07:40.116998 4756 scope.go:117] "RemoveContainer" containerID="6282743b2855b6e8d5a5f2de10949d881993e076a7475251b665c1caeac15b6c" Sep 30 21:07:40 crc kubenswrapper[4756]: E0930 21:07:40.117930 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4n9zj_openshift-machine-config-operator(3370c2ca-fec3-4f90-8df7-51e21e6c7e1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" Sep 30 21:07:41 crc kubenswrapper[4756]: I0930 21:07:41.130786 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-gltp7"] Sep 30 21:07:41 crc kubenswrapper[4756]: I0930 21:07:41.133408 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-gltp7" Sep 30 21:07:41 crc kubenswrapper[4756]: I0930 21:07:41.136128 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncontroller-ovncontroller-dockercfg-pktcs" Sep 30 21:07:41 crc kubenswrapper[4756]: I0930 21:07:41.136349 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-scripts" Sep 30 21:07:41 crc kubenswrapper[4756]: I0930 21:07:41.154552 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-gltp7"] Sep 30 21:07:41 crc kubenswrapper[4756]: I0930 21:07:41.170722 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-ovs-2nzxh"] Sep 30 21:07:41 crc kubenswrapper[4756]: I0930 21:07:41.179634 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-2nzxh" Sep 30 21:07:41 crc kubenswrapper[4756]: I0930 21:07:41.207998 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-ovs-2nzxh"] Sep 30 21:07:41 crc kubenswrapper[4756]: I0930 21:07:41.234186 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/55f82193-72c2-4f43-b9bc-3e3f9b832b05-etc-ovs\") pod \"ovn-controller-ovs-2nzxh\" (UID: \"55f82193-72c2-4f43-b9bc-3e3f9b832b05\") " pod="openstack/ovn-controller-ovs-2nzxh" Sep 30 21:07:41 crc kubenswrapper[4756]: I0930 21:07:41.234223 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/55f82193-72c2-4f43-b9bc-3e3f9b832b05-scripts\") pod \"ovn-controller-ovs-2nzxh\" (UID: \"55f82193-72c2-4f43-b9bc-3e3f9b832b05\") " pod="openstack/ovn-controller-ovs-2nzxh" Sep 30 21:07:41 crc kubenswrapper[4756]: I0930 21:07:41.234244 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/55f82193-72c2-4f43-b9bc-3e3f9b832b05-var-log\") pod \"ovn-controller-ovs-2nzxh\" (UID: \"55f82193-72c2-4f43-b9bc-3e3f9b832b05\") " pod="openstack/ovn-controller-ovs-2nzxh" Sep 30 21:07:41 crc kubenswrapper[4756]: I0930 21:07:41.234357 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/5b6960d9-1e9a-48e5-832a-d740084ceede-var-log-ovn\") pod \"ovn-controller-gltp7\" (UID: \"5b6960d9-1e9a-48e5-832a-d740084ceede\") " pod="openstack/ovn-controller-gltp7" Sep 30 21:07:41 crc kubenswrapper[4756]: I0930 21:07:41.234391 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/55f82193-72c2-4f43-b9bc-3e3f9b832b05-var-lib\") pod \"ovn-controller-ovs-2nzxh\" (UID: \"55f82193-72c2-4f43-b9bc-3e3f9b832b05\") " pod="openstack/ovn-controller-ovs-2nzxh" Sep 30 21:07:41 crc kubenswrapper[4756]: I0930 21:07:41.235469 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/5b6960d9-1e9a-48e5-832a-d740084ceede-var-run-ovn\") pod \"ovn-controller-gltp7\" (UID: \"5b6960d9-1e9a-48e5-832a-d740084ceede\") " pod="openstack/ovn-controller-gltp7" Sep 30 21:07:41 crc kubenswrapper[4756]: I0930 21:07:41.235554 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/5b6960d9-1e9a-48e5-832a-d740084ceede-scripts\") pod \"ovn-controller-gltp7\" (UID: \"5b6960d9-1e9a-48e5-832a-d740084ceede\") " pod="openstack/ovn-controller-gltp7" Sep 30 21:07:41 crc kubenswrapper[4756]: I0930 21:07:41.235596 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kzv4k\" (UniqueName: \"kubernetes.io/projected/5b6960d9-1e9a-48e5-832a-d740084ceede-kube-api-access-kzv4k\") pod \"ovn-controller-gltp7\" (UID: \"5b6960d9-1e9a-48e5-832a-d740084ceede\") " pod="openstack/ovn-controller-gltp7" Sep 30 21:07:41 crc kubenswrapper[4756]: I0930 21:07:41.235625 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/5b6960d9-1e9a-48e5-832a-d740084ceede-var-run\") pod \"ovn-controller-gltp7\" (UID: \"5b6960d9-1e9a-48e5-832a-d740084ceede\") " pod="openstack/ovn-controller-gltp7" Sep 30 21:07:41 crc kubenswrapper[4756]: I0930 21:07:41.235682 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/55f82193-72c2-4f43-b9bc-3e3f9b832b05-var-run\") pod \"ovn-controller-ovs-2nzxh\" (UID: \"55f82193-72c2-4f43-b9bc-3e3f9b832b05\") " pod="openstack/ovn-controller-ovs-2nzxh" Sep 30 21:07:41 crc kubenswrapper[4756]: I0930 21:07:41.235708 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9l5wf\" (UniqueName: \"kubernetes.io/projected/55f82193-72c2-4f43-b9bc-3e3f9b832b05-kube-api-access-9l5wf\") pod \"ovn-controller-ovs-2nzxh\" (UID: \"55f82193-72c2-4f43-b9bc-3e3f9b832b05\") " pod="openstack/ovn-controller-ovs-2nzxh" Sep 30 21:07:41 crc kubenswrapper[4756]: I0930 21:07:41.337299 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/5b6960d9-1e9a-48e5-832a-d740084ceede-var-log-ovn\") pod \"ovn-controller-gltp7\" (UID: \"5b6960d9-1e9a-48e5-832a-d740084ceede\") " pod="openstack/ovn-controller-gltp7" Sep 30 21:07:41 crc kubenswrapper[4756]: I0930 21:07:41.337343 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/55f82193-72c2-4f43-b9bc-3e3f9b832b05-var-lib\") pod \"ovn-controller-ovs-2nzxh\" (UID: \"55f82193-72c2-4f43-b9bc-3e3f9b832b05\") " pod="openstack/ovn-controller-ovs-2nzxh" Sep 30 21:07:41 crc kubenswrapper[4756]: I0930 21:07:41.337418 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/5b6960d9-1e9a-48e5-832a-d740084ceede-var-run-ovn\") pod \"ovn-controller-gltp7\" (UID: \"5b6960d9-1e9a-48e5-832a-d740084ceede\") " pod="openstack/ovn-controller-gltp7" Sep 30 21:07:41 crc kubenswrapper[4756]: I0930 21:07:41.337464 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/5b6960d9-1e9a-48e5-832a-d740084ceede-scripts\") pod \"ovn-controller-gltp7\" (UID: \"5b6960d9-1e9a-48e5-832a-d740084ceede\") " pod="openstack/ovn-controller-gltp7" Sep 30 21:07:41 crc kubenswrapper[4756]: I0930 21:07:41.337481 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kzv4k\" (UniqueName: \"kubernetes.io/projected/5b6960d9-1e9a-48e5-832a-d740084ceede-kube-api-access-kzv4k\") pod \"ovn-controller-gltp7\" (UID: \"5b6960d9-1e9a-48e5-832a-d740084ceede\") " pod="openstack/ovn-controller-gltp7" Sep 30 21:07:41 crc kubenswrapper[4756]: I0930 21:07:41.337498 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/5b6960d9-1e9a-48e5-832a-d740084ceede-var-run\") pod \"ovn-controller-gltp7\" (UID: \"5b6960d9-1e9a-48e5-832a-d740084ceede\") " pod="openstack/ovn-controller-gltp7" Sep 30 21:07:41 crc kubenswrapper[4756]: I0930 21:07:41.337522 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/55f82193-72c2-4f43-b9bc-3e3f9b832b05-var-run\") pod \"ovn-controller-ovs-2nzxh\" (UID: \"55f82193-72c2-4f43-b9bc-3e3f9b832b05\") " pod="openstack/ovn-controller-ovs-2nzxh" Sep 30 21:07:41 crc kubenswrapper[4756]: I0930 21:07:41.337541 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9l5wf\" (UniqueName: \"kubernetes.io/projected/55f82193-72c2-4f43-b9bc-3e3f9b832b05-kube-api-access-9l5wf\") pod \"ovn-controller-ovs-2nzxh\" (UID: \"55f82193-72c2-4f43-b9bc-3e3f9b832b05\") " pod="openstack/ovn-controller-ovs-2nzxh" Sep 30 21:07:41 crc kubenswrapper[4756]: I0930 21:07:41.337600 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/55f82193-72c2-4f43-b9bc-3e3f9b832b05-etc-ovs\") pod \"ovn-controller-ovs-2nzxh\" (UID: \"55f82193-72c2-4f43-b9bc-3e3f9b832b05\") " pod="openstack/ovn-controller-ovs-2nzxh" Sep 30 21:07:41 crc kubenswrapper[4756]: I0930 21:07:41.337620 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/55f82193-72c2-4f43-b9bc-3e3f9b832b05-scripts\") pod \"ovn-controller-ovs-2nzxh\" (UID: \"55f82193-72c2-4f43-b9bc-3e3f9b832b05\") " pod="openstack/ovn-controller-ovs-2nzxh" Sep 30 21:07:41 crc kubenswrapper[4756]: I0930 21:07:41.337636 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/55f82193-72c2-4f43-b9bc-3e3f9b832b05-var-log\") pod \"ovn-controller-ovs-2nzxh\" (UID: \"55f82193-72c2-4f43-b9bc-3e3f9b832b05\") " pod="openstack/ovn-controller-ovs-2nzxh" Sep 30 21:07:41 crc kubenswrapper[4756]: I0930 21:07:41.337703 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/5b6960d9-1e9a-48e5-832a-d740084ceede-var-log-ovn\") pod \"ovn-controller-gltp7\" (UID: \"5b6960d9-1e9a-48e5-832a-d740084ceede\") " pod="openstack/ovn-controller-gltp7" Sep 30 21:07:41 crc kubenswrapper[4756]: I0930 21:07:41.337723 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/55f82193-72c2-4f43-b9bc-3e3f9b832b05-var-log\") pod \"ovn-controller-ovs-2nzxh\" (UID: \"55f82193-72c2-4f43-b9bc-3e3f9b832b05\") " pod="openstack/ovn-controller-ovs-2nzxh" Sep 30 21:07:41 crc kubenswrapper[4756]: I0930 21:07:41.337724 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/55f82193-72c2-4f43-b9bc-3e3f9b832b05-var-lib\") pod \"ovn-controller-ovs-2nzxh\" (UID: \"55f82193-72c2-4f43-b9bc-3e3f9b832b05\") " pod="openstack/ovn-controller-ovs-2nzxh" Sep 30 21:07:41 crc kubenswrapper[4756]: I0930 21:07:41.337728 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/5b6960d9-1e9a-48e5-832a-d740084ceede-var-run-ovn\") pod \"ovn-controller-gltp7\" (UID: \"5b6960d9-1e9a-48e5-832a-d740084ceede\") " pod="openstack/ovn-controller-gltp7" Sep 30 21:07:41 crc kubenswrapper[4756]: I0930 21:07:41.337780 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/55f82193-72c2-4f43-b9bc-3e3f9b832b05-var-run\") pod \"ovn-controller-ovs-2nzxh\" (UID: \"55f82193-72c2-4f43-b9bc-3e3f9b832b05\") " pod="openstack/ovn-controller-ovs-2nzxh" Sep 30 21:07:41 crc kubenswrapper[4756]: I0930 21:07:41.337808 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/5b6960d9-1e9a-48e5-832a-d740084ceede-var-run\") pod \"ovn-controller-gltp7\" (UID: \"5b6960d9-1e9a-48e5-832a-d740084ceede\") " pod="openstack/ovn-controller-gltp7" Sep 30 21:07:41 crc kubenswrapper[4756]: I0930 21:07:41.337810 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/55f82193-72c2-4f43-b9bc-3e3f9b832b05-etc-ovs\") pod \"ovn-controller-ovs-2nzxh\" (UID: \"55f82193-72c2-4f43-b9bc-3e3f9b832b05\") " pod="openstack/ovn-controller-ovs-2nzxh" Sep 30 21:07:41 crc kubenswrapper[4756]: I0930 21:07:41.340076 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/55f82193-72c2-4f43-b9bc-3e3f9b832b05-scripts\") pod \"ovn-controller-ovs-2nzxh\" (UID: \"55f82193-72c2-4f43-b9bc-3e3f9b832b05\") " pod="openstack/ovn-controller-ovs-2nzxh" Sep 30 21:07:41 crc kubenswrapper[4756]: I0930 21:07:41.340910 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/5b6960d9-1e9a-48e5-832a-d740084ceede-scripts\") pod \"ovn-controller-gltp7\" (UID: \"5b6960d9-1e9a-48e5-832a-d740084ceede\") " pod="openstack/ovn-controller-gltp7" Sep 30 21:07:41 crc kubenswrapper[4756]: I0930 21:07:41.355106 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9l5wf\" (UniqueName: \"kubernetes.io/projected/55f82193-72c2-4f43-b9bc-3e3f9b832b05-kube-api-access-9l5wf\") pod \"ovn-controller-ovs-2nzxh\" (UID: \"55f82193-72c2-4f43-b9bc-3e3f9b832b05\") " pod="openstack/ovn-controller-ovs-2nzxh" Sep 30 21:07:41 crc kubenswrapper[4756]: I0930 21:07:41.355921 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kzv4k\" (UniqueName: \"kubernetes.io/projected/5b6960d9-1e9a-48e5-832a-d740084ceede-kube-api-access-kzv4k\") pod \"ovn-controller-gltp7\" (UID: \"5b6960d9-1e9a-48e5-832a-d740084ceede\") " pod="openstack/ovn-controller-gltp7" Sep 30 21:07:41 crc kubenswrapper[4756]: I0930 21:07:41.470769 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-gltp7" Sep 30 21:07:41 crc kubenswrapper[4756]: I0930 21:07:41.502342 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-2nzxh" Sep 30 21:07:41 crc kubenswrapper[4756]: I0930 21:07:41.954648 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-gltp7"] Sep 30 21:07:42 crc kubenswrapper[4756]: I0930 21:07:42.289253 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-gltp7" event={"ID":"5b6960d9-1e9a-48e5-832a-d740084ceede","Type":"ContainerStarted","Data":"c90242e9acb848e88a6f90eae2ac571aee7ab5ca141ba0032976ee89faf851a3"} Sep 30 21:07:42 crc kubenswrapper[4756]: I0930 21:07:42.406967 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-ovs-2nzxh"] Sep 30 21:07:42 crc kubenswrapper[4756]: I0930 21:07:42.652100 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-metrics-zwrsz"] Sep 30 21:07:42 crc kubenswrapper[4756]: I0930 21:07:42.653833 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-zwrsz" Sep 30 21:07:42 crc kubenswrapper[4756]: I0930 21:07:42.655692 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-metrics-config" Sep 30 21:07:42 crc kubenswrapper[4756]: I0930 21:07:42.669427 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-metrics-zwrsz"] Sep 30 21:07:42 crc kubenswrapper[4756]: I0930 21:07:42.769520 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0c0bde9d-e4ed-466c-8ebd-7383eacd6157-config\") pod \"ovn-controller-metrics-zwrsz\" (UID: \"0c0bde9d-e4ed-466c-8ebd-7383eacd6157\") " pod="openstack/ovn-controller-metrics-zwrsz" Sep 30 21:07:42 crc kubenswrapper[4756]: I0930 21:07:42.769636 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/0c0bde9d-e4ed-466c-8ebd-7383eacd6157-ovn-rundir\") pod \"ovn-controller-metrics-zwrsz\" (UID: \"0c0bde9d-e4ed-466c-8ebd-7383eacd6157\") " pod="openstack/ovn-controller-metrics-zwrsz" Sep 30 21:07:42 crc kubenswrapper[4756]: I0930 21:07:42.769725 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/0c0bde9d-e4ed-466c-8ebd-7383eacd6157-ovs-rundir\") pod \"ovn-controller-metrics-zwrsz\" (UID: \"0c0bde9d-e4ed-466c-8ebd-7383eacd6157\") " pod="openstack/ovn-controller-metrics-zwrsz" Sep 30 21:07:42 crc kubenswrapper[4756]: I0930 21:07:42.769763 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h6bk8\" (UniqueName: \"kubernetes.io/projected/0c0bde9d-e4ed-466c-8ebd-7383eacd6157-kube-api-access-h6bk8\") pod \"ovn-controller-metrics-zwrsz\" (UID: \"0c0bde9d-e4ed-466c-8ebd-7383eacd6157\") " pod="openstack/ovn-controller-metrics-zwrsz" Sep 30 21:07:42 crc kubenswrapper[4756]: I0930 21:07:42.871451 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/0c0bde9d-e4ed-466c-8ebd-7383eacd6157-ovs-rundir\") pod \"ovn-controller-metrics-zwrsz\" (UID: \"0c0bde9d-e4ed-466c-8ebd-7383eacd6157\") " pod="openstack/ovn-controller-metrics-zwrsz" Sep 30 21:07:42 crc kubenswrapper[4756]: I0930 21:07:42.871516 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h6bk8\" (UniqueName: \"kubernetes.io/projected/0c0bde9d-e4ed-466c-8ebd-7383eacd6157-kube-api-access-h6bk8\") pod \"ovn-controller-metrics-zwrsz\" (UID: \"0c0bde9d-e4ed-466c-8ebd-7383eacd6157\") " pod="openstack/ovn-controller-metrics-zwrsz" Sep 30 21:07:42 crc kubenswrapper[4756]: I0930 21:07:42.871568 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0c0bde9d-e4ed-466c-8ebd-7383eacd6157-config\") pod \"ovn-controller-metrics-zwrsz\" (UID: \"0c0bde9d-e4ed-466c-8ebd-7383eacd6157\") " pod="openstack/ovn-controller-metrics-zwrsz" Sep 30 21:07:42 crc kubenswrapper[4756]: I0930 21:07:42.871623 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/0c0bde9d-e4ed-466c-8ebd-7383eacd6157-ovn-rundir\") pod \"ovn-controller-metrics-zwrsz\" (UID: \"0c0bde9d-e4ed-466c-8ebd-7383eacd6157\") " pod="openstack/ovn-controller-metrics-zwrsz" Sep 30 21:07:42 crc kubenswrapper[4756]: I0930 21:07:42.871820 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/0c0bde9d-e4ed-466c-8ebd-7383eacd6157-ovn-rundir\") pod \"ovn-controller-metrics-zwrsz\" (UID: \"0c0bde9d-e4ed-466c-8ebd-7383eacd6157\") " pod="openstack/ovn-controller-metrics-zwrsz" Sep 30 21:07:42 crc kubenswrapper[4756]: I0930 21:07:42.872384 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0c0bde9d-e4ed-466c-8ebd-7383eacd6157-config\") pod \"ovn-controller-metrics-zwrsz\" (UID: \"0c0bde9d-e4ed-466c-8ebd-7383eacd6157\") " pod="openstack/ovn-controller-metrics-zwrsz" Sep 30 21:07:42 crc kubenswrapper[4756]: I0930 21:07:42.872472 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/0c0bde9d-e4ed-466c-8ebd-7383eacd6157-ovs-rundir\") pod \"ovn-controller-metrics-zwrsz\" (UID: \"0c0bde9d-e4ed-466c-8ebd-7383eacd6157\") " pod="openstack/ovn-controller-metrics-zwrsz" Sep 30 21:07:42 crc kubenswrapper[4756]: I0930 21:07:42.894557 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h6bk8\" (UniqueName: \"kubernetes.io/projected/0c0bde9d-e4ed-466c-8ebd-7383eacd6157-kube-api-access-h6bk8\") pod \"ovn-controller-metrics-zwrsz\" (UID: \"0c0bde9d-e4ed-466c-8ebd-7383eacd6157\") " pod="openstack/ovn-controller-metrics-zwrsz" Sep 30 21:07:42 crc kubenswrapper[4756]: I0930 21:07:42.980021 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-zwrsz" Sep 30 21:07:43 crc kubenswrapper[4756]: I0930 21:07:43.299838 4756 generic.go:334] "Generic (PLEG): container finished" podID="55f82193-72c2-4f43-b9bc-3e3f9b832b05" containerID="011336fc97bf84c878444fd5cc930bf9f76827618682cbb7143d6ecd0b37b9ba" exitCode=0 Sep 30 21:07:43 crc kubenswrapper[4756]: I0930 21:07:43.300177 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-2nzxh" event={"ID":"55f82193-72c2-4f43-b9bc-3e3f9b832b05","Type":"ContainerDied","Data":"011336fc97bf84c878444fd5cc930bf9f76827618682cbb7143d6ecd0b37b9ba"} Sep 30 21:07:43 crc kubenswrapper[4756]: I0930 21:07:43.300208 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-2nzxh" event={"ID":"55f82193-72c2-4f43-b9bc-3e3f9b832b05","Type":"ContainerStarted","Data":"06b11c5b7ada5665f30cb8a0588ce2df9024d317da6eafdcd85fa9911347a449"} Sep 30 21:07:43 crc kubenswrapper[4756]: I0930 21:07:43.304732 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-gltp7" event={"ID":"5b6960d9-1e9a-48e5-832a-d740084ceede","Type":"ContainerStarted","Data":"c0d97dfca5b3970ce0b6aa76bac8f9cac7cec89bfa075d815d09166de1043c72"} Sep 30 21:07:43 crc kubenswrapper[4756]: I0930 21:07:43.304925 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-gltp7" Sep 30 21:07:43 crc kubenswrapper[4756]: I0930 21:07:43.339758 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-gltp7" podStartSLOduration=2.339737436 podStartE2EDuration="2.339737436s" podCreationTimestamp="2025-09-30 21:07:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 21:07:43.338242317 +0000 UTC m=+5792.959175794" watchObservedRunningTime="2025-09-30 21:07:43.339737436 +0000 UTC m=+5792.960670923" Sep 30 21:07:43 crc kubenswrapper[4756]: I0930 21:07:43.445299 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-metrics-zwrsz"] Sep 30 21:07:43 crc kubenswrapper[4756]: I0930 21:07:43.969825 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/octavia-db-create-l88p8"] Sep 30 21:07:43 crc kubenswrapper[4756]: I0930 21:07:43.971303 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-db-create-l88p8" Sep 30 21:07:43 crc kubenswrapper[4756]: I0930 21:07:43.984849 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-db-create-l88p8"] Sep 30 21:07:44 crc kubenswrapper[4756]: I0930 21:07:44.092783 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-25fsz\" (UniqueName: \"kubernetes.io/projected/477c082d-c5b0-4c5d-8907-1f12804b3b54-kube-api-access-25fsz\") pod \"octavia-db-create-l88p8\" (UID: \"477c082d-c5b0-4c5d-8907-1f12804b3b54\") " pod="openstack/octavia-db-create-l88p8" Sep 30 21:07:44 crc kubenswrapper[4756]: I0930 21:07:44.195533 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-25fsz\" (UniqueName: \"kubernetes.io/projected/477c082d-c5b0-4c5d-8907-1f12804b3b54-kube-api-access-25fsz\") pod \"octavia-db-create-l88p8\" (UID: \"477c082d-c5b0-4c5d-8907-1f12804b3b54\") " pod="openstack/octavia-db-create-l88p8" Sep 30 21:07:44 crc kubenswrapper[4756]: I0930 21:07:44.221369 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-25fsz\" (UniqueName: \"kubernetes.io/projected/477c082d-c5b0-4c5d-8907-1f12804b3b54-kube-api-access-25fsz\") pod \"octavia-db-create-l88p8\" (UID: \"477c082d-c5b0-4c5d-8907-1f12804b3b54\") " pod="openstack/octavia-db-create-l88p8" Sep 30 21:07:44 crc kubenswrapper[4756]: I0930 21:07:44.286881 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-db-create-l88p8" Sep 30 21:07:44 crc kubenswrapper[4756]: I0930 21:07:44.319577 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-2nzxh" event={"ID":"55f82193-72c2-4f43-b9bc-3e3f9b832b05","Type":"ContainerStarted","Data":"82b68d2c4fc19acd96b8db28ecd4379620b00701a368691a808cfd7f87d19f00"} Sep 30 21:07:44 crc kubenswrapper[4756]: I0930 21:07:44.319625 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-2nzxh" event={"ID":"55f82193-72c2-4f43-b9bc-3e3f9b832b05","Type":"ContainerStarted","Data":"6ce33353366c2b7d308c653159fcf3c1a4d6a30b86acc9a4c9de27978e91ad75"} Sep 30 21:07:44 crc kubenswrapper[4756]: I0930 21:07:44.319642 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-ovs-2nzxh" Sep 30 21:07:44 crc kubenswrapper[4756]: I0930 21:07:44.319666 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-ovs-2nzxh" Sep 30 21:07:44 crc kubenswrapper[4756]: I0930 21:07:44.323924 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-zwrsz" event={"ID":"0c0bde9d-e4ed-466c-8ebd-7383eacd6157","Type":"ContainerStarted","Data":"3505e23464f5eaccf8c339fce52d15f7052dbeb4eb32e029d417d8d13f9b1517"} Sep 30 21:07:44 crc kubenswrapper[4756]: I0930 21:07:44.323970 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-zwrsz" event={"ID":"0c0bde9d-e4ed-466c-8ebd-7383eacd6157","Type":"ContainerStarted","Data":"bf7e16ebbbfd5fcdc4cd9c6c483875492406a1cc693ccbdd490d67d67da6f40f"} Sep 30 21:07:44 crc kubenswrapper[4756]: I0930 21:07:44.348749 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-ovs-2nzxh" podStartSLOduration=3.348730793 podStartE2EDuration="3.348730793s" podCreationTimestamp="2025-09-30 21:07:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 21:07:44.348440605 +0000 UTC m=+5793.969374082" watchObservedRunningTime="2025-09-30 21:07:44.348730793 +0000 UTC m=+5793.969664270" Sep 30 21:07:44 crc kubenswrapper[4756]: I0930 21:07:44.373516 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-metrics-zwrsz" podStartSLOduration=2.3734991389999998 podStartE2EDuration="2.373499139s" podCreationTimestamp="2025-09-30 21:07:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 21:07:44.371697011 +0000 UTC m=+5793.992630488" watchObservedRunningTime="2025-09-30 21:07:44.373499139 +0000 UTC m=+5793.994432696" Sep 30 21:07:44 crc kubenswrapper[4756]: W0930 21:07:44.820818 4756 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod477c082d_c5b0_4c5d_8907_1f12804b3b54.slice/crio-04996b5389d3d9939253496879a582a1cda258c163425207c0e6ff9a1690903e WatchSource:0}: Error finding container 04996b5389d3d9939253496879a582a1cda258c163425207c0e6ff9a1690903e: Status 404 returned error can't find the container with id 04996b5389d3d9939253496879a582a1cda258c163425207c0e6ff9a1690903e Sep 30 21:07:44 crc kubenswrapper[4756]: I0930 21:07:44.820851 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-db-create-l88p8"] Sep 30 21:07:45 crc kubenswrapper[4756]: E0930 21:07:45.194741 4756 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod477c082d_c5b0_4c5d_8907_1f12804b3b54.slice/crio-d75a3de1b7613999c5993dcc38d928473addf046a976593a0050014f98ad61e8.scope\": RecentStats: unable to find data in memory cache]" Sep 30 21:07:45 crc kubenswrapper[4756]: I0930 21:07:45.338657 4756 generic.go:334] "Generic (PLEG): container finished" podID="477c082d-c5b0-4c5d-8907-1f12804b3b54" containerID="d75a3de1b7613999c5993dcc38d928473addf046a976593a0050014f98ad61e8" exitCode=0 Sep 30 21:07:45 crc kubenswrapper[4756]: I0930 21:07:45.338753 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-db-create-l88p8" event={"ID":"477c082d-c5b0-4c5d-8907-1f12804b3b54","Type":"ContainerDied","Data":"d75a3de1b7613999c5993dcc38d928473addf046a976593a0050014f98ad61e8"} Sep 30 21:07:45 crc kubenswrapper[4756]: I0930 21:07:45.338796 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-db-create-l88p8" event={"ID":"477c082d-c5b0-4c5d-8907-1f12804b3b54","Type":"ContainerStarted","Data":"04996b5389d3d9939253496879a582a1cda258c163425207c0e6ff9a1690903e"} Sep 30 21:07:46 crc kubenswrapper[4756]: I0930 21:07:46.728450 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-db-create-l88p8" Sep 30 21:07:46 crc kubenswrapper[4756]: I0930 21:07:46.847374 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-25fsz\" (UniqueName: \"kubernetes.io/projected/477c082d-c5b0-4c5d-8907-1f12804b3b54-kube-api-access-25fsz\") pod \"477c082d-c5b0-4c5d-8907-1f12804b3b54\" (UID: \"477c082d-c5b0-4c5d-8907-1f12804b3b54\") " Sep 30 21:07:46 crc kubenswrapper[4756]: I0930 21:07:46.858316 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/477c082d-c5b0-4c5d-8907-1f12804b3b54-kube-api-access-25fsz" (OuterVolumeSpecName: "kube-api-access-25fsz") pod "477c082d-c5b0-4c5d-8907-1f12804b3b54" (UID: "477c082d-c5b0-4c5d-8907-1f12804b3b54"). InnerVolumeSpecName "kube-api-access-25fsz". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 21:07:46 crc kubenswrapper[4756]: I0930 21:07:46.950860 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-25fsz\" (UniqueName: \"kubernetes.io/projected/477c082d-c5b0-4c5d-8907-1f12804b3b54-kube-api-access-25fsz\") on node \"crc\" DevicePath \"\"" Sep 30 21:07:47 crc kubenswrapper[4756]: I0930 21:07:47.360668 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-db-create-l88p8" event={"ID":"477c082d-c5b0-4c5d-8907-1f12804b3b54","Type":"ContainerDied","Data":"04996b5389d3d9939253496879a582a1cda258c163425207c0e6ff9a1690903e"} Sep 30 21:07:47 crc kubenswrapper[4756]: I0930 21:07:47.360721 4756 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="04996b5389d3d9939253496879a582a1cda258c163425207c0e6ff9a1690903e" Sep 30 21:07:47 crc kubenswrapper[4756]: I0930 21:07:47.360802 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-db-create-l88p8" Sep 30 21:07:55 crc kubenswrapper[4756]: I0930 21:07:55.116777 4756 scope.go:117] "RemoveContainer" containerID="6282743b2855b6e8d5a5f2de10949d881993e076a7475251b665c1caeac15b6c" Sep 30 21:07:55 crc kubenswrapper[4756]: E0930 21:07:55.117987 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4n9zj_openshift-machine-config-operator(3370c2ca-fec3-4f90-8df7-51e21e6c7e1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" Sep 30 21:07:56 crc kubenswrapper[4756]: I0930 21:07:56.080615 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/octavia-b13e-account-create-2j9bs"] Sep 30 21:07:56 crc kubenswrapper[4756]: E0930 21:07:56.081020 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="477c082d-c5b0-4c5d-8907-1f12804b3b54" containerName="mariadb-database-create" Sep 30 21:07:56 crc kubenswrapper[4756]: I0930 21:07:56.081037 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="477c082d-c5b0-4c5d-8907-1f12804b3b54" containerName="mariadb-database-create" Sep 30 21:07:56 crc kubenswrapper[4756]: I0930 21:07:56.081245 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="477c082d-c5b0-4c5d-8907-1f12804b3b54" containerName="mariadb-database-create" Sep 30 21:07:56 crc kubenswrapper[4756]: I0930 21:07:56.081885 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-b13e-account-create-2j9bs" Sep 30 21:07:56 crc kubenswrapper[4756]: I0930 21:07:56.085117 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"octavia-db-secret" Sep 30 21:07:56 crc kubenswrapper[4756]: I0930 21:07:56.095272 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-b13e-account-create-2j9bs"] Sep 30 21:07:56 crc kubenswrapper[4756]: I0930 21:07:56.137833 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bkk5q\" (UniqueName: \"kubernetes.io/projected/818e347b-2733-4610-8188-39798cbdd1db-kube-api-access-bkk5q\") pod \"octavia-b13e-account-create-2j9bs\" (UID: \"818e347b-2733-4610-8188-39798cbdd1db\") " pod="openstack/octavia-b13e-account-create-2j9bs" Sep 30 21:07:56 crc kubenswrapper[4756]: I0930 21:07:56.239102 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bkk5q\" (UniqueName: \"kubernetes.io/projected/818e347b-2733-4610-8188-39798cbdd1db-kube-api-access-bkk5q\") pod \"octavia-b13e-account-create-2j9bs\" (UID: \"818e347b-2733-4610-8188-39798cbdd1db\") " pod="openstack/octavia-b13e-account-create-2j9bs" Sep 30 21:07:56 crc kubenswrapper[4756]: I0930 21:07:56.265068 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bkk5q\" (UniqueName: \"kubernetes.io/projected/818e347b-2733-4610-8188-39798cbdd1db-kube-api-access-bkk5q\") pod \"octavia-b13e-account-create-2j9bs\" (UID: \"818e347b-2733-4610-8188-39798cbdd1db\") " pod="openstack/octavia-b13e-account-create-2j9bs" Sep 30 21:07:56 crc kubenswrapper[4756]: I0930 21:07:56.412035 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-b13e-account-create-2j9bs" Sep 30 21:07:56 crc kubenswrapper[4756]: I0930 21:07:56.881580 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-b13e-account-create-2j9bs"] Sep 30 21:07:57 crc kubenswrapper[4756]: I0930 21:07:57.466430 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-b13e-account-create-2j9bs" event={"ID":"818e347b-2733-4610-8188-39798cbdd1db","Type":"ContainerStarted","Data":"06b1debc7f1165ddbd7f96ed825eb8e0a062ecf8bc081ec139520e8216cb25e5"} Sep 30 21:07:57 crc kubenswrapper[4756]: I0930 21:07:57.467647 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-b13e-account-create-2j9bs" event={"ID":"818e347b-2733-4610-8188-39798cbdd1db","Type":"ContainerStarted","Data":"fb14a4c0a8939fd4c9f7413dcacbe62c3d54d28ec42f8d57ef275c1ce9542220"} Sep 30 21:07:57 crc kubenswrapper[4756]: I0930 21:07:57.484643 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/octavia-b13e-account-create-2j9bs" podStartSLOduration=1.484625914 podStartE2EDuration="1.484625914s" podCreationTimestamp="2025-09-30 21:07:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 21:07:57.478913035 +0000 UTC m=+5807.099846512" watchObservedRunningTime="2025-09-30 21:07:57.484625914 +0000 UTC m=+5807.105559391" Sep 30 21:07:58 crc kubenswrapper[4756]: I0930 21:07:58.478219 4756 generic.go:334] "Generic (PLEG): container finished" podID="818e347b-2733-4610-8188-39798cbdd1db" containerID="06b1debc7f1165ddbd7f96ed825eb8e0a062ecf8bc081ec139520e8216cb25e5" exitCode=0 Sep 30 21:07:58 crc kubenswrapper[4756]: I0930 21:07:58.478261 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-b13e-account-create-2j9bs" event={"ID":"818e347b-2733-4610-8188-39798cbdd1db","Type":"ContainerDied","Data":"06b1debc7f1165ddbd7f96ed825eb8e0a062ecf8bc081ec139520e8216cb25e5"} Sep 30 21:07:59 crc kubenswrapper[4756]: I0930 21:07:59.846989 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-b13e-account-create-2j9bs" Sep 30 21:07:59 crc kubenswrapper[4756]: I0930 21:07:59.920079 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bkk5q\" (UniqueName: \"kubernetes.io/projected/818e347b-2733-4610-8188-39798cbdd1db-kube-api-access-bkk5q\") pod \"818e347b-2733-4610-8188-39798cbdd1db\" (UID: \"818e347b-2733-4610-8188-39798cbdd1db\") " Sep 30 21:07:59 crc kubenswrapper[4756]: I0930 21:07:59.925879 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/818e347b-2733-4610-8188-39798cbdd1db-kube-api-access-bkk5q" (OuterVolumeSpecName: "kube-api-access-bkk5q") pod "818e347b-2733-4610-8188-39798cbdd1db" (UID: "818e347b-2733-4610-8188-39798cbdd1db"). InnerVolumeSpecName "kube-api-access-bkk5q". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 21:08:00 crc kubenswrapper[4756]: I0930 21:08:00.022996 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bkk5q\" (UniqueName: \"kubernetes.io/projected/818e347b-2733-4610-8188-39798cbdd1db-kube-api-access-bkk5q\") on node \"crc\" DevicePath \"\"" Sep 30 21:08:00 crc kubenswrapper[4756]: I0930 21:08:00.495776 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-b13e-account-create-2j9bs" event={"ID":"818e347b-2733-4610-8188-39798cbdd1db","Type":"ContainerDied","Data":"fb14a4c0a8939fd4c9f7413dcacbe62c3d54d28ec42f8d57ef275c1ce9542220"} Sep 30 21:08:00 crc kubenswrapper[4756]: I0930 21:08:00.495810 4756 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="fb14a4c0a8939fd4c9f7413dcacbe62c3d54d28ec42f8d57ef275c1ce9542220" Sep 30 21:08:00 crc kubenswrapper[4756]: I0930 21:08:00.495826 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-b13e-account-create-2j9bs" Sep 30 21:08:02 crc kubenswrapper[4756]: I0930 21:08:02.074172 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/octavia-persistence-db-create-6jtcb"] Sep 30 21:08:02 crc kubenswrapper[4756]: E0930 21:08:02.074890 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="818e347b-2733-4610-8188-39798cbdd1db" containerName="mariadb-account-create" Sep 30 21:08:02 crc kubenswrapper[4756]: I0930 21:08:02.074901 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="818e347b-2733-4610-8188-39798cbdd1db" containerName="mariadb-account-create" Sep 30 21:08:02 crc kubenswrapper[4756]: I0930 21:08:02.075139 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="818e347b-2733-4610-8188-39798cbdd1db" containerName="mariadb-account-create" Sep 30 21:08:02 crc kubenswrapper[4756]: I0930 21:08:02.075794 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-persistence-db-create-6jtcb" Sep 30 21:08:02 crc kubenswrapper[4756]: I0930 21:08:02.082752 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-persistence-db-create-6jtcb"] Sep 30 21:08:02 crc kubenswrapper[4756]: I0930 21:08:02.170804 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wp6b4\" (UniqueName: \"kubernetes.io/projected/57f905fc-a890-404f-be63-02046a7e6774-kube-api-access-wp6b4\") pod \"octavia-persistence-db-create-6jtcb\" (UID: \"57f905fc-a890-404f-be63-02046a7e6774\") " pod="openstack/octavia-persistence-db-create-6jtcb" Sep 30 21:08:02 crc kubenswrapper[4756]: I0930 21:08:02.273242 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wp6b4\" (UniqueName: \"kubernetes.io/projected/57f905fc-a890-404f-be63-02046a7e6774-kube-api-access-wp6b4\") pod \"octavia-persistence-db-create-6jtcb\" (UID: \"57f905fc-a890-404f-be63-02046a7e6774\") " pod="openstack/octavia-persistence-db-create-6jtcb" Sep 30 21:08:02 crc kubenswrapper[4756]: I0930 21:08:02.299368 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wp6b4\" (UniqueName: \"kubernetes.io/projected/57f905fc-a890-404f-be63-02046a7e6774-kube-api-access-wp6b4\") pod \"octavia-persistence-db-create-6jtcb\" (UID: \"57f905fc-a890-404f-be63-02046a7e6774\") " pod="openstack/octavia-persistence-db-create-6jtcb" Sep 30 21:08:02 crc kubenswrapper[4756]: I0930 21:08:02.408775 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-persistence-db-create-6jtcb" Sep 30 21:08:02 crc kubenswrapper[4756]: W0930 21:08:02.883425 4756 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod57f905fc_a890_404f_be63_02046a7e6774.slice/crio-7b3c717ee7866f989e54ab4c65cb91e30852ab634c8646c46416659e2326b4d5 WatchSource:0}: Error finding container 7b3c717ee7866f989e54ab4c65cb91e30852ab634c8646c46416659e2326b4d5: Status 404 returned error can't find the container with id 7b3c717ee7866f989e54ab4c65cb91e30852ab634c8646c46416659e2326b4d5 Sep 30 21:08:02 crc kubenswrapper[4756]: I0930 21:08:02.895662 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-persistence-db-create-6jtcb"] Sep 30 21:08:03 crc kubenswrapper[4756]: I0930 21:08:03.532808 4756 generic.go:334] "Generic (PLEG): container finished" podID="57f905fc-a890-404f-be63-02046a7e6774" containerID="c82c3c0160979cdad882fbfa9caf6e527c2515ea5483b6563790cbdf85cab886" exitCode=0 Sep 30 21:08:03 crc kubenswrapper[4756]: I0930 21:08:03.532951 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-persistence-db-create-6jtcb" event={"ID":"57f905fc-a890-404f-be63-02046a7e6774","Type":"ContainerDied","Data":"c82c3c0160979cdad882fbfa9caf6e527c2515ea5483b6563790cbdf85cab886"} Sep 30 21:08:03 crc kubenswrapper[4756]: I0930 21:08:03.534316 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-persistence-db-create-6jtcb" event={"ID":"57f905fc-a890-404f-be63-02046a7e6774","Type":"ContainerStarted","Data":"7b3c717ee7866f989e54ab4c65cb91e30852ab634c8646c46416659e2326b4d5"} Sep 30 21:08:03 crc kubenswrapper[4756]: I0930 21:08:03.594205 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-qrwhg"] Sep 30 21:08:03 crc kubenswrapper[4756]: I0930 21:08:03.597216 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-qrwhg" Sep 30 21:08:03 crc kubenswrapper[4756]: I0930 21:08:03.625255 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-qrwhg"] Sep 30 21:08:03 crc kubenswrapper[4756]: I0930 21:08:03.700075 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-78scg\" (UniqueName: \"kubernetes.io/projected/b5eef5de-a05a-4118-bb76-a08b881d3cf0-kube-api-access-78scg\") pod \"redhat-marketplace-qrwhg\" (UID: \"b5eef5de-a05a-4118-bb76-a08b881d3cf0\") " pod="openshift-marketplace/redhat-marketplace-qrwhg" Sep 30 21:08:03 crc kubenswrapper[4756]: I0930 21:08:03.700144 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b5eef5de-a05a-4118-bb76-a08b881d3cf0-catalog-content\") pod \"redhat-marketplace-qrwhg\" (UID: \"b5eef5de-a05a-4118-bb76-a08b881d3cf0\") " pod="openshift-marketplace/redhat-marketplace-qrwhg" Sep 30 21:08:03 crc kubenswrapper[4756]: I0930 21:08:03.700169 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b5eef5de-a05a-4118-bb76-a08b881d3cf0-utilities\") pod \"redhat-marketplace-qrwhg\" (UID: \"b5eef5de-a05a-4118-bb76-a08b881d3cf0\") " pod="openshift-marketplace/redhat-marketplace-qrwhg" Sep 30 21:08:03 crc kubenswrapper[4756]: I0930 21:08:03.802270 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-78scg\" (UniqueName: \"kubernetes.io/projected/b5eef5de-a05a-4118-bb76-a08b881d3cf0-kube-api-access-78scg\") pod \"redhat-marketplace-qrwhg\" (UID: \"b5eef5de-a05a-4118-bb76-a08b881d3cf0\") " pod="openshift-marketplace/redhat-marketplace-qrwhg" Sep 30 21:08:03 crc kubenswrapper[4756]: I0930 21:08:03.802318 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b5eef5de-a05a-4118-bb76-a08b881d3cf0-catalog-content\") pod \"redhat-marketplace-qrwhg\" (UID: \"b5eef5de-a05a-4118-bb76-a08b881d3cf0\") " pod="openshift-marketplace/redhat-marketplace-qrwhg" Sep 30 21:08:03 crc kubenswrapper[4756]: I0930 21:08:03.802334 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b5eef5de-a05a-4118-bb76-a08b881d3cf0-utilities\") pod \"redhat-marketplace-qrwhg\" (UID: \"b5eef5de-a05a-4118-bb76-a08b881d3cf0\") " pod="openshift-marketplace/redhat-marketplace-qrwhg" Sep 30 21:08:03 crc kubenswrapper[4756]: I0930 21:08:03.802993 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b5eef5de-a05a-4118-bb76-a08b881d3cf0-utilities\") pod \"redhat-marketplace-qrwhg\" (UID: \"b5eef5de-a05a-4118-bb76-a08b881d3cf0\") " pod="openshift-marketplace/redhat-marketplace-qrwhg" Sep 30 21:08:03 crc kubenswrapper[4756]: I0930 21:08:03.803039 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b5eef5de-a05a-4118-bb76-a08b881d3cf0-catalog-content\") pod \"redhat-marketplace-qrwhg\" (UID: \"b5eef5de-a05a-4118-bb76-a08b881d3cf0\") " pod="openshift-marketplace/redhat-marketplace-qrwhg" Sep 30 21:08:03 crc kubenswrapper[4756]: I0930 21:08:03.831081 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-78scg\" (UniqueName: \"kubernetes.io/projected/b5eef5de-a05a-4118-bb76-a08b881d3cf0-kube-api-access-78scg\") pod \"redhat-marketplace-qrwhg\" (UID: \"b5eef5de-a05a-4118-bb76-a08b881d3cf0\") " pod="openshift-marketplace/redhat-marketplace-qrwhg" Sep 30 21:08:03 crc kubenswrapper[4756]: I0930 21:08:03.932080 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-qrwhg" Sep 30 21:08:04 crc kubenswrapper[4756]: I0930 21:08:04.386995 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-qrwhg"] Sep 30 21:08:04 crc kubenswrapper[4756]: W0930 21:08:04.394679 4756 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb5eef5de_a05a_4118_bb76_a08b881d3cf0.slice/crio-371feede64f0170da3f44e81fc6195fee58447490110366b956f2fcbdc0a3168 WatchSource:0}: Error finding container 371feede64f0170da3f44e81fc6195fee58447490110366b956f2fcbdc0a3168: Status 404 returned error can't find the container with id 371feede64f0170da3f44e81fc6195fee58447490110366b956f2fcbdc0a3168 Sep 30 21:08:04 crc kubenswrapper[4756]: I0930 21:08:04.547758 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qrwhg" event={"ID":"b5eef5de-a05a-4118-bb76-a08b881d3cf0","Type":"ContainerStarted","Data":"371feede64f0170da3f44e81fc6195fee58447490110366b956f2fcbdc0a3168"} Sep 30 21:08:04 crc kubenswrapper[4756]: I0930 21:08:04.790579 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-persistence-db-create-6jtcb" Sep 30 21:08:04 crc kubenswrapper[4756]: I0930 21:08:04.820246 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wp6b4\" (UniqueName: \"kubernetes.io/projected/57f905fc-a890-404f-be63-02046a7e6774-kube-api-access-wp6b4\") pod \"57f905fc-a890-404f-be63-02046a7e6774\" (UID: \"57f905fc-a890-404f-be63-02046a7e6774\") " Sep 30 21:08:04 crc kubenswrapper[4756]: I0930 21:08:04.826343 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/57f905fc-a890-404f-be63-02046a7e6774-kube-api-access-wp6b4" (OuterVolumeSpecName: "kube-api-access-wp6b4") pod "57f905fc-a890-404f-be63-02046a7e6774" (UID: "57f905fc-a890-404f-be63-02046a7e6774"). InnerVolumeSpecName "kube-api-access-wp6b4". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 21:08:04 crc kubenswrapper[4756]: I0930 21:08:04.922633 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wp6b4\" (UniqueName: \"kubernetes.io/projected/57f905fc-a890-404f-be63-02046a7e6774-kube-api-access-wp6b4\") on node \"crc\" DevicePath \"\"" Sep 30 21:08:05 crc kubenswrapper[4756]: I0930 21:08:05.558099 4756 generic.go:334] "Generic (PLEG): container finished" podID="b5eef5de-a05a-4118-bb76-a08b881d3cf0" containerID="cd05ee27c7ce7d36eb0545378d73263dab074f4a6ad9304869ba25581b1ac704" exitCode=0 Sep 30 21:08:05 crc kubenswrapper[4756]: I0930 21:08:05.558340 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qrwhg" event={"ID":"b5eef5de-a05a-4118-bb76-a08b881d3cf0","Type":"ContainerDied","Data":"cd05ee27c7ce7d36eb0545378d73263dab074f4a6ad9304869ba25581b1ac704"} Sep 30 21:08:05 crc kubenswrapper[4756]: I0930 21:08:05.565618 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-persistence-db-create-6jtcb" event={"ID":"57f905fc-a890-404f-be63-02046a7e6774","Type":"ContainerDied","Data":"7b3c717ee7866f989e54ab4c65cb91e30852ab634c8646c46416659e2326b4d5"} Sep 30 21:08:05 crc kubenswrapper[4756]: I0930 21:08:05.565657 4756 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7b3c717ee7866f989e54ab4c65cb91e30852ab634c8646c46416659e2326b4d5" Sep 30 21:08:05 crc kubenswrapper[4756]: I0930 21:08:05.565841 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-persistence-db-create-6jtcb" Sep 30 21:08:08 crc kubenswrapper[4756]: I0930 21:08:08.595210 4756 generic.go:334] "Generic (PLEG): container finished" podID="b5eef5de-a05a-4118-bb76-a08b881d3cf0" containerID="b77df1c7e0491d7a2a424efafa8855e523e495820a60799d2ac2a66be7af5136" exitCode=0 Sep 30 21:08:08 crc kubenswrapper[4756]: I0930 21:08:08.595260 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qrwhg" event={"ID":"b5eef5de-a05a-4118-bb76-a08b881d3cf0","Type":"ContainerDied","Data":"b77df1c7e0491d7a2a424efafa8855e523e495820a60799d2ac2a66be7af5136"} Sep 30 21:08:10 crc kubenswrapper[4756]: I0930 21:08:10.116212 4756 scope.go:117] "RemoveContainer" containerID="6282743b2855b6e8d5a5f2de10949d881993e076a7475251b665c1caeac15b6c" Sep 30 21:08:10 crc kubenswrapper[4756]: E0930 21:08:10.116879 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4n9zj_openshift-machine-config-operator(3370c2ca-fec3-4f90-8df7-51e21e6c7e1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" Sep 30 21:08:10 crc kubenswrapper[4756]: I0930 21:08:10.634955 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qrwhg" event={"ID":"b5eef5de-a05a-4118-bb76-a08b881d3cf0","Type":"ContainerStarted","Data":"fb4a81094f52f355eb43e186fb9c47bcf7fbab738a268b1fe7814357c1bc1ace"} Sep 30 21:08:10 crc kubenswrapper[4756]: I0930 21:08:10.667460 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-qrwhg" podStartSLOduration=3.028030464 podStartE2EDuration="7.6674378s" podCreationTimestamp="2025-09-30 21:08:03 +0000 UTC" firstStartedPulling="2025-09-30 21:08:05.562607115 +0000 UTC m=+5815.183540592" lastFinishedPulling="2025-09-30 21:08:10.202014451 +0000 UTC m=+5819.822947928" observedRunningTime="2025-09-30 21:08:10.655843628 +0000 UTC m=+5820.276777115" watchObservedRunningTime="2025-09-30 21:08:10.6674378 +0000 UTC m=+5820.288371277" Sep 30 21:08:13 crc kubenswrapper[4756]: I0930 21:08:13.011681 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/octavia-d793-account-create-g2xr5"] Sep 30 21:08:13 crc kubenswrapper[4756]: E0930 21:08:13.014132 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="57f905fc-a890-404f-be63-02046a7e6774" containerName="mariadb-database-create" Sep 30 21:08:13 crc kubenswrapper[4756]: I0930 21:08:13.014215 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="57f905fc-a890-404f-be63-02046a7e6774" containerName="mariadb-database-create" Sep 30 21:08:13 crc kubenswrapper[4756]: I0930 21:08:13.014482 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="57f905fc-a890-404f-be63-02046a7e6774" containerName="mariadb-database-create" Sep 30 21:08:13 crc kubenswrapper[4756]: I0930 21:08:13.015294 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-d793-account-create-g2xr5" Sep 30 21:08:13 crc kubenswrapper[4756]: I0930 21:08:13.017523 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"octavia-persistence-db-secret" Sep 30 21:08:13 crc kubenswrapper[4756]: I0930 21:08:13.028860 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-d793-account-create-g2xr5"] Sep 30 21:08:13 crc kubenswrapper[4756]: I0930 21:08:13.085100 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f5pq2\" (UniqueName: \"kubernetes.io/projected/95434820-0d72-4254-bf80-df546cb03774-kube-api-access-f5pq2\") pod \"octavia-d793-account-create-g2xr5\" (UID: \"95434820-0d72-4254-bf80-df546cb03774\") " pod="openstack/octavia-d793-account-create-g2xr5" Sep 30 21:08:13 crc kubenswrapper[4756]: I0930 21:08:13.187165 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f5pq2\" (UniqueName: \"kubernetes.io/projected/95434820-0d72-4254-bf80-df546cb03774-kube-api-access-f5pq2\") pod \"octavia-d793-account-create-g2xr5\" (UID: \"95434820-0d72-4254-bf80-df546cb03774\") " pod="openstack/octavia-d793-account-create-g2xr5" Sep 30 21:08:13 crc kubenswrapper[4756]: I0930 21:08:13.224381 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f5pq2\" (UniqueName: \"kubernetes.io/projected/95434820-0d72-4254-bf80-df546cb03774-kube-api-access-f5pq2\") pod \"octavia-d793-account-create-g2xr5\" (UID: \"95434820-0d72-4254-bf80-df546cb03774\") " pod="openstack/octavia-d793-account-create-g2xr5" Sep 30 21:08:13 crc kubenswrapper[4756]: I0930 21:08:13.338669 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-d793-account-create-g2xr5" Sep 30 21:08:13 crc kubenswrapper[4756]: I0930 21:08:13.812658 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-d793-account-create-g2xr5"] Sep 30 21:08:13 crc kubenswrapper[4756]: W0930 21:08:13.821310 4756 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod95434820_0d72_4254_bf80_df546cb03774.slice/crio-e1b91cade174f7d4d9708c352fac9bba77fd6ac1a7017d9c94e687591fdcefc8 WatchSource:0}: Error finding container e1b91cade174f7d4d9708c352fac9bba77fd6ac1a7017d9c94e687591fdcefc8: Status 404 returned error can't find the container with id e1b91cade174f7d4d9708c352fac9bba77fd6ac1a7017d9c94e687591fdcefc8 Sep 30 21:08:13 crc kubenswrapper[4756]: I0930 21:08:13.932791 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-qrwhg" Sep 30 21:08:13 crc kubenswrapper[4756]: I0930 21:08:13.932872 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-qrwhg" Sep 30 21:08:13 crc kubenswrapper[4756]: I0930 21:08:13.995422 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-qrwhg" Sep 30 21:08:14 crc kubenswrapper[4756]: I0930 21:08:14.676517 4756 generic.go:334] "Generic (PLEG): container finished" podID="95434820-0d72-4254-bf80-df546cb03774" containerID="6ffb02fc015aa1aa321236eb89debcae475a29dbe3d8b2ea6d90673c22761c8c" exitCode=0 Sep 30 21:08:14 crc kubenswrapper[4756]: I0930 21:08:14.676562 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-d793-account-create-g2xr5" event={"ID":"95434820-0d72-4254-bf80-df546cb03774","Type":"ContainerDied","Data":"6ffb02fc015aa1aa321236eb89debcae475a29dbe3d8b2ea6d90673c22761c8c"} Sep 30 21:08:14 crc kubenswrapper[4756]: I0930 21:08:14.676628 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-d793-account-create-g2xr5" event={"ID":"95434820-0d72-4254-bf80-df546cb03774","Type":"ContainerStarted","Data":"e1b91cade174f7d4d9708c352fac9bba77fd6ac1a7017d9c94e687591fdcefc8"} Sep 30 21:08:15 crc kubenswrapper[4756]: I0930 21:08:15.749935 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-qrwhg" Sep 30 21:08:15 crc kubenswrapper[4756]: I0930 21:08:15.810487 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-qrwhg"] Sep 30 21:08:16 crc kubenswrapper[4756]: I0930 21:08:16.040619 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-d793-account-create-g2xr5" Sep 30 21:08:16 crc kubenswrapper[4756]: I0930 21:08:16.237713 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-f5pq2\" (UniqueName: \"kubernetes.io/projected/95434820-0d72-4254-bf80-df546cb03774-kube-api-access-f5pq2\") pod \"95434820-0d72-4254-bf80-df546cb03774\" (UID: \"95434820-0d72-4254-bf80-df546cb03774\") " Sep 30 21:08:16 crc kubenswrapper[4756]: I0930 21:08:16.246572 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/95434820-0d72-4254-bf80-df546cb03774-kube-api-access-f5pq2" (OuterVolumeSpecName: "kube-api-access-f5pq2") pod "95434820-0d72-4254-bf80-df546cb03774" (UID: "95434820-0d72-4254-bf80-df546cb03774"). InnerVolumeSpecName "kube-api-access-f5pq2". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 21:08:16 crc kubenswrapper[4756]: I0930 21:08:16.341928 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-f5pq2\" (UniqueName: \"kubernetes.io/projected/95434820-0d72-4254-bf80-df546cb03774-kube-api-access-f5pq2\") on node \"crc\" DevicePath \"\"" Sep 30 21:08:16 crc kubenswrapper[4756]: I0930 21:08:16.516032 4756 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ovn-controller-gltp7" podUID="5b6960d9-1e9a-48e5-832a-d740084ceede" containerName="ovn-controller" probeResult="failure" output=< Sep 30 21:08:16 crc kubenswrapper[4756]: ERROR - ovn-controller connection status is 'not connected', expecting 'connected' status Sep 30 21:08:16 crc kubenswrapper[4756]: > Sep 30 21:08:16 crc kubenswrapper[4756]: I0930 21:08:16.550457 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-ovs-2nzxh" Sep 30 21:08:16 crc kubenswrapper[4756]: I0930 21:08:16.556390 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-ovs-2nzxh" Sep 30 21:08:16 crc kubenswrapper[4756]: I0930 21:08:16.700892 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-d793-account-create-g2xr5" Sep 30 21:08:16 crc kubenswrapper[4756]: I0930 21:08:16.702836 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-d793-account-create-g2xr5" event={"ID":"95434820-0d72-4254-bf80-df546cb03774","Type":"ContainerDied","Data":"e1b91cade174f7d4d9708c352fac9bba77fd6ac1a7017d9c94e687591fdcefc8"} Sep 30 21:08:16 crc kubenswrapper[4756]: I0930 21:08:16.702895 4756 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e1b91cade174f7d4d9708c352fac9bba77fd6ac1a7017d9c94e687591fdcefc8" Sep 30 21:08:16 crc kubenswrapper[4756]: I0930 21:08:16.702917 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-gltp7-config-l8wwd"] Sep 30 21:08:16 crc kubenswrapper[4756]: E0930 21:08:16.703422 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="95434820-0d72-4254-bf80-df546cb03774" containerName="mariadb-account-create" Sep 30 21:08:16 crc kubenswrapper[4756]: I0930 21:08:16.703442 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="95434820-0d72-4254-bf80-df546cb03774" containerName="mariadb-account-create" Sep 30 21:08:16 crc kubenswrapper[4756]: I0930 21:08:16.703669 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="95434820-0d72-4254-bf80-df546cb03774" containerName="mariadb-account-create" Sep 30 21:08:16 crc kubenswrapper[4756]: I0930 21:08:16.704352 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-gltp7-config-l8wwd" Sep 30 21:08:16 crc kubenswrapper[4756]: I0930 21:08:16.706286 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-extra-scripts" Sep 30 21:08:16 crc kubenswrapper[4756]: I0930 21:08:16.715133 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-gltp7-config-l8wwd"] Sep 30 21:08:16 crc kubenswrapper[4756]: I0930 21:08:16.851967 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/8e4a48af-90ee-43ed-a22a-f05a9765fa67-scripts\") pod \"ovn-controller-gltp7-config-l8wwd\" (UID: \"8e4a48af-90ee-43ed-a22a-f05a9765fa67\") " pod="openstack/ovn-controller-gltp7-config-l8wwd" Sep 30 21:08:16 crc kubenswrapper[4756]: I0930 21:08:16.852046 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/8e4a48af-90ee-43ed-a22a-f05a9765fa67-var-log-ovn\") pod \"ovn-controller-gltp7-config-l8wwd\" (UID: \"8e4a48af-90ee-43ed-a22a-f05a9765fa67\") " pod="openstack/ovn-controller-gltp7-config-l8wwd" Sep 30 21:08:16 crc kubenswrapper[4756]: I0930 21:08:16.852101 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lcgvg\" (UniqueName: \"kubernetes.io/projected/8e4a48af-90ee-43ed-a22a-f05a9765fa67-kube-api-access-lcgvg\") pod \"ovn-controller-gltp7-config-l8wwd\" (UID: \"8e4a48af-90ee-43ed-a22a-f05a9765fa67\") " pod="openstack/ovn-controller-gltp7-config-l8wwd" Sep 30 21:08:16 crc kubenswrapper[4756]: I0930 21:08:16.852938 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/8e4a48af-90ee-43ed-a22a-f05a9765fa67-var-run\") pod \"ovn-controller-gltp7-config-l8wwd\" (UID: \"8e4a48af-90ee-43ed-a22a-f05a9765fa67\") " pod="openstack/ovn-controller-gltp7-config-l8wwd" Sep 30 21:08:16 crc kubenswrapper[4756]: I0930 21:08:16.852982 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/8e4a48af-90ee-43ed-a22a-f05a9765fa67-additional-scripts\") pod \"ovn-controller-gltp7-config-l8wwd\" (UID: \"8e4a48af-90ee-43ed-a22a-f05a9765fa67\") " pod="openstack/ovn-controller-gltp7-config-l8wwd" Sep 30 21:08:16 crc kubenswrapper[4756]: I0930 21:08:16.853085 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/8e4a48af-90ee-43ed-a22a-f05a9765fa67-var-run-ovn\") pod \"ovn-controller-gltp7-config-l8wwd\" (UID: \"8e4a48af-90ee-43ed-a22a-f05a9765fa67\") " pod="openstack/ovn-controller-gltp7-config-l8wwd" Sep 30 21:08:16 crc kubenswrapper[4756]: I0930 21:08:16.954262 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/8e4a48af-90ee-43ed-a22a-f05a9765fa67-var-run\") pod \"ovn-controller-gltp7-config-l8wwd\" (UID: \"8e4a48af-90ee-43ed-a22a-f05a9765fa67\") " pod="openstack/ovn-controller-gltp7-config-l8wwd" Sep 30 21:08:16 crc kubenswrapper[4756]: I0930 21:08:16.954606 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/8e4a48af-90ee-43ed-a22a-f05a9765fa67-additional-scripts\") pod \"ovn-controller-gltp7-config-l8wwd\" (UID: \"8e4a48af-90ee-43ed-a22a-f05a9765fa67\") " pod="openstack/ovn-controller-gltp7-config-l8wwd" Sep 30 21:08:16 crc kubenswrapper[4756]: I0930 21:08:16.954670 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/8e4a48af-90ee-43ed-a22a-f05a9765fa67-var-run-ovn\") pod \"ovn-controller-gltp7-config-l8wwd\" (UID: \"8e4a48af-90ee-43ed-a22a-f05a9765fa67\") " pod="openstack/ovn-controller-gltp7-config-l8wwd" Sep 30 21:08:16 crc kubenswrapper[4756]: I0930 21:08:16.954678 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/8e4a48af-90ee-43ed-a22a-f05a9765fa67-var-run\") pod \"ovn-controller-gltp7-config-l8wwd\" (UID: \"8e4a48af-90ee-43ed-a22a-f05a9765fa67\") " pod="openstack/ovn-controller-gltp7-config-l8wwd" Sep 30 21:08:16 crc kubenswrapper[4756]: I0930 21:08:16.954697 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/8e4a48af-90ee-43ed-a22a-f05a9765fa67-scripts\") pod \"ovn-controller-gltp7-config-l8wwd\" (UID: \"8e4a48af-90ee-43ed-a22a-f05a9765fa67\") " pod="openstack/ovn-controller-gltp7-config-l8wwd" Sep 30 21:08:16 crc kubenswrapper[4756]: I0930 21:08:16.954878 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/8e4a48af-90ee-43ed-a22a-f05a9765fa67-var-log-ovn\") pod \"ovn-controller-gltp7-config-l8wwd\" (UID: \"8e4a48af-90ee-43ed-a22a-f05a9765fa67\") " pod="openstack/ovn-controller-gltp7-config-l8wwd" Sep 30 21:08:16 crc kubenswrapper[4756]: I0930 21:08:16.954887 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/8e4a48af-90ee-43ed-a22a-f05a9765fa67-var-run-ovn\") pod \"ovn-controller-gltp7-config-l8wwd\" (UID: \"8e4a48af-90ee-43ed-a22a-f05a9765fa67\") " pod="openstack/ovn-controller-gltp7-config-l8wwd" Sep 30 21:08:16 crc kubenswrapper[4756]: I0930 21:08:16.954994 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/8e4a48af-90ee-43ed-a22a-f05a9765fa67-var-log-ovn\") pod \"ovn-controller-gltp7-config-l8wwd\" (UID: \"8e4a48af-90ee-43ed-a22a-f05a9765fa67\") " pod="openstack/ovn-controller-gltp7-config-l8wwd" Sep 30 21:08:16 crc kubenswrapper[4756]: I0930 21:08:16.955011 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lcgvg\" (UniqueName: \"kubernetes.io/projected/8e4a48af-90ee-43ed-a22a-f05a9765fa67-kube-api-access-lcgvg\") pod \"ovn-controller-gltp7-config-l8wwd\" (UID: \"8e4a48af-90ee-43ed-a22a-f05a9765fa67\") " pod="openstack/ovn-controller-gltp7-config-l8wwd" Sep 30 21:08:16 crc kubenswrapper[4756]: I0930 21:08:16.955513 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/8e4a48af-90ee-43ed-a22a-f05a9765fa67-additional-scripts\") pod \"ovn-controller-gltp7-config-l8wwd\" (UID: \"8e4a48af-90ee-43ed-a22a-f05a9765fa67\") " pod="openstack/ovn-controller-gltp7-config-l8wwd" Sep 30 21:08:16 crc kubenswrapper[4756]: I0930 21:08:16.956799 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/8e4a48af-90ee-43ed-a22a-f05a9765fa67-scripts\") pod \"ovn-controller-gltp7-config-l8wwd\" (UID: \"8e4a48af-90ee-43ed-a22a-f05a9765fa67\") " pod="openstack/ovn-controller-gltp7-config-l8wwd" Sep 30 21:08:16 crc kubenswrapper[4756]: I0930 21:08:16.973708 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lcgvg\" (UniqueName: \"kubernetes.io/projected/8e4a48af-90ee-43ed-a22a-f05a9765fa67-kube-api-access-lcgvg\") pod \"ovn-controller-gltp7-config-l8wwd\" (UID: \"8e4a48af-90ee-43ed-a22a-f05a9765fa67\") " pod="openstack/ovn-controller-gltp7-config-l8wwd" Sep 30 21:08:17 crc kubenswrapper[4756]: I0930 21:08:17.037945 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-gltp7-config-l8wwd" Sep 30 21:08:17 crc kubenswrapper[4756]: I0930 21:08:17.475812 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-gltp7-config-l8wwd"] Sep 30 21:08:17 crc kubenswrapper[4756]: I0930 21:08:17.711187 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-gltp7-config-l8wwd" event={"ID":"8e4a48af-90ee-43ed-a22a-f05a9765fa67","Type":"ContainerStarted","Data":"d497fcd8f2e4b3db12a6af204351a1312ef4892471fb3ce34fea092b0fd80315"} Sep 30 21:08:17 crc kubenswrapper[4756]: I0930 21:08:17.711510 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-qrwhg" podUID="b5eef5de-a05a-4118-bb76-a08b881d3cf0" containerName="registry-server" containerID="cri-o://fb4a81094f52f355eb43e186fb9c47bcf7fbab738a268b1fe7814357c1bc1ace" gracePeriod=2 Sep 30 21:08:18 crc kubenswrapper[4756]: I0930 21:08:18.716800 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-qrwhg" Sep 30 21:08:18 crc kubenswrapper[4756]: I0930 21:08:18.722301 4756 generic.go:334] "Generic (PLEG): container finished" podID="b5eef5de-a05a-4118-bb76-a08b881d3cf0" containerID="fb4a81094f52f355eb43e186fb9c47bcf7fbab738a268b1fe7814357c1bc1ace" exitCode=0 Sep 30 21:08:18 crc kubenswrapper[4756]: I0930 21:08:18.722371 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qrwhg" event={"ID":"b5eef5de-a05a-4118-bb76-a08b881d3cf0","Type":"ContainerDied","Data":"fb4a81094f52f355eb43e186fb9c47bcf7fbab738a268b1fe7814357c1bc1ace"} Sep 30 21:08:18 crc kubenswrapper[4756]: I0930 21:08:18.722447 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qrwhg" event={"ID":"b5eef5de-a05a-4118-bb76-a08b881d3cf0","Type":"ContainerDied","Data":"371feede64f0170da3f44e81fc6195fee58447490110366b956f2fcbdc0a3168"} Sep 30 21:08:18 crc kubenswrapper[4756]: I0930 21:08:18.722381 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-qrwhg" Sep 30 21:08:18 crc kubenswrapper[4756]: I0930 21:08:18.722466 4756 scope.go:117] "RemoveContainer" containerID="fb4a81094f52f355eb43e186fb9c47bcf7fbab738a268b1fe7814357c1bc1ace" Sep 30 21:08:18 crc kubenswrapper[4756]: I0930 21:08:18.724234 4756 generic.go:334] "Generic (PLEG): container finished" podID="8e4a48af-90ee-43ed-a22a-f05a9765fa67" containerID="84a4ab483a45f9f5f306b97ffcfd9900f6b1710503c9cff033755cd83774eee1" exitCode=0 Sep 30 21:08:18 crc kubenswrapper[4756]: I0930 21:08:18.724272 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-gltp7-config-l8wwd" event={"ID":"8e4a48af-90ee-43ed-a22a-f05a9765fa67","Type":"ContainerDied","Data":"84a4ab483a45f9f5f306b97ffcfd9900f6b1710503c9cff033755cd83774eee1"} Sep 30 21:08:18 crc kubenswrapper[4756]: I0930 21:08:18.755521 4756 scope.go:117] "RemoveContainer" containerID="b77df1c7e0491d7a2a424efafa8855e523e495820a60799d2ac2a66be7af5136" Sep 30 21:08:18 crc kubenswrapper[4756]: I0930 21:08:18.782802 4756 scope.go:117] "RemoveContainer" containerID="cd05ee27c7ce7d36eb0545378d73263dab074f4a6ad9304869ba25581b1ac704" Sep 30 21:08:18 crc kubenswrapper[4756]: I0930 21:08:18.823725 4756 scope.go:117] "RemoveContainer" containerID="fb4a81094f52f355eb43e186fb9c47bcf7fbab738a268b1fe7814357c1bc1ace" Sep 30 21:08:18 crc kubenswrapper[4756]: E0930 21:08:18.826354 4756 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fb4a81094f52f355eb43e186fb9c47bcf7fbab738a268b1fe7814357c1bc1ace\": container with ID starting with fb4a81094f52f355eb43e186fb9c47bcf7fbab738a268b1fe7814357c1bc1ace not found: ID does not exist" containerID="fb4a81094f52f355eb43e186fb9c47bcf7fbab738a268b1fe7814357c1bc1ace" Sep 30 21:08:18 crc kubenswrapper[4756]: I0930 21:08:18.826482 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fb4a81094f52f355eb43e186fb9c47bcf7fbab738a268b1fe7814357c1bc1ace"} err="failed to get container status \"fb4a81094f52f355eb43e186fb9c47bcf7fbab738a268b1fe7814357c1bc1ace\": rpc error: code = NotFound desc = could not find container \"fb4a81094f52f355eb43e186fb9c47bcf7fbab738a268b1fe7814357c1bc1ace\": container with ID starting with fb4a81094f52f355eb43e186fb9c47bcf7fbab738a268b1fe7814357c1bc1ace not found: ID does not exist" Sep 30 21:08:18 crc kubenswrapper[4756]: I0930 21:08:18.826538 4756 scope.go:117] "RemoveContainer" containerID="b77df1c7e0491d7a2a424efafa8855e523e495820a60799d2ac2a66be7af5136" Sep 30 21:08:18 crc kubenswrapper[4756]: E0930 21:08:18.826960 4756 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b77df1c7e0491d7a2a424efafa8855e523e495820a60799d2ac2a66be7af5136\": container with ID starting with b77df1c7e0491d7a2a424efafa8855e523e495820a60799d2ac2a66be7af5136 not found: ID does not exist" containerID="b77df1c7e0491d7a2a424efafa8855e523e495820a60799d2ac2a66be7af5136" Sep 30 21:08:18 crc kubenswrapper[4756]: I0930 21:08:18.827005 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b77df1c7e0491d7a2a424efafa8855e523e495820a60799d2ac2a66be7af5136"} err="failed to get container status \"b77df1c7e0491d7a2a424efafa8855e523e495820a60799d2ac2a66be7af5136\": rpc error: code = NotFound desc = could not find container \"b77df1c7e0491d7a2a424efafa8855e523e495820a60799d2ac2a66be7af5136\": container with ID starting with b77df1c7e0491d7a2a424efafa8855e523e495820a60799d2ac2a66be7af5136 not found: ID does not exist" Sep 30 21:08:18 crc kubenswrapper[4756]: I0930 21:08:18.827021 4756 scope.go:117] "RemoveContainer" containerID="cd05ee27c7ce7d36eb0545378d73263dab074f4a6ad9304869ba25581b1ac704" Sep 30 21:08:18 crc kubenswrapper[4756]: E0930 21:08:18.827649 4756 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cd05ee27c7ce7d36eb0545378d73263dab074f4a6ad9304869ba25581b1ac704\": container with ID starting with cd05ee27c7ce7d36eb0545378d73263dab074f4a6ad9304869ba25581b1ac704 not found: ID does not exist" containerID="cd05ee27c7ce7d36eb0545378d73263dab074f4a6ad9304869ba25581b1ac704" Sep 30 21:08:18 crc kubenswrapper[4756]: I0930 21:08:18.827701 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cd05ee27c7ce7d36eb0545378d73263dab074f4a6ad9304869ba25581b1ac704"} err="failed to get container status \"cd05ee27c7ce7d36eb0545378d73263dab074f4a6ad9304869ba25581b1ac704\": rpc error: code = NotFound desc = could not find container \"cd05ee27c7ce7d36eb0545378d73263dab074f4a6ad9304869ba25581b1ac704\": container with ID starting with cd05ee27c7ce7d36eb0545378d73263dab074f4a6ad9304869ba25581b1ac704 not found: ID does not exist" Sep 30 21:08:18 crc kubenswrapper[4756]: I0930 21:08:18.888690 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-78scg\" (UniqueName: \"kubernetes.io/projected/b5eef5de-a05a-4118-bb76-a08b881d3cf0-kube-api-access-78scg\") pod \"b5eef5de-a05a-4118-bb76-a08b881d3cf0\" (UID: \"b5eef5de-a05a-4118-bb76-a08b881d3cf0\") " Sep 30 21:08:18 crc kubenswrapper[4756]: I0930 21:08:18.889017 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b5eef5de-a05a-4118-bb76-a08b881d3cf0-catalog-content\") pod \"b5eef5de-a05a-4118-bb76-a08b881d3cf0\" (UID: \"b5eef5de-a05a-4118-bb76-a08b881d3cf0\") " Sep 30 21:08:18 crc kubenswrapper[4756]: I0930 21:08:18.889050 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b5eef5de-a05a-4118-bb76-a08b881d3cf0-utilities\") pod \"b5eef5de-a05a-4118-bb76-a08b881d3cf0\" (UID: \"b5eef5de-a05a-4118-bb76-a08b881d3cf0\") " Sep 30 21:08:18 crc kubenswrapper[4756]: I0930 21:08:18.890067 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b5eef5de-a05a-4118-bb76-a08b881d3cf0-utilities" (OuterVolumeSpecName: "utilities") pod "b5eef5de-a05a-4118-bb76-a08b881d3cf0" (UID: "b5eef5de-a05a-4118-bb76-a08b881d3cf0"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 21:08:18 crc kubenswrapper[4756]: I0930 21:08:18.894433 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b5eef5de-a05a-4118-bb76-a08b881d3cf0-kube-api-access-78scg" (OuterVolumeSpecName: "kube-api-access-78scg") pod "b5eef5de-a05a-4118-bb76-a08b881d3cf0" (UID: "b5eef5de-a05a-4118-bb76-a08b881d3cf0"). InnerVolumeSpecName "kube-api-access-78scg". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 21:08:18 crc kubenswrapper[4756]: I0930 21:08:18.902245 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b5eef5de-a05a-4118-bb76-a08b881d3cf0-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b5eef5de-a05a-4118-bb76-a08b881d3cf0" (UID: "b5eef5de-a05a-4118-bb76-a08b881d3cf0"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 21:08:18 crc kubenswrapper[4756]: I0930 21:08:18.991382 4756 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b5eef5de-a05a-4118-bb76-a08b881d3cf0-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 21:08:18 crc kubenswrapper[4756]: I0930 21:08:18.991427 4756 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b5eef5de-a05a-4118-bb76-a08b881d3cf0-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 21:08:18 crc kubenswrapper[4756]: I0930 21:08:18.991437 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-78scg\" (UniqueName: \"kubernetes.io/projected/b5eef5de-a05a-4118-bb76-a08b881d3cf0-kube-api-access-78scg\") on node \"crc\" DevicePath \"\"" Sep 30 21:08:19 crc kubenswrapper[4756]: I0930 21:08:19.052264 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-qrwhg"] Sep 30 21:08:19 crc kubenswrapper[4756]: I0930 21:08:19.066769 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-qrwhg"] Sep 30 21:08:19 crc kubenswrapper[4756]: I0930 21:08:19.150027 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b5eef5de-a05a-4118-bb76-a08b881d3cf0" path="/var/lib/kubelet/pods/b5eef5de-a05a-4118-bb76-a08b881d3cf0/volumes" Sep 30 21:08:19 crc kubenswrapper[4756]: I0930 21:08:19.207449 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/octavia-api-66b5c6cccd-rcf58"] Sep 30 21:08:19 crc kubenswrapper[4756]: E0930 21:08:19.207929 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b5eef5de-a05a-4118-bb76-a08b881d3cf0" containerName="registry-server" Sep 30 21:08:19 crc kubenswrapper[4756]: I0930 21:08:19.207952 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="b5eef5de-a05a-4118-bb76-a08b881d3cf0" containerName="registry-server" Sep 30 21:08:19 crc kubenswrapper[4756]: E0930 21:08:19.207976 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b5eef5de-a05a-4118-bb76-a08b881d3cf0" containerName="extract-utilities" Sep 30 21:08:19 crc kubenswrapper[4756]: I0930 21:08:19.207986 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="b5eef5de-a05a-4118-bb76-a08b881d3cf0" containerName="extract-utilities" Sep 30 21:08:19 crc kubenswrapper[4756]: E0930 21:08:19.208015 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b5eef5de-a05a-4118-bb76-a08b881d3cf0" containerName="extract-content" Sep 30 21:08:19 crc kubenswrapper[4756]: I0930 21:08:19.208022 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="b5eef5de-a05a-4118-bb76-a08b881d3cf0" containerName="extract-content" Sep 30 21:08:19 crc kubenswrapper[4756]: I0930 21:08:19.208245 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="b5eef5de-a05a-4118-bb76-a08b881d3cf0" containerName="registry-server" Sep 30 21:08:19 crc kubenswrapper[4756]: I0930 21:08:19.209933 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-api-66b5c6cccd-rcf58" Sep 30 21:08:19 crc kubenswrapper[4756]: I0930 21:08:19.212867 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"octavia-api-scripts" Sep 30 21:08:19 crc kubenswrapper[4756]: I0930 21:08:19.213930 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"octavia-octavia-dockercfg-4kvsb" Sep 30 21:08:19 crc kubenswrapper[4756]: I0930 21:08:19.219241 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"octavia-api-config-data" Sep 30 21:08:19 crc kubenswrapper[4756]: I0930 21:08:19.221226 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-api-66b5c6cccd-rcf58"] Sep 30 21:08:19 crc kubenswrapper[4756]: I0930 21:08:19.297224 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d38fa27e-0974-4f1d-922c-43f94ab027df-config-data\") pod \"octavia-api-66b5c6cccd-rcf58\" (UID: \"d38fa27e-0974-4f1d-922c-43f94ab027df\") " pod="openstack/octavia-api-66b5c6cccd-rcf58" Sep 30 21:08:19 crc kubenswrapper[4756]: I0930 21:08:19.297953 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d38fa27e-0974-4f1d-922c-43f94ab027df-scripts\") pod \"octavia-api-66b5c6cccd-rcf58\" (UID: \"d38fa27e-0974-4f1d-922c-43f94ab027df\") " pod="openstack/octavia-api-66b5c6cccd-rcf58" Sep 30 21:08:19 crc kubenswrapper[4756]: I0930 21:08:19.298068 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"octavia-run\" (UniqueName: \"kubernetes.io/empty-dir/d38fa27e-0974-4f1d-922c-43f94ab027df-octavia-run\") pod \"octavia-api-66b5c6cccd-rcf58\" (UID: \"d38fa27e-0974-4f1d-922c-43f94ab027df\") " pod="openstack/octavia-api-66b5c6cccd-rcf58" Sep 30 21:08:19 crc kubenswrapper[4756]: I0930 21:08:19.298259 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d38fa27e-0974-4f1d-922c-43f94ab027df-combined-ca-bundle\") pod \"octavia-api-66b5c6cccd-rcf58\" (UID: \"d38fa27e-0974-4f1d-922c-43f94ab027df\") " pod="openstack/octavia-api-66b5c6cccd-rcf58" Sep 30 21:08:19 crc kubenswrapper[4756]: I0930 21:08:19.298418 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/d38fa27e-0974-4f1d-922c-43f94ab027df-config-data-merged\") pod \"octavia-api-66b5c6cccd-rcf58\" (UID: \"d38fa27e-0974-4f1d-922c-43f94ab027df\") " pod="openstack/octavia-api-66b5c6cccd-rcf58" Sep 30 21:08:19 crc kubenswrapper[4756]: I0930 21:08:19.399808 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d38fa27e-0974-4f1d-922c-43f94ab027df-scripts\") pod \"octavia-api-66b5c6cccd-rcf58\" (UID: \"d38fa27e-0974-4f1d-922c-43f94ab027df\") " pod="openstack/octavia-api-66b5c6cccd-rcf58" Sep 30 21:08:19 crc kubenswrapper[4756]: I0930 21:08:19.399861 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"octavia-run\" (UniqueName: \"kubernetes.io/empty-dir/d38fa27e-0974-4f1d-922c-43f94ab027df-octavia-run\") pod \"octavia-api-66b5c6cccd-rcf58\" (UID: \"d38fa27e-0974-4f1d-922c-43f94ab027df\") " pod="openstack/octavia-api-66b5c6cccd-rcf58" Sep 30 21:08:19 crc kubenswrapper[4756]: I0930 21:08:19.399907 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d38fa27e-0974-4f1d-922c-43f94ab027df-combined-ca-bundle\") pod \"octavia-api-66b5c6cccd-rcf58\" (UID: \"d38fa27e-0974-4f1d-922c-43f94ab027df\") " pod="openstack/octavia-api-66b5c6cccd-rcf58" Sep 30 21:08:19 crc kubenswrapper[4756]: I0930 21:08:19.399938 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/d38fa27e-0974-4f1d-922c-43f94ab027df-config-data-merged\") pod \"octavia-api-66b5c6cccd-rcf58\" (UID: \"d38fa27e-0974-4f1d-922c-43f94ab027df\") " pod="openstack/octavia-api-66b5c6cccd-rcf58" Sep 30 21:08:19 crc kubenswrapper[4756]: I0930 21:08:19.399993 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d38fa27e-0974-4f1d-922c-43f94ab027df-config-data\") pod \"octavia-api-66b5c6cccd-rcf58\" (UID: \"d38fa27e-0974-4f1d-922c-43f94ab027df\") " pod="openstack/octavia-api-66b5c6cccd-rcf58" Sep 30 21:08:19 crc kubenswrapper[4756]: I0930 21:08:19.401224 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"octavia-run\" (UniqueName: \"kubernetes.io/empty-dir/d38fa27e-0974-4f1d-922c-43f94ab027df-octavia-run\") pod \"octavia-api-66b5c6cccd-rcf58\" (UID: \"d38fa27e-0974-4f1d-922c-43f94ab027df\") " pod="openstack/octavia-api-66b5c6cccd-rcf58" Sep 30 21:08:19 crc kubenswrapper[4756]: I0930 21:08:19.401374 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/d38fa27e-0974-4f1d-922c-43f94ab027df-config-data-merged\") pod \"octavia-api-66b5c6cccd-rcf58\" (UID: \"d38fa27e-0974-4f1d-922c-43f94ab027df\") " pod="openstack/octavia-api-66b5c6cccd-rcf58" Sep 30 21:08:19 crc kubenswrapper[4756]: I0930 21:08:19.405678 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d38fa27e-0974-4f1d-922c-43f94ab027df-combined-ca-bundle\") pod \"octavia-api-66b5c6cccd-rcf58\" (UID: \"d38fa27e-0974-4f1d-922c-43f94ab027df\") " pod="openstack/octavia-api-66b5c6cccd-rcf58" Sep 30 21:08:19 crc kubenswrapper[4756]: I0930 21:08:19.405758 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d38fa27e-0974-4f1d-922c-43f94ab027df-config-data\") pod \"octavia-api-66b5c6cccd-rcf58\" (UID: \"d38fa27e-0974-4f1d-922c-43f94ab027df\") " pod="openstack/octavia-api-66b5c6cccd-rcf58" Sep 30 21:08:19 crc kubenswrapper[4756]: I0930 21:08:19.407824 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d38fa27e-0974-4f1d-922c-43f94ab027df-scripts\") pod \"octavia-api-66b5c6cccd-rcf58\" (UID: \"d38fa27e-0974-4f1d-922c-43f94ab027df\") " pod="openstack/octavia-api-66b5c6cccd-rcf58" Sep 30 21:08:19 crc kubenswrapper[4756]: I0930 21:08:19.545685 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-api-66b5c6cccd-rcf58" Sep 30 21:08:20 crc kubenswrapper[4756]: I0930 21:08:20.085710 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-api-66b5c6cccd-rcf58"] Sep 30 21:08:20 crc kubenswrapper[4756]: I0930 21:08:20.186032 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-gltp7-config-l8wwd" Sep 30 21:08:20 crc kubenswrapper[4756]: I0930 21:08:20.323890 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/8e4a48af-90ee-43ed-a22a-f05a9765fa67-scripts\") pod \"8e4a48af-90ee-43ed-a22a-f05a9765fa67\" (UID: \"8e4a48af-90ee-43ed-a22a-f05a9765fa67\") " Sep 30 21:08:20 crc kubenswrapper[4756]: I0930 21:08:20.324291 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/8e4a48af-90ee-43ed-a22a-f05a9765fa67-additional-scripts\") pod \"8e4a48af-90ee-43ed-a22a-f05a9765fa67\" (UID: \"8e4a48af-90ee-43ed-a22a-f05a9765fa67\") " Sep 30 21:08:20 crc kubenswrapper[4756]: I0930 21:08:20.324332 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lcgvg\" (UniqueName: \"kubernetes.io/projected/8e4a48af-90ee-43ed-a22a-f05a9765fa67-kube-api-access-lcgvg\") pod \"8e4a48af-90ee-43ed-a22a-f05a9765fa67\" (UID: \"8e4a48af-90ee-43ed-a22a-f05a9765fa67\") " Sep 30 21:08:20 crc kubenswrapper[4756]: I0930 21:08:20.324374 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/8e4a48af-90ee-43ed-a22a-f05a9765fa67-var-run\") pod \"8e4a48af-90ee-43ed-a22a-f05a9765fa67\" (UID: \"8e4a48af-90ee-43ed-a22a-f05a9765fa67\") " Sep 30 21:08:20 crc kubenswrapper[4756]: I0930 21:08:20.324422 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/8e4a48af-90ee-43ed-a22a-f05a9765fa67-var-run-ovn\") pod \"8e4a48af-90ee-43ed-a22a-f05a9765fa67\" (UID: \"8e4a48af-90ee-43ed-a22a-f05a9765fa67\") " Sep 30 21:08:20 crc kubenswrapper[4756]: I0930 21:08:20.324447 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/8e4a48af-90ee-43ed-a22a-f05a9765fa67-var-run" (OuterVolumeSpecName: "var-run") pod "8e4a48af-90ee-43ed-a22a-f05a9765fa67" (UID: "8e4a48af-90ee-43ed-a22a-f05a9765fa67"). InnerVolumeSpecName "var-run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 21:08:20 crc kubenswrapper[4756]: I0930 21:08:20.324483 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/8e4a48af-90ee-43ed-a22a-f05a9765fa67-var-log-ovn\") pod \"8e4a48af-90ee-43ed-a22a-f05a9765fa67\" (UID: \"8e4a48af-90ee-43ed-a22a-f05a9765fa67\") " Sep 30 21:08:20 crc kubenswrapper[4756]: I0930 21:08:20.324461 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/8e4a48af-90ee-43ed-a22a-f05a9765fa67-var-run-ovn" (OuterVolumeSpecName: "var-run-ovn") pod "8e4a48af-90ee-43ed-a22a-f05a9765fa67" (UID: "8e4a48af-90ee-43ed-a22a-f05a9765fa67"). InnerVolumeSpecName "var-run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 21:08:20 crc kubenswrapper[4756]: I0930 21:08:20.324555 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/8e4a48af-90ee-43ed-a22a-f05a9765fa67-var-log-ovn" (OuterVolumeSpecName: "var-log-ovn") pod "8e4a48af-90ee-43ed-a22a-f05a9765fa67" (UID: "8e4a48af-90ee-43ed-a22a-f05a9765fa67"). InnerVolumeSpecName "var-log-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 21:08:20 crc kubenswrapper[4756]: I0930 21:08:20.324848 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8e4a48af-90ee-43ed-a22a-f05a9765fa67-additional-scripts" (OuterVolumeSpecName: "additional-scripts") pod "8e4a48af-90ee-43ed-a22a-f05a9765fa67" (UID: "8e4a48af-90ee-43ed-a22a-f05a9765fa67"). InnerVolumeSpecName "additional-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 21:08:20 crc kubenswrapper[4756]: I0930 21:08:20.325087 4756 reconciler_common.go:293] "Volume detached for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/8e4a48af-90ee-43ed-a22a-f05a9765fa67-additional-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 21:08:20 crc kubenswrapper[4756]: I0930 21:08:20.325112 4756 reconciler_common.go:293] "Volume detached for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/8e4a48af-90ee-43ed-a22a-f05a9765fa67-var-run\") on node \"crc\" DevicePath \"\"" Sep 30 21:08:20 crc kubenswrapper[4756]: I0930 21:08:20.325123 4756 reconciler_common.go:293] "Volume detached for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/8e4a48af-90ee-43ed-a22a-f05a9765fa67-var-run-ovn\") on node \"crc\" DevicePath \"\"" Sep 30 21:08:20 crc kubenswrapper[4756]: I0930 21:08:20.325136 4756 reconciler_common.go:293] "Volume detached for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/8e4a48af-90ee-43ed-a22a-f05a9765fa67-var-log-ovn\") on node \"crc\" DevicePath \"\"" Sep 30 21:08:20 crc kubenswrapper[4756]: I0930 21:08:20.325599 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8e4a48af-90ee-43ed-a22a-f05a9765fa67-scripts" (OuterVolumeSpecName: "scripts") pod "8e4a48af-90ee-43ed-a22a-f05a9765fa67" (UID: "8e4a48af-90ee-43ed-a22a-f05a9765fa67"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 21:08:20 crc kubenswrapper[4756]: I0930 21:08:20.329877 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8e4a48af-90ee-43ed-a22a-f05a9765fa67-kube-api-access-lcgvg" (OuterVolumeSpecName: "kube-api-access-lcgvg") pod "8e4a48af-90ee-43ed-a22a-f05a9765fa67" (UID: "8e4a48af-90ee-43ed-a22a-f05a9765fa67"). InnerVolumeSpecName "kube-api-access-lcgvg". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 21:08:20 crc kubenswrapper[4756]: I0930 21:08:20.427272 4756 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/8e4a48af-90ee-43ed-a22a-f05a9765fa67-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 21:08:20 crc kubenswrapper[4756]: I0930 21:08:20.427306 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lcgvg\" (UniqueName: \"kubernetes.io/projected/8e4a48af-90ee-43ed-a22a-f05a9765fa67-kube-api-access-lcgvg\") on node \"crc\" DevicePath \"\"" Sep 30 21:08:20 crc kubenswrapper[4756]: I0930 21:08:20.750383 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-gltp7-config-l8wwd" event={"ID":"8e4a48af-90ee-43ed-a22a-f05a9765fa67","Type":"ContainerDied","Data":"d497fcd8f2e4b3db12a6af204351a1312ef4892471fb3ce34fea092b0fd80315"} Sep 30 21:08:20 crc kubenswrapper[4756]: I0930 21:08:20.750431 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-gltp7-config-l8wwd" Sep 30 21:08:20 crc kubenswrapper[4756]: I0930 21:08:20.750455 4756 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d497fcd8f2e4b3db12a6af204351a1312ef4892471fb3ce34fea092b0fd80315" Sep 30 21:08:20 crc kubenswrapper[4756]: I0930 21:08:20.751463 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-api-66b5c6cccd-rcf58" event={"ID":"d38fa27e-0974-4f1d-922c-43f94ab027df","Type":"ContainerStarted","Data":"28edca297f969011b14cc6f8aaf1f6cf3b384c72ba812de2d09fccfb776a61f2"} Sep 30 21:08:21 crc kubenswrapper[4756]: I0930 21:08:21.126889 4756 scope.go:117] "RemoveContainer" containerID="6282743b2855b6e8d5a5f2de10949d881993e076a7475251b665c1caeac15b6c" Sep 30 21:08:21 crc kubenswrapper[4756]: E0930 21:08:21.128351 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4n9zj_openshift-machine-config-operator(3370c2ca-fec3-4f90-8df7-51e21e6c7e1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" Sep 30 21:08:21 crc kubenswrapper[4756]: I0930 21:08:21.271281 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-gltp7-config-l8wwd"] Sep 30 21:08:21 crc kubenswrapper[4756]: I0930 21:08:21.280554 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-controller-gltp7-config-l8wwd"] Sep 30 21:08:21 crc kubenswrapper[4756]: I0930 21:08:21.392732 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-gltp7-config-gvvhq"] Sep 30 21:08:21 crc kubenswrapper[4756]: E0930 21:08:21.393177 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8e4a48af-90ee-43ed-a22a-f05a9765fa67" containerName="ovn-config" Sep 30 21:08:21 crc kubenswrapper[4756]: I0930 21:08:21.393194 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="8e4a48af-90ee-43ed-a22a-f05a9765fa67" containerName="ovn-config" Sep 30 21:08:21 crc kubenswrapper[4756]: I0930 21:08:21.393388 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="8e4a48af-90ee-43ed-a22a-f05a9765fa67" containerName="ovn-config" Sep 30 21:08:21 crc kubenswrapper[4756]: I0930 21:08:21.394062 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-gltp7-config-gvvhq" Sep 30 21:08:21 crc kubenswrapper[4756]: I0930 21:08:21.396193 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-extra-scripts" Sep 30 21:08:21 crc kubenswrapper[4756]: I0930 21:08:21.413943 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-gltp7-config-gvvhq"] Sep 30 21:08:21 crc kubenswrapper[4756]: I0930 21:08:21.529685 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-gltp7" Sep 30 21:08:21 crc kubenswrapper[4756]: I0930 21:08:21.548174 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/26cad56a-adf5-4339-9564-e1b0e35f65d5-scripts\") pod \"ovn-controller-gltp7-config-gvvhq\" (UID: \"26cad56a-adf5-4339-9564-e1b0e35f65d5\") " pod="openstack/ovn-controller-gltp7-config-gvvhq" Sep 30 21:08:21 crc kubenswrapper[4756]: I0930 21:08:21.548248 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/26cad56a-adf5-4339-9564-e1b0e35f65d5-var-run-ovn\") pod \"ovn-controller-gltp7-config-gvvhq\" (UID: \"26cad56a-adf5-4339-9564-e1b0e35f65d5\") " pod="openstack/ovn-controller-gltp7-config-gvvhq" Sep 30 21:08:21 crc kubenswrapper[4756]: I0930 21:08:21.548275 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/26cad56a-adf5-4339-9564-e1b0e35f65d5-additional-scripts\") pod \"ovn-controller-gltp7-config-gvvhq\" (UID: \"26cad56a-adf5-4339-9564-e1b0e35f65d5\") " pod="openstack/ovn-controller-gltp7-config-gvvhq" Sep 30 21:08:21 crc kubenswrapper[4756]: I0930 21:08:21.548427 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xw89n\" (UniqueName: \"kubernetes.io/projected/26cad56a-adf5-4339-9564-e1b0e35f65d5-kube-api-access-xw89n\") pod \"ovn-controller-gltp7-config-gvvhq\" (UID: \"26cad56a-adf5-4339-9564-e1b0e35f65d5\") " pod="openstack/ovn-controller-gltp7-config-gvvhq" Sep 30 21:08:21 crc kubenswrapper[4756]: I0930 21:08:21.548460 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/26cad56a-adf5-4339-9564-e1b0e35f65d5-var-run\") pod \"ovn-controller-gltp7-config-gvvhq\" (UID: \"26cad56a-adf5-4339-9564-e1b0e35f65d5\") " pod="openstack/ovn-controller-gltp7-config-gvvhq" Sep 30 21:08:21 crc kubenswrapper[4756]: I0930 21:08:21.548522 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/26cad56a-adf5-4339-9564-e1b0e35f65d5-var-log-ovn\") pod \"ovn-controller-gltp7-config-gvvhq\" (UID: \"26cad56a-adf5-4339-9564-e1b0e35f65d5\") " pod="openstack/ovn-controller-gltp7-config-gvvhq" Sep 30 21:08:21 crc kubenswrapper[4756]: I0930 21:08:21.650707 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/26cad56a-adf5-4339-9564-e1b0e35f65d5-var-log-ovn\") pod \"ovn-controller-gltp7-config-gvvhq\" (UID: \"26cad56a-adf5-4339-9564-e1b0e35f65d5\") " pod="openstack/ovn-controller-gltp7-config-gvvhq" Sep 30 21:08:21 crc kubenswrapper[4756]: I0930 21:08:21.650870 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/26cad56a-adf5-4339-9564-e1b0e35f65d5-scripts\") pod \"ovn-controller-gltp7-config-gvvhq\" (UID: \"26cad56a-adf5-4339-9564-e1b0e35f65d5\") " pod="openstack/ovn-controller-gltp7-config-gvvhq" Sep 30 21:08:21 crc kubenswrapper[4756]: I0930 21:08:21.650949 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/26cad56a-adf5-4339-9564-e1b0e35f65d5-var-run-ovn\") pod \"ovn-controller-gltp7-config-gvvhq\" (UID: \"26cad56a-adf5-4339-9564-e1b0e35f65d5\") " pod="openstack/ovn-controller-gltp7-config-gvvhq" Sep 30 21:08:21 crc kubenswrapper[4756]: I0930 21:08:21.650977 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/26cad56a-adf5-4339-9564-e1b0e35f65d5-additional-scripts\") pod \"ovn-controller-gltp7-config-gvvhq\" (UID: \"26cad56a-adf5-4339-9564-e1b0e35f65d5\") " pod="openstack/ovn-controller-gltp7-config-gvvhq" Sep 30 21:08:21 crc kubenswrapper[4756]: I0930 21:08:21.651152 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xw89n\" (UniqueName: \"kubernetes.io/projected/26cad56a-adf5-4339-9564-e1b0e35f65d5-kube-api-access-xw89n\") pod \"ovn-controller-gltp7-config-gvvhq\" (UID: \"26cad56a-adf5-4339-9564-e1b0e35f65d5\") " pod="openstack/ovn-controller-gltp7-config-gvvhq" Sep 30 21:08:21 crc kubenswrapper[4756]: I0930 21:08:21.651221 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/26cad56a-adf5-4339-9564-e1b0e35f65d5-var-run\") pod \"ovn-controller-gltp7-config-gvvhq\" (UID: \"26cad56a-adf5-4339-9564-e1b0e35f65d5\") " pod="openstack/ovn-controller-gltp7-config-gvvhq" Sep 30 21:08:21 crc kubenswrapper[4756]: I0930 21:08:21.652865 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/26cad56a-adf5-4339-9564-e1b0e35f65d5-additional-scripts\") pod \"ovn-controller-gltp7-config-gvvhq\" (UID: \"26cad56a-adf5-4339-9564-e1b0e35f65d5\") " pod="openstack/ovn-controller-gltp7-config-gvvhq" Sep 30 21:08:21 crc kubenswrapper[4756]: I0930 21:08:21.652973 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/26cad56a-adf5-4339-9564-e1b0e35f65d5-var-run-ovn\") pod \"ovn-controller-gltp7-config-gvvhq\" (UID: \"26cad56a-adf5-4339-9564-e1b0e35f65d5\") " pod="openstack/ovn-controller-gltp7-config-gvvhq" Sep 30 21:08:21 crc kubenswrapper[4756]: I0930 21:08:21.653355 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/26cad56a-adf5-4339-9564-e1b0e35f65d5-var-log-ovn\") pod \"ovn-controller-gltp7-config-gvvhq\" (UID: \"26cad56a-adf5-4339-9564-e1b0e35f65d5\") " pod="openstack/ovn-controller-gltp7-config-gvvhq" Sep 30 21:08:21 crc kubenswrapper[4756]: I0930 21:08:21.653544 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/26cad56a-adf5-4339-9564-e1b0e35f65d5-var-run\") pod \"ovn-controller-gltp7-config-gvvhq\" (UID: \"26cad56a-adf5-4339-9564-e1b0e35f65d5\") " pod="openstack/ovn-controller-gltp7-config-gvvhq" Sep 30 21:08:21 crc kubenswrapper[4756]: I0930 21:08:21.653892 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/26cad56a-adf5-4339-9564-e1b0e35f65d5-scripts\") pod \"ovn-controller-gltp7-config-gvvhq\" (UID: \"26cad56a-adf5-4339-9564-e1b0e35f65d5\") " pod="openstack/ovn-controller-gltp7-config-gvvhq" Sep 30 21:08:21 crc kubenswrapper[4756]: I0930 21:08:21.679072 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xw89n\" (UniqueName: \"kubernetes.io/projected/26cad56a-adf5-4339-9564-e1b0e35f65d5-kube-api-access-xw89n\") pod \"ovn-controller-gltp7-config-gvvhq\" (UID: \"26cad56a-adf5-4339-9564-e1b0e35f65d5\") " pod="openstack/ovn-controller-gltp7-config-gvvhq" Sep 30 21:08:21 crc kubenswrapper[4756]: I0930 21:08:21.723098 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-gltp7-config-gvvhq" Sep 30 21:08:22 crc kubenswrapper[4756]: W0930 21:08:22.238035 4756 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod26cad56a_adf5_4339_9564_e1b0e35f65d5.slice/crio-c8576784603f2167fe3db677008cf422e6d648acb543403e3632fa38ef1477f2 WatchSource:0}: Error finding container c8576784603f2167fe3db677008cf422e6d648acb543403e3632fa38ef1477f2: Status 404 returned error can't find the container with id c8576784603f2167fe3db677008cf422e6d648acb543403e3632fa38ef1477f2 Sep 30 21:08:22 crc kubenswrapper[4756]: I0930 21:08:22.238800 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-gltp7-config-gvvhq"] Sep 30 21:08:22 crc kubenswrapper[4756]: I0930 21:08:22.778842 4756 generic.go:334] "Generic (PLEG): container finished" podID="26cad56a-adf5-4339-9564-e1b0e35f65d5" containerID="8407d7908fdfd3a1ea404f121edecbdb2836503b62c9f27ad191957603fd4fca" exitCode=0 Sep 30 21:08:22 crc kubenswrapper[4756]: I0930 21:08:22.778934 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-gltp7-config-gvvhq" event={"ID":"26cad56a-adf5-4339-9564-e1b0e35f65d5","Type":"ContainerDied","Data":"8407d7908fdfd3a1ea404f121edecbdb2836503b62c9f27ad191957603fd4fca"} Sep 30 21:08:22 crc kubenswrapper[4756]: I0930 21:08:22.779108 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-gltp7-config-gvvhq" event={"ID":"26cad56a-adf5-4339-9564-e1b0e35f65d5","Type":"ContainerStarted","Data":"c8576784603f2167fe3db677008cf422e6d648acb543403e3632fa38ef1477f2"} Sep 30 21:08:23 crc kubenswrapper[4756]: I0930 21:08:23.128311 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8e4a48af-90ee-43ed-a22a-f05a9765fa67" path="/var/lib/kubelet/pods/8e4a48af-90ee-43ed-a22a-f05a9765fa67/volumes" Sep 30 21:08:30 crc kubenswrapper[4756]: I0930 21:08:30.634959 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-gltp7-config-gvvhq" Sep 30 21:08:30 crc kubenswrapper[4756]: I0930 21:08:30.748464 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/26cad56a-adf5-4339-9564-e1b0e35f65d5-scripts\") pod \"26cad56a-adf5-4339-9564-e1b0e35f65d5\" (UID: \"26cad56a-adf5-4339-9564-e1b0e35f65d5\") " Sep 30 21:08:30 crc kubenswrapper[4756]: I0930 21:08:30.748707 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/26cad56a-adf5-4339-9564-e1b0e35f65d5-var-run\") pod \"26cad56a-adf5-4339-9564-e1b0e35f65d5\" (UID: \"26cad56a-adf5-4339-9564-e1b0e35f65d5\") " Sep 30 21:08:30 crc kubenswrapper[4756]: I0930 21:08:30.748779 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/26cad56a-adf5-4339-9564-e1b0e35f65d5-var-run" (OuterVolumeSpecName: "var-run") pod "26cad56a-adf5-4339-9564-e1b0e35f65d5" (UID: "26cad56a-adf5-4339-9564-e1b0e35f65d5"). InnerVolumeSpecName "var-run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 21:08:30 crc kubenswrapper[4756]: I0930 21:08:30.748822 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xw89n\" (UniqueName: \"kubernetes.io/projected/26cad56a-adf5-4339-9564-e1b0e35f65d5-kube-api-access-xw89n\") pod \"26cad56a-adf5-4339-9564-e1b0e35f65d5\" (UID: \"26cad56a-adf5-4339-9564-e1b0e35f65d5\") " Sep 30 21:08:30 crc kubenswrapper[4756]: I0930 21:08:30.748923 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/26cad56a-adf5-4339-9564-e1b0e35f65d5-additional-scripts\") pod \"26cad56a-adf5-4339-9564-e1b0e35f65d5\" (UID: \"26cad56a-adf5-4339-9564-e1b0e35f65d5\") " Sep 30 21:08:30 crc kubenswrapper[4756]: I0930 21:08:30.748986 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/26cad56a-adf5-4339-9564-e1b0e35f65d5-var-run-ovn\") pod \"26cad56a-adf5-4339-9564-e1b0e35f65d5\" (UID: \"26cad56a-adf5-4339-9564-e1b0e35f65d5\") " Sep 30 21:08:30 crc kubenswrapper[4756]: I0930 21:08:30.749063 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/26cad56a-adf5-4339-9564-e1b0e35f65d5-var-run-ovn" (OuterVolumeSpecName: "var-run-ovn") pod "26cad56a-adf5-4339-9564-e1b0e35f65d5" (UID: "26cad56a-adf5-4339-9564-e1b0e35f65d5"). InnerVolumeSpecName "var-run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 21:08:30 crc kubenswrapper[4756]: I0930 21:08:30.749255 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/26cad56a-adf5-4339-9564-e1b0e35f65d5-var-log-ovn\") pod \"26cad56a-adf5-4339-9564-e1b0e35f65d5\" (UID: \"26cad56a-adf5-4339-9564-e1b0e35f65d5\") " Sep 30 21:08:30 crc kubenswrapper[4756]: I0930 21:08:30.749338 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/26cad56a-adf5-4339-9564-e1b0e35f65d5-var-log-ovn" (OuterVolumeSpecName: "var-log-ovn") pod "26cad56a-adf5-4339-9564-e1b0e35f65d5" (UID: "26cad56a-adf5-4339-9564-e1b0e35f65d5"). InnerVolumeSpecName "var-log-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 21:08:30 crc kubenswrapper[4756]: I0930 21:08:30.749569 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/26cad56a-adf5-4339-9564-e1b0e35f65d5-additional-scripts" (OuterVolumeSpecName: "additional-scripts") pod "26cad56a-adf5-4339-9564-e1b0e35f65d5" (UID: "26cad56a-adf5-4339-9564-e1b0e35f65d5"). InnerVolumeSpecName "additional-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 21:08:30 crc kubenswrapper[4756]: I0930 21:08:30.749993 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/26cad56a-adf5-4339-9564-e1b0e35f65d5-scripts" (OuterVolumeSpecName: "scripts") pod "26cad56a-adf5-4339-9564-e1b0e35f65d5" (UID: "26cad56a-adf5-4339-9564-e1b0e35f65d5"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 21:08:30 crc kubenswrapper[4756]: I0930 21:08:30.750023 4756 reconciler_common.go:293] "Volume detached for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/26cad56a-adf5-4339-9564-e1b0e35f65d5-additional-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 21:08:30 crc kubenswrapper[4756]: I0930 21:08:30.750060 4756 reconciler_common.go:293] "Volume detached for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/26cad56a-adf5-4339-9564-e1b0e35f65d5-var-run-ovn\") on node \"crc\" DevicePath \"\"" Sep 30 21:08:30 crc kubenswrapper[4756]: I0930 21:08:30.750078 4756 reconciler_common.go:293] "Volume detached for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/26cad56a-adf5-4339-9564-e1b0e35f65d5-var-log-ovn\") on node \"crc\" DevicePath \"\"" Sep 30 21:08:30 crc kubenswrapper[4756]: I0930 21:08:30.750096 4756 reconciler_common.go:293] "Volume detached for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/26cad56a-adf5-4339-9564-e1b0e35f65d5-var-run\") on node \"crc\" DevicePath \"\"" Sep 30 21:08:30 crc kubenswrapper[4756]: I0930 21:08:30.756587 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/26cad56a-adf5-4339-9564-e1b0e35f65d5-kube-api-access-xw89n" (OuterVolumeSpecName: "kube-api-access-xw89n") pod "26cad56a-adf5-4339-9564-e1b0e35f65d5" (UID: "26cad56a-adf5-4339-9564-e1b0e35f65d5"). InnerVolumeSpecName "kube-api-access-xw89n". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 21:08:30 crc kubenswrapper[4756]: I0930 21:08:30.851641 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xw89n\" (UniqueName: \"kubernetes.io/projected/26cad56a-adf5-4339-9564-e1b0e35f65d5-kube-api-access-xw89n\") on node \"crc\" DevicePath \"\"" Sep 30 21:08:30 crc kubenswrapper[4756]: I0930 21:08:30.851672 4756 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/26cad56a-adf5-4339-9564-e1b0e35f65d5-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 21:08:30 crc kubenswrapper[4756]: I0930 21:08:30.866209 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-gltp7-config-gvvhq" event={"ID":"26cad56a-adf5-4339-9564-e1b0e35f65d5","Type":"ContainerDied","Data":"c8576784603f2167fe3db677008cf422e6d648acb543403e3632fa38ef1477f2"} Sep 30 21:08:30 crc kubenswrapper[4756]: I0930 21:08:30.866241 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-gltp7-config-gvvhq" Sep 30 21:08:30 crc kubenswrapper[4756]: I0930 21:08:30.866261 4756 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c8576784603f2167fe3db677008cf422e6d648acb543403e3632fa38ef1477f2" Sep 30 21:08:31 crc kubenswrapper[4756]: I0930 21:08:31.721787 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-gltp7-config-gvvhq"] Sep 30 21:08:31 crc kubenswrapper[4756]: I0930 21:08:31.733261 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-controller-gltp7-config-gvvhq"] Sep 30 21:08:31 crc kubenswrapper[4756]: I0930 21:08:31.877980 4756 generic.go:334] "Generic (PLEG): container finished" podID="d38fa27e-0974-4f1d-922c-43f94ab027df" containerID="72858b5e55c2dd767d04a5f44c601e7fb4f8b63fb4dba0b8e0fb419bbe6b5d85" exitCode=0 Sep 30 21:08:31 crc kubenswrapper[4756]: I0930 21:08:31.878106 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-api-66b5c6cccd-rcf58" event={"ID":"d38fa27e-0974-4f1d-922c-43f94ab027df","Type":"ContainerDied","Data":"72858b5e55c2dd767d04a5f44c601e7fb4f8b63fb4dba0b8e0fb419bbe6b5d85"} Sep 30 21:08:32 crc kubenswrapper[4756]: I0930 21:08:32.116068 4756 scope.go:117] "RemoveContainer" containerID="6282743b2855b6e8d5a5f2de10949d881993e076a7475251b665c1caeac15b6c" Sep 30 21:08:32 crc kubenswrapper[4756]: E0930 21:08:32.116506 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4n9zj_openshift-machine-config-operator(3370c2ca-fec3-4f90-8df7-51e21e6c7e1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" Sep 30 21:08:32 crc kubenswrapper[4756]: I0930 21:08:32.893204 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-api-66b5c6cccd-rcf58" event={"ID":"d38fa27e-0974-4f1d-922c-43f94ab027df","Type":"ContainerStarted","Data":"1ef08504b657a282dbb1ef64d1ea55d7ed46d132f6b5af58616f76aace7c6de6"} Sep 30 21:08:32 crc kubenswrapper[4756]: I0930 21:08:32.893762 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-api-66b5c6cccd-rcf58" event={"ID":"d38fa27e-0974-4f1d-922c-43f94ab027df","Type":"ContainerStarted","Data":"4db60cf6b7ffed5a94514b9e76328f692e0170487fd6348a2c970db657642a5b"} Sep 30 21:08:32 crc kubenswrapper[4756]: I0930 21:08:32.893786 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/octavia-api-66b5c6cccd-rcf58" Sep 30 21:08:32 crc kubenswrapper[4756]: I0930 21:08:32.893799 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/octavia-api-66b5c6cccd-rcf58" Sep 30 21:08:32 crc kubenswrapper[4756]: I0930 21:08:32.928500 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/octavia-api-66b5c6cccd-rcf58" podStartSLOduration=3.223152028 podStartE2EDuration="13.928473466s" podCreationTimestamp="2025-09-30 21:08:19 +0000 UTC" firstStartedPulling="2025-09-30 21:08:20.104897579 +0000 UTC m=+5829.725831056" lastFinishedPulling="2025-09-30 21:08:30.810219017 +0000 UTC m=+5840.431152494" observedRunningTime="2025-09-30 21:08:32.925305833 +0000 UTC m=+5842.546239350" watchObservedRunningTime="2025-09-30 21:08:32.928473466 +0000 UTC m=+5842.549406933" Sep 30 21:08:33 crc kubenswrapper[4756]: I0930 21:08:33.128514 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="26cad56a-adf5-4339-9564-e1b0e35f65d5" path="/var/lib/kubelet/pods/26cad56a-adf5-4339-9564-e1b0e35f65d5/volumes" Sep 30 21:08:45 crc kubenswrapper[4756]: I0930 21:08:45.116318 4756 scope.go:117] "RemoveContainer" containerID="6282743b2855b6e8d5a5f2de10949d881993e076a7475251b665c1caeac15b6c" Sep 30 21:08:45 crc kubenswrapper[4756]: E0930 21:08:45.118248 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4n9zj_openshift-machine-config-operator(3370c2ca-fec3-4f90-8df7-51e21e6c7e1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" Sep 30 21:08:53 crc kubenswrapper[4756]: I0930 21:08:53.729892 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/octavia-api-66b5c6cccd-rcf58" Sep 30 21:08:54 crc kubenswrapper[4756]: I0930 21:08:54.045979 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/octavia-api-66b5c6cccd-rcf58" Sep 30 21:08:55 crc kubenswrapper[4756]: I0930 21:08:55.247061 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/octavia-rsyslog-zjxht"] Sep 30 21:08:55 crc kubenswrapper[4756]: E0930 21:08:55.247641 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="26cad56a-adf5-4339-9564-e1b0e35f65d5" containerName="ovn-config" Sep 30 21:08:55 crc kubenswrapper[4756]: I0930 21:08:55.247658 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="26cad56a-adf5-4339-9564-e1b0e35f65d5" containerName="ovn-config" Sep 30 21:08:55 crc kubenswrapper[4756]: I0930 21:08:55.247888 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="26cad56a-adf5-4339-9564-e1b0e35f65d5" containerName="ovn-config" Sep 30 21:08:55 crc kubenswrapper[4756]: I0930 21:08:55.249223 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-rsyslog-zjxht" Sep 30 21:08:55 crc kubenswrapper[4756]: I0930 21:08:55.252936 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"octavia-rsyslog-scripts" Sep 30 21:08:55 crc kubenswrapper[4756]: I0930 21:08:55.253343 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"octavia-rsyslog-config-data" Sep 30 21:08:55 crc kubenswrapper[4756]: I0930 21:08:55.255903 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"octavia-hmport-map" Sep 30 21:08:55 crc kubenswrapper[4756]: I0930 21:08:55.260484 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-rsyslog-zjxht"] Sep 30 21:08:55 crc kubenswrapper[4756]: I0930 21:08:55.376528 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8e308d95-d34a-4264-86a0-d4bad47490de-config-data\") pod \"octavia-rsyslog-zjxht\" (UID: \"8e308d95-d34a-4264-86a0-d4bad47490de\") " pod="openstack/octavia-rsyslog-zjxht" Sep 30 21:08:55 crc kubenswrapper[4756]: I0930 21:08:55.376750 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hm-ports\" (UniqueName: \"kubernetes.io/configmap/8e308d95-d34a-4264-86a0-d4bad47490de-hm-ports\") pod \"octavia-rsyslog-zjxht\" (UID: \"8e308d95-d34a-4264-86a0-d4bad47490de\") " pod="openstack/octavia-rsyslog-zjxht" Sep 30 21:08:55 crc kubenswrapper[4756]: I0930 21:08:55.376824 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/8e308d95-d34a-4264-86a0-d4bad47490de-config-data-merged\") pod \"octavia-rsyslog-zjxht\" (UID: \"8e308d95-d34a-4264-86a0-d4bad47490de\") " pod="openstack/octavia-rsyslog-zjxht" Sep 30 21:08:55 crc kubenswrapper[4756]: I0930 21:08:55.377018 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8e308d95-d34a-4264-86a0-d4bad47490de-scripts\") pod \"octavia-rsyslog-zjxht\" (UID: \"8e308d95-d34a-4264-86a0-d4bad47490de\") " pod="openstack/octavia-rsyslog-zjxht" Sep 30 21:08:55 crc kubenswrapper[4756]: I0930 21:08:55.478836 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8e308d95-d34a-4264-86a0-d4bad47490de-scripts\") pod \"octavia-rsyslog-zjxht\" (UID: \"8e308d95-d34a-4264-86a0-d4bad47490de\") " pod="openstack/octavia-rsyslog-zjxht" Sep 30 21:08:55 crc kubenswrapper[4756]: I0930 21:08:55.479001 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8e308d95-d34a-4264-86a0-d4bad47490de-config-data\") pod \"octavia-rsyslog-zjxht\" (UID: \"8e308d95-d34a-4264-86a0-d4bad47490de\") " pod="openstack/octavia-rsyslog-zjxht" Sep 30 21:08:55 crc kubenswrapper[4756]: I0930 21:08:55.479083 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hm-ports\" (UniqueName: \"kubernetes.io/configmap/8e308d95-d34a-4264-86a0-d4bad47490de-hm-ports\") pod \"octavia-rsyslog-zjxht\" (UID: \"8e308d95-d34a-4264-86a0-d4bad47490de\") " pod="openstack/octavia-rsyslog-zjxht" Sep 30 21:08:55 crc kubenswrapper[4756]: I0930 21:08:55.479151 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/8e308d95-d34a-4264-86a0-d4bad47490de-config-data-merged\") pod \"octavia-rsyslog-zjxht\" (UID: \"8e308d95-d34a-4264-86a0-d4bad47490de\") " pod="openstack/octavia-rsyslog-zjxht" Sep 30 21:08:55 crc kubenswrapper[4756]: I0930 21:08:55.479735 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/8e308d95-d34a-4264-86a0-d4bad47490de-config-data-merged\") pod \"octavia-rsyslog-zjxht\" (UID: \"8e308d95-d34a-4264-86a0-d4bad47490de\") " pod="openstack/octavia-rsyslog-zjxht" Sep 30 21:08:55 crc kubenswrapper[4756]: I0930 21:08:55.480159 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hm-ports\" (UniqueName: \"kubernetes.io/configmap/8e308d95-d34a-4264-86a0-d4bad47490de-hm-ports\") pod \"octavia-rsyslog-zjxht\" (UID: \"8e308d95-d34a-4264-86a0-d4bad47490de\") " pod="openstack/octavia-rsyslog-zjxht" Sep 30 21:08:55 crc kubenswrapper[4756]: I0930 21:08:55.485935 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8e308d95-d34a-4264-86a0-d4bad47490de-scripts\") pod \"octavia-rsyslog-zjxht\" (UID: \"8e308d95-d34a-4264-86a0-d4bad47490de\") " pod="openstack/octavia-rsyslog-zjxht" Sep 30 21:08:55 crc kubenswrapper[4756]: I0930 21:08:55.506340 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8e308d95-d34a-4264-86a0-d4bad47490de-config-data\") pod \"octavia-rsyslog-zjxht\" (UID: \"8e308d95-d34a-4264-86a0-d4bad47490de\") " pod="openstack/octavia-rsyslog-zjxht" Sep 30 21:08:55 crc kubenswrapper[4756]: I0930 21:08:55.572948 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-rsyslog-zjxht" Sep 30 21:08:56 crc kubenswrapper[4756]: I0930 21:08:56.120657 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-rsyslog-zjxht"] Sep 30 21:08:56 crc kubenswrapper[4756]: I0930 21:08:56.140128 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-rsyslog-zjxht" event={"ID":"8e308d95-d34a-4264-86a0-d4bad47490de","Type":"ContainerStarted","Data":"72d7a661da8baf357936e9b72da83e0ae0aa401b506e533b92a12fc3cf1ff0e7"} Sep 30 21:08:56 crc kubenswrapper[4756]: I0930 21:08:56.242636 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/octavia-image-upload-59f8cff499-mwhmt"] Sep 30 21:08:56 crc kubenswrapper[4756]: I0930 21:08:56.249516 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-image-upload-59f8cff499-mwhmt" Sep 30 21:08:56 crc kubenswrapper[4756]: I0930 21:08:56.257840 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"octavia-config-data" Sep 30 21:08:56 crc kubenswrapper[4756]: I0930 21:08:56.307103 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-image-upload-59f8cff499-mwhmt"] Sep 30 21:08:56 crc kubenswrapper[4756]: I0930 21:08:56.397969 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"amphora-image\" (UniqueName: \"kubernetes.io/empty-dir/25c22123-ad6e-4f1d-a4f9-334f0e701cbf-amphora-image\") pod \"octavia-image-upload-59f8cff499-mwhmt\" (UID: \"25c22123-ad6e-4f1d-a4f9-334f0e701cbf\") " pod="openstack/octavia-image-upload-59f8cff499-mwhmt" Sep 30 21:08:56 crc kubenswrapper[4756]: I0930 21:08:56.398114 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/25c22123-ad6e-4f1d-a4f9-334f0e701cbf-httpd-config\") pod \"octavia-image-upload-59f8cff499-mwhmt\" (UID: \"25c22123-ad6e-4f1d-a4f9-334f0e701cbf\") " pod="openstack/octavia-image-upload-59f8cff499-mwhmt" Sep 30 21:08:56 crc kubenswrapper[4756]: I0930 21:08:56.502024 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"amphora-image\" (UniqueName: \"kubernetes.io/empty-dir/25c22123-ad6e-4f1d-a4f9-334f0e701cbf-amphora-image\") pod \"octavia-image-upload-59f8cff499-mwhmt\" (UID: \"25c22123-ad6e-4f1d-a4f9-334f0e701cbf\") " pod="openstack/octavia-image-upload-59f8cff499-mwhmt" Sep 30 21:08:56 crc kubenswrapper[4756]: I0930 21:08:56.502126 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/25c22123-ad6e-4f1d-a4f9-334f0e701cbf-httpd-config\") pod \"octavia-image-upload-59f8cff499-mwhmt\" (UID: \"25c22123-ad6e-4f1d-a4f9-334f0e701cbf\") " pod="openstack/octavia-image-upload-59f8cff499-mwhmt" Sep 30 21:08:56 crc kubenswrapper[4756]: I0930 21:08:56.503511 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"amphora-image\" (UniqueName: \"kubernetes.io/empty-dir/25c22123-ad6e-4f1d-a4f9-334f0e701cbf-amphora-image\") pod \"octavia-image-upload-59f8cff499-mwhmt\" (UID: \"25c22123-ad6e-4f1d-a4f9-334f0e701cbf\") " pod="openstack/octavia-image-upload-59f8cff499-mwhmt" Sep 30 21:08:56 crc kubenswrapper[4756]: I0930 21:08:56.524877 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/25c22123-ad6e-4f1d-a4f9-334f0e701cbf-httpd-config\") pod \"octavia-image-upload-59f8cff499-mwhmt\" (UID: \"25c22123-ad6e-4f1d-a4f9-334f0e701cbf\") " pod="openstack/octavia-image-upload-59f8cff499-mwhmt" Sep 30 21:08:56 crc kubenswrapper[4756]: I0930 21:08:56.606189 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-image-upload-59f8cff499-mwhmt" Sep 30 21:08:57 crc kubenswrapper[4756]: I0930 21:08:57.084029 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-image-upload-59f8cff499-mwhmt"] Sep 30 21:08:57 crc kubenswrapper[4756]: W0930 21:08:57.091238 4756 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod25c22123_ad6e_4f1d_a4f9_334f0e701cbf.slice/crio-a841d9dfe810beea655017a457358e1983968e4d97103888f78f50556a5d3a1f WatchSource:0}: Error finding container a841d9dfe810beea655017a457358e1983968e4d97103888f78f50556a5d3a1f: Status 404 returned error can't find the container with id a841d9dfe810beea655017a457358e1983968e4d97103888f78f50556a5d3a1f Sep 30 21:08:57 crc kubenswrapper[4756]: I0930 21:08:57.116277 4756 scope.go:117] "RemoveContainer" containerID="6282743b2855b6e8d5a5f2de10949d881993e076a7475251b665c1caeac15b6c" Sep 30 21:08:57 crc kubenswrapper[4756]: E0930 21:08:57.116579 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4n9zj_openshift-machine-config-operator(3370c2ca-fec3-4f90-8df7-51e21e6c7e1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" Sep 30 21:08:57 crc kubenswrapper[4756]: I0930 21:08:57.160263 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-image-upload-59f8cff499-mwhmt" event={"ID":"25c22123-ad6e-4f1d-a4f9-334f0e701cbf","Type":"ContainerStarted","Data":"a841d9dfe810beea655017a457358e1983968e4d97103888f78f50556a5d3a1f"} Sep 30 21:08:57 crc kubenswrapper[4756]: I0930 21:08:57.417036 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/octavia-db-sync-2m9fn"] Sep 30 21:08:57 crc kubenswrapper[4756]: I0930 21:08:57.419551 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-db-sync-2m9fn" Sep 30 21:08:57 crc kubenswrapper[4756]: I0930 21:08:57.422293 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"octavia-scripts" Sep 30 21:08:57 crc kubenswrapper[4756]: I0930 21:08:57.428327 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-db-sync-2m9fn"] Sep 30 21:08:57 crc kubenswrapper[4756]: I0930 21:08:57.524143 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cf3ed40b-5636-4917-82bc-b9c60bff311f-combined-ca-bundle\") pod \"octavia-db-sync-2m9fn\" (UID: \"cf3ed40b-5636-4917-82bc-b9c60bff311f\") " pod="openstack/octavia-db-sync-2m9fn" Sep 30 21:08:57 crc kubenswrapper[4756]: I0930 21:08:57.524495 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/cf3ed40b-5636-4917-82bc-b9c60bff311f-config-data-merged\") pod \"octavia-db-sync-2m9fn\" (UID: \"cf3ed40b-5636-4917-82bc-b9c60bff311f\") " pod="openstack/octavia-db-sync-2m9fn" Sep 30 21:08:57 crc kubenswrapper[4756]: I0930 21:08:57.524624 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cf3ed40b-5636-4917-82bc-b9c60bff311f-scripts\") pod \"octavia-db-sync-2m9fn\" (UID: \"cf3ed40b-5636-4917-82bc-b9c60bff311f\") " pod="openstack/octavia-db-sync-2m9fn" Sep 30 21:08:57 crc kubenswrapper[4756]: I0930 21:08:57.524896 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cf3ed40b-5636-4917-82bc-b9c60bff311f-config-data\") pod \"octavia-db-sync-2m9fn\" (UID: \"cf3ed40b-5636-4917-82bc-b9c60bff311f\") " pod="openstack/octavia-db-sync-2m9fn" Sep 30 21:08:57 crc kubenswrapper[4756]: I0930 21:08:57.626216 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cf3ed40b-5636-4917-82bc-b9c60bff311f-combined-ca-bundle\") pod \"octavia-db-sync-2m9fn\" (UID: \"cf3ed40b-5636-4917-82bc-b9c60bff311f\") " pod="openstack/octavia-db-sync-2m9fn" Sep 30 21:08:57 crc kubenswrapper[4756]: I0930 21:08:57.626342 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/cf3ed40b-5636-4917-82bc-b9c60bff311f-config-data-merged\") pod \"octavia-db-sync-2m9fn\" (UID: \"cf3ed40b-5636-4917-82bc-b9c60bff311f\") " pod="openstack/octavia-db-sync-2m9fn" Sep 30 21:08:57 crc kubenswrapper[4756]: I0930 21:08:57.626411 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cf3ed40b-5636-4917-82bc-b9c60bff311f-scripts\") pod \"octavia-db-sync-2m9fn\" (UID: \"cf3ed40b-5636-4917-82bc-b9c60bff311f\") " pod="openstack/octavia-db-sync-2m9fn" Sep 30 21:08:57 crc kubenswrapper[4756]: I0930 21:08:57.626487 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cf3ed40b-5636-4917-82bc-b9c60bff311f-config-data\") pod \"octavia-db-sync-2m9fn\" (UID: \"cf3ed40b-5636-4917-82bc-b9c60bff311f\") " pod="openstack/octavia-db-sync-2m9fn" Sep 30 21:08:57 crc kubenswrapper[4756]: I0930 21:08:57.627167 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/cf3ed40b-5636-4917-82bc-b9c60bff311f-config-data-merged\") pod \"octavia-db-sync-2m9fn\" (UID: \"cf3ed40b-5636-4917-82bc-b9c60bff311f\") " pod="openstack/octavia-db-sync-2m9fn" Sep 30 21:08:57 crc kubenswrapper[4756]: I0930 21:08:57.632802 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cf3ed40b-5636-4917-82bc-b9c60bff311f-scripts\") pod \"octavia-db-sync-2m9fn\" (UID: \"cf3ed40b-5636-4917-82bc-b9c60bff311f\") " pod="openstack/octavia-db-sync-2m9fn" Sep 30 21:08:57 crc kubenswrapper[4756]: I0930 21:08:57.635186 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cf3ed40b-5636-4917-82bc-b9c60bff311f-combined-ca-bundle\") pod \"octavia-db-sync-2m9fn\" (UID: \"cf3ed40b-5636-4917-82bc-b9c60bff311f\") " pod="openstack/octavia-db-sync-2m9fn" Sep 30 21:08:57 crc kubenswrapper[4756]: I0930 21:08:57.649641 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cf3ed40b-5636-4917-82bc-b9c60bff311f-config-data\") pod \"octavia-db-sync-2m9fn\" (UID: \"cf3ed40b-5636-4917-82bc-b9c60bff311f\") " pod="openstack/octavia-db-sync-2m9fn" Sep 30 21:08:57 crc kubenswrapper[4756]: I0930 21:08:57.738636 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-db-sync-2m9fn" Sep 30 21:08:58 crc kubenswrapper[4756]: I0930 21:08:58.444906 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-db-sync-2m9fn"] Sep 30 21:08:58 crc kubenswrapper[4756]: W0930 21:08:58.454145 4756 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podcf3ed40b_5636_4917_82bc_b9c60bff311f.slice/crio-50bd882f9c7a6de3dddfb802e3eb495c8710a6d1f5fbc64e5a3faecea1348801 WatchSource:0}: Error finding container 50bd882f9c7a6de3dddfb802e3eb495c8710a6d1f5fbc64e5a3faecea1348801: Status 404 returned error can't find the container with id 50bd882f9c7a6de3dddfb802e3eb495c8710a6d1f5fbc64e5a3faecea1348801 Sep 30 21:09:09 crc kubenswrapper[4756]: I0930 21:08:59.188919 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-db-sync-2m9fn" event={"ID":"cf3ed40b-5636-4917-82bc-b9c60bff311f","Type":"ContainerStarted","Data":"50bd882f9c7a6de3dddfb802e3eb495c8710a6d1f5fbc64e5a3faecea1348801"} Sep 30 21:09:09 crc kubenswrapper[4756]: I0930 21:09:01.212443 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-db-sync-2m9fn" event={"ID":"cf3ed40b-5636-4917-82bc-b9c60bff311f","Type":"ContainerStarted","Data":"6d3776eac851b6ba5a88a3ddd9930a8e3c663b99917a9447bac10fdbd018b63c"} Sep 30 21:09:09 crc kubenswrapper[4756]: I0930 21:09:01.214570 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-rsyslog-zjxht" event={"ID":"8e308d95-d34a-4264-86a0-d4bad47490de","Type":"ContainerStarted","Data":"24162b02ef85e24975e76aed0e40ff2ac7b56e1387135322e72738be8d189b7b"} Sep 30 21:09:09 crc kubenswrapper[4756]: I0930 21:09:04.240935 4756 generic.go:334] "Generic (PLEG): container finished" podID="cf3ed40b-5636-4917-82bc-b9c60bff311f" containerID="6d3776eac851b6ba5a88a3ddd9930a8e3c663b99917a9447bac10fdbd018b63c" exitCode=0 Sep 30 21:09:09 crc kubenswrapper[4756]: I0930 21:09:04.241159 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-db-sync-2m9fn" event={"ID":"cf3ed40b-5636-4917-82bc-b9c60bff311f","Type":"ContainerDied","Data":"6d3776eac851b6ba5a88a3ddd9930a8e3c663b99917a9447bac10fdbd018b63c"} Sep 30 21:09:09 crc kubenswrapper[4756]: I0930 21:09:05.582649 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/octavia-healthmanager-h4j74"] Sep 30 21:09:09 crc kubenswrapper[4756]: I0930 21:09:05.585963 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-healthmanager-h4j74" Sep 30 21:09:09 crc kubenswrapper[4756]: I0930 21:09:05.589156 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"octavia-certs-secret" Sep 30 21:09:09 crc kubenswrapper[4756]: I0930 21:09:05.589156 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"octavia-healthmanager-scripts" Sep 30 21:09:09 crc kubenswrapper[4756]: I0930 21:09:05.590618 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"octavia-healthmanager-config-data" Sep 30 21:09:09 crc kubenswrapper[4756]: I0930 21:09:05.593839 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-healthmanager-h4j74"] Sep 30 21:09:09 crc kubenswrapper[4756]: I0930 21:09:05.712310 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"amphora-certs\" (UniqueName: \"kubernetes.io/secret/1be3323c-dbb7-42fe-a95d-56b44b70fc98-amphora-certs\") pod \"octavia-healthmanager-h4j74\" (UID: \"1be3323c-dbb7-42fe-a95d-56b44b70fc98\") " pod="openstack/octavia-healthmanager-h4j74" Sep 30 21:09:09 crc kubenswrapper[4756]: I0930 21:09:05.712439 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hm-ports\" (UniqueName: \"kubernetes.io/configmap/1be3323c-dbb7-42fe-a95d-56b44b70fc98-hm-ports\") pod \"octavia-healthmanager-h4j74\" (UID: \"1be3323c-dbb7-42fe-a95d-56b44b70fc98\") " pod="openstack/octavia-healthmanager-h4j74" Sep 30 21:09:09 crc kubenswrapper[4756]: I0930 21:09:05.712488 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1be3323c-dbb7-42fe-a95d-56b44b70fc98-scripts\") pod \"octavia-healthmanager-h4j74\" (UID: \"1be3323c-dbb7-42fe-a95d-56b44b70fc98\") " pod="openstack/octavia-healthmanager-h4j74" Sep 30 21:09:09 crc kubenswrapper[4756]: I0930 21:09:05.712562 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/1be3323c-dbb7-42fe-a95d-56b44b70fc98-config-data-merged\") pod \"octavia-healthmanager-h4j74\" (UID: \"1be3323c-dbb7-42fe-a95d-56b44b70fc98\") " pod="openstack/octavia-healthmanager-h4j74" Sep 30 21:09:09 crc kubenswrapper[4756]: I0930 21:09:05.712588 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1be3323c-dbb7-42fe-a95d-56b44b70fc98-config-data\") pod \"octavia-healthmanager-h4j74\" (UID: \"1be3323c-dbb7-42fe-a95d-56b44b70fc98\") " pod="openstack/octavia-healthmanager-h4j74" Sep 30 21:09:09 crc kubenswrapper[4756]: I0930 21:09:05.712716 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1be3323c-dbb7-42fe-a95d-56b44b70fc98-combined-ca-bundle\") pod \"octavia-healthmanager-h4j74\" (UID: \"1be3323c-dbb7-42fe-a95d-56b44b70fc98\") " pod="openstack/octavia-healthmanager-h4j74" Sep 30 21:09:09 crc kubenswrapper[4756]: I0930 21:09:05.813973 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"amphora-certs\" (UniqueName: \"kubernetes.io/secret/1be3323c-dbb7-42fe-a95d-56b44b70fc98-amphora-certs\") pod \"octavia-healthmanager-h4j74\" (UID: \"1be3323c-dbb7-42fe-a95d-56b44b70fc98\") " pod="openstack/octavia-healthmanager-h4j74" Sep 30 21:09:09 crc kubenswrapper[4756]: I0930 21:09:05.814013 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hm-ports\" (UniqueName: \"kubernetes.io/configmap/1be3323c-dbb7-42fe-a95d-56b44b70fc98-hm-ports\") pod \"octavia-healthmanager-h4j74\" (UID: \"1be3323c-dbb7-42fe-a95d-56b44b70fc98\") " pod="openstack/octavia-healthmanager-h4j74" Sep 30 21:09:09 crc kubenswrapper[4756]: I0930 21:09:05.814045 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1be3323c-dbb7-42fe-a95d-56b44b70fc98-scripts\") pod \"octavia-healthmanager-h4j74\" (UID: \"1be3323c-dbb7-42fe-a95d-56b44b70fc98\") " pod="openstack/octavia-healthmanager-h4j74" Sep 30 21:09:09 crc kubenswrapper[4756]: I0930 21:09:05.814082 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/1be3323c-dbb7-42fe-a95d-56b44b70fc98-config-data-merged\") pod \"octavia-healthmanager-h4j74\" (UID: \"1be3323c-dbb7-42fe-a95d-56b44b70fc98\") " pod="openstack/octavia-healthmanager-h4j74" Sep 30 21:09:09 crc kubenswrapper[4756]: I0930 21:09:05.814104 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1be3323c-dbb7-42fe-a95d-56b44b70fc98-config-data\") pod \"octavia-healthmanager-h4j74\" (UID: \"1be3323c-dbb7-42fe-a95d-56b44b70fc98\") " pod="openstack/octavia-healthmanager-h4j74" Sep 30 21:09:09 crc kubenswrapper[4756]: I0930 21:09:05.814150 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1be3323c-dbb7-42fe-a95d-56b44b70fc98-combined-ca-bundle\") pod \"octavia-healthmanager-h4j74\" (UID: \"1be3323c-dbb7-42fe-a95d-56b44b70fc98\") " pod="openstack/octavia-healthmanager-h4j74" Sep 30 21:09:09 crc kubenswrapper[4756]: I0930 21:09:05.815949 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/1be3323c-dbb7-42fe-a95d-56b44b70fc98-config-data-merged\") pod \"octavia-healthmanager-h4j74\" (UID: \"1be3323c-dbb7-42fe-a95d-56b44b70fc98\") " pod="openstack/octavia-healthmanager-h4j74" Sep 30 21:09:09 crc kubenswrapper[4756]: I0930 21:09:05.816512 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hm-ports\" (UniqueName: \"kubernetes.io/configmap/1be3323c-dbb7-42fe-a95d-56b44b70fc98-hm-ports\") pod \"octavia-healthmanager-h4j74\" (UID: \"1be3323c-dbb7-42fe-a95d-56b44b70fc98\") " pod="openstack/octavia-healthmanager-h4j74" Sep 30 21:09:09 crc kubenswrapper[4756]: I0930 21:09:05.819556 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1be3323c-dbb7-42fe-a95d-56b44b70fc98-config-data\") pod \"octavia-healthmanager-h4j74\" (UID: \"1be3323c-dbb7-42fe-a95d-56b44b70fc98\") " pod="openstack/octavia-healthmanager-h4j74" Sep 30 21:09:09 crc kubenswrapper[4756]: I0930 21:09:05.819763 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"amphora-certs\" (UniqueName: \"kubernetes.io/secret/1be3323c-dbb7-42fe-a95d-56b44b70fc98-amphora-certs\") pod \"octavia-healthmanager-h4j74\" (UID: \"1be3323c-dbb7-42fe-a95d-56b44b70fc98\") " pod="openstack/octavia-healthmanager-h4j74" Sep 30 21:09:09 crc kubenswrapper[4756]: I0930 21:09:05.820092 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1be3323c-dbb7-42fe-a95d-56b44b70fc98-scripts\") pod \"octavia-healthmanager-h4j74\" (UID: \"1be3323c-dbb7-42fe-a95d-56b44b70fc98\") " pod="openstack/octavia-healthmanager-h4j74" Sep 30 21:09:09 crc kubenswrapper[4756]: I0930 21:09:05.820169 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1be3323c-dbb7-42fe-a95d-56b44b70fc98-combined-ca-bundle\") pod \"octavia-healthmanager-h4j74\" (UID: \"1be3323c-dbb7-42fe-a95d-56b44b70fc98\") " pod="openstack/octavia-healthmanager-h4j74" Sep 30 21:09:09 crc kubenswrapper[4756]: I0930 21:09:05.912555 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-healthmanager-h4j74" Sep 30 21:09:09 crc kubenswrapper[4756]: I0930 21:09:06.266102 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-db-sync-2m9fn" event={"ID":"cf3ed40b-5636-4917-82bc-b9c60bff311f","Type":"ContainerStarted","Data":"86facede1871635eb01714c5f8eff8ee47c34beb142ccd161bd2d3856da9420a"} Sep 30 21:09:09 crc kubenswrapper[4756]: I0930 21:09:06.268335 4756 generic.go:334] "Generic (PLEG): container finished" podID="8e308d95-d34a-4264-86a0-d4bad47490de" containerID="24162b02ef85e24975e76aed0e40ff2ac7b56e1387135322e72738be8d189b7b" exitCode=0 Sep 30 21:09:09 crc kubenswrapper[4756]: I0930 21:09:06.268431 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-rsyslog-zjxht" event={"ID":"8e308d95-d34a-4264-86a0-d4bad47490de","Type":"ContainerDied","Data":"24162b02ef85e24975e76aed0e40ff2ac7b56e1387135322e72738be8d189b7b"} Sep 30 21:09:09 crc kubenswrapper[4756]: I0930 21:09:06.294608 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/octavia-db-sync-2m9fn" podStartSLOduration=9.294587387 podStartE2EDuration="9.294587387s" podCreationTimestamp="2025-09-30 21:08:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 21:09:06.287351358 +0000 UTC m=+5875.908284835" watchObservedRunningTime="2025-09-30 21:09:06.294587387 +0000 UTC m=+5875.915520884" Sep 30 21:09:09 crc kubenswrapper[4756]: I0930 21:09:09.337534 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/octavia-housekeeping-qzvbg"] Sep 30 21:09:09 crc kubenswrapper[4756]: I0930 21:09:09.339953 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-housekeeping-qzvbg" Sep 30 21:09:09 crc kubenswrapper[4756]: I0930 21:09:09.343040 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"octavia-housekeeping-scripts" Sep 30 21:09:09 crc kubenswrapper[4756]: I0930 21:09:09.343065 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"octavia-housekeeping-config-data" Sep 30 21:09:09 crc kubenswrapper[4756]: I0930 21:09:09.363685 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-housekeeping-qzvbg"] Sep 30 21:09:09 crc kubenswrapper[4756]: I0930 21:09:09.398792 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hm-ports\" (UniqueName: \"kubernetes.io/configmap/1a3d7e55-8c6d-4b7e-8c52-8e9c1a2a5e55-hm-ports\") pod \"octavia-housekeeping-qzvbg\" (UID: \"1a3d7e55-8c6d-4b7e-8c52-8e9c1a2a5e55\") " pod="openstack/octavia-housekeeping-qzvbg" Sep 30 21:09:09 crc kubenswrapper[4756]: I0930 21:09:09.399168 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"amphora-certs\" (UniqueName: \"kubernetes.io/secret/1a3d7e55-8c6d-4b7e-8c52-8e9c1a2a5e55-amphora-certs\") pod \"octavia-housekeeping-qzvbg\" (UID: \"1a3d7e55-8c6d-4b7e-8c52-8e9c1a2a5e55\") " pod="openstack/octavia-housekeeping-qzvbg" Sep 30 21:09:09 crc kubenswrapper[4756]: I0930 21:09:09.399212 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/1a3d7e55-8c6d-4b7e-8c52-8e9c1a2a5e55-config-data-merged\") pod \"octavia-housekeeping-qzvbg\" (UID: \"1a3d7e55-8c6d-4b7e-8c52-8e9c1a2a5e55\") " pod="openstack/octavia-housekeeping-qzvbg" Sep 30 21:09:09 crc kubenswrapper[4756]: I0930 21:09:09.399234 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1a3d7e55-8c6d-4b7e-8c52-8e9c1a2a5e55-scripts\") pod \"octavia-housekeeping-qzvbg\" (UID: \"1a3d7e55-8c6d-4b7e-8c52-8e9c1a2a5e55\") " pod="openstack/octavia-housekeeping-qzvbg" Sep 30 21:09:09 crc kubenswrapper[4756]: I0930 21:09:09.399254 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1a3d7e55-8c6d-4b7e-8c52-8e9c1a2a5e55-config-data\") pod \"octavia-housekeeping-qzvbg\" (UID: \"1a3d7e55-8c6d-4b7e-8c52-8e9c1a2a5e55\") " pod="openstack/octavia-housekeeping-qzvbg" Sep 30 21:09:09 crc kubenswrapper[4756]: I0930 21:09:09.399278 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1a3d7e55-8c6d-4b7e-8c52-8e9c1a2a5e55-combined-ca-bundle\") pod \"octavia-housekeeping-qzvbg\" (UID: \"1a3d7e55-8c6d-4b7e-8c52-8e9c1a2a5e55\") " pod="openstack/octavia-housekeeping-qzvbg" Sep 30 21:09:09 crc kubenswrapper[4756]: I0930 21:09:09.511448 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"amphora-certs\" (UniqueName: \"kubernetes.io/secret/1a3d7e55-8c6d-4b7e-8c52-8e9c1a2a5e55-amphora-certs\") pod \"octavia-housekeeping-qzvbg\" (UID: \"1a3d7e55-8c6d-4b7e-8c52-8e9c1a2a5e55\") " pod="openstack/octavia-housekeeping-qzvbg" Sep 30 21:09:09 crc kubenswrapper[4756]: I0930 21:09:09.511629 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/1a3d7e55-8c6d-4b7e-8c52-8e9c1a2a5e55-config-data-merged\") pod \"octavia-housekeeping-qzvbg\" (UID: \"1a3d7e55-8c6d-4b7e-8c52-8e9c1a2a5e55\") " pod="openstack/octavia-housekeeping-qzvbg" Sep 30 21:09:09 crc kubenswrapper[4756]: I0930 21:09:09.511676 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1a3d7e55-8c6d-4b7e-8c52-8e9c1a2a5e55-scripts\") pod \"octavia-housekeeping-qzvbg\" (UID: \"1a3d7e55-8c6d-4b7e-8c52-8e9c1a2a5e55\") " pod="openstack/octavia-housekeeping-qzvbg" Sep 30 21:09:09 crc kubenswrapper[4756]: I0930 21:09:09.511720 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1a3d7e55-8c6d-4b7e-8c52-8e9c1a2a5e55-config-data\") pod \"octavia-housekeeping-qzvbg\" (UID: \"1a3d7e55-8c6d-4b7e-8c52-8e9c1a2a5e55\") " pod="openstack/octavia-housekeeping-qzvbg" Sep 30 21:09:09 crc kubenswrapper[4756]: I0930 21:09:09.511773 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1a3d7e55-8c6d-4b7e-8c52-8e9c1a2a5e55-combined-ca-bundle\") pod \"octavia-housekeeping-qzvbg\" (UID: \"1a3d7e55-8c6d-4b7e-8c52-8e9c1a2a5e55\") " pod="openstack/octavia-housekeeping-qzvbg" Sep 30 21:09:09 crc kubenswrapper[4756]: I0930 21:09:09.511828 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hm-ports\" (UniqueName: \"kubernetes.io/configmap/1a3d7e55-8c6d-4b7e-8c52-8e9c1a2a5e55-hm-ports\") pod \"octavia-housekeeping-qzvbg\" (UID: \"1a3d7e55-8c6d-4b7e-8c52-8e9c1a2a5e55\") " pod="openstack/octavia-housekeeping-qzvbg" Sep 30 21:09:09 crc kubenswrapper[4756]: I0930 21:09:09.512635 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"amphora-certs\" (UniqueName: \"kubernetes.io/secret/1a3d7e55-8c6d-4b7e-8c52-8e9c1a2a5e55-amphora-certs\") pod \"octavia-housekeeping-qzvbg\" (UID: \"1a3d7e55-8c6d-4b7e-8c52-8e9c1a2a5e55\") " pod="openstack/octavia-housekeeping-qzvbg" Sep 30 21:09:09 crc kubenswrapper[4756]: I0930 21:09:09.513000 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/1a3d7e55-8c6d-4b7e-8c52-8e9c1a2a5e55-config-data-merged\") pod \"octavia-housekeeping-qzvbg\" (UID: \"1a3d7e55-8c6d-4b7e-8c52-8e9c1a2a5e55\") " pod="openstack/octavia-housekeeping-qzvbg" Sep 30 21:09:09 crc kubenswrapper[4756]: I0930 21:09:09.513081 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hm-ports\" (UniqueName: \"kubernetes.io/configmap/1a3d7e55-8c6d-4b7e-8c52-8e9c1a2a5e55-hm-ports\") pod \"octavia-housekeeping-qzvbg\" (UID: \"1a3d7e55-8c6d-4b7e-8c52-8e9c1a2a5e55\") " pod="openstack/octavia-housekeeping-qzvbg" Sep 30 21:09:09 crc kubenswrapper[4756]: I0930 21:09:09.520363 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1a3d7e55-8c6d-4b7e-8c52-8e9c1a2a5e55-config-data\") pod \"octavia-housekeeping-qzvbg\" (UID: \"1a3d7e55-8c6d-4b7e-8c52-8e9c1a2a5e55\") " pod="openstack/octavia-housekeeping-qzvbg" Sep 30 21:09:09 crc kubenswrapper[4756]: I0930 21:09:09.521218 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1a3d7e55-8c6d-4b7e-8c52-8e9c1a2a5e55-scripts\") pod \"octavia-housekeeping-qzvbg\" (UID: \"1a3d7e55-8c6d-4b7e-8c52-8e9c1a2a5e55\") " pod="openstack/octavia-housekeeping-qzvbg" Sep 30 21:09:09 crc kubenswrapper[4756]: I0930 21:09:09.538938 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1a3d7e55-8c6d-4b7e-8c52-8e9c1a2a5e55-combined-ca-bundle\") pod \"octavia-housekeeping-qzvbg\" (UID: \"1a3d7e55-8c6d-4b7e-8c52-8e9c1a2a5e55\") " pod="openstack/octavia-housekeeping-qzvbg" Sep 30 21:09:09 crc kubenswrapper[4756]: I0930 21:09:09.658970 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-healthmanager-h4j74"] Sep 30 21:09:09 crc kubenswrapper[4756]: I0930 21:09:09.661158 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-housekeeping-qzvbg" Sep 30 21:09:10 crc kubenswrapper[4756]: I0930 21:09:10.116588 4756 scope.go:117] "RemoveContainer" containerID="6282743b2855b6e8d5a5f2de10949d881993e076a7475251b665c1caeac15b6c" Sep 30 21:09:10 crc kubenswrapper[4756]: E0930 21:09:10.116842 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4n9zj_openshift-machine-config-operator(3370c2ca-fec3-4f90-8df7-51e21e6c7e1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" Sep 30 21:09:14 crc kubenswrapper[4756]: W0930 21:09:14.816241 4756 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1be3323c_dbb7_42fe_a95d_56b44b70fc98.slice/crio-d6541d2bcc723196500c675e42277da642ec79453200b78f225efcc10aa63e84 WatchSource:0}: Error finding container d6541d2bcc723196500c675e42277da642ec79453200b78f225efcc10aa63e84: Status 404 returned error can't find the container with id d6541d2bcc723196500c675e42277da642ec79453200b78f225efcc10aa63e84 Sep 30 21:09:15 crc kubenswrapper[4756]: I0930 21:09:15.373465 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-housekeeping-qzvbg"] Sep 30 21:09:15 crc kubenswrapper[4756]: I0930 21:09:15.383217 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-healthmanager-h4j74" event={"ID":"1be3323c-dbb7-42fe-a95d-56b44b70fc98","Type":"ContainerStarted","Data":"89ace16bb0ff3f82b5e8fdd26fe554ad3b857d183671446d99eeaa8dfabe1e48"} Sep 30 21:09:15 crc kubenswrapper[4756]: I0930 21:09:15.383281 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-healthmanager-h4j74" event={"ID":"1be3323c-dbb7-42fe-a95d-56b44b70fc98","Type":"ContainerStarted","Data":"d6541d2bcc723196500c675e42277da642ec79453200b78f225efcc10aa63e84"} Sep 30 21:09:16 crc kubenswrapper[4756]: I0930 21:09:16.396738 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-housekeeping-qzvbg" event={"ID":"1a3d7e55-8c6d-4b7e-8c52-8e9c1a2a5e55","Type":"ContainerStarted","Data":"c8953cc77a9e42b85e1d601d91854b730cd1108091d37006fecfb055c3085d2d"} Sep 30 21:09:17 crc kubenswrapper[4756]: I0930 21:09:17.051940 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-db-create-mvn7x"] Sep 30 21:09:17 crc kubenswrapper[4756]: I0930 21:09:17.065062 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-db-create-mvn7x"] Sep 30 21:09:17 crc kubenswrapper[4756]: I0930 21:09:17.130811 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a4a4feac-42b9-4ba1-98ae-4cd5bc24c6d2" path="/var/lib/kubelet/pods/a4a4feac-42b9-4ba1-98ae-4cd5bc24c6d2/volumes" Sep 30 21:09:17 crc kubenswrapper[4756]: I0930 21:09:17.413338 4756 generic.go:334] "Generic (PLEG): container finished" podID="1be3323c-dbb7-42fe-a95d-56b44b70fc98" containerID="89ace16bb0ff3f82b5e8fdd26fe554ad3b857d183671446d99eeaa8dfabe1e48" exitCode=0 Sep 30 21:09:17 crc kubenswrapper[4756]: I0930 21:09:17.413382 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-healthmanager-h4j74" event={"ID":"1be3323c-dbb7-42fe-a95d-56b44b70fc98","Type":"ContainerDied","Data":"89ace16bb0ff3f82b5e8fdd26fe554ad3b857d183671446d99eeaa8dfabe1e48"} Sep 30 21:09:17 crc kubenswrapper[4756]: I0930 21:09:17.906685 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/octavia-worker-wnndt"] Sep 30 21:09:17 crc kubenswrapper[4756]: I0930 21:09:17.908545 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-worker-wnndt" Sep 30 21:09:17 crc kubenswrapper[4756]: I0930 21:09:17.910568 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"octavia-worker-scripts" Sep 30 21:09:17 crc kubenswrapper[4756]: I0930 21:09:17.910703 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"octavia-worker-config-data" Sep 30 21:09:17 crc kubenswrapper[4756]: I0930 21:09:17.920127 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-worker-wnndt"] Sep 30 21:09:17 crc kubenswrapper[4756]: I0930 21:09:17.984428 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/3c1ed8ea-284a-4ae3-b08d-e167a9caf5f9-config-data-merged\") pod \"octavia-worker-wnndt\" (UID: \"3c1ed8ea-284a-4ae3-b08d-e167a9caf5f9\") " pod="openstack/octavia-worker-wnndt" Sep 30 21:09:17 crc kubenswrapper[4756]: I0930 21:09:17.984544 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hm-ports\" (UniqueName: \"kubernetes.io/configmap/3c1ed8ea-284a-4ae3-b08d-e167a9caf5f9-hm-ports\") pod \"octavia-worker-wnndt\" (UID: \"3c1ed8ea-284a-4ae3-b08d-e167a9caf5f9\") " pod="openstack/octavia-worker-wnndt" Sep 30 21:09:17 crc kubenswrapper[4756]: I0930 21:09:17.984790 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3c1ed8ea-284a-4ae3-b08d-e167a9caf5f9-config-data\") pod \"octavia-worker-wnndt\" (UID: \"3c1ed8ea-284a-4ae3-b08d-e167a9caf5f9\") " pod="openstack/octavia-worker-wnndt" Sep 30 21:09:17 crc kubenswrapper[4756]: I0930 21:09:17.984815 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"amphora-certs\" (UniqueName: \"kubernetes.io/secret/3c1ed8ea-284a-4ae3-b08d-e167a9caf5f9-amphora-certs\") pod \"octavia-worker-wnndt\" (UID: \"3c1ed8ea-284a-4ae3-b08d-e167a9caf5f9\") " pod="openstack/octavia-worker-wnndt" Sep 30 21:09:17 crc kubenswrapper[4756]: I0930 21:09:17.984913 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3c1ed8ea-284a-4ae3-b08d-e167a9caf5f9-scripts\") pod \"octavia-worker-wnndt\" (UID: \"3c1ed8ea-284a-4ae3-b08d-e167a9caf5f9\") " pod="openstack/octavia-worker-wnndt" Sep 30 21:09:17 crc kubenswrapper[4756]: I0930 21:09:17.985029 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3c1ed8ea-284a-4ae3-b08d-e167a9caf5f9-combined-ca-bundle\") pod \"octavia-worker-wnndt\" (UID: \"3c1ed8ea-284a-4ae3-b08d-e167a9caf5f9\") " pod="openstack/octavia-worker-wnndt" Sep 30 21:09:18 crc kubenswrapper[4756]: I0930 21:09:18.086574 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3c1ed8ea-284a-4ae3-b08d-e167a9caf5f9-combined-ca-bundle\") pod \"octavia-worker-wnndt\" (UID: \"3c1ed8ea-284a-4ae3-b08d-e167a9caf5f9\") " pod="openstack/octavia-worker-wnndt" Sep 30 21:09:18 crc kubenswrapper[4756]: I0930 21:09:18.086665 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/3c1ed8ea-284a-4ae3-b08d-e167a9caf5f9-config-data-merged\") pod \"octavia-worker-wnndt\" (UID: \"3c1ed8ea-284a-4ae3-b08d-e167a9caf5f9\") " pod="openstack/octavia-worker-wnndt" Sep 30 21:09:18 crc kubenswrapper[4756]: I0930 21:09:18.086715 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hm-ports\" (UniqueName: \"kubernetes.io/configmap/3c1ed8ea-284a-4ae3-b08d-e167a9caf5f9-hm-ports\") pod \"octavia-worker-wnndt\" (UID: \"3c1ed8ea-284a-4ae3-b08d-e167a9caf5f9\") " pod="openstack/octavia-worker-wnndt" Sep 30 21:09:18 crc kubenswrapper[4756]: I0930 21:09:18.086818 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3c1ed8ea-284a-4ae3-b08d-e167a9caf5f9-config-data\") pod \"octavia-worker-wnndt\" (UID: \"3c1ed8ea-284a-4ae3-b08d-e167a9caf5f9\") " pod="openstack/octavia-worker-wnndt" Sep 30 21:09:18 crc kubenswrapper[4756]: I0930 21:09:18.086842 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"amphora-certs\" (UniqueName: \"kubernetes.io/secret/3c1ed8ea-284a-4ae3-b08d-e167a9caf5f9-amphora-certs\") pod \"octavia-worker-wnndt\" (UID: \"3c1ed8ea-284a-4ae3-b08d-e167a9caf5f9\") " pod="openstack/octavia-worker-wnndt" Sep 30 21:09:18 crc kubenswrapper[4756]: I0930 21:09:18.086897 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3c1ed8ea-284a-4ae3-b08d-e167a9caf5f9-scripts\") pod \"octavia-worker-wnndt\" (UID: \"3c1ed8ea-284a-4ae3-b08d-e167a9caf5f9\") " pod="openstack/octavia-worker-wnndt" Sep 30 21:09:18 crc kubenswrapper[4756]: I0930 21:09:18.090687 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hm-ports\" (UniqueName: \"kubernetes.io/configmap/3c1ed8ea-284a-4ae3-b08d-e167a9caf5f9-hm-ports\") pod \"octavia-worker-wnndt\" (UID: \"3c1ed8ea-284a-4ae3-b08d-e167a9caf5f9\") " pod="openstack/octavia-worker-wnndt" Sep 30 21:09:18 crc kubenswrapper[4756]: I0930 21:09:18.091538 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/3c1ed8ea-284a-4ae3-b08d-e167a9caf5f9-config-data-merged\") pod \"octavia-worker-wnndt\" (UID: \"3c1ed8ea-284a-4ae3-b08d-e167a9caf5f9\") " pod="openstack/octavia-worker-wnndt" Sep 30 21:09:18 crc kubenswrapper[4756]: I0930 21:09:18.093102 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3c1ed8ea-284a-4ae3-b08d-e167a9caf5f9-combined-ca-bundle\") pod \"octavia-worker-wnndt\" (UID: \"3c1ed8ea-284a-4ae3-b08d-e167a9caf5f9\") " pod="openstack/octavia-worker-wnndt" Sep 30 21:09:18 crc kubenswrapper[4756]: I0930 21:09:18.093142 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3c1ed8ea-284a-4ae3-b08d-e167a9caf5f9-config-data\") pod \"octavia-worker-wnndt\" (UID: \"3c1ed8ea-284a-4ae3-b08d-e167a9caf5f9\") " pod="openstack/octavia-worker-wnndt" Sep 30 21:09:18 crc kubenswrapper[4756]: I0930 21:09:18.094075 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"amphora-certs\" (UniqueName: \"kubernetes.io/secret/3c1ed8ea-284a-4ae3-b08d-e167a9caf5f9-amphora-certs\") pod \"octavia-worker-wnndt\" (UID: \"3c1ed8ea-284a-4ae3-b08d-e167a9caf5f9\") " pod="openstack/octavia-worker-wnndt" Sep 30 21:09:18 crc kubenswrapper[4756]: I0930 21:09:18.094944 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3c1ed8ea-284a-4ae3-b08d-e167a9caf5f9-scripts\") pod \"octavia-worker-wnndt\" (UID: \"3c1ed8ea-284a-4ae3-b08d-e167a9caf5f9\") " pod="openstack/octavia-worker-wnndt" Sep 30 21:09:18 crc kubenswrapper[4756]: I0930 21:09:18.235240 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-worker-wnndt" Sep 30 21:09:18 crc kubenswrapper[4756]: I0930 21:09:18.427847 4756 generic.go:334] "Generic (PLEG): container finished" podID="cf3ed40b-5636-4917-82bc-b9c60bff311f" containerID="86facede1871635eb01714c5f8eff8ee47c34beb142ccd161bd2d3856da9420a" exitCode=0 Sep 30 21:09:18 crc kubenswrapper[4756]: I0930 21:09:18.427885 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-db-sync-2m9fn" event={"ID":"cf3ed40b-5636-4917-82bc-b9c60bff311f","Type":"ContainerDied","Data":"86facede1871635eb01714c5f8eff8ee47c34beb142ccd161bd2d3856da9420a"} Sep 30 21:09:22 crc kubenswrapper[4756]: I0930 21:09:22.018191 4756 scope.go:117] "RemoveContainer" containerID="cb2a782ec168a65cb67068fb628fb42ac6ecd46b3f5508c0d07cf893caff3b3b" Sep 30 21:09:22 crc kubenswrapper[4756]: I0930 21:09:22.115745 4756 scope.go:117] "RemoveContainer" containerID="6282743b2855b6e8d5a5f2de10949d881993e076a7475251b665c1caeac15b6c" Sep 30 21:09:22 crc kubenswrapper[4756]: E0930 21:09:22.115979 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4n9zj_openshift-machine-config-operator(3370c2ca-fec3-4f90-8df7-51e21e6c7e1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" Sep 30 21:09:24 crc kubenswrapper[4756]: I0930 21:09:24.444738 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-db-sync-2m9fn" Sep 30 21:09:24 crc kubenswrapper[4756]: I0930 21:09:24.507736 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-db-sync-2m9fn" event={"ID":"cf3ed40b-5636-4917-82bc-b9c60bff311f","Type":"ContainerDied","Data":"50bd882f9c7a6de3dddfb802e3eb495c8710a6d1f5fbc64e5a3faecea1348801"} Sep 30 21:09:24 crc kubenswrapper[4756]: I0930 21:09:24.507774 4756 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="50bd882f9c7a6de3dddfb802e3eb495c8710a6d1f5fbc64e5a3faecea1348801" Sep 30 21:09:24 crc kubenswrapper[4756]: I0930 21:09:24.507797 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-db-sync-2m9fn" Sep 30 21:09:24 crc kubenswrapper[4756]: I0930 21:09:24.540071 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/cf3ed40b-5636-4917-82bc-b9c60bff311f-config-data-merged\") pod \"cf3ed40b-5636-4917-82bc-b9c60bff311f\" (UID: \"cf3ed40b-5636-4917-82bc-b9c60bff311f\") " Sep 30 21:09:24 crc kubenswrapper[4756]: I0930 21:09:24.540182 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cf3ed40b-5636-4917-82bc-b9c60bff311f-scripts\") pod \"cf3ed40b-5636-4917-82bc-b9c60bff311f\" (UID: \"cf3ed40b-5636-4917-82bc-b9c60bff311f\") " Sep 30 21:09:24 crc kubenswrapper[4756]: I0930 21:09:24.540276 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cf3ed40b-5636-4917-82bc-b9c60bff311f-combined-ca-bundle\") pod \"cf3ed40b-5636-4917-82bc-b9c60bff311f\" (UID: \"cf3ed40b-5636-4917-82bc-b9c60bff311f\") " Sep 30 21:09:24 crc kubenswrapper[4756]: I0930 21:09:24.540306 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cf3ed40b-5636-4917-82bc-b9c60bff311f-config-data\") pod \"cf3ed40b-5636-4917-82bc-b9c60bff311f\" (UID: \"cf3ed40b-5636-4917-82bc-b9c60bff311f\") " Sep 30 21:09:24 crc kubenswrapper[4756]: I0930 21:09:24.550605 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cf3ed40b-5636-4917-82bc-b9c60bff311f-config-data" (OuterVolumeSpecName: "config-data") pod "cf3ed40b-5636-4917-82bc-b9c60bff311f" (UID: "cf3ed40b-5636-4917-82bc-b9c60bff311f"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 21:09:24 crc kubenswrapper[4756]: I0930 21:09:24.550741 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cf3ed40b-5636-4917-82bc-b9c60bff311f-scripts" (OuterVolumeSpecName: "scripts") pod "cf3ed40b-5636-4917-82bc-b9c60bff311f" (UID: "cf3ed40b-5636-4917-82bc-b9c60bff311f"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 21:09:24 crc kubenswrapper[4756]: I0930 21:09:24.566647 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cf3ed40b-5636-4917-82bc-b9c60bff311f-config-data-merged" (OuterVolumeSpecName: "config-data-merged") pod "cf3ed40b-5636-4917-82bc-b9c60bff311f" (UID: "cf3ed40b-5636-4917-82bc-b9c60bff311f"). InnerVolumeSpecName "config-data-merged". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 21:09:24 crc kubenswrapper[4756]: I0930 21:09:24.575560 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cf3ed40b-5636-4917-82bc-b9c60bff311f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "cf3ed40b-5636-4917-82bc-b9c60bff311f" (UID: "cf3ed40b-5636-4917-82bc-b9c60bff311f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 21:09:24 crc kubenswrapper[4756]: I0930 21:09:24.642588 4756 reconciler_common.go:293] "Volume detached for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/cf3ed40b-5636-4917-82bc-b9c60bff311f-config-data-merged\") on node \"crc\" DevicePath \"\"" Sep 30 21:09:24 crc kubenswrapper[4756]: I0930 21:09:24.642617 4756 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cf3ed40b-5636-4917-82bc-b9c60bff311f-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 21:09:24 crc kubenswrapper[4756]: I0930 21:09:24.642626 4756 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cf3ed40b-5636-4917-82bc-b9c60bff311f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 21:09:24 crc kubenswrapper[4756]: I0930 21:09:24.642636 4756 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cf3ed40b-5636-4917-82bc-b9c60bff311f-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 21:09:25 crc kubenswrapper[4756]: I0930 21:09:25.635311 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-worker-wnndt"] Sep 30 21:09:26 crc kubenswrapper[4756]: I0930 21:09:26.541140 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-rsyslog-zjxht" event={"ID":"8e308d95-d34a-4264-86a0-d4bad47490de","Type":"ContainerStarted","Data":"f7d55b715079db9c422514fbcfb118d8b128cc4915ff2aaafed8a92c681487f4"} Sep 30 21:09:26 crc kubenswrapper[4756]: I0930 21:09:26.544126 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-image-upload-59f8cff499-mwhmt" event={"ID":"25c22123-ad6e-4f1d-a4f9-334f0e701cbf","Type":"ContainerStarted","Data":"caaf73fd9818fa892ca47879cc0a5931d58ca87178acebf5d78e30e93098418c"} Sep 30 21:09:26 crc kubenswrapper[4756]: I0930 21:09:26.548734 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-worker-wnndt" event={"ID":"3c1ed8ea-284a-4ae3-b08d-e167a9caf5f9","Type":"ContainerStarted","Data":"d853e6b7c7c36d0796dbdf5fb39449ecf362d7781ba73533438e4a1c8d11127c"} Sep 30 21:09:27 crc kubenswrapper[4756]: I0930 21:09:27.561890 4756 generic.go:334] "Generic (PLEG): container finished" podID="25c22123-ad6e-4f1d-a4f9-334f0e701cbf" containerID="caaf73fd9818fa892ca47879cc0a5931d58ca87178acebf5d78e30e93098418c" exitCode=0 Sep 30 21:09:27 crc kubenswrapper[4756]: I0930 21:09:27.561950 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-image-upload-59f8cff499-mwhmt" event={"ID":"25c22123-ad6e-4f1d-a4f9-334f0e701cbf","Type":"ContainerDied","Data":"caaf73fd9818fa892ca47879cc0a5931d58ca87178acebf5d78e30e93098418c"} Sep 30 21:09:27 crc kubenswrapper[4756]: I0930 21:09:27.572131 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-housekeeping-qzvbg" event={"ID":"1a3d7e55-8c6d-4b7e-8c52-8e9c1a2a5e55","Type":"ContainerStarted","Data":"d9f1519550d85ced4a362dbc9eeaddca8839876d85d90653b3e51eee0abecbce"} Sep 30 21:09:27 crc kubenswrapper[4756]: I0930 21:09:27.581448 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-healthmanager-h4j74" event={"ID":"1be3323c-dbb7-42fe-a95d-56b44b70fc98","Type":"ContainerStarted","Data":"11a05b0696ecc3c7c36689f5b6727a97fd817e1c74052e45b58e76254e2bf5b0"} Sep 30 21:09:27 crc kubenswrapper[4756]: I0930 21:09:27.581554 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/octavia-rsyslog-zjxht" Sep 30 21:09:27 crc kubenswrapper[4756]: I0930 21:09:27.581697 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/octavia-healthmanager-h4j74" Sep 30 21:09:27 crc kubenswrapper[4756]: I0930 21:09:27.628281 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/octavia-healthmanager-h4j74" podStartSLOduration=22.628256546 podStartE2EDuration="22.628256546s" podCreationTimestamp="2025-09-30 21:09:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 21:09:27.614078976 +0000 UTC m=+5897.235012453" watchObservedRunningTime="2025-09-30 21:09:27.628256546 +0000 UTC m=+5897.249190023" Sep 30 21:09:27 crc kubenswrapper[4756]: I0930 21:09:27.632838 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/octavia-rsyslog-zjxht" podStartSLOduration=4.117720399 podStartE2EDuration="32.632822745s" podCreationTimestamp="2025-09-30 21:08:55 +0000 UTC" firstStartedPulling="2025-09-30 21:08:56.129797877 +0000 UTC m=+5865.750731344" lastFinishedPulling="2025-09-30 21:09:24.644900213 +0000 UTC m=+5894.265833690" observedRunningTime="2025-09-30 21:09:27.631612193 +0000 UTC m=+5897.252545670" watchObservedRunningTime="2025-09-30 21:09:27.632822745 +0000 UTC m=+5897.253756222" Sep 30 21:09:28 crc kubenswrapper[4756]: I0930 21:09:28.051892 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-cb69-account-create-dhbgr"] Sep 30 21:09:28 crc kubenswrapper[4756]: I0930 21:09:28.071685 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-cb69-account-create-dhbgr"] Sep 30 21:09:29 crc kubenswrapper[4756]: I0930 21:09:29.129429 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6690dfee-cc9f-47c1-a102-6cd970d9dd9d" path="/var/lib/kubelet/pods/6690dfee-cc9f-47c1-a102-6cd970d9dd9d/volumes" Sep 30 21:09:30 crc kubenswrapper[4756]: I0930 21:09:30.629497 4756 generic.go:334] "Generic (PLEG): container finished" podID="1a3d7e55-8c6d-4b7e-8c52-8e9c1a2a5e55" containerID="d9f1519550d85ced4a362dbc9eeaddca8839876d85d90653b3e51eee0abecbce" exitCode=0 Sep 30 21:09:30 crc kubenswrapper[4756]: I0930 21:09:30.629934 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-housekeeping-qzvbg" event={"ID":"1a3d7e55-8c6d-4b7e-8c52-8e9c1a2a5e55","Type":"ContainerDied","Data":"d9f1519550d85ced4a362dbc9eeaddca8839876d85d90653b3e51eee0abecbce"} Sep 30 21:09:32 crc kubenswrapper[4756]: I0930 21:09:32.662028 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-housekeeping-qzvbg" event={"ID":"1a3d7e55-8c6d-4b7e-8c52-8e9c1a2a5e55","Type":"ContainerStarted","Data":"be245444f8ce72bd74e650683fc1b889f0a2ebdc0864ef90330736df97f10c28"} Sep 30 21:09:32 crc kubenswrapper[4756]: I0930 21:09:32.662797 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/octavia-housekeeping-qzvbg" Sep 30 21:09:32 crc kubenswrapper[4756]: I0930 21:09:32.690216 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/octavia-housekeeping-qzvbg" podStartSLOduration=13.850384189 podStartE2EDuration="23.690146981s" podCreationTimestamp="2025-09-30 21:09:09 +0000 UTC" firstStartedPulling="2025-09-30 21:09:16.271788545 +0000 UTC m=+5885.892722032" lastFinishedPulling="2025-09-30 21:09:26.111551317 +0000 UTC m=+5895.732484824" observedRunningTime="2025-09-30 21:09:32.682225485 +0000 UTC m=+5902.303158992" watchObservedRunningTime="2025-09-30 21:09:32.690146981 +0000 UTC m=+5902.311080458" Sep 30 21:09:33 crc kubenswrapper[4756]: I0930 21:09:33.678602 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-worker-wnndt" event={"ID":"3c1ed8ea-284a-4ae3-b08d-e167a9caf5f9","Type":"ContainerStarted","Data":"f60f023bf1683c102d4636ff37f1e145382f1806614e811daa063ba57d23bb10"} Sep 30 21:09:34 crc kubenswrapper[4756]: I0930 21:09:34.054826 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-db-sync-fz2mj"] Sep 30 21:09:34 crc kubenswrapper[4756]: I0930 21:09:34.067042 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-db-sync-fz2mj"] Sep 30 21:09:34 crc kubenswrapper[4756]: I0930 21:09:34.116306 4756 scope.go:117] "RemoveContainer" containerID="6282743b2855b6e8d5a5f2de10949d881993e076a7475251b665c1caeac15b6c" Sep 30 21:09:34 crc kubenswrapper[4756]: E0930 21:09:34.116732 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4n9zj_openshift-machine-config-operator(3370c2ca-fec3-4f90-8df7-51e21e6c7e1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" Sep 30 21:09:34 crc kubenswrapper[4756]: I0930 21:09:34.690495 4756 generic.go:334] "Generic (PLEG): container finished" podID="3c1ed8ea-284a-4ae3-b08d-e167a9caf5f9" containerID="f60f023bf1683c102d4636ff37f1e145382f1806614e811daa063ba57d23bb10" exitCode=0 Sep 30 21:09:34 crc kubenswrapper[4756]: I0930 21:09:34.690540 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-worker-wnndt" event={"ID":"3c1ed8ea-284a-4ae3-b08d-e167a9caf5f9","Type":"ContainerDied","Data":"f60f023bf1683c102d4636ff37f1e145382f1806614e811daa063ba57d23bb10"} Sep 30 21:09:35 crc kubenswrapper[4756]: I0930 21:09:35.127286 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bd45f86f-28b1-4422-b1f6-9a2f43e39ae3" path="/var/lib/kubelet/pods/bd45f86f-28b1-4422-b1f6-9a2f43e39ae3/volumes" Sep 30 21:09:35 crc kubenswrapper[4756]: I0930 21:09:35.701698 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-image-upload-59f8cff499-mwhmt" event={"ID":"25c22123-ad6e-4f1d-a4f9-334f0e701cbf","Type":"ContainerStarted","Data":"d0d98df1a69d8200a086af028743a5d6b3c0e99e253c343f697dc2088ff58a35"} Sep 30 21:09:35 crc kubenswrapper[4756]: I0930 21:09:35.706479 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-worker-wnndt" event={"ID":"3c1ed8ea-284a-4ae3-b08d-e167a9caf5f9","Type":"ContainerStarted","Data":"b2e1dcf4c3a14216ad1fedaa711f55d7fd1e2e7fa0bcd6667af99406e2e3d940"} Sep 30 21:09:35 crc kubenswrapper[4756]: I0930 21:09:35.706663 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/octavia-worker-wnndt" Sep 30 21:09:35 crc kubenswrapper[4756]: I0930 21:09:35.720032 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/octavia-image-upload-59f8cff499-mwhmt" podStartSLOduration=1.9378270149999999 podStartE2EDuration="39.719650507s" podCreationTimestamp="2025-09-30 21:08:56 +0000 UTC" firstStartedPulling="2025-09-30 21:08:57.094377536 +0000 UTC m=+5866.715311013" lastFinishedPulling="2025-09-30 21:09:34.876201028 +0000 UTC m=+5904.497134505" observedRunningTime="2025-09-30 21:09:35.716919226 +0000 UTC m=+5905.337852703" watchObservedRunningTime="2025-09-30 21:09:35.719650507 +0000 UTC m=+5905.340583984" Sep 30 21:09:35 crc kubenswrapper[4756]: I0930 21:09:35.763183 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/octavia-worker-wnndt" podStartSLOduration=12.205540785 podStartE2EDuration="18.763166242s" podCreationTimestamp="2025-09-30 21:09:17 +0000 UTC" firstStartedPulling="2025-09-30 21:09:26.043592835 +0000 UTC m=+5895.664526312" lastFinishedPulling="2025-09-30 21:09:32.601218282 +0000 UTC m=+5902.222151769" observedRunningTime="2025-09-30 21:09:35.745977804 +0000 UTC m=+5905.366911291" watchObservedRunningTime="2025-09-30 21:09:35.763166242 +0000 UTC m=+5905.384099719" Sep 30 21:09:35 crc kubenswrapper[4756]: I0930 21:09:35.951841 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/octavia-healthmanager-h4j74" Sep 30 21:09:39 crc kubenswrapper[4756]: I0930 21:09:39.695068 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/octavia-housekeeping-qzvbg" Sep 30 21:09:40 crc kubenswrapper[4756]: I0930 21:09:40.607806 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/octavia-rsyslog-zjxht" Sep 30 21:09:47 crc kubenswrapper[4756]: I0930 21:09:47.116149 4756 scope.go:117] "RemoveContainer" containerID="6282743b2855b6e8d5a5f2de10949d881993e076a7475251b665c1caeac15b6c" Sep 30 21:09:47 crc kubenswrapper[4756]: E0930 21:09:47.116896 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4n9zj_openshift-machine-config-operator(3370c2ca-fec3-4f90-8df7-51e21e6c7e1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" Sep 30 21:09:48 crc kubenswrapper[4756]: I0930 21:09:48.271063 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/octavia-worker-wnndt" Sep 30 21:09:59 crc kubenswrapper[4756]: I0930 21:09:59.408512 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/octavia-image-upload-59f8cff499-mwhmt"] Sep 30 21:09:59 crc kubenswrapper[4756]: I0930 21:09:59.409157 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/octavia-image-upload-59f8cff499-mwhmt" podUID="25c22123-ad6e-4f1d-a4f9-334f0e701cbf" containerName="octavia-amphora-httpd" containerID="cri-o://d0d98df1a69d8200a086af028743a5d6b3c0e99e253c343f697dc2088ff58a35" gracePeriod=30 Sep 30 21:10:00 crc kubenswrapper[4756]: I0930 21:09:59.914185 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-image-upload-59f8cff499-mwhmt" Sep 30 21:10:00 crc kubenswrapper[4756]: I0930 21:09:59.962890 4756 generic.go:334] "Generic (PLEG): container finished" podID="25c22123-ad6e-4f1d-a4f9-334f0e701cbf" containerID="d0d98df1a69d8200a086af028743a5d6b3c0e99e253c343f697dc2088ff58a35" exitCode=0 Sep 30 21:10:00 crc kubenswrapper[4756]: I0930 21:09:59.962923 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-image-upload-59f8cff499-mwhmt" event={"ID":"25c22123-ad6e-4f1d-a4f9-334f0e701cbf","Type":"ContainerDied","Data":"d0d98df1a69d8200a086af028743a5d6b3c0e99e253c343f697dc2088ff58a35"} Sep 30 21:10:00 crc kubenswrapper[4756]: I0930 21:09:59.962946 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-image-upload-59f8cff499-mwhmt" event={"ID":"25c22123-ad6e-4f1d-a4f9-334f0e701cbf","Type":"ContainerDied","Data":"a841d9dfe810beea655017a457358e1983968e4d97103888f78f50556a5d3a1f"} Sep 30 21:10:00 crc kubenswrapper[4756]: I0930 21:09:59.962962 4756 scope.go:117] "RemoveContainer" containerID="d0d98df1a69d8200a086af028743a5d6b3c0e99e253c343f697dc2088ff58a35" Sep 30 21:10:00 crc kubenswrapper[4756]: I0930 21:09:59.963086 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-image-upload-59f8cff499-mwhmt" Sep 30 21:10:00 crc kubenswrapper[4756]: I0930 21:09:59.987496 4756 scope.go:117] "RemoveContainer" containerID="caaf73fd9818fa892ca47879cc0a5931d58ca87178acebf5d78e30e93098418c" Sep 30 21:10:00 crc kubenswrapper[4756]: I0930 21:10:00.033096 4756 scope.go:117] "RemoveContainer" containerID="d0d98df1a69d8200a086af028743a5d6b3c0e99e253c343f697dc2088ff58a35" Sep 30 21:10:00 crc kubenswrapper[4756]: E0930 21:10:00.033905 4756 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d0d98df1a69d8200a086af028743a5d6b3c0e99e253c343f697dc2088ff58a35\": container with ID starting with d0d98df1a69d8200a086af028743a5d6b3c0e99e253c343f697dc2088ff58a35 not found: ID does not exist" containerID="d0d98df1a69d8200a086af028743a5d6b3c0e99e253c343f697dc2088ff58a35" Sep 30 21:10:00 crc kubenswrapper[4756]: I0930 21:10:00.033936 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d0d98df1a69d8200a086af028743a5d6b3c0e99e253c343f697dc2088ff58a35"} err="failed to get container status \"d0d98df1a69d8200a086af028743a5d6b3c0e99e253c343f697dc2088ff58a35\": rpc error: code = NotFound desc = could not find container \"d0d98df1a69d8200a086af028743a5d6b3c0e99e253c343f697dc2088ff58a35\": container with ID starting with d0d98df1a69d8200a086af028743a5d6b3c0e99e253c343f697dc2088ff58a35 not found: ID does not exist" Sep 30 21:10:00 crc kubenswrapper[4756]: I0930 21:10:00.033954 4756 scope.go:117] "RemoveContainer" containerID="caaf73fd9818fa892ca47879cc0a5931d58ca87178acebf5d78e30e93098418c" Sep 30 21:10:00 crc kubenswrapper[4756]: E0930 21:10:00.034194 4756 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"caaf73fd9818fa892ca47879cc0a5931d58ca87178acebf5d78e30e93098418c\": container with ID starting with caaf73fd9818fa892ca47879cc0a5931d58ca87178acebf5d78e30e93098418c not found: ID does not exist" containerID="caaf73fd9818fa892ca47879cc0a5931d58ca87178acebf5d78e30e93098418c" Sep 30 21:10:00 crc kubenswrapper[4756]: I0930 21:10:00.034209 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"caaf73fd9818fa892ca47879cc0a5931d58ca87178acebf5d78e30e93098418c"} err="failed to get container status \"caaf73fd9818fa892ca47879cc0a5931d58ca87178acebf5d78e30e93098418c\": rpc error: code = NotFound desc = could not find container \"caaf73fd9818fa892ca47879cc0a5931d58ca87178acebf5d78e30e93098418c\": container with ID starting with caaf73fd9818fa892ca47879cc0a5931d58ca87178acebf5d78e30e93098418c not found: ID does not exist" Sep 30 21:10:00 crc kubenswrapper[4756]: I0930 21:10:00.090574 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/25c22123-ad6e-4f1d-a4f9-334f0e701cbf-httpd-config\") pod \"25c22123-ad6e-4f1d-a4f9-334f0e701cbf\" (UID: \"25c22123-ad6e-4f1d-a4f9-334f0e701cbf\") " Sep 30 21:10:00 crc kubenswrapper[4756]: I0930 21:10:00.090810 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"amphora-image\" (UniqueName: \"kubernetes.io/empty-dir/25c22123-ad6e-4f1d-a4f9-334f0e701cbf-amphora-image\") pod \"25c22123-ad6e-4f1d-a4f9-334f0e701cbf\" (UID: \"25c22123-ad6e-4f1d-a4f9-334f0e701cbf\") " Sep 30 21:10:00 crc kubenswrapper[4756]: I0930 21:10:00.154354 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/25c22123-ad6e-4f1d-a4f9-334f0e701cbf-amphora-image" (OuterVolumeSpecName: "amphora-image") pod "25c22123-ad6e-4f1d-a4f9-334f0e701cbf" (UID: "25c22123-ad6e-4f1d-a4f9-334f0e701cbf"). InnerVolumeSpecName "amphora-image". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 21:10:00 crc kubenswrapper[4756]: I0930 21:10:00.191073 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/25c22123-ad6e-4f1d-a4f9-334f0e701cbf-httpd-config" (OuterVolumeSpecName: "httpd-config") pod "25c22123-ad6e-4f1d-a4f9-334f0e701cbf" (UID: "25c22123-ad6e-4f1d-a4f9-334f0e701cbf"). InnerVolumeSpecName "httpd-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 21:10:00 crc kubenswrapper[4756]: I0930 21:10:00.204595 4756 reconciler_common.go:293] "Volume detached for volume \"amphora-image\" (UniqueName: \"kubernetes.io/empty-dir/25c22123-ad6e-4f1d-a4f9-334f0e701cbf-amphora-image\") on node \"crc\" DevicePath \"\"" Sep 30 21:10:00 crc kubenswrapper[4756]: I0930 21:10:00.204625 4756 reconciler_common.go:293] "Volume detached for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/25c22123-ad6e-4f1d-a4f9-334f0e701cbf-httpd-config\") on node \"crc\" DevicePath \"\"" Sep 30 21:10:00 crc kubenswrapper[4756]: I0930 21:10:00.296194 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/octavia-image-upload-59f8cff499-mwhmt"] Sep 30 21:10:00 crc kubenswrapper[4756]: I0930 21:10:00.310615 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/octavia-image-upload-59f8cff499-mwhmt"] Sep 30 21:10:01 crc kubenswrapper[4756]: I0930 21:10:01.125193 4756 scope.go:117] "RemoveContainer" containerID="6282743b2855b6e8d5a5f2de10949d881993e076a7475251b665c1caeac15b6c" Sep 30 21:10:01 crc kubenswrapper[4756]: E0930 21:10:01.125757 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4n9zj_openshift-machine-config-operator(3370c2ca-fec3-4f90-8df7-51e21e6c7e1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" Sep 30 21:10:01 crc kubenswrapper[4756]: I0930 21:10:01.129656 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="25c22123-ad6e-4f1d-a4f9-334f0e701cbf" path="/var/lib/kubelet/pods/25c22123-ad6e-4f1d-a4f9-334f0e701cbf/volumes" Sep 30 21:10:04 crc kubenswrapper[4756]: I0930 21:10:04.047460 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-db-create-dq47d"] Sep 30 21:10:04 crc kubenswrapper[4756]: I0930 21:10:04.058623 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-db-create-dq47d"] Sep 30 21:10:05 crc kubenswrapper[4756]: I0930 21:10:05.077345 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/octavia-image-upload-59f8cff499-dwt7s"] Sep 30 21:10:05 crc kubenswrapper[4756]: E0930 21:10:05.078247 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cf3ed40b-5636-4917-82bc-b9c60bff311f" containerName="init" Sep 30 21:10:05 crc kubenswrapper[4756]: I0930 21:10:05.078268 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="cf3ed40b-5636-4917-82bc-b9c60bff311f" containerName="init" Sep 30 21:10:05 crc kubenswrapper[4756]: E0930 21:10:05.078305 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cf3ed40b-5636-4917-82bc-b9c60bff311f" containerName="octavia-db-sync" Sep 30 21:10:05 crc kubenswrapper[4756]: I0930 21:10:05.078315 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="cf3ed40b-5636-4917-82bc-b9c60bff311f" containerName="octavia-db-sync" Sep 30 21:10:05 crc kubenswrapper[4756]: E0930 21:10:05.078404 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="25c22123-ad6e-4f1d-a4f9-334f0e701cbf" containerName="octavia-amphora-httpd" Sep 30 21:10:05 crc kubenswrapper[4756]: I0930 21:10:05.078418 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="25c22123-ad6e-4f1d-a4f9-334f0e701cbf" containerName="octavia-amphora-httpd" Sep 30 21:10:05 crc kubenswrapper[4756]: E0930 21:10:05.078447 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="25c22123-ad6e-4f1d-a4f9-334f0e701cbf" containerName="init" Sep 30 21:10:05 crc kubenswrapper[4756]: I0930 21:10:05.078457 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="25c22123-ad6e-4f1d-a4f9-334f0e701cbf" containerName="init" Sep 30 21:10:05 crc kubenswrapper[4756]: I0930 21:10:05.079035 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="25c22123-ad6e-4f1d-a4f9-334f0e701cbf" containerName="octavia-amphora-httpd" Sep 30 21:10:05 crc kubenswrapper[4756]: I0930 21:10:05.079103 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="cf3ed40b-5636-4917-82bc-b9c60bff311f" containerName="octavia-db-sync" Sep 30 21:10:05 crc kubenswrapper[4756]: I0930 21:10:05.081488 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-image-upload-59f8cff499-dwt7s" Sep 30 21:10:05 crc kubenswrapper[4756]: I0930 21:10:05.085491 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"octavia-config-data" Sep 30 21:10:05 crc kubenswrapper[4756]: I0930 21:10:05.109128 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-image-upload-59f8cff499-dwt7s"] Sep 30 21:10:05 crc kubenswrapper[4756]: I0930 21:10:05.128052 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f2985e22-daaf-45c5-8650-beac6a5bbc44" path="/var/lib/kubelet/pods/f2985e22-daaf-45c5-8650-beac6a5bbc44/volumes" Sep 30 21:10:05 crc kubenswrapper[4756]: I0930 21:10:05.158725 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"amphora-image\" (UniqueName: \"kubernetes.io/empty-dir/3091d122-9691-440c-a0a1-ed14393f46fc-amphora-image\") pod \"octavia-image-upload-59f8cff499-dwt7s\" (UID: \"3091d122-9691-440c-a0a1-ed14393f46fc\") " pod="openstack/octavia-image-upload-59f8cff499-dwt7s" Sep 30 21:10:05 crc kubenswrapper[4756]: I0930 21:10:05.159041 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/3091d122-9691-440c-a0a1-ed14393f46fc-httpd-config\") pod \"octavia-image-upload-59f8cff499-dwt7s\" (UID: \"3091d122-9691-440c-a0a1-ed14393f46fc\") " pod="openstack/octavia-image-upload-59f8cff499-dwt7s" Sep 30 21:10:05 crc kubenswrapper[4756]: I0930 21:10:05.261101 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/3091d122-9691-440c-a0a1-ed14393f46fc-httpd-config\") pod \"octavia-image-upload-59f8cff499-dwt7s\" (UID: \"3091d122-9691-440c-a0a1-ed14393f46fc\") " pod="openstack/octavia-image-upload-59f8cff499-dwt7s" Sep 30 21:10:05 crc kubenswrapper[4756]: I0930 21:10:05.261184 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"amphora-image\" (UniqueName: \"kubernetes.io/empty-dir/3091d122-9691-440c-a0a1-ed14393f46fc-amphora-image\") pod \"octavia-image-upload-59f8cff499-dwt7s\" (UID: \"3091d122-9691-440c-a0a1-ed14393f46fc\") " pod="openstack/octavia-image-upload-59f8cff499-dwt7s" Sep 30 21:10:05 crc kubenswrapper[4756]: I0930 21:10:05.261777 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"amphora-image\" (UniqueName: \"kubernetes.io/empty-dir/3091d122-9691-440c-a0a1-ed14393f46fc-amphora-image\") pod \"octavia-image-upload-59f8cff499-dwt7s\" (UID: \"3091d122-9691-440c-a0a1-ed14393f46fc\") " pod="openstack/octavia-image-upload-59f8cff499-dwt7s" Sep 30 21:10:05 crc kubenswrapper[4756]: I0930 21:10:05.269824 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/3091d122-9691-440c-a0a1-ed14393f46fc-httpd-config\") pod \"octavia-image-upload-59f8cff499-dwt7s\" (UID: \"3091d122-9691-440c-a0a1-ed14393f46fc\") " pod="openstack/octavia-image-upload-59f8cff499-dwt7s" Sep 30 21:10:05 crc kubenswrapper[4756]: I0930 21:10:05.425306 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-image-upload-59f8cff499-dwt7s" Sep 30 21:10:05 crc kubenswrapper[4756]: I0930 21:10:05.964546 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-image-upload-59f8cff499-dwt7s"] Sep 30 21:10:06 crc kubenswrapper[4756]: I0930 21:10:06.042948 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-image-upload-59f8cff499-dwt7s" event={"ID":"3091d122-9691-440c-a0a1-ed14393f46fc","Type":"ContainerStarted","Data":"b92589f7e73919619c55c71105c1f7c0eafd3df3a38a4d56514ca3de16668f13"} Sep 30 21:10:07 crc kubenswrapper[4756]: I0930 21:10:07.056284 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-image-upload-59f8cff499-dwt7s" event={"ID":"3091d122-9691-440c-a0a1-ed14393f46fc","Type":"ContainerStarted","Data":"37e02f7770eda9fff21f9228c77f0c5a9e2d1d37858abc8375878bcd1dfc3cef"} Sep 30 21:10:12 crc kubenswrapper[4756]: I0930 21:10:12.101529 4756 generic.go:334] "Generic (PLEG): container finished" podID="3091d122-9691-440c-a0a1-ed14393f46fc" containerID="37e02f7770eda9fff21f9228c77f0c5a9e2d1d37858abc8375878bcd1dfc3cef" exitCode=0 Sep 30 21:10:12 crc kubenswrapper[4756]: I0930 21:10:12.101605 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-image-upload-59f8cff499-dwt7s" event={"ID":"3091d122-9691-440c-a0a1-ed14393f46fc","Type":"ContainerDied","Data":"37e02f7770eda9fff21f9228c77f0c5a9e2d1d37858abc8375878bcd1dfc3cef"} Sep 30 21:10:13 crc kubenswrapper[4756]: I0930 21:10:13.116265 4756 scope.go:117] "RemoveContainer" containerID="6282743b2855b6e8d5a5f2de10949d881993e076a7475251b665c1caeac15b6c" Sep 30 21:10:13 crc kubenswrapper[4756]: E0930 21:10:13.116884 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4n9zj_openshift-machine-config-operator(3370c2ca-fec3-4f90-8df7-51e21e6c7e1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" Sep 30 21:10:14 crc kubenswrapper[4756]: I0930 21:10:14.039886 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-4b1e-account-create-7q5gc"] Sep 30 21:10:14 crc kubenswrapper[4756]: I0930 21:10:14.048986 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-4b1e-account-create-7q5gc"] Sep 30 21:10:14 crc kubenswrapper[4756]: I0930 21:10:14.126428 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-image-upload-59f8cff499-dwt7s" event={"ID":"3091d122-9691-440c-a0a1-ed14393f46fc","Type":"ContainerStarted","Data":"d72da9b59a4813c8584f7e58c57d8a1fcd4aec4de4849d4f8dba4439cae01e96"} Sep 30 21:10:14 crc kubenswrapper[4756]: I0930 21:10:14.149123 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/octavia-image-upload-59f8cff499-dwt7s" podStartSLOduration=1.730908705 podStartE2EDuration="9.149104118s" podCreationTimestamp="2025-09-30 21:10:05 +0000 UTC" firstStartedPulling="2025-09-30 21:10:05.971179901 +0000 UTC m=+5935.592113378" lastFinishedPulling="2025-09-30 21:10:13.389375304 +0000 UTC m=+5943.010308791" observedRunningTime="2025-09-30 21:10:14.139566119 +0000 UTC m=+5943.760499586" watchObservedRunningTime="2025-09-30 21:10:14.149104118 +0000 UTC m=+5943.770037595" Sep 30 21:10:15 crc kubenswrapper[4756]: I0930 21:10:15.131423 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1ab97981-55b0-43e6-80b8-cce94d3c3b5c" path="/var/lib/kubelet/pods/1ab97981-55b0-43e6-80b8-cce94d3c3b5c/volumes" Sep 30 21:10:23 crc kubenswrapper[4756]: I0930 21:10:23.048673 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-db-sync-mshpd"] Sep 30 21:10:23 crc kubenswrapper[4756]: I0930 21:10:23.065864 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-db-sync-mshpd"] Sep 30 21:10:23 crc kubenswrapper[4756]: I0930 21:10:23.128578 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d09fbc18-1295-4ffb-8e64-8dedd940711f" path="/var/lib/kubelet/pods/d09fbc18-1295-4ffb-8e64-8dedd940711f/volumes" Sep 30 21:10:25 crc kubenswrapper[4756]: I0930 21:10:25.640930 4756 scope.go:117] "RemoveContainer" containerID="0c443525020f5b5a61558dbd6b4a09d54852337dcb65e03b38c4e7a0dd179a35" Sep 30 21:10:25 crc kubenswrapper[4756]: I0930 21:10:25.671727 4756 scope.go:117] "RemoveContainer" containerID="fd14b88b951bc7a26b152f4798c9eacb17743f095cd8b957da0f4036e9c7a593" Sep 30 21:10:25 crc kubenswrapper[4756]: I0930 21:10:25.725227 4756 scope.go:117] "RemoveContainer" containerID="6d97867e24f633c3054e9ac37ce8e0a69414d89e441a0f1a3cf4c9e51c33ce7e" Sep 30 21:10:25 crc kubenswrapper[4756]: I0930 21:10:25.777369 4756 scope.go:117] "RemoveContainer" containerID="8f2c20459fb858d6582871d59bfc78f100424cb03945e62d93fd01506f14963e" Sep 30 21:10:25 crc kubenswrapper[4756]: I0930 21:10:25.847068 4756 scope.go:117] "RemoveContainer" containerID="cfd59081ee6a99e2fda0bbc4cb7a10fd0722cc94af12462e6d648d51ee471c2b" Sep 30 21:10:26 crc kubenswrapper[4756]: I0930 21:10:26.117386 4756 scope.go:117] "RemoveContainer" containerID="6282743b2855b6e8d5a5f2de10949d881993e076a7475251b665c1caeac15b6c" Sep 30 21:10:26 crc kubenswrapper[4756]: E0930 21:10:26.117962 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4n9zj_openshift-machine-config-operator(3370c2ca-fec3-4f90-8df7-51e21e6c7e1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" Sep 30 21:10:31 crc kubenswrapper[4756]: I0930 21:10:31.159866 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-vjjbl"] Sep 30 21:10:31 crc kubenswrapper[4756]: I0930 21:10:31.165166 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-vjjbl" Sep 30 21:10:31 crc kubenswrapper[4756]: I0930 21:10:31.168012 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-vjjbl"] Sep 30 21:10:31 crc kubenswrapper[4756]: I0930 21:10:31.218378 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/aa1031af-26bd-460f-8888-1459d36526ce-catalog-content\") pod \"certified-operators-vjjbl\" (UID: \"aa1031af-26bd-460f-8888-1459d36526ce\") " pod="openshift-marketplace/certified-operators-vjjbl" Sep 30 21:10:31 crc kubenswrapper[4756]: I0930 21:10:31.218450 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5v9gv\" (UniqueName: \"kubernetes.io/projected/aa1031af-26bd-460f-8888-1459d36526ce-kube-api-access-5v9gv\") pod \"certified-operators-vjjbl\" (UID: \"aa1031af-26bd-460f-8888-1459d36526ce\") " pod="openshift-marketplace/certified-operators-vjjbl" Sep 30 21:10:31 crc kubenswrapper[4756]: I0930 21:10:31.218508 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/aa1031af-26bd-460f-8888-1459d36526ce-utilities\") pod \"certified-operators-vjjbl\" (UID: \"aa1031af-26bd-460f-8888-1459d36526ce\") " pod="openshift-marketplace/certified-operators-vjjbl" Sep 30 21:10:31 crc kubenswrapper[4756]: I0930 21:10:31.319649 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/aa1031af-26bd-460f-8888-1459d36526ce-catalog-content\") pod \"certified-operators-vjjbl\" (UID: \"aa1031af-26bd-460f-8888-1459d36526ce\") " pod="openshift-marketplace/certified-operators-vjjbl" Sep 30 21:10:31 crc kubenswrapper[4756]: I0930 21:10:31.319695 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5v9gv\" (UniqueName: \"kubernetes.io/projected/aa1031af-26bd-460f-8888-1459d36526ce-kube-api-access-5v9gv\") pod \"certified-operators-vjjbl\" (UID: \"aa1031af-26bd-460f-8888-1459d36526ce\") " pod="openshift-marketplace/certified-operators-vjjbl" Sep 30 21:10:31 crc kubenswrapper[4756]: I0930 21:10:31.319730 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/aa1031af-26bd-460f-8888-1459d36526ce-utilities\") pod \"certified-operators-vjjbl\" (UID: \"aa1031af-26bd-460f-8888-1459d36526ce\") " pod="openshift-marketplace/certified-operators-vjjbl" Sep 30 21:10:31 crc kubenswrapper[4756]: I0930 21:10:31.320425 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/aa1031af-26bd-460f-8888-1459d36526ce-utilities\") pod \"certified-operators-vjjbl\" (UID: \"aa1031af-26bd-460f-8888-1459d36526ce\") " pod="openshift-marketplace/certified-operators-vjjbl" Sep 30 21:10:31 crc kubenswrapper[4756]: I0930 21:10:31.320478 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/aa1031af-26bd-460f-8888-1459d36526ce-catalog-content\") pod \"certified-operators-vjjbl\" (UID: \"aa1031af-26bd-460f-8888-1459d36526ce\") " pod="openshift-marketplace/certified-operators-vjjbl" Sep 30 21:10:31 crc kubenswrapper[4756]: I0930 21:10:31.345750 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5v9gv\" (UniqueName: \"kubernetes.io/projected/aa1031af-26bd-460f-8888-1459d36526ce-kube-api-access-5v9gv\") pod \"certified-operators-vjjbl\" (UID: \"aa1031af-26bd-460f-8888-1459d36526ce\") " pod="openshift-marketplace/certified-operators-vjjbl" Sep 30 21:10:31 crc kubenswrapper[4756]: I0930 21:10:31.490813 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-vjjbl" Sep 30 21:10:31 crc kubenswrapper[4756]: I0930 21:10:31.888033 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-vjjbl"] Sep 30 21:10:32 crc kubenswrapper[4756]: I0930 21:10:32.329431 4756 generic.go:334] "Generic (PLEG): container finished" podID="aa1031af-26bd-460f-8888-1459d36526ce" containerID="ef680eea6e60bfdee1c8a964c8ca47ff83f4dab5eb3346a860b983e29a8c1b6f" exitCode=0 Sep 30 21:10:32 crc kubenswrapper[4756]: I0930 21:10:32.329473 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vjjbl" event={"ID":"aa1031af-26bd-460f-8888-1459d36526ce","Type":"ContainerDied","Data":"ef680eea6e60bfdee1c8a964c8ca47ff83f4dab5eb3346a860b983e29a8c1b6f"} Sep 30 21:10:32 crc kubenswrapper[4756]: I0930 21:10:32.330592 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vjjbl" event={"ID":"aa1031af-26bd-460f-8888-1459d36526ce","Type":"ContainerStarted","Data":"49c990fa5a9ca35d5cee23d9687d15eba3e5e381783253531476959a1f643828"} Sep 30 21:10:33 crc kubenswrapper[4756]: I0930 21:10:33.342014 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vjjbl" event={"ID":"aa1031af-26bd-460f-8888-1459d36526ce","Type":"ContainerStarted","Data":"ca2440c5173ad3545b7b9900642c6ca9e51a971192b3115787721875c982f25e"} Sep 30 21:10:34 crc kubenswrapper[4756]: I0930 21:10:34.356425 4756 generic.go:334] "Generic (PLEG): container finished" podID="aa1031af-26bd-460f-8888-1459d36526ce" containerID="ca2440c5173ad3545b7b9900642c6ca9e51a971192b3115787721875c982f25e" exitCode=0 Sep 30 21:10:34 crc kubenswrapper[4756]: I0930 21:10:34.356484 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vjjbl" event={"ID":"aa1031af-26bd-460f-8888-1459d36526ce","Type":"ContainerDied","Data":"ca2440c5173ad3545b7b9900642c6ca9e51a971192b3115787721875c982f25e"} Sep 30 21:10:35 crc kubenswrapper[4756]: I0930 21:10:35.372563 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vjjbl" event={"ID":"aa1031af-26bd-460f-8888-1459d36526ce","Type":"ContainerStarted","Data":"9ac95ad10d2cbd433e4a1a487a0265fafa8537b37723afdc1ab5380a4e7ac77e"} Sep 30 21:10:35 crc kubenswrapper[4756]: I0930 21:10:35.391100 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-vjjbl" podStartSLOduration=1.91952511 podStartE2EDuration="4.391080234s" podCreationTimestamp="2025-09-30 21:10:31 +0000 UTC" firstStartedPulling="2025-09-30 21:10:32.331311809 +0000 UTC m=+5961.952245306" lastFinishedPulling="2025-09-30 21:10:34.802866933 +0000 UTC m=+5964.423800430" observedRunningTime="2025-09-30 21:10:35.390701284 +0000 UTC m=+5965.011634781" watchObservedRunningTime="2025-09-30 21:10:35.391080234 +0000 UTC m=+5965.012013731" Sep 30 21:10:37 crc kubenswrapper[4756]: I0930 21:10:37.116954 4756 scope.go:117] "RemoveContainer" containerID="6282743b2855b6e8d5a5f2de10949d881993e076a7475251b665c1caeac15b6c" Sep 30 21:10:37 crc kubenswrapper[4756]: E0930 21:10:37.117765 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4n9zj_openshift-machine-config-operator(3370c2ca-fec3-4f90-8df7-51e21e6c7e1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" Sep 30 21:10:41 crc kubenswrapper[4756]: I0930 21:10:41.491770 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-vjjbl" Sep 30 21:10:41 crc kubenswrapper[4756]: I0930 21:10:41.492072 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-vjjbl" Sep 30 21:10:41 crc kubenswrapper[4756]: I0930 21:10:41.546241 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-vjjbl" Sep 30 21:10:42 crc kubenswrapper[4756]: I0930 21:10:42.502199 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-vjjbl" Sep 30 21:10:42 crc kubenswrapper[4756]: I0930 21:10:42.556442 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-vjjbl"] Sep 30 21:10:44 crc kubenswrapper[4756]: I0930 21:10:44.460464 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-vjjbl" podUID="aa1031af-26bd-460f-8888-1459d36526ce" containerName="registry-server" containerID="cri-o://9ac95ad10d2cbd433e4a1a487a0265fafa8537b37723afdc1ab5380a4e7ac77e" gracePeriod=2 Sep 30 21:10:44 crc kubenswrapper[4756]: I0930 21:10:44.960432 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-vjjbl" Sep 30 21:10:44 crc kubenswrapper[4756]: I0930 21:10:44.996244 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/aa1031af-26bd-460f-8888-1459d36526ce-catalog-content\") pod \"aa1031af-26bd-460f-8888-1459d36526ce\" (UID: \"aa1031af-26bd-460f-8888-1459d36526ce\") " Sep 30 21:10:44 crc kubenswrapper[4756]: I0930 21:10:44.996293 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5v9gv\" (UniqueName: \"kubernetes.io/projected/aa1031af-26bd-460f-8888-1459d36526ce-kube-api-access-5v9gv\") pod \"aa1031af-26bd-460f-8888-1459d36526ce\" (UID: \"aa1031af-26bd-460f-8888-1459d36526ce\") " Sep 30 21:10:44 crc kubenswrapper[4756]: I0930 21:10:44.996464 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/aa1031af-26bd-460f-8888-1459d36526ce-utilities\") pod \"aa1031af-26bd-460f-8888-1459d36526ce\" (UID: \"aa1031af-26bd-460f-8888-1459d36526ce\") " Sep 30 21:10:44 crc kubenswrapper[4756]: I0930 21:10:44.997135 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/aa1031af-26bd-460f-8888-1459d36526ce-utilities" (OuterVolumeSpecName: "utilities") pod "aa1031af-26bd-460f-8888-1459d36526ce" (UID: "aa1031af-26bd-460f-8888-1459d36526ce"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 21:10:45 crc kubenswrapper[4756]: I0930 21:10:45.004615 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/aa1031af-26bd-460f-8888-1459d36526ce-kube-api-access-5v9gv" (OuterVolumeSpecName: "kube-api-access-5v9gv") pod "aa1031af-26bd-460f-8888-1459d36526ce" (UID: "aa1031af-26bd-460f-8888-1459d36526ce"). InnerVolumeSpecName "kube-api-access-5v9gv". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 21:10:45 crc kubenswrapper[4756]: I0930 21:10:45.044093 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/aa1031af-26bd-460f-8888-1459d36526ce-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "aa1031af-26bd-460f-8888-1459d36526ce" (UID: "aa1031af-26bd-460f-8888-1459d36526ce"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 21:10:45 crc kubenswrapper[4756]: I0930 21:10:45.098685 4756 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/aa1031af-26bd-460f-8888-1459d36526ce-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 21:10:45 crc kubenswrapper[4756]: I0930 21:10:45.098719 4756 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/aa1031af-26bd-460f-8888-1459d36526ce-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 21:10:45 crc kubenswrapper[4756]: I0930 21:10:45.098729 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5v9gv\" (UniqueName: \"kubernetes.io/projected/aa1031af-26bd-460f-8888-1459d36526ce-kube-api-access-5v9gv\") on node \"crc\" DevicePath \"\"" Sep 30 21:10:45 crc kubenswrapper[4756]: I0930 21:10:45.473039 4756 generic.go:334] "Generic (PLEG): container finished" podID="aa1031af-26bd-460f-8888-1459d36526ce" containerID="9ac95ad10d2cbd433e4a1a487a0265fafa8537b37723afdc1ab5380a4e7ac77e" exitCode=0 Sep 30 21:10:45 crc kubenswrapper[4756]: I0930 21:10:45.473103 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-vjjbl" Sep 30 21:10:45 crc kubenswrapper[4756]: I0930 21:10:45.473122 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vjjbl" event={"ID":"aa1031af-26bd-460f-8888-1459d36526ce","Type":"ContainerDied","Data":"9ac95ad10d2cbd433e4a1a487a0265fafa8537b37723afdc1ab5380a4e7ac77e"} Sep 30 21:10:45 crc kubenswrapper[4756]: I0930 21:10:45.474133 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vjjbl" event={"ID":"aa1031af-26bd-460f-8888-1459d36526ce","Type":"ContainerDied","Data":"49c990fa5a9ca35d5cee23d9687d15eba3e5e381783253531476959a1f643828"} Sep 30 21:10:45 crc kubenswrapper[4756]: I0930 21:10:45.474172 4756 scope.go:117] "RemoveContainer" containerID="9ac95ad10d2cbd433e4a1a487a0265fafa8537b37723afdc1ab5380a4e7ac77e" Sep 30 21:10:45 crc kubenswrapper[4756]: I0930 21:10:45.501609 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-vjjbl"] Sep 30 21:10:45 crc kubenswrapper[4756]: I0930 21:10:45.514074 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-vjjbl"] Sep 30 21:10:45 crc kubenswrapper[4756]: I0930 21:10:45.514286 4756 scope.go:117] "RemoveContainer" containerID="ca2440c5173ad3545b7b9900642c6ca9e51a971192b3115787721875c982f25e" Sep 30 21:10:45 crc kubenswrapper[4756]: I0930 21:10:45.545461 4756 scope.go:117] "RemoveContainer" containerID="ef680eea6e60bfdee1c8a964c8ca47ff83f4dab5eb3346a860b983e29a8c1b6f" Sep 30 21:10:45 crc kubenswrapper[4756]: I0930 21:10:45.585158 4756 scope.go:117] "RemoveContainer" containerID="9ac95ad10d2cbd433e4a1a487a0265fafa8537b37723afdc1ab5380a4e7ac77e" Sep 30 21:10:45 crc kubenswrapper[4756]: E0930 21:10:45.586174 4756 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9ac95ad10d2cbd433e4a1a487a0265fafa8537b37723afdc1ab5380a4e7ac77e\": container with ID starting with 9ac95ad10d2cbd433e4a1a487a0265fafa8537b37723afdc1ab5380a4e7ac77e not found: ID does not exist" containerID="9ac95ad10d2cbd433e4a1a487a0265fafa8537b37723afdc1ab5380a4e7ac77e" Sep 30 21:10:45 crc kubenswrapper[4756]: I0930 21:10:45.586250 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9ac95ad10d2cbd433e4a1a487a0265fafa8537b37723afdc1ab5380a4e7ac77e"} err="failed to get container status \"9ac95ad10d2cbd433e4a1a487a0265fafa8537b37723afdc1ab5380a4e7ac77e\": rpc error: code = NotFound desc = could not find container \"9ac95ad10d2cbd433e4a1a487a0265fafa8537b37723afdc1ab5380a4e7ac77e\": container with ID starting with 9ac95ad10d2cbd433e4a1a487a0265fafa8537b37723afdc1ab5380a4e7ac77e not found: ID does not exist" Sep 30 21:10:45 crc kubenswrapper[4756]: I0930 21:10:45.586294 4756 scope.go:117] "RemoveContainer" containerID="ca2440c5173ad3545b7b9900642c6ca9e51a971192b3115787721875c982f25e" Sep 30 21:10:45 crc kubenswrapper[4756]: E0930 21:10:45.586734 4756 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ca2440c5173ad3545b7b9900642c6ca9e51a971192b3115787721875c982f25e\": container with ID starting with ca2440c5173ad3545b7b9900642c6ca9e51a971192b3115787721875c982f25e not found: ID does not exist" containerID="ca2440c5173ad3545b7b9900642c6ca9e51a971192b3115787721875c982f25e" Sep 30 21:10:45 crc kubenswrapper[4756]: I0930 21:10:45.586788 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ca2440c5173ad3545b7b9900642c6ca9e51a971192b3115787721875c982f25e"} err="failed to get container status \"ca2440c5173ad3545b7b9900642c6ca9e51a971192b3115787721875c982f25e\": rpc error: code = NotFound desc = could not find container \"ca2440c5173ad3545b7b9900642c6ca9e51a971192b3115787721875c982f25e\": container with ID starting with ca2440c5173ad3545b7b9900642c6ca9e51a971192b3115787721875c982f25e not found: ID does not exist" Sep 30 21:10:45 crc kubenswrapper[4756]: I0930 21:10:45.586822 4756 scope.go:117] "RemoveContainer" containerID="ef680eea6e60bfdee1c8a964c8ca47ff83f4dab5eb3346a860b983e29a8c1b6f" Sep 30 21:10:45 crc kubenswrapper[4756]: E0930 21:10:45.587124 4756 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ef680eea6e60bfdee1c8a964c8ca47ff83f4dab5eb3346a860b983e29a8c1b6f\": container with ID starting with ef680eea6e60bfdee1c8a964c8ca47ff83f4dab5eb3346a860b983e29a8c1b6f not found: ID does not exist" containerID="ef680eea6e60bfdee1c8a964c8ca47ff83f4dab5eb3346a860b983e29a8c1b6f" Sep 30 21:10:45 crc kubenswrapper[4756]: I0930 21:10:45.587161 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ef680eea6e60bfdee1c8a964c8ca47ff83f4dab5eb3346a860b983e29a8c1b6f"} err="failed to get container status \"ef680eea6e60bfdee1c8a964c8ca47ff83f4dab5eb3346a860b983e29a8c1b6f\": rpc error: code = NotFound desc = could not find container \"ef680eea6e60bfdee1c8a964c8ca47ff83f4dab5eb3346a860b983e29a8c1b6f\": container with ID starting with ef680eea6e60bfdee1c8a964c8ca47ff83f4dab5eb3346a860b983e29a8c1b6f not found: ID does not exist" Sep 30 21:10:46 crc kubenswrapper[4756]: I0930 21:10:46.869624 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-557fc8f6cc-z8kp5"] Sep 30 21:10:46 crc kubenswrapper[4756]: E0930 21:10:46.870261 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aa1031af-26bd-460f-8888-1459d36526ce" containerName="extract-content" Sep 30 21:10:46 crc kubenswrapper[4756]: I0930 21:10:46.870273 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="aa1031af-26bd-460f-8888-1459d36526ce" containerName="extract-content" Sep 30 21:10:46 crc kubenswrapper[4756]: E0930 21:10:46.870289 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aa1031af-26bd-460f-8888-1459d36526ce" containerName="extract-utilities" Sep 30 21:10:46 crc kubenswrapper[4756]: I0930 21:10:46.870295 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="aa1031af-26bd-460f-8888-1459d36526ce" containerName="extract-utilities" Sep 30 21:10:46 crc kubenswrapper[4756]: E0930 21:10:46.870318 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aa1031af-26bd-460f-8888-1459d36526ce" containerName="registry-server" Sep 30 21:10:46 crc kubenswrapper[4756]: I0930 21:10:46.870325 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="aa1031af-26bd-460f-8888-1459d36526ce" containerName="registry-server" Sep 30 21:10:46 crc kubenswrapper[4756]: I0930 21:10:46.870573 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="aa1031af-26bd-460f-8888-1459d36526ce" containerName="registry-server" Sep 30 21:10:46 crc kubenswrapper[4756]: I0930 21:10:46.872557 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-557fc8f6cc-z8kp5" Sep 30 21:10:46 crc kubenswrapper[4756]: I0930 21:10:46.875064 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"horizon-horizon-dockercfg-mrcl7" Sep 30 21:10:46 crc kubenswrapper[4756]: I0930 21:10:46.875387 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"horizon" Sep 30 21:10:46 crc kubenswrapper[4756]: I0930 21:10:46.875660 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"horizon-scripts" Sep 30 21:10:46 crc kubenswrapper[4756]: I0930 21:10:46.875793 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"horizon-config-data" Sep 30 21:10:46 crc kubenswrapper[4756]: I0930 21:10:46.901053 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-557fc8f6cc-z8kp5"] Sep 30 21:10:46 crc kubenswrapper[4756]: I0930 21:10:46.939379 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/27f3702e-5145-4646-84e8-a470d2ae02ae-config-data\") pod \"horizon-557fc8f6cc-z8kp5\" (UID: \"27f3702e-5145-4646-84e8-a470d2ae02ae\") " pod="openstack/horizon-557fc8f6cc-z8kp5" Sep 30 21:10:46 crc kubenswrapper[4756]: I0930 21:10:46.939439 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/27f3702e-5145-4646-84e8-a470d2ae02ae-horizon-secret-key\") pod \"horizon-557fc8f6cc-z8kp5\" (UID: \"27f3702e-5145-4646-84e8-a470d2ae02ae\") " pod="openstack/horizon-557fc8f6cc-z8kp5" Sep 30 21:10:46 crc kubenswrapper[4756]: I0930 21:10:46.939625 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/27f3702e-5145-4646-84e8-a470d2ae02ae-scripts\") pod \"horizon-557fc8f6cc-z8kp5\" (UID: \"27f3702e-5145-4646-84e8-a470d2ae02ae\") " pod="openstack/horizon-557fc8f6cc-z8kp5" Sep 30 21:10:46 crc kubenswrapper[4756]: I0930 21:10:46.939691 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/27f3702e-5145-4646-84e8-a470d2ae02ae-logs\") pod \"horizon-557fc8f6cc-z8kp5\" (UID: \"27f3702e-5145-4646-84e8-a470d2ae02ae\") " pod="openstack/horizon-557fc8f6cc-z8kp5" Sep 30 21:10:46 crc kubenswrapper[4756]: I0930 21:10:46.939850 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ls6v2\" (UniqueName: \"kubernetes.io/projected/27f3702e-5145-4646-84e8-a470d2ae02ae-kube-api-access-ls6v2\") pod \"horizon-557fc8f6cc-z8kp5\" (UID: \"27f3702e-5145-4646-84e8-a470d2ae02ae\") " pod="openstack/horizon-557fc8f6cc-z8kp5" Sep 30 21:10:46 crc kubenswrapper[4756]: I0930 21:10:46.942806 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Sep 30 21:10:46 crc kubenswrapper[4756]: I0930 21:10:46.943475 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="45b49577-8ba6-43a8-86be-987744002d0c" containerName="glance-log" containerID="cri-o://85b0b4211214b1b0940d27fac90ef0fc7dadfc4c7afc5981e944a56f3e595727" gracePeriod=30 Sep 30 21:10:46 crc kubenswrapper[4756]: I0930 21:10:46.943526 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="45b49577-8ba6-43a8-86be-987744002d0c" containerName="glance-httpd" containerID="cri-o://1ed06ca741613b448b386d339a6050885134868e51bebd7fb347b43ca5e6c977" gracePeriod=30 Sep 30 21:10:46 crc kubenswrapper[4756]: I0930 21:10:46.995461 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-658f995c57-xqjhp"] Sep 30 21:10:47 crc kubenswrapper[4756]: I0930 21:10:47.004079 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-658f995c57-xqjhp" Sep 30 21:10:47 crc kubenswrapper[4756]: I0930 21:10:47.017455 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 30 21:10:47 crc kubenswrapper[4756]: I0930 21:10:47.017701 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="0ed70cdf-5345-4faf-ba3b-2ca0f06fb3d6" containerName="glance-log" containerID="cri-o://4ae49e1404ac093ad727eb55bcadf7520d4b489934cc4899c2ab03d72ed55621" gracePeriod=30 Sep 30 21:10:47 crc kubenswrapper[4756]: I0930 21:10:47.017845 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="0ed70cdf-5345-4faf-ba3b-2ca0f06fb3d6" containerName="glance-httpd" containerID="cri-o://30e5fe0a52e1838b034a83f46a24e63f375c89cd78121be89f384bc094ce4abc" gracePeriod=30 Sep 30 21:10:47 crc kubenswrapper[4756]: I0930 21:10:47.039276 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-658f995c57-xqjhp"] Sep 30 21:10:47 crc kubenswrapper[4756]: I0930 21:10:47.042291 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d456def0-4660-4a2b-ac99-c664da4d2d5d-scripts\") pod \"horizon-658f995c57-xqjhp\" (UID: \"d456def0-4660-4a2b-ac99-c664da4d2d5d\") " pod="openstack/horizon-658f995c57-xqjhp" Sep 30 21:10:47 crc kubenswrapper[4756]: I0930 21:10:47.042536 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/27f3702e-5145-4646-84e8-a470d2ae02ae-config-data\") pod \"horizon-557fc8f6cc-z8kp5\" (UID: \"27f3702e-5145-4646-84e8-a470d2ae02ae\") " pod="openstack/horizon-557fc8f6cc-z8kp5" Sep 30 21:10:47 crc kubenswrapper[4756]: I0930 21:10:47.042615 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/27f3702e-5145-4646-84e8-a470d2ae02ae-horizon-secret-key\") pod \"horizon-557fc8f6cc-z8kp5\" (UID: \"27f3702e-5145-4646-84e8-a470d2ae02ae\") " pod="openstack/horizon-557fc8f6cc-z8kp5" Sep 30 21:10:47 crc kubenswrapper[4756]: I0930 21:10:47.042714 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/d456def0-4660-4a2b-ac99-c664da4d2d5d-config-data\") pod \"horizon-658f995c57-xqjhp\" (UID: \"d456def0-4660-4a2b-ac99-c664da4d2d5d\") " pod="openstack/horizon-658f995c57-xqjhp" Sep 30 21:10:47 crc kubenswrapper[4756]: I0930 21:10:47.042794 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/27f3702e-5145-4646-84e8-a470d2ae02ae-scripts\") pod \"horizon-557fc8f6cc-z8kp5\" (UID: \"27f3702e-5145-4646-84e8-a470d2ae02ae\") " pod="openstack/horizon-557fc8f6cc-z8kp5" Sep 30 21:10:47 crc kubenswrapper[4756]: I0930 21:10:47.042869 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/27f3702e-5145-4646-84e8-a470d2ae02ae-logs\") pod \"horizon-557fc8f6cc-z8kp5\" (UID: \"27f3702e-5145-4646-84e8-a470d2ae02ae\") " pod="openstack/horizon-557fc8f6cc-z8kp5" Sep 30 21:10:47 crc kubenswrapper[4756]: I0930 21:10:47.042955 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d456def0-4660-4a2b-ac99-c664da4d2d5d-logs\") pod \"horizon-658f995c57-xqjhp\" (UID: \"d456def0-4660-4a2b-ac99-c664da4d2d5d\") " pod="openstack/horizon-658f995c57-xqjhp" Sep 30 21:10:47 crc kubenswrapper[4756]: I0930 21:10:47.043567 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ls6v2\" (UniqueName: \"kubernetes.io/projected/27f3702e-5145-4646-84e8-a470d2ae02ae-kube-api-access-ls6v2\") pod \"horizon-557fc8f6cc-z8kp5\" (UID: \"27f3702e-5145-4646-84e8-a470d2ae02ae\") " pod="openstack/horizon-557fc8f6cc-z8kp5" Sep 30 21:10:47 crc kubenswrapper[4756]: I0930 21:10:47.043876 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wm4b6\" (UniqueName: \"kubernetes.io/projected/d456def0-4660-4a2b-ac99-c664da4d2d5d-kube-api-access-wm4b6\") pod \"horizon-658f995c57-xqjhp\" (UID: \"d456def0-4660-4a2b-ac99-c664da4d2d5d\") " pod="openstack/horizon-658f995c57-xqjhp" Sep 30 21:10:47 crc kubenswrapper[4756]: I0930 21:10:47.044233 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/d456def0-4660-4a2b-ac99-c664da4d2d5d-horizon-secret-key\") pod \"horizon-658f995c57-xqjhp\" (UID: \"d456def0-4660-4a2b-ac99-c664da4d2d5d\") " pod="openstack/horizon-658f995c57-xqjhp" Sep 30 21:10:47 crc kubenswrapper[4756]: I0930 21:10:47.046049 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/27f3702e-5145-4646-84e8-a470d2ae02ae-config-data\") pod \"horizon-557fc8f6cc-z8kp5\" (UID: \"27f3702e-5145-4646-84e8-a470d2ae02ae\") " pod="openstack/horizon-557fc8f6cc-z8kp5" Sep 30 21:10:47 crc kubenswrapper[4756]: I0930 21:10:47.046050 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/27f3702e-5145-4646-84e8-a470d2ae02ae-logs\") pod \"horizon-557fc8f6cc-z8kp5\" (UID: \"27f3702e-5145-4646-84e8-a470d2ae02ae\") " pod="openstack/horizon-557fc8f6cc-z8kp5" Sep 30 21:10:47 crc kubenswrapper[4756]: I0930 21:10:47.046592 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/27f3702e-5145-4646-84e8-a470d2ae02ae-scripts\") pod \"horizon-557fc8f6cc-z8kp5\" (UID: \"27f3702e-5145-4646-84e8-a470d2ae02ae\") " pod="openstack/horizon-557fc8f6cc-z8kp5" Sep 30 21:10:47 crc kubenswrapper[4756]: I0930 21:10:47.053814 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/27f3702e-5145-4646-84e8-a470d2ae02ae-horizon-secret-key\") pod \"horizon-557fc8f6cc-z8kp5\" (UID: \"27f3702e-5145-4646-84e8-a470d2ae02ae\") " pod="openstack/horizon-557fc8f6cc-z8kp5" Sep 30 21:10:47 crc kubenswrapper[4756]: I0930 21:10:47.074991 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ls6v2\" (UniqueName: \"kubernetes.io/projected/27f3702e-5145-4646-84e8-a470d2ae02ae-kube-api-access-ls6v2\") pod \"horizon-557fc8f6cc-z8kp5\" (UID: \"27f3702e-5145-4646-84e8-a470d2ae02ae\") " pod="openstack/horizon-557fc8f6cc-z8kp5" Sep 30 21:10:47 crc kubenswrapper[4756]: I0930 21:10:47.127986 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="aa1031af-26bd-460f-8888-1459d36526ce" path="/var/lib/kubelet/pods/aa1031af-26bd-460f-8888-1459d36526ce/volumes" Sep 30 21:10:47 crc kubenswrapper[4756]: I0930 21:10:47.146229 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/d456def0-4660-4a2b-ac99-c664da4d2d5d-config-data\") pod \"horizon-658f995c57-xqjhp\" (UID: \"d456def0-4660-4a2b-ac99-c664da4d2d5d\") " pod="openstack/horizon-658f995c57-xqjhp" Sep 30 21:10:47 crc kubenswrapper[4756]: I0930 21:10:47.146303 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d456def0-4660-4a2b-ac99-c664da4d2d5d-logs\") pod \"horizon-658f995c57-xqjhp\" (UID: \"d456def0-4660-4a2b-ac99-c664da4d2d5d\") " pod="openstack/horizon-658f995c57-xqjhp" Sep 30 21:10:47 crc kubenswrapper[4756]: I0930 21:10:47.146371 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wm4b6\" (UniqueName: \"kubernetes.io/projected/d456def0-4660-4a2b-ac99-c664da4d2d5d-kube-api-access-wm4b6\") pod \"horizon-658f995c57-xqjhp\" (UID: \"d456def0-4660-4a2b-ac99-c664da4d2d5d\") " pod="openstack/horizon-658f995c57-xqjhp" Sep 30 21:10:47 crc kubenswrapper[4756]: I0930 21:10:47.146427 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/d456def0-4660-4a2b-ac99-c664da4d2d5d-horizon-secret-key\") pod \"horizon-658f995c57-xqjhp\" (UID: \"d456def0-4660-4a2b-ac99-c664da4d2d5d\") " pod="openstack/horizon-658f995c57-xqjhp" Sep 30 21:10:47 crc kubenswrapper[4756]: I0930 21:10:47.146475 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d456def0-4660-4a2b-ac99-c664da4d2d5d-scripts\") pod \"horizon-658f995c57-xqjhp\" (UID: \"d456def0-4660-4a2b-ac99-c664da4d2d5d\") " pod="openstack/horizon-658f995c57-xqjhp" Sep 30 21:10:47 crc kubenswrapper[4756]: I0930 21:10:47.147167 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d456def0-4660-4a2b-ac99-c664da4d2d5d-scripts\") pod \"horizon-658f995c57-xqjhp\" (UID: \"d456def0-4660-4a2b-ac99-c664da4d2d5d\") " pod="openstack/horizon-658f995c57-xqjhp" Sep 30 21:10:47 crc kubenswrapper[4756]: I0930 21:10:47.147593 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d456def0-4660-4a2b-ac99-c664da4d2d5d-logs\") pod \"horizon-658f995c57-xqjhp\" (UID: \"d456def0-4660-4a2b-ac99-c664da4d2d5d\") " pod="openstack/horizon-658f995c57-xqjhp" Sep 30 21:10:47 crc kubenswrapper[4756]: I0930 21:10:47.147998 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/d456def0-4660-4a2b-ac99-c664da4d2d5d-config-data\") pod \"horizon-658f995c57-xqjhp\" (UID: \"d456def0-4660-4a2b-ac99-c664da4d2d5d\") " pod="openstack/horizon-658f995c57-xqjhp" Sep 30 21:10:47 crc kubenswrapper[4756]: I0930 21:10:47.151552 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/d456def0-4660-4a2b-ac99-c664da4d2d5d-horizon-secret-key\") pod \"horizon-658f995c57-xqjhp\" (UID: \"d456def0-4660-4a2b-ac99-c664da4d2d5d\") " pod="openstack/horizon-658f995c57-xqjhp" Sep 30 21:10:47 crc kubenswrapper[4756]: I0930 21:10:47.162338 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wm4b6\" (UniqueName: \"kubernetes.io/projected/d456def0-4660-4a2b-ac99-c664da4d2d5d-kube-api-access-wm4b6\") pod \"horizon-658f995c57-xqjhp\" (UID: \"d456def0-4660-4a2b-ac99-c664da4d2d5d\") " pod="openstack/horizon-658f995c57-xqjhp" Sep 30 21:10:47 crc kubenswrapper[4756]: I0930 21:10:47.218646 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-557fc8f6cc-z8kp5" Sep 30 21:10:47 crc kubenswrapper[4756]: I0930 21:10:47.331863 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-658f995c57-xqjhp" Sep 30 21:10:47 crc kubenswrapper[4756]: I0930 21:10:47.503752 4756 generic.go:334] "Generic (PLEG): container finished" podID="45b49577-8ba6-43a8-86be-987744002d0c" containerID="85b0b4211214b1b0940d27fac90ef0fc7dadfc4c7afc5981e944a56f3e595727" exitCode=143 Sep 30 21:10:47 crc kubenswrapper[4756]: I0930 21:10:47.503813 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"45b49577-8ba6-43a8-86be-987744002d0c","Type":"ContainerDied","Data":"85b0b4211214b1b0940d27fac90ef0fc7dadfc4c7afc5981e944a56f3e595727"} Sep 30 21:10:47 crc kubenswrapper[4756]: I0930 21:10:47.506447 4756 generic.go:334] "Generic (PLEG): container finished" podID="0ed70cdf-5345-4faf-ba3b-2ca0f06fb3d6" containerID="4ae49e1404ac093ad727eb55bcadf7520d4b489934cc4899c2ab03d72ed55621" exitCode=143 Sep 30 21:10:47 crc kubenswrapper[4756]: I0930 21:10:47.506479 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"0ed70cdf-5345-4faf-ba3b-2ca0f06fb3d6","Type":"ContainerDied","Data":"4ae49e1404ac093ad727eb55bcadf7520d4b489934cc4899c2ab03d72ed55621"} Sep 30 21:10:47 crc kubenswrapper[4756]: I0930 21:10:47.611417 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-557fc8f6cc-z8kp5"] Sep 30 21:10:47 crc kubenswrapper[4756]: I0930 21:10:47.667802 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-756d7594c9-2dtwc"] Sep 30 21:10:47 crc kubenswrapper[4756]: I0930 21:10:47.687579 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-756d7594c9-2dtwc"] Sep 30 21:10:47 crc kubenswrapper[4756]: I0930 21:10:47.687621 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-756d7594c9-2dtwc" Sep 30 21:10:47 crc kubenswrapper[4756]: I0930 21:10:47.688141 4756 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Sep 30 21:10:47 crc kubenswrapper[4756]: I0930 21:10:47.703688 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-557fc8f6cc-z8kp5"] Sep 30 21:10:47 crc kubenswrapper[4756]: I0930 21:10:47.765730 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/871a70bc-3a8f-4615-9e8f-cd949ec43e0d-config-data\") pod \"horizon-756d7594c9-2dtwc\" (UID: \"871a70bc-3a8f-4615-9e8f-cd949ec43e0d\") " pod="openstack/horizon-756d7594c9-2dtwc" Sep 30 21:10:47 crc kubenswrapper[4756]: I0930 21:10:47.765791 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s85qt\" (UniqueName: \"kubernetes.io/projected/871a70bc-3a8f-4615-9e8f-cd949ec43e0d-kube-api-access-s85qt\") pod \"horizon-756d7594c9-2dtwc\" (UID: \"871a70bc-3a8f-4615-9e8f-cd949ec43e0d\") " pod="openstack/horizon-756d7594c9-2dtwc" Sep 30 21:10:47 crc kubenswrapper[4756]: I0930 21:10:47.765873 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/871a70bc-3a8f-4615-9e8f-cd949ec43e0d-horizon-secret-key\") pod \"horizon-756d7594c9-2dtwc\" (UID: \"871a70bc-3a8f-4615-9e8f-cd949ec43e0d\") " pod="openstack/horizon-756d7594c9-2dtwc" Sep 30 21:10:47 crc kubenswrapper[4756]: I0930 21:10:47.765896 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/871a70bc-3a8f-4615-9e8f-cd949ec43e0d-logs\") pod \"horizon-756d7594c9-2dtwc\" (UID: \"871a70bc-3a8f-4615-9e8f-cd949ec43e0d\") " pod="openstack/horizon-756d7594c9-2dtwc" Sep 30 21:10:47 crc kubenswrapper[4756]: I0930 21:10:47.765922 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/871a70bc-3a8f-4615-9e8f-cd949ec43e0d-scripts\") pod \"horizon-756d7594c9-2dtwc\" (UID: \"871a70bc-3a8f-4615-9e8f-cd949ec43e0d\") " pod="openstack/horizon-756d7594c9-2dtwc" Sep 30 21:10:47 crc kubenswrapper[4756]: I0930 21:10:47.827290 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-658f995c57-xqjhp"] Sep 30 21:10:47 crc kubenswrapper[4756]: W0930 21:10:47.830017 4756 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd456def0_4660_4a2b_ac99_c664da4d2d5d.slice/crio-705be0fef9cc69209b8c112bb236515c8e83058280fcd30e131d37d4375fe010 WatchSource:0}: Error finding container 705be0fef9cc69209b8c112bb236515c8e83058280fcd30e131d37d4375fe010: Status 404 returned error can't find the container with id 705be0fef9cc69209b8c112bb236515c8e83058280fcd30e131d37d4375fe010 Sep 30 21:10:47 crc kubenswrapper[4756]: I0930 21:10:47.867607 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/871a70bc-3a8f-4615-9e8f-cd949ec43e0d-horizon-secret-key\") pod \"horizon-756d7594c9-2dtwc\" (UID: \"871a70bc-3a8f-4615-9e8f-cd949ec43e0d\") " pod="openstack/horizon-756d7594c9-2dtwc" Sep 30 21:10:47 crc kubenswrapper[4756]: I0930 21:10:47.867667 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/871a70bc-3a8f-4615-9e8f-cd949ec43e0d-logs\") pod \"horizon-756d7594c9-2dtwc\" (UID: \"871a70bc-3a8f-4615-9e8f-cd949ec43e0d\") " pod="openstack/horizon-756d7594c9-2dtwc" Sep 30 21:10:47 crc kubenswrapper[4756]: I0930 21:10:47.867705 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/871a70bc-3a8f-4615-9e8f-cd949ec43e0d-scripts\") pod \"horizon-756d7594c9-2dtwc\" (UID: \"871a70bc-3a8f-4615-9e8f-cd949ec43e0d\") " pod="openstack/horizon-756d7594c9-2dtwc" Sep 30 21:10:47 crc kubenswrapper[4756]: I0930 21:10:47.867823 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/871a70bc-3a8f-4615-9e8f-cd949ec43e0d-config-data\") pod \"horizon-756d7594c9-2dtwc\" (UID: \"871a70bc-3a8f-4615-9e8f-cd949ec43e0d\") " pod="openstack/horizon-756d7594c9-2dtwc" Sep 30 21:10:47 crc kubenswrapper[4756]: I0930 21:10:47.867867 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s85qt\" (UniqueName: \"kubernetes.io/projected/871a70bc-3a8f-4615-9e8f-cd949ec43e0d-kube-api-access-s85qt\") pod \"horizon-756d7594c9-2dtwc\" (UID: \"871a70bc-3a8f-4615-9e8f-cd949ec43e0d\") " pod="openstack/horizon-756d7594c9-2dtwc" Sep 30 21:10:47 crc kubenswrapper[4756]: I0930 21:10:47.868179 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/871a70bc-3a8f-4615-9e8f-cd949ec43e0d-logs\") pod \"horizon-756d7594c9-2dtwc\" (UID: \"871a70bc-3a8f-4615-9e8f-cd949ec43e0d\") " pod="openstack/horizon-756d7594c9-2dtwc" Sep 30 21:10:47 crc kubenswrapper[4756]: I0930 21:10:47.868685 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/871a70bc-3a8f-4615-9e8f-cd949ec43e0d-scripts\") pod \"horizon-756d7594c9-2dtwc\" (UID: \"871a70bc-3a8f-4615-9e8f-cd949ec43e0d\") " pod="openstack/horizon-756d7594c9-2dtwc" Sep 30 21:10:47 crc kubenswrapper[4756]: I0930 21:10:47.869450 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/871a70bc-3a8f-4615-9e8f-cd949ec43e0d-config-data\") pod \"horizon-756d7594c9-2dtwc\" (UID: \"871a70bc-3a8f-4615-9e8f-cd949ec43e0d\") " pod="openstack/horizon-756d7594c9-2dtwc" Sep 30 21:10:47 crc kubenswrapper[4756]: I0930 21:10:47.875880 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/871a70bc-3a8f-4615-9e8f-cd949ec43e0d-horizon-secret-key\") pod \"horizon-756d7594c9-2dtwc\" (UID: \"871a70bc-3a8f-4615-9e8f-cd949ec43e0d\") " pod="openstack/horizon-756d7594c9-2dtwc" Sep 30 21:10:47 crc kubenswrapper[4756]: I0930 21:10:47.887621 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s85qt\" (UniqueName: \"kubernetes.io/projected/871a70bc-3a8f-4615-9e8f-cd949ec43e0d-kube-api-access-s85qt\") pod \"horizon-756d7594c9-2dtwc\" (UID: \"871a70bc-3a8f-4615-9e8f-cd949ec43e0d\") " pod="openstack/horizon-756d7594c9-2dtwc" Sep 30 21:10:48 crc kubenswrapper[4756]: I0930 21:10:48.015788 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-756d7594c9-2dtwc" Sep 30 21:10:48 crc kubenswrapper[4756]: I0930 21:10:48.476269 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-756d7594c9-2dtwc"] Sep 30 21:10:48 crc kubenswrapper[4756]: W0930 21:10:48.485779 4756 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod871a70bc_3a8f_4615_9e8f_cd949ec43e0d.slice/crio-95a756160987bfaf98b48e3b195b7338bf2af269c6b4a13f823ee49f9614260b WatchSource:0}: Error finding container 95a756160987bfaf98b48e3b195b7338bf2af269c6b4a13f823ee49f9614260b: Status 404 returned error can't find the container with id 95a756160987bfaf98b48e3b195b7338bf2af269c6b4a13f823ee49f9614260b Sep 30 21:10:48 crc kubenswrapper[4756]: I0930 21:10:48.522684 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-756d7594c9-2dtwc" event={"ID":"871a70bc-3a8f-4615-9e8f-cd949ec43e0d","Type":"ContainerStarted","Data":"95a756160987bfaf98b48e3b195b7338bf2af269c6b4a13f823ee49f9614260b"} Sep 30 21:10:48 crc kubenswrapper[4756]: I0930 21:10:48.525042 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-658f995c57-xqjhp" event={"ID":"d456def0-4660-4a2b-ac99-c664da4d2d5d","Type":"ContainerStarted","Data":"705be0fef9cc69209b8c112bb236515c8e83058280fcd30e131d37d4375fe010"} Sep 30 21:10:48 crc kubenswrapper[4756]: I0930 21:10:48.526299 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-557fc8f6cc-z8kp5" event={"ID":"27f3702e-5145-4646-84e8-a470d2ae02ae","Type":"ContainerStarted","Data":"38ae9f437284af7a11d2e05d094f88b0b0b548f8035f376c94a33eb8d1cc2758"} Sep 30 21:10:49 crc kubenswrapper[4756]: I0930 21:10:49.116475 4756 scope.go:117] "RemoveContainer" containerID="6282743b2855b6e8d5a5f2de10949d881993e076a7475251b665c1caeac15b6c" Sep 30 21:10:49 crc kubenswrapper[4756]: E0930 21:10:49.116764 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4n9zj_openshift-machine-config-operator(3370c2ca-fec3-4f90-8df7-51e21e6c7e1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" Sep 30 21:10:51 crc kubenswrapper[4756]: I0930 21:10:51.558284 4756 generic.go:334] "Generic (PLEG): container finished" podID="0ed70cdf-5345-4faf-ba3b-2ca0f06fb3d6" containerID="30e5fe0a52e1838b034a83f46a24e63f375c89cd78121be89f384bc094ce4abc" exitCode=0 Sep 30 21:10:51 crc kubenswrapper[4756]: I0930 21:10:51.558567 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"0ed70cdf-5345-4faf-ba3b-2ca0f06fb3d6","Type":"ContainerDied","Data":"30e5fe0a52e1838b034a83f46a24e63f375c89cd78121be89f384bc094ce4abc"} Sep 30 21:10:51 crc kubenswrapper[4756]: I0930 21:10:51.561899 4756 generic.go:334] "Generic (PLEG): container finished" podID="45b49577-8ba6-43a8-86be-987744002d0c" containerID="1ed06ca741613b448b386d339a6050885134868e51bebd7fb347b43ca5e6c977" exitCode=0 Sep 30 21:10:51 crc kubenswrapper[4756]: I0930 21:10:51.561933 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"45b49577-8ba6-43a8-86be-987744002d0c","Type":"ContainerDied","Data":"1ed06ca741613b448b386d339a6050885134868e51bebd7fb347b43ca5e6c977"} Sep 30 21:10:57 crc kubenswrapper[4756]: I0930 21:10:57.895325 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-sbssg"] Sep 30 21:10:57 crc kubenswrapper[4756]: I0930 21:10:57.899111 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-sbssg" Sep 30 21:10:57 crc kubenswrapper[4756]: I0930 21:10:57.918341 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-sbssg"] Sep 30 21:10:57 crc kubenswrapper[4756]: I0930 21:10:57.993320 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ec76931d-297e-4e71-9d01-f6acc920b97a-utilities\") pod \"community-operators-sbssg\" (UID: \"ec76931d-297e-4e71-9d01-f6acc920b97a\") " pod="openshift-marketplace/community-operators-sbssg" Sep 30 21:10:57 crc kubenswrapper[4756]: I0930 21:10:57.993470 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jm44j\" (UniqueName: \"kubernetes.io/projected/ec76931d-297e-4e71-9d01-f6acc920b97a-kube-api-access-jm44j\") pod \"community-operators-sbssg\" (UID: \"ec76931d-297e-4e71-9d01-f6acc920b97a\") " pod="openshift-marketplace/community-operators-sbssg" Sep 30 21:10:57 crc kubenswrapper[4756]: I0930 21:10:57.993513 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ec76931d-297e-4e71-9d01-f6acc920b97a-catalog-content\") pod \"community-operators-sbssg\" (UID: \"ec76931d-297e-4e71-9d01-f6acc920b97a\") " pod="openshift-marketplace/community-operators-sbssg" Sep 30 21:10:58 crc kubenswrapper[4756]: I0930 21:10:58.095471 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jm44j\" (UniqueName: \"kubernetes.io/projected/ec76931d-297e-4e71-9d01-f6acc920b97a-kube-api-access-jm44j\") pod \"community-operators-sbssg\" (UID: \"ec76931d-297e-4e71-9d01-f6acc920b97a\") " pod="openshift-marketplace/community-operators-sbssg" Sep 30 21:10:58 crc kubenswrapper[4756]: I0930 21:10:58.095524 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ec76931d-297e-4e71-9d01-f6acc920b97a-catalog-content\") pod \"community-operators-sbssg\" (UID: \"ec76931d-297e-4e71-9d01-f6acc920b97a\") " pod="openshift-marketplace/community-operators-sbssg" Sep 30 21:10:58 crc kubenswrapper[4756]: I0930 21:10:58.095618 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ec76931d-297e-4e71-9d01-f6acc920b97a-utilities\") pod \"community-operators-sbssg\" (UID: \"ec76931d-297e-4e71-9d01-f6acc920b97a\") " pod="openshift-marketplace/community-operators-sbssg" Sep 30 21:10:58 crc kubenswrapper[4756]: I0930 21:10:58.096099 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ec76931d-297e-4e71-9d01-f6acc920b97a-utilities\") pod \"community-operators-sbssg\" (UID: \"ec76931d-297e-4e71-9d01-f6acc920b97a\") " pod="openshift-marketplace/community-operators-sbssg" Sep 30 21:10:58 crc kubenswrapper[4756]: I0930 21:10:58.096255 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ec76931d-297e-4e71-9d01-f6acc920b97a-catalog-content\") pod \"community-operators-sbssg\" (UID: \"ec76931d-297e-4e71-9d01-f6acc920b97a\") " pod="openshift-marketplace/community-operators-sbssg" Sep 30 21:10:58 crc kubenswrapper[4756]: I0930 21:10:58.121216 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jm44j\" (UniqueName: \"kubernetes.io/projected/ec76931d-297e-4e71-9d01-f6acc920b97a-kube-api-access-jm44j\") pod \"community-operators-sbssg\" (UID: \"ec76931d-297e-4e71-9d01-f6acc920b97a\") " pod="openshift-marketplace/community-operators-sbssg" Sep 30 21:10:58 crc kubenswrapper[4756]: I0930 21:10:58.235049 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-sbssg" Sep 30 21:10:59 crc kubenswrapper[4756]: I0930 21:10:59.665831 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"45b49577-8ba6-43a8-86be-987744002d0c","Type":"ContainerDied","Data":"cd01ffb157dd6c79ed8a3381f61ba28d5f3d69d847936f3f5e5e5a444dff8748"} Sep 30 21:10:59 crc kubenswrapper[4756]: I0930 21:10:59.666125 4756 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="cd01ffb157dd6c79ed8a3381f61ba28d5f3d69d847936f3f5e5e5a444dff8748" Sep 30 21:10:59 crc kubenswrapper[4756]: I0930 21:10:59.669164 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"0ed70cdf-5345-4faf-ba3b-2ca0f06fb3d6","Type":"ContainerDied","Data":"b6d205723e0951dd271d700836757a772aaafa8cf4367532d5ec72a7513ae970"} Sep 30 21:10:59 crc kubenswrapper[4756]: I0930 21:10:59.669204 4756 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b6d205723e0951dd271d700836757a772aaafa8cf4367532d5ec72a7513ae970" Sep 30 21:10:59 crc kubenswrapper[4756]: I0930 21:10:59.670292 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Sep 30 21:10:59 crc kubenswrapper[4756]: I0930 21:10:59.680733 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Sep 30 21:10:59 crc kubenswrapper[4756]: I0930 21:10:59.754567 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/45b49577-8ba6-43a8-86be-987744002d0c-ceph\") pod \"45b49577-8ba6-43a8-86be-987744002d0c\" (UID: \"45b49577-8ba6-43a8-86be-987744002d0c\") " Sep 30 21:10:59 crc kubenswrapper[4756]: I0930 21:10:59.754639 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/45b49577-8ba6-43a8-86be-987744002d0c-combined-ca-bundle\") pod \"45b49577-8ba6-43a8-86be-987744002d0c\" (UID: \"45b49577-8ba6-43a8-86be-987744002d0c\") " Sep 30 21:10:59 crc kubenswrapper[4756]: I0930 21:10:59.754680 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0ed70cdf-5345-4faf-ba3b-2ca0f06fb3d6-combined-ca-bundle\") pod \"0ed70cdf-5345-4faf-ba3b-2ca0f06fb3d6\" (UID: \"0ed70cdf-5345-4faf-ba3b-2ca0f06fb3d6\") " Sep 30 21:10:59 crc kubenswrapper[4756]: I0930 21:10:59.754703 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/45b49577-8ba6-43a8-86be-987744002d0c-logs\") pod \"45b49577-8ba6-43a8-86be-987744002d0c\" (UID: \"45b49577-8ba6-43a8-86be-987744002d0c\") " Sep 30 21:10:59 crc kubenswrapper[4756]: I0930 21:10:59.754764 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/45b49577-8ba6-43a8-86be-987744002d0c-httpd-run\") pod \"45b49577-8ba6-43a8-86be-987744002d0c\" (UID: \"45b49577-8ba6-43a8-86be-987744002d0c\") " Sep 30 21:10:59 crc kubenswrapper[4756]: I0930 21:10:59.754795 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0ed70cdf-5345-4faf-ba3b-2ca0f06fb3d6-config-data\") pod \"0ed70cdf-5345-4faf-ba3b-2ca0f06fb3d6\" (UID: \"0ed70cdf-5345-4faf-ba3b-2ca0f06fb3d6\") " Sep 30 21:10:59 crc kubenswrapper[4756]: I0930 21:10:59.754820 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0ed70cdf-5345-4faf-ba3b-2ca0f06fb3d6-scripts\") pod \"0ed70cdf-5345-4faf-ba3b-2ca0f06fb3d6\" (UID: \"0ed70cdf-5345-4faf-ba3b-2ca0f06fb3d6\") " Sep 30 21:10:59 crc kubenswrapper[4756]: I0930 21:10:59.754836 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/45b49577-8ba6-43a8-86be-987744002d0c-scripts\") pod \"45b49577-8ba6-43a8-86be-987744002d0c\" (UID: \"45b49577-8ba6-43a8-86be-987744002d0c\") " Sep 30 21:10:59 crc kubenswrapper[4756]: I0930 21:10:59.754881 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0ed70cdf-5345-4faf-ba3b-2ca0f06fb3d6-logs\") pod \"0ed70cdf-5345-4faf-ba3b-2ca0f06fb3d6\" (UID: \"0ed70cdf-5345-4faf-ba3b-2ca0f06fb3d6\") " Sep 30 21:10:59 crc kubenswrapper[4756]: I0930 21:10:59.754905 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/45b49577-8ba6-43a8-86be-987744002d0c-config-data\") pod \"45b49577-8ba6-43a8-86be-987744002d0c\" (UID: \"45b49577-8ba6-43a8-86be-987744002d0c\") " Sep 30 21:10:59 crc kubenswrapper[4756]: I0930 21:10:59.754931 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/0ed70cdf-5345-4faf-ba3b-2ca0f06fb3d6-ceph\") pod \"0ed70cdf-5345-4faf-ba3b-2ca0f06fb3d6\" (UID: \"0ed70cdf-5345-4faf-ba3b-2ca0f06fb3d6\") " Sep 30 21:10:59 crc kubenswrapper[4756]: I0930 21:10:59.754962 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gxnbp\" (UniqueName: \"kubernetes.io/projected/0ed70cdf-5345-4faf-ba3b-2ca0f06fb3d6-kube-api-access-gxnbp\") pod \"0ed70cdf-5345-4faf-ba3b-2ca0f06fb3d6\" (UID: \"0ed70cdf-5345-4faf-ba3b-2ca0f06fb3d6\") " Sep 30 21:10:59 crc kubenswrapper[4756]: I0930 21:10:59.754992 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kzbvv\" (UniqueName: \"kubernetes.io/projected/45b49577-8ba6-43a8-86be-987744002d0c-kube-api-access-kzbvv\") pod \"45b49577-8ba6-43a8-86be-987744002d0c\" (UID: \"45b49577-8ba6-43a8-86be-987744002d0c\") " Sep 30 21:10:59 crc kubenswrapper[4756]: I0930 21:10:59.755024 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/0ed70cdf-5345-4faf-ba3b-2ca0f06fb3d6-httpd-run\") pod \"0ed70cdf-5345-4faf-ba3b-2ca0f06fb3d6\" (UID: \"0ed70cdf-5345-4faf-ba3b-2ca0f06fb3d6\") " Sep 30 21:10:59 crc kubenswrapper[4756]: I0930 21:10:59.755968 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0ed70cdf-5345-4faf-ba3b-2ca0f06fb3d6-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "0ed70cdf-5345-4faf-ba3b-2ca0f06fb3d6" (UID: "0ed70cdf-5345-4faf-ba3b-2ca0f06fb3d6"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 21:10:59 crc kubenswrapper[4756]: I0930 21:10:59.762329 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0ed70cdf-5345-4faf-ba3b-2ca0f06fb3d6-logs" (OuterVolumeSpecName: "logs") pod "0ed70cdf-5345-4faf-ba3b-2ca0f06fb3d6" (UID: "0ed70cdf-5345-4faf-ba3b-2ca0f06fb3d6"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 21:10:59 crc kubenswrapper[4756]: I0930 21:10:59.762384 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/45b49577-8ba6-43a8-86be-987744002d0c-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "45b49577-8ba6-43a8-86be-987744002d0c" (UID: "45b49577-8ba6-43a8-86be-987744002d0c"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 21:10:59 crc kubenswrapper[4756]: I0930 21:10:59.762503 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/45b49577-8ba6-43a8-86be-987744002d0c-ceph" (OuterVolumeSpecName: "ceph") pod "45b49577-8ba6-43a8-86be-987744002d0c" (UID: "45b49577-8ba6-43a8-86be-987744002d0c"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 21:10:59 crc kubenswrapper[4756]: I0930 21:10:59.762655 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0ed70cdf-5345-4faf-ba3b-2ca0f06fb3d6-kube-api-access-gxnbp" (OuterVolumeSpecName: "kube-api-access-gxnbp") pod "0ed70cdf-5345-4faf-ba3b-2ca0f06fb3d6" (UID: "0ed70cdf-5345-4faf-ba3b-2ca0f06fb3d6"). InnerVolumeSpecName "kube-api-access-gxnbp". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 21:10:59 crc kubenswrapper[4756]: I0930 21:10:59.762738 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/45b49577-8ba6-43a8-86be-987744002d0c-logs" (OuterVolumeSpecName: "logs") pod "45b49577-8ba6-43a8-86be-987744002d0c" (UID: "45b49577-8ba6-43a8-86be-987744002d0c"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 21:10:59 crc kubenswrapper[4756]: I0930 21:10:59.766913 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0ed70cdf-5345-4faf-ba3b-2ca0f06fb3d6-ceph" (OuterVolumeSpecName: "ceph") pod "0ed70cdf-5345-4faf-ba3b-2ca0f06fb3d6" (UID: "0ed70cdf-5345-4faf-ba3b-2ca0f06fb3d6"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 21:10:59 crc kubenswrapper[4756]: I0930 21:10:59.777760 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/45b49577-8ba6-43a8-86be-987744002d0c-scripts" (OuterVolumeSpecName: "scripts") pod "45b49577-8ba6-43a8-86be-987744002d0c" (UID: "45b49577-8ba6-43a8-86be-987744002d0c"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 21:10:59 crc kubenswrapper[4756]: I0930 21:10:59.803013 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0ed70cdf-5345-4faf-ba3b-2ca0f06fb3d6-scripts" (OuterVolumeSpecName: "scripts") pod "0ed70cdf-5345-4faf-ba3b-2ca0f06fb3d6" (UID: "0ed70cdf-5345-4faf-ba3b-2ca0f06fb3d6"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 21:10:59 crc kubenswrapper[4756]: I0930 21:10:59.803815 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/45b49577-8ba6-43a8-86be-987744002d0c-kube-api-access-kzbvv" (OuterVolumeSpecName: "kube-api-access-kzbvv") pod "45b49577-8ba6-43a8-86be-987744002d0c" (UID: "45b49577-8ba6-43a8-86be-987744002d0c"). InnerVolumeSpecName "kube-api-access-kzbvv". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 21:10:59 crc kubenswrapper[4756]: I0930 21:10:59.811720 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0ed70cdf-5345-4faf-ba3b-2ca0f06fb3d6-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "0ed70cdf-5345-4faf-ba3b-2ca0f06fb3d6" (UID: "0ed70cdf-5345-4faf-ba3b-2ca0f06fb3d6"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 21:10:59 crc kubenswrapper[4756]: I0930 21:10:59.824663 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/45b49577-8ba6-43a8-86be-987744002d0c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "45b49577-8ba6-43a8-86be-987744002d0c" (UID: "45b49577-8ba6-43a8-86be-987744002d0c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 21:10:59 crc kubenswrapper[4756]: I0930 21:10:59.836359 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0ed70cdf-5345-4faf-ba3b-2ca0f06fb3d6-config-data" (OuterVolumeSpecName: "config-data") pod "0ed70cdf-5345-4faf-ba3b-2ca0f06fb3d6" (UID: "0ed70cdf-5345-4faf-ba3b-2ca0f06fb3d6"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 21:10:59 crc kubenswrapper[4756]: I0930 21:10:59.840717 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/45b49577-8ba6-43a8-86be-987744002d0c-config-data" (OuterVolumeSpecName: "config-data") pod "45b49577-8ba6-43a8-86be-987744002d0c" (UID: "45b49577-8ba6-43a8-86be-987744002d0c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 21:10:59 crc kubenswrapper[4756]: I0930 21:10:59.857985 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kzbvv\" (UniqueName: \"kubernetes.io/projected/45b49577-8ba6-43a8-86be-987744002d0c-kube-api-access-kzbvv\") on node \"crc\" DevicePath \"\"" Sep 30 21:10:59 crc kubenswrapper[4756]: I0930 21:10:59.858016 4756 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/0ed70cdf-5345-4faf-ba3b-2ca0f06fb3d6-httpd-run\") on node \"crc\" DevicePath \"\"" Sep 30 21:10:59 crc kubenswrapper[4756]: I0930 21:10:59.858028 4756 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/45b49577-8ba6-43a8-86be-987744002d0c-ceph\") on node \"crc\" DevicePath \"\"" Sep 30 21:10:59 crc kubenswrapper[4756]: I0930 21:10:59.858036 4756 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/45b49577-8ba6-43a8-86be-987744002d0c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 21:10:59 crc kubenswrapper[4756]: I0930 21:10:59.858045 4756 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0ed70cdf-5345-4faf-ba3b-2ca0f06fb3d6-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 21:10:59 crc kubenswrapper[4756]: I0930 21:10:59.858052 4756 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/45b49577-8ba6-43a8-86be-987744002d0c-logs\") on node \"crc\" DevicePath \"\"" Sep 30 21:10:59 crc kubenswrapper[4756]: I0930 21:10:59.858063 4756 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/45b49577-8ba6-43a8-86be-987744002d0c-httpd-run\") on node \"crc\" DevicePath \"\"" Sep 30 21:10:59 crc kubenswrapper[4756]: I0930 21:10:59.858073 4756 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0ed70cdf-5345-4faf-ba3b-2ca0f06fb3d6-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 21:10:59 crc kubenswrapper[4756]: I0930 21:10:59.858082 4756 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0ed70cdf-5345-4faf-ba3b-2ca0f06fb3d6-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 21:10:59 crc kubenswrapper[4756]: I0930 21:10:59.858091 4756 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/45b49577-8ba6-43a8-86be-987744002d0c-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 21:10:59 crc kubenswrapper[4756]: I0930 21:10:59.858100 4756 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0ed70cdf-5345-4faf-ba3b-2ca0f06fb3d6-logs\") on node \"crc\" DevicePath \"\"" Sep 30 21:10:59 crc kubenswrapper[4756]: I0930 21:10:59.858110 4756 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/45b49577-8ba6-43a8-86be-987744002d0c-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 21:10:59 crc kubenswrapper[4756]: I0930 21:10:59.858121 4756 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/0ed70cdf-5345-4faf-ba3b-2ca0f06fb3d6-ceph\") on node \"crc\" DevicePath \"\"" Sep 30 21:10:59 crc kubenswrapper[4756]: I0930 21:10:59.858131 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gxnbp\" (UniqueName: \"kubernetes.io/projected/0ed70cdf-5345-4faf-ba3b-2ca0f06fb3d6-kube-api-access-gxnbp\") on node \"crc\" DevicePath \"\"" Sep 30 21:10:59 crc kubenswrapper[4756]: E0930 21:10:59.898331 4756 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-horizon:current-podified" Sep 30 21:10:59 crc kubenswrapper[4756]: E0930 21:10:59.898495 4756 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:horizon-log,Image:quay.io/podified-antelope-centos9/openstack-horizon:current-podified,Command:[/bin/bash],Args:[-c tail -n+1 -F /var/log/horizon/horizon.log],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n665h669h597h59dh5f5h689h8bh6dhbch57ch96hfbh5fbhc5h5ffh5c7h9fh59hcch544h68fh66h5chbfh5cch657h55bh55ch689h4h565h58dq,ValueFrom:nil,},EnvVar{Name:ENABLE_DESIGNATE,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_HEAT,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_IRONIC,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_MANILA,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_OCTAVIA,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_WATCHER,Value:no,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},EnvVar{Name:UNPACK_THEME,Value:true,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:logs,ReadOnly:false,MountPath:/var/log/horizon,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-wm4b6,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*48,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*true,RunAsGroup:*42400,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod horizon-658f995c57-xqjhp_openstack(d456def0-4660-4a2b-ac99-c664da4d2d5d): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Sep 30 21:10:59 crc kubenswrapper[4756]: E0930 21:10:59.901339 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"horizon-log\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\", failed to \"StartContainer\" for \"horizon\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-horizon:current-podified\\\"\"]" pod="openstack/horizon-658f995c57-xqjhp" podUID="d456def0-4660-4a2b-ac99-c664da4d2d5d" Sep 30 21:11:00 crc kubenswrapper[4756]: I0930 21:11:00.056071 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-sbssg"] Sep 30 21:11:00 crc kubenswrapper[4756]: W0930 21:11:00.058770 4756 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podec76931d_297e_4e71_9d01_f6acc920b97a.slice/crio-c824e7cb272b621a2253907de46c9dc5dbe27f5c925d53ec433beef5ba2fb23e WatchSource:0}: Error finding container c824e7cb272b621a2253907de46c9dc5dbe27f5c925d53ec433beef5ba2fb23e: Status 404 returned error can't find the container with id c824e7cb272b621a2253907de46c9dc5dbe27f5c925d53ec433beef5ba2fb23e Sep 30 21:11:00 crc kubenswrapper[4756]: E0930 21:11:00.396199 4756 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-horizon:current-podified" Sep 30 21:11:00 crc kubenswrapper[4756]: E0930 21:11:00.397951 4756 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:horizon-log,Image:quay.io/podified-antelope-centos9/openstack-horizon:current-podified,Command:[/bin/bash],Args:[-c tail -n+1 -F /var/log/horizon/horizon.log],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n564h66fh5c9h579h66bh665h587h54dh57dhd9h599h57bh54dh5h5bdh5c9hddhddh5fbh564h5d8h8dh59fh668h5d9h55fhdchc4h94h655h59dh5fdq,ValueFrom:nil,},EnvVar{Name:ENABLE_DESIGNATE,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_HEAT,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_IRONIC,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_MANILA,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_OCTAVIA,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_WATCHER,Value:no,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},EnvVar{Name:UNPACK_THEME,Value:true,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:logs,ReadOnly:false,MountPath:/var/log/horizon,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-s85qt,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*48,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*true,RunAsGroup:*42400,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod horizon-756d7594c9-2dtwc_openstack(871a70bc-3a8f-4615-9e8f-cd949ec43e0d): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Sep 30 21:11:00 crc kubenswrapper[4756]: E0930 21:11:00.400253 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"horizon-log\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\", failed to \"StartContainer\" for \"horizon\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-horizon:current-podified\\\"\"]" pod="openstack/horizon-756d7594c9-2dtwc" podUID="871a70bc-3a8f-4615-9e8f-cd949ec43e0d" Sep 30 21:11:00 crc kubenswrapper[4756]: E0930 21:11:00.606178 4756 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-horizon:current-podified" Sep 30 21:11:00 crc kubenswrapper[4756]: E0930 21:11:00.606329 4756 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:horizon-log,Image:quay.io/podified-antelope-centos9/openstack-horizon:current-podified,Command:[/bin/bash],Args:[-c tail -n+1 -F /var/log/horizon/horizon.log],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:nf7h87h5b6h668h679h658h559h574hd9h5f5h68dh5c5h575h5h55ch9bh556hdbh694h54fh577h67h5f9h57h569h64ch64ch556hb4h5b8h684h68cq,ValueFrom:nil,},EnvVar{Name:ENABLE_DESIGNATE,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_HEAT,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_IRONIC,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_MANILA,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_OCTAVIA,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_WATCHER,Value:no,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},EnvVar{Name:UNPACK_THEME,Value:true,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:logs,ReadOnly:false,MountPath:/var/log/horizon,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-ls6v2,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*48,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*true,RunAsGroup:*42400,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod horizon-557fc8f6cc-z8kp5_openstack(27f3702e-5145-4646-84e8-a470d2ae02ae): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Sep 30 21:11:00 crc kubenswrapper[4756]: E0930 21:11:00.610103 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"horizon-log\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\", failed to \"StartContainer\" for \"horizon\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-horizon:current-podified\\\"\"]" pod="openstack/horizon-557fc8f6cc-z8kp5" podUID="27f3702e-5145-4646-84e8-a470d2ae02ae" Sep 30 21:11:00 crc kubenswrapper[4756]: I0930 21:11:00.677880 4756 generic.go:334] "Generic (PLEG): container finished" podID="ec76931d-297e-4e71-9d01-f6acc920b97a" containerID="eb598581ae8bb0b14a404ad8c9bd9645d686ee9947a73afef4219edb6699c0dc" exitCode=0 Sep 30 21:11:00 crc kubenswrapper[4756]: I0930 21:11:00.678065 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Sep 30 21:11:00 crc kubenswrapper[4756]: I0930 21:11:00.678977 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Sep 30 21:11:00 crc kubenswrapper[4756]: I0930 21:11:00.680320 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-sbssg" event={"ID":"ec76931d-297e-4e71-9d01-f6acc920b97a","Type":"ContainerDied","Data":"eb598581ae8bb0b14a404ad8c9bd9645d686ee9947a73afef4219edb6699c0dc"} Sep 30 21:11:00 crc kubenswrapper[4756]: I0930 21:11:00.680459 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-sbssg" event={"ID":"ec76931d-297e-4e71-9d01-f6acc920b97a","Type":"ContainerStarted","Data":"c824e7cb272b621a2253907de46c9dc5dbe27f5c925d53ec433beef5ba2fb23e"} Sep 30 21:11:00 crc kubenswrapper[4756]: E0930 21:11:00.681534 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"horizon-log\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-horizon:current-podified\\\"\", failed to \"StartContainer\" for \"horizon\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-horizon:current-podified\\\"\"]" pod="openstack/horizon-658f995c57-xqjhp" podUID="d456def0-4660-4a2b-ac99-c664da4d2d5d" Sep 30 21:11:00 crc kubenswrapper[4756]: E0930 21:11:00.681670 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"horizon-log\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-horizon:current-podified\\\"\", failed to \"StartContainer\" for \"horizon\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-horizon:current-podified\\\"\"]" pod="openstack/horizon-756d7594c9-2dtwc" podUID="871a70bc-3a8f-4615-9e8f-cd949ec43e0d" Sep 30 21:11:00 crc kubenswrapper[4756]: I0930 21:11:00.801622 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Sep 30 21:11:00 crc kubenswrapper[4756]: I0930 21:11:00.820323 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Sep 30 21:11:00 crc kubenswrapper[4756]: I0930 21:11:00.832886 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 30 21:11:00 crc kubenswrapper[4756]: I0930 21:11:00.843800 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 30 21:11:00 crc kubenswrapper[4756]: I0930 21:11:00.854595 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Sep 30 21:11:00 crc kubenswrapper[4756]: E0930 21:11:00.855143 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="45b49577-8ba6-43a8-86be-987744002d0c" containerName="glance-log" Sep 30 21:11:00 crc kubenswrapper[4756]: I0930 21:11:00.855172 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="45b49577-8ba6-43a8-86be-987744002d0c" containerName="glance-log" Sep 30 21:11:00 crc kubenswrapper[4756]: E0930 21:11:00.855202 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="45b49577-8ba6-43a8-86be-987744002d0c" containerName="glance-httpd" Sep 30 21:11:00 crc kubenswrapper[4756]: I0930 21:11:00.855211 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="45b49577-8ba6-43a8-86be-987744002d0c" containerName="glance-httpd" Sep 30 21:11:00 crc kubenswrapper[4756]: E0930 21:11:00.855247 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0ed70cdf-5345-4faf-ba3b-2ca0f06fb3d6" containerName="glance-log" Sep 30 21:11:00 crc kubenswrapper[4756]: I0930 21:11:00.855256 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="0ed70cdf-5345-4faf-ba3b-2ca0f06fb3d6" containerName="glance-log" Sep 30 21:11:00 crc kubenswrapper[4756]: E0930 21:11:00.855265 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0ed70cdf-5345-4faf-ba3b-2ca0f06fb3d6" containerName="glance-httpd" Sep 30 21:11:00 crc kubenswrapper[4756]: I0930 21:11:00.855272 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="0ed70cdf-5345-4faf-ba3b-2ca0f06fb3d6" containerName="glance-httpd" Sep 30 21:11:00 crc kubenswrapper[4756]: I0930 21:11:00.855512 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="0ed70cdf-5345-4faf-ba3b-2ca0f06fb3d6" containerName="glance-httpd" Sep 30 21:11:00 crc kubenswrapper[4756]: I0930 21:11:00.855555 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="0ed70cdf-5345-4faf-ba3b-2ca0f06fb3d6" containerName="glance-log" Sep 30 21:11:00 crc kubenswrapper[4756]: I0930 21:11:00.855573 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="45b49577-8ba6-43a8-86be-987744002d0c" containerName="glance-log" Sep 30 21:11:00 crc kubenswrapper[4756]: I0930 21:11:00.855590 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="45b49577-8ba6-43a8-86be-987744002d0c" containerName="glance-httpd" Sep 30 21:11:00 crc kubenswrapper[4756]: I0930 21:11:00.856878 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Sep 30 21:11:00 crc kubenswrapper[4756]: I0930 21:11:00.860888 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Sep 30 21:11:00 crc kubenswrapper[4756]: I0930 21:11:00.861162 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-scripts" Sep 30 21:11:00 crc kubenswrapper[4756]: I0930 21:11:00.861270 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-2hdft" Sep 30 21:11:00 crc kubenswrapper[4756]: I0930 21:11:00.875522 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Sep 30 21:11:00 crc kubenswrapper[4756]: I0930 21:11:00.892552 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 30 21:11:00 crc kubenswrapper[4756]: I0930 21:11:00.894274 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Sep 30 21:11:00 crc kubenswrapper[4756]: I0930 21:11:00.895841 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Sep 30 21:11:00 crc kubenswrapper[4756]: I0930 21:11:00.902066 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 30 21:11:00 crc kubenswrapper[4756]: I0930 21:11:00.976903 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/50ebcdc4-6975-4b2f-b08a-b09034017b42-scripts\") pod \"glance-default-internal-api-0\" (UID: \"50ebcdc4-6975-4b2f-b08a-b09034017b42\") " pod="openstack/glance-default-internal-api-0" Sep 30 21:11:00 crc kubenswrapper[4756]: I0930 21:11:00.976963 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6dch6\" (UniqueName: \"kubernetes.io/projected/7fb95e15-f324-4549-959e-2f3edba3bce3-kube-api-access-6dch6\") pod \"glance-default-external-api-0\" (UID: \"7fb95e15-f324-4549-959e-2f3edba3bce3\") " pod="openstack/glance-default-external-api-0" Sep 30 21:11:00 crc kubenswrapper[4756]: I0930 21:11:00.976999 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/7fb95e15-f324-4549-959e-2f3edba3bce3-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"7fb95e15-f324-4549-959e-2f3edba3bce3\") " pod="openstack/glance-default-external-api-0" Sep 30 21:11:00 crc kubenswrapper[4756]: I0930 21:11:00.977014 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/50ebcdc4-6975-4b2f-b08a-b09034017b42-logs\") pod \"glance-default-internal-api-0\" (UID: \"50ebcdc4-6975-4b2f-b08a-b09034017b42\") " pod="openstack/glance-default-internal-api-0" Sep 30 21:11:00 crc kubenswrapper[4756]: I0930 21:11:00.977028 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/50ebcdc4-6975-4b2f-b08a-b09034017b42-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"50ebcdc4-6975-4b2f-b08a-b09034017b42\") " pod="openstack/glance-default-internal-api-0" Sep 30 21:11:00 crc kubenswrapper[4756]: I0930 21:11:00.977056 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/50ebcdc4-6975-4b2f-b08a-b09034017b42-config-data\") pod \"glance-default-internal-api-0\" (UID: \"50ebcdc4-6975-4b2f-b08a-b09034017b42\") " pod="openstack/glance-default-internal-api-0" Sep 30 21:11:00 crc kubenswrapper[4756]: I0930 21:11:00.977073 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/50ebcdc4-6975-4b2f-b08a-b09034017b42-ceph\") pod \"glance-default-internal-api-0\" (UID: \"50ebcdc4-6975-4b2f-b08a-b09034017b42\") " pod="openstack/glance-default-internal-api-0" Sep 30 21:11:00 crc kubenswrapper[4756]: I0930 21:11:00.977091 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7fb95e15-f324-4549-959e-2f3edba3bce3-config-data\") pod \"glance-default-external-api-0\" (UID: \"7fb95e15-f324-4549-959e-2f3edba3bce3\") " pod="openstack/glance-default-external-api-0" Sep 30 21:11:00 crc kubenswrapper[4756]: I0930 21:11:00.977123 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7fb95e15-f324-4549-959e-2f3edba3bce3-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"7fb95e15-f324-4549-959e-2f3edba3bce3\") " pod="openstack/glance-default-external-api-0" Sep 30 21:11:00 crc kubenswrapper[4756]: I0930 21:11:00.977151 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/7fb95e15-f324-4549-959e-2f3edba3bce3-ceph\") pod \"glance-default-external-api-0\" (UID: \"7fb95e15-f324-4549-959e-2f3edba3bce3\") " pod="openstack/glance-default-external-api-0" Sep 30 21:11:00 crc kubenswrapper[4756]: I0930 21:11:00.977184 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/50ebcdc4-6975-4b2f-b08a-b09034017b42-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"50ebcdc4-6975-4b2f-b08a-b09034017b42\") " pod="openstack/glance-default-internal-api-0" Sep 30 21:11:00 crc kubenswrapper[4756]: I0930 21:11:00.977242 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mrbft\" (UniqueName: \"kubernetes.io/projected/50ebcdc4-6975-4b2f-b08a-b09034017b42-kube-api-access-mrbft\") pod \"glance-default-internal-api-0\" (UID: \"50ebcdc4-6975-4b2f-b08a-b09034017b42\") " pod="openstack/glance-default-internal-api-0" Sep 30 21:11:00 crc kubenswrapper[4756]: I0930 21:11:00.977289 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7fb95e15-f324-4549-959e-2f3edba3bce3-logs\") pod \"glance-default-external-api-0\" (UID: \"7fb95e15-f324-4549-959e-2f3edba3bce3\") " pod="openstack/glance-default-external-api-0" Sep 30 21:11:00 crc kubenswrapper[4756]: I0930 21:11:00.977316 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7fb95e15-f324-4549-959e-2f3edba3bce3-scripts\") pod \"glance-default-external-api-0\" (UID: \"7fb95e15-f324-4549-959e-2f3edba3bce3\") " pod="openstack/glance-default-external-api-0" Sep 30 21:11:01 crc kubenswrapper[4756]: I0930 21:11:01.080050 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7fb95e15-f324-4549-959e-2f3edba3bce3-scripts\") pod \"glance-default-external-api-0\" (UID: \"7fb95e15-f324-4549-959e-2f3edba3bce3\") " pod="openstack/glance-default-external-api-0" Sep 30 21:11:01 crc kubenswrapper[4756]: I0930 21:11:01.080295 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/50ebcdc4-6975-4b2f-b08a-b09034017b42-scripts\") pod \"glance-default-internal-api-0\" (UID: \"50ebcdc4-6975-4b2f-b08a-b09034017b42\") " pod="openstack/glance-default-internal-api-0" Sep 30 21:11:01 crc kubenswrapper[4756]: I0930 21:11:01.080354 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6dch6\" (UniqueName: \"kubernetes.io/projected/7fb95e15-f324-4549-959e-2f3edba3bce3-kube-api-access-6dch6\") pod \"glance-default-external-api-0\" (UID: \"7fb95e15-f324-4549-959e-2f3edba3bce3\") " pod="openstack/glance-default-external-api-0" Sep 30 21:11:01 crc kubenswrapper[4756]: I0930 21:11:01.080416 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/50ebcdc4-6975-4b2f-b08a-b09034017b42-logs\") pod \"glance-default-internal-api-0\" (UID: \"50ebcdc4-6975-4b2f-b08a-b09034017b42\") " pod="openstack/glance-default-internal-api-0" Sep 30 21:11:01 crc kubenswrapper[4756]: I0930 21:11:01.080440 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/50ebcdc4-6975-4b2f-b08a-b09034017b42-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"50ebcdc4-6975-4b2f-b08a-b09034017b42\") " pod="openstack/glance-default-internal-api-0" Sep 30 21:11:01 crc kubenswrapper[4756]: I0930 21:11:01.080514 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/7fb95e15-f324-4549-959e-2f3edba3bce3-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"7fb95e15-f324-4549-959e-2f3edba3bce3\") " pod="openstack/glance-default-external-api-0" Sep 30 21:11:01 crc kubenswrapper[4756]: I0930 21:11:01.080630 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/50ebcdc4-6975-4b2f-b08a-b09034017b42-config-data\") pod \"glance-default-internal-api-0\" (UID: \"50ebcdc4-6975-4b2f-b08a-b09034017b42\") " pod="openstack/glance-default-internal-api-0" Sep 30 21:11:01 crc kubenswrapper[4756]: I0930 21:11:01.080680 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/50ebcdc4-6975-4b2f-b08a-b09034017b42-ceph\") pod \"glance-default-internal-api-0\" (UID: \"50ebcdc4-6975-4b2f-b08a-b09034017b42\") " pod="openstack/glance-default-internal-api-0" Sep 30 21:11:01 crc kubenswrapper[4756]: I0930 21:11:01.080712 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7fb95e15-f324-4549-959e-2f3edba3bce3-config-data\") pod \"glance-default-external-api-0\" (UID: \"7fb95e15-f324-4549-959e-2f3edba3bce3\") " pod="openstack/glance-default-external-api-0" Sep 30 21:11:01 crc kubenswrapper[4756]: I0930 21:11:01.080767 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7fb95e15-f324-4549-959e-2f3edba3bce3-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"7fb95e15-f324-4549-959e-2f3edba3bce3\") " pod="openstack/glance-default-external-api-0" Sep 30 21:11:01 crc kubenswrapper[4756]: I0930 21:11:01.080818 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/7fb95e15-f324-4549-959e-2f3edba3bce3-ceph\") pod \"glance-default-external-api-0\" (UID: \"7fb95e15-f324-4549-959e-2f3edba3bce3\") " pod="openstack/glance-default-external-api-0" Sep 30 21:11:01 crc kubenswrapper[4756]: I0930 21:11:01.080849 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/50ebcdc4-6975-4b2f-b08a-b09034017b42-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"50ebcdc4-6975-4b2f-b08a-b09034017b42\") " pod="openstack/glance-default-internal-api-0" Sep 30 21:11:01 crc kubenswrapper[4756]: I0930 21:11:01.080886 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mrbft\" (UniqueName: \"kubernetes.io/projected/50ebcdc4-6975-4b2f-b08a-b09034017b42-kube-api-access-mrbft\") pod \"glance-default-internal-api-0\" (UID: \"50ebcdc4-6975-4b2f-b08a-b09034017b42\") " pod="openstack/glance-default-internal-api-0" Sep 30 21:11:01 crc kubenswrapper[4756]: I0930 21:11:01.080948 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7fb95e15-f324-4549-959e-2f3edba3bce3-logs\") pod \"glance-default-external-api-0\" (UID: \"7fb95e15-f324-4549-959e-2f3edba3bce3\") " pod="openstack/glance-default-external-api-0" Sep 30 21:11:01 crc kubenswrapper[4756]: I0930 21:11:01.081267 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/7fb95e15-f324-4549-959e-2f3edba3bce3-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"7fb95e15-f324-4549-959e-2f3edba3bce3\") " pod="openstack/glance-default-external-api-0" Sep 30 21:11:01 crc kubenswrapper[4756]: I0930 21:11:01.081342 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/50ebcdc4-6975-4b2f-b08a-b09034017b42-logs\") pod \"glance-default-internal-api-0\" (UID: \"50ebcdc4-6975-4b2f-b08a-b09034017b42\") " pod="openstack/glance-default-internal-api-0" Sep 30 21:11:01 crc kubenswrapper[4756]: I0930 21:11:01.081345 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7fb95e15-f324-4549-959e-2f3edba3bce3-logs\") pod \"glance-default-external-api-0\" (UID: \"7fb95e15-f324-4549-959e-2f3edba3bce3\") " pod="openstack/glance-default-external-api-0" Sep 30 21:11:01 crc kubenswrapper[4756]: I0930 21:11:01.081712 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/50ebcdc4-6975-4b2f-b08a-b09034017b42-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"50ebcdc4-6975-4b2f-b08a-b09034017b42\") " pod="openstack/glance-default-internal-api-0" Sep 30 21:11:01 crc kubenswrapper[4756]: I0930 21:11:01.086987 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/50ebcdc4-6975-4b2f-b08a-b09034017b42-config-data\") pod \"glance-default-internal-api-0\" (UID: \"50ebcdc4-6975-4b2f-b08a-b09034017b42\") " pod="openstack/glance-default-internal-api-0" Sep 30 21:11:01 crc kubenswrapper[4756]: I0930 21:11:01.087072 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7fb95e15-f324-4549-959e-2f3edba3bce3-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"7fb95e15-f324-4549-959e-2f3edba3bce3\") " pod="openstack/glance-default-external-api-0" Sep 30 21:11:01 crc kubenswrapper[4756]: I0930 21:11:01.087920 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/50ebcdc4-6975-4b2f-b08a-b09034017b42-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"50ebcdc4-6975-4b2f-b08a-b09034017b42\") " pod="openstack/glance-default-internal-api-0" Sep 30 21:11:01 crc kubenswrapper[4756]: I0930 21:11:01.088956 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/7fb95e15-f324-4549-959e-2f3edba3bce3-ceph\") pod \"glance-default-external-api-0\" (UID: \"7fb95e15-f324-4549-959e-2f3edba3bce3\") " pod="openstack/glance-default-external-api-0" Sep 30 21:11:01 crc kubenswrapper[4756]: I0930 21:11:01.090218 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/50ebcdc4-6975-4b2f-b08a-b09034017b42-scripts\") pod \"glance-default-internal-api-0\" (UID: \"50ebcdc4-6975-4b2f-b08a-b09034017b42\") " pod="openstack/glance-default-internal-api-0" Sep 30 21:11:01 crc kubenswrapper[4756]: I0930 21:11:01.090443 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7fb95e15-f324-4549-959e-2f3edba3bce3-scripts\") pod \"glance-default-external-api-0\" (UID: \"7fb95e15-f324-4549-959e-2f3edba3bce3\") " pod="openstack/glance-default-external-api-0" Sep 30 21:11:01 crc kubenswrapper[4756]: I0930 21:11:01.091793 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-557fc8f6cc-z8kp5" Sep 30 21:11:01 crc kubenswrapper[4756]: I0930 21:11:01.092728 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7fb95e15-f324-4549-959e-2f3edba3bce3-config-data\") pod \"glance-default-external-api-0\" (UID: \"7fb95e15-f324-4549-959e-2f3edba3bce3\") " pod="openstack/glance-default-external-api-0" Sep 30 21:11:01 crc kubenswrapper[4756]: I0930 21:11:01.093745 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/50ebcdc4-6975-4b2f-b08a-b09034017b42-ceph\") pod \"glance-default-internal-api-0\" (UID: \"50ebcdc4-6975-4b2f-b08a-b09034017b42\") " pod="openstack/glance-default-internal-api-0" Sep 30 21:11:01 crc kubenswrapper[4756]: I0930 21:11:01.099881 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6dch6\" (UniqueName: \"kubernetes.io/projected/7fb95e15-f324-4549-959e-2f3edba3bce3-kube-api-access-6dch6\") pod \"glance-default-external-api-0\" (UID: \"7fb95e15-f324-4549-959e-2f3edba3bce3\") " pod="openstack/glance-default-external-api-0" Sep 30 21:11:01 crc kubenswrapper[4756]: I0930 21:11:01.112856 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mrbft\" (UniqueName: \"kubernetes.io/projected/50ebcdc4-6975-4b2f-b08a-b09034017b42-kube-api-access-mrbft\") pod \"glance-default-internal-api-0\" (UID: \"50ebcdc4-6975-4b2f-b08a-b09034017b42\") " pod="openstack/glance-default-internal-api-0" Sep 30 21:11:01 crc kubenswrapper[4756]: I0930 21:11:01.127046 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0ed70cdf-5345-4faf-ba3b-2ca0f06fb3d6" path="/var/lib/kubelet/pods/0ed70cdf-5345-4faf-ba3b-2ca0f06fb3d6/volumes" Sep 30 21:11:01 crc kubenswrapper[4756]: I0930 21:11:01.127864 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="45b49577-8ba6-43a8-86be-987744002d0c" path="/var/lib/kubelet/pods/45b49577-8ba6-43a8-86be-987744002d0c/volumes" Sep 30 21:11:01 crc kubenswrapper[4756]: I0930 21:11:01.182248 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/27f3702e-5145-4646-84e8-a470d2ae02ae-logs\") pod \"27f3702e-5145-4646-84e8-a470d2ae02ae\" (UID: \"27f3702e-5145-4646-84e8-a470d2ae02ae\") " Sep 30 21:11:01 crc kubenswrapper[4756]: I0930 21:11:01.182313 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ls6v2\" (UniqueName: \"kubernetes.io/projected/27f3702e-5145-4646-84e8-a470d2ae02ae-kube-api-access-ls6v2\") pod \"27f3702e-5145-4646-84e8-a470d2ae02ae\" (UID: \"27f3702e-5145-4646-84e8-a470d2ae02ae\") " Sep 30 21:11:01 crc kubenswrapper[4756]: I0930 21:11:01.182412 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/27f3702e-5145-4646-84e8-a470d2ae02ae-config-data\") pod \"27f3702e-5145-4646-84e8-a470d2ae02ae\" (UID: \"27f3702e-5145-4646-84e8-a470d2ae02ae\") " Sep 30 21:11:01 crc kubenswrapper[4756]: I0930 21:11:01.182493 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/27f3702e-5145-4646-84e8-a470d2ae02ae-horizon-secret-key\") pod \"27f3702e-5145-4646-84e8-a470d2ae02ae\" (UID: \"27f3702e-5145-4646-84e8-a470d2ae02ae\") " Sep 30 21:11:01 crc kubenswrapper[4756]: I0930 21:11:01.182563 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/27f3702e-5145-4646-84e8-a470d2ae02ae-scripts\") pod \"27f3702e-5145-4646-84e8-a470d2ae02ae\" (UID: \"27f3702e-5145-4646-84e8-a470d2ae02ae\") " Sep 30 21:11:01 crc kubenswrapper[4756]: I0930 21:11:01.182663 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/27f3702e-5145-4646-84e8-a470d2ae02ae-logs" (OuterVolumeSpecName: "logs") pod "27f3702e-5145-4646-84e8-a470d2ae02ae" (UID: "27f3702e-5145-4646-84e8-a470d2ae02ae"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 21:11:01 crc kubenswrapper[4756]: I0930 21:11:01.183289 4756 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/27f3702e-5145-4646-84e8-a470d2ae02ae-logs\") on node \"crc\" DevicePath \"\"" Sep 30 21:11:01 crc kubenswrapper[4756]: I0930 21:11:01.183339 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/27f3702e-5145-4646-84e8-a470d2ae02ae-config-data" (OuterVolumeSpecName: "config-data") pod "27f3702e-5145-4646-84e8-a470d2ae02ae" (UID: "27f3702e-5145-4646-84e8-a470d2ae02ae"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 21:11:01 crc kubenswrapper[4756]: I0930 21:11:01.184117 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/27f3702e-5145-4646-84e8-a470d2ae02ae-scripts" (OuterVolumeSpecName: "scripts") pod "27f3702e-5145-4646-84e8-a470d2ae02ae" (UID: "27f3702e-5145-4646-84e8-a470d2ae02ae"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 21:11:01 crc kubenswrapper[4756]: I0930 21:11:01.186632 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/27f3702e-5145-4646-84e8-a470d2ae02ae-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "27f3702e-5145-4646-84e8-a470d2ae02ae" (UID: "27f3702e-5145-4646-84e8-a470d2ae02ae"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 21:11:01 crc kubenswrapper[4756]: I0930 21:11:01.186694 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/27f3702e-5145-4646-84e8-a470d2ae02ae-kube-api-access-ls6v2" (OuterVolumeSpecName: "kube-api-access-ls6v2") pod "27f3702e-5145-4646-84e8-a470d2ae02ae" (UID: "27f3702e-5145-4646-84e8-a470d2ae02ae"). InnerVolumeSpecName "kube-api-access-ls6v2". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 21:11:01 crc kubenswrapper[4756]: I0930 21:11:01.193180 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Sep 30 21:11:01 crc kubenswrapper[4756]: I0930 21:11:01.220847 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Sep 30 21:11:01 crc kubenswrapper[4756]: I0930 21:11:01.285677 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ls6v2\" (UniqueName: \"kubernetes.io/projected/27f3702e-5145-4646-84e8-a470d2ae02ae-kube-api-access-ls6v2\") on node \"crc\" DevicePath \"\"" Sep 30 21:11:01 crc kubenswrapper[4756]: I0930 21:11:01.285946 4756 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/27f3702e-5145-4646-84e8-a470d2ae02ae-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 21:11:01 crc kubenswrapper[4756]: I0930 21:11:01.285965 4756 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/27f3702e-5145-4646-84e8-a470d2ae02ae-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Sep 30 21:11:01 crc kubenswrapper[4756]: I0930 21:11:01.285976 4756 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/27f3702e-5145-4646-84e8-a470d2ae02ae-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 21:11:01 crc kubenswrapper[4756]: I0930 21:11:01.686759 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-557fc8f6cc-z8kp5" event={"ID":"27f3702e-5145-4646-84e8-a470d2ae02ae","Type":"ContainerDied","Data":"38ae9f437284af7a11d2e05d094f88b0b0b548f8035f376c94a33eb8d1cc2758"} Sep 30 21:11:01 crc kubenswrapper[4756]: I0930 21:11:01.686868 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-557fc8f6cc-z8kp5" Sep 30 21:11:01 crc kubenswrapper[4756]: I0930 21:11:01.741063 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-557fc8f6cc-z8kp5"] Sep 30 21:11:01 crc kubenswrapper[4756]: I0930 21:11:01.748752 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-557fc8f6cc-z8kp5"] Sep 30 21:11:01 crc kubenswrapper[4756]: I0930 21:11:01.775700 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Sep 30 21:11:01 crc kubenswrapper[4756]: W0930 21:11:01.782018 4756 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7fb95e15_f324_4549_959e_2f3edba3bce3.slice/crio-f8e37289b9a45a52d7a24820209c4f7000ec82fbec0ba2ed8bafbdebcf6898eb WatchSource:0}: Error finding container f8e37289b9a45a52d7a24820209c4f7000ec82fbec0ba2ed8bafbdebcf6898eb: Status 404 returned error can't find the container with id f8e37289b9a45a52d7a24820209c4f7000ec82fbec0ba2ed8bafbdebcf6898eb Sep 30 21:11:02 crc kubenswrapper[4756]: W0930 21:11:02.612956 4756 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod50ebcdc4_6975_4b2f_b08a_b09034017b42.slice/crio-8ca70aa65f4cadc836f20b0da875be82c9606ef29cf17b443cc14b4a9f8e339f WatchSource:0}: Error finding container 8ca70aa65f4cadc836f20b0da875be82c9606ef29cf17b443cc14b4a9f8e339f: Status 404 returned error can't find the container with id 8ca70aa65f4cadc836f20b0da875be82c9606ef29cf17b443cc14b4a9f8e339f Sep 30 21:11:02 crc kubenswrapper[4756]: I0930 21:11:02.619573 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 30 21:11:02 crc kubenswrapper[4756]: I0930 21:11:02.705610 4756 generic.go:334] "Generic (PLEG): container finished" podID="ec76931d-297e-4e71-9d01-f6acc920b97a" containerID="06b40b0b6295d8736eb7054faf89109fe641f1f3504ad4a7be88d0b04ee396c2" exitCode=0 Sep 30 21:11:02 crc kubenswrapper[4756]: I0930 21:11:02.705676 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-sbssg" event={"ID":"ec76931d-297e-4e71-9d01-f6acc920b97a","Type":"ContainerDied","Data":"06b40b0b6295d8736eb7054faf89109fe641f1f3504ad4a7be88d0b04ee396c2"} Sep 30 21:11:02 crc kubenswrapper[4756]: I0930 21:11:02.730425 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"7fb95e15-f324-4549-959e-2f3edba3bce3","Type":"ContainerStarted","Data":"fb71aeda783fc1bdafbc90d0ca36a69e9a2f4b5f33ae7857c0a2bfe074a11d9c"} Sep 30 21:11:02 crc kubenswrapper[4756]: I0930 21:11:02.730473 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"7fb95e15-f324-4549-959e-2f3edba3bce3","Type":"ContainerStarted","Data":"f8e37289b9a45a52d7a24820209c4f7000ec82fbec0ba2ed8bafbdebcf6898eb"} Sep 30 21:11:02 crc kubenswrapper[4756]: I0930 21:11:02.734920 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"50ebcdc4-6975-4b2f-b08a-b09034017b42","Type":"ContainerStarted","Data":"8ca70aa65f4cadc836f20b0da875be82c9606ef29cf17b443cc14b4a9f8e339f"} Sep 30 21:11:03 crc kubenswrapper[4756]: I0930 21:11:03.127926 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="27f3702e-5145-4646-84e8-a470d2ae02ae" path="/var/lib/kubelet/pods/27f3702e-5145-4646-84e8-a470d2ae02ae/volumes" Sep 30 21:11:03 crc kubenswrapper[4756]: I0930 21:11:03.752715 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"7fb95e15-f324-4549-959e-2f3edba3bce3","Type":"ContainerStarted","Data":"8f2c79b321fa2e4eda4a5ff8e409f2d9fb57acc24657b4f1911ac8a2ceb96731"} Sep 30 21:11:03 crc kubenswrapper[4756]: I0930 21:11:03.758239 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"50ebcdc4-6975-4b2f-b08a-b09034017b42","Type":"ContainerStarted","Data":"41af5e9c1caec3d1d5df43ea19c749c7b5308168a83dd494bd961c2d0409f9dc"} Sep 30 21:11:03 crc kubenswrapper[4756]: I0930 21:11:03.789098 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=3.789078296 podStartE2EDuration="3.789078296s" podCreationTimestamp="2025-09-30 21:11:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 21:11:03.773267984 +0000 UTC m=+5993.394201471" watchObservedRunningTime="2025-09-30 21:11:03.789078296 +0000 UTC m=+5993.410011773" Sep 30 21:11:04 crc kubenswrapper[4756]: I0930 21:11:04.116458 4756 scope.go:117] "RemoveContainer" containerID="6282743b2855b6e8d5a5f2de10949d881993e076a7475251b665c1caeac15b6c" Sep 30 21:11:04 crc kubenswrapper[4756]: E0930 21:11:04.116662 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4n9zj_openshift-machine-config-operator(3370c2ca-fec3-4f90-8df7-51e21e6c7e1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" Sep 30 21:11:04 crc kubenswrapper[4756]: I0930 21:11:04.770265 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"50ebcdc4-6975-4b2f-b08a-b09034017b42","Type":"ContainerStarted","Data":"098d27169faccdfe165845b41b75d22336bd57fcf497333df3638a3b97448de5"} Sep 30 21:11:04 crc kubenswrapper[4756]: I0930 21:11:04.773580 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-sbssg" event={"ID":"ec76931d-297e-4e71-9d01-f6acc920b97a","Type":"ContainerStarted","Data":"f46f99052b9c1eac4030adfb517c27fc32e738b711c3caf454db848889c275fd"} Sep 30 21:11:04 crc kubenswrapper[4756]: I0930 21:11:04.804353 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=4.804332156 podStartE2EDuration="4.804332156s" podCreationTimestamp="2025-09-30 21:11:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 21:11:04.796767778 +0000 UTC m=+5994.417701295" watchObservedRunningTime="2025-09-30 21:11:04.804332156 +0000 UTC m=+5994.425265633" Sep 30 21:11:04 crc kubenswrapper[4756]: I0930 21:11:04.817595 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-sbssg" podStartSLOduration=5.04132202 podStartE2EDuration="7.817571651s" podCreationTimestamp="2025-09-30 21:10:57 +0000 UTC" firstStartedPulling="2025-09-30 21:11:00.681285918 +0000 UTC m=+5990.302219395" lastFinishedPulling="2025-09-30 21:11:03.457535549 +0000 UTC m=+5993.078469026" observedRunningTime="2025-09-30 21:11:04.813534736 +0000 UTC m=+5994.434468213" watchObservedRunningTime="2025-09-30 21:11:04.817571651 +0000 UTC m=+5994.438505128" Sep 30 21:11:05 crc kubenswrapper[4756]: I0930 21:11:05.038887 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-db-create-t48lv"] Sep 30 21:11:05 crc kubenswrapper[4756]: I0930 21:11:05.049827 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-db-create-t48lv"] Sep 30 21:11:05 crc kubenswrapper[4756]: I0930 21:11:05.129156 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7b4bfb72-efc3-44b1-9467-f61964ed636a" path="/var/lib/kubelet/pods/7b4bfb72-efc3-44b1-9467-f61964ed636a/volumes" Sep 30 21:11:08 crc kubenswrapper[4756]: I0930 21:11:08.236568 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-sbssg" Sep 30 21:11:08 crc kubenswrapper[4756]: I0930 21:11:08.236887 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-sbssg" Sep 30 21:11:08 crc kubenswrapper[4756]: I0930 21:11:08.295850 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-sbssg" Sep 30 21:11:08 crc kubenswrapper[4756]: I0930 21:11:08.873424 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-sbssg" Sep 30 21:11:08 crc kubenswrapper[4756]: I0930 21:11:08.936288 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-sbssg"] Sep 30 21:11:10 crc kubenswrapper[4756]: I0930 21:11:10.833705 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-sbssg" podUID="ec76931d-297e-4e71-9d01-f6acc920b97a" containerName="registry-server" containerID="cri-o://f46f99052b9c1eac4030adfb517c27fc32e738b711c3caf454db848889c275fd" gracePeriod=2 Sep 30 21:11:11 crc kubenswrapper[4756]: E0930 21:11:11.139336 4756 info.go:109] Failed to get network devices: open /sys/class/net/c824e7cb272b621/address: no such file or directory Sep 30 21:11:11 crc kubenswrapper[4756]: I0930 21:11:11.194734 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Sep 30 21:11:11 crc kubenswrapper[4756]: I0930 21:11:11.194785 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Sep 30 21:11:11 crc kubenswrapper[4756]: I0930 21:11:11.221653 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Sep 30 21:11:11 crc kubenswrapper[4756]: I0930 21:11:11.221687 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Sep 30 21:11:11 crc kubenswrapper[4756]: I0930 21:11:11.242771 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Sep 30 21:11:11 crc kubenswrapper[4756]: I0930 21:11:11.261176 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Sep 30 21:11:11 crc kubenswrapper[4756]: I0930 21:11:11.290054 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Sep 30 21:11:11 crc kubenswrapper[4756]: I0930 21:11:11.295317 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Sep 30 21:11:11 crc kubenswrapper[4756]: I0930 21:11:11.343019 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-sbssg" Sep 30 21:11:11 crc kubenswrapper[4756]: I0930 21:11:11.406067 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ec76931d-297e-4e71-9d01-f6acc920b97a-catalog-content\") pod \"ec76931d-297e-4e71-9d01-f6acc920b97a\" (UID: \"ec76931d-297e-4e71-9d01-f6acc920b97a\") " Sep 30 21:11:11 crc kubenswrapper[4756]: I0930 21:11:11.406494 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ec76931d-297e-4e71-9d01-f6acc920b97a-utilities\") pod \"ec76931d-297e-4e71-9d01-f6acc920b97a\" (UID: \"ec76931d-297e-4e71-9d01-f6acc920b97a\") " Sep 30 21:11:11 crc kubenswrapper[4756]: I0930 21:11:11.406535 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jm44j\" (UniqueName: \"kubernetes.io/projected/ec76931d-297e-4e71-9d01-f6acc920b97a-kube-api-access-jm44j\") pod \"ec76931d-297e-4e71-9d01-f6acc920b97a\" (UID: \"ec76931d-297e-4e71-9d01-f6acc920b97a\") " Sep 30 21:11:11 crc kubenswrapper[4756]: I0930 21:11:11.411069 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ec76931d-297e-4e71-9d01-f6acc920b97a-utilities" (OuterVolumeSpecName: "utilities") pod "ec76931d-297e-4e71-9d01-f6acc920b97a" (UID: "ec76931d-297e-4e71-9d01-f6acc920b97a"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 21:11:11 crc kubenswrapper[4756]: I0930 21:11:11.414717 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ec76931d-297e-4e71-9d01-f6acc920b97a-kube-api-access-jm44j" (OuterVolumeSpecName: "kube-api-access-jm44j") pod "ec76931d-297e-4e71-9d01-f6acc920b97a" (UID: "ec76931d-297e-4e71-9d01-f6acc920b97a"). InnerVolumeSpecName "kube-api-access-jm44j". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 21:11:11 crc kubenswrapper[4756]: I0930 21:11:11.465958 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ec76931d-297e-4e71-9d01-f6acc920b97a-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "ec76931d-297e-4e71-9d01-f6acc920b97a" (UID: "ec76931d-297e-4e71-9d01-f6acc920b97a"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 21:11:11 crc kubenswrapper[4756]: I0930 21:11:11.509221 4756 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ec76931d-297e-4e71-9d01-f6acc920b97a-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 21:11:11 crc kubenswrapper[4756]: I0930 21:11:11.509477 4756 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ec76931d-297e-4e71-9d01-f6acc920b97a-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 21:11:11 crc kubenswrapper[4756]: I0930 21:11:11.509570 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jm44j\" (UniqueName: \"kubernetes.io/projected/ec76931d-297e-4e71-9d01-f6acc920b97a-kube-api-access-jm44j\") on node \"crc\" DevicePath \"\"" Sep 30 21:11:11 crc kubenswrapper[4756]: I0930 21:11:11.865448 4756 generic.go:334] "Generic (PLEG): container finished" podID="ec76931d-297e-4e71-9d01-f6acc920b97a" containerID="f46f99052b9c1eac4030adfb517c27fc32e738b711c3caf454db848889c275fd" exitCode=0 Sep 30 21:11:11 crc kubenswrapper[4756]: I0930 21:11:11.865919 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-sbssg" event={"ID":"ec76931d-297e-4e71-9d01-f6acc920b97a","Type":"ContainerDied","Data":"f46f99052b9c1eac4030adfb517c27fc32e738b711c3caf454db848889c275fd"} Sep 30 21:11:11 crc kubenswrapper[4756]: I0930 21:11:11.865988 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-sbssg" event={"ID":"ec76931d-297e-4e71-9d01-f6acc920b97a","Type":"ContainerDied","Data":"c824e7cb272b621a2253907de46c9dc5dbe27f5c925d53ec433beef5ba2fb23e"} Sep 30 21:11:11 crc kubenswrapper[4756]: I0930 21:11:11.866027 4756 scope.go:117] "RemoveContainer" containerID="f46f99052b9c1eac4030adfb517c27fc32e738b711c3caf454db848889c275fd" Sep 30 21:11:11 crc kubenswrapper[4756]: I0930 21:11:11.866816 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-sbssg" Sep 30 21:11:11 crc kubenswrapper[4756]: I0930 21:11:11.867180 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Sep 30 21:11:11 crc kubenswrapper[4756]: I0930 21:11:11.867201 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Sep 30 21:11:11 crc kubenswrapper[4756]: I0930 21:11:11.867211 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Sep 30 21:11:11 crc kubenswrapper[4756]: I0930 21:11:11.867219 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Sep 30 21:11:11 crc kubenswrapper[4756]: I0930 21:11:11.901107 4756 scope.go:117] "RemoveContainer" containerID="06b40b0b6295d8736eb7054faf89109fe641f1f3504ad4a7be88d0b04ee396c2" Sep 30 21:11:11 crc kubenswrapper[4756]: I0930 21:11:11.931480 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-sbssg"] Sep 30 21:11:11 crc kubenswrapper[4756]: I0930 21:11:11.936667 4756 scope.go:117] "RemoveContainer" containerID="eb598581ae8bb0b14a404ad8c9bd9645d686ee9947a73afef4219edb6699c0dc" Sep 30 21:11:11 crc kubenswrapper[4756]: I0930 21:11:11.941849 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-sbssg"] Sep 30 21:11:11 crc kubenswrapper[4756]: I0930 21:11:11.987781 4756 scope.go:117] "RemoveContainer" containerID="f46f99052b9c1eac4030adfb517c27fc32e738b711c3caf454db848889c275fd" Sep 30 21:11:11 crc kubenswrapper[4756]: E0930 21:11:11.988235 4756 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f46f99052b9c1eac4030adfb517c27fc32e738b711c3caf454db848889c275fd\": container with ID starting with f46f99052b9c1eac4030adfb517c27fc32e738b711c3caf454db848889c275fd not found: ID does not exist" containerID="f46f99052b9c1eac4030adfb517c27fc32e738b711c3caf454db848889c275fd" Sep 30 21:11:11 crc kubenswrapper[4756]: I0930 21:11:11.988277 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f46f99052b9c1eac4030adfb517c27fc32e738b711c3caf454db848889c275fd"} err="failed to get container status \"f46f99052b9c1eac4030adfb517c27fc32e738b711c3caf454db848889c275fd\": rpc error: code = NotFound desc = could not find container \"f46f99052b9c1eac4030adfb517c27fc32e738b711c3caf454db848889c275fd\": container with ID starting with f46f99052b9c1eac4030adfb517c27fc32e738b711c3caf454db848889c275fd not found: ID does not exist" Sep 30 21:11:11 crc kubenswrapper[4756]: I0930 21:11:11.988417 4756 scope.go:117] "RemoveContainer" containerID="06b40b0b6295d8736eb7054faf89109fe641f1f3504ad4a7be88d0b04ee396c2" Sep 30 21:11:11 crc kubenswrapper[4756]: E0930 21:11:11.988935 4756 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"06b40b0b6295d8736eb7054faf89109fe641f1f3504ad4a7be88d0b04ee396c2\": container with ID starting with 06b40b0b6295d8736eb7054faf89109fe641f1f3504ad4a7be88d0b04ee396c2 not found: ID does not exist" containerID="06b40b0b6295d8736eb7054faf89109fe641f1f3504ad4a7be88d0b04ee396c2" Sep 30 21:11:11 crc kubenswrapper[4756]: I0930 21:11:11.988958 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"06b40b0b6295d8736eb7054faf89109fe641f1f3504ad4a7be88d0b04ee396c2"} err="failed to get container status \"06b40b0b6295d8736eb7054faf89109fe641f1f3504ad4a7be88d0b04ee396c2\": rpc error: code = NotFound desc = could not find container \"06b40b0b6295d8736eb7054faf89109fe641f1f3504ad4a7be88d0b04ee396c2\": container with ID starting with 06b40b0b6295d8736eb7054faf89109fe641f1f3504ad4a7be88d0b04ee396c2 not found: ID does not exist" Sep 30 21:11:11 crc kubenswrapper[4756]: I0930 21:11:11.988971 4756 scope.go:117] "RemoveContainer" containerID="eb598581ae8bb0b14a404ad8c9bd9645d686ee9947a73afef4219edb6699c0dc" Sep 30 21:11:11 crc kubenswrapper[4756]: E0930 21:11:11.989934 4756 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"eb598581ae8bb0b14a404ad8c9bd9645d686ee9947a73afef4219edb6699c0dc\": container with ID starting with eb598581ae8bb0b14a404ad8c9bd9645d686ee9947a73afef4219edb6699c0dc not found: ID does not exist" containerID="eb598581ae8bb0b14a404ad8c9bd9645d686ee9947a73afef4219edb6699c0dc" Sep 30 21:11:11 crc kubenswrapper[4756]: I0930 21:11:11.989958 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"eb598581ae8bb0b14a404ad8c9bd9645d686ee9947a73afef4219edb6699c0dc"} err="failed to get container status \"eb598581ae8bb0b14a404ad8c9bd9645d686ee9947a73afef4219edb6699c0dc\": rpc error: code = NotFound desc = could not find container \"eb598581ae8bb0b14a404ad8c9bd9645d686ee9947a73afef4219edb6699c0dc\": container with ID starting with eb598581ae8bb0b14a404ad8c9bd9645d686ee9947a73afef4219edb6699c0dc not found: ID does not exist" Sep 30 21:11:12 crc kubenswrapper[4756]: I0930 21:11:12.879838 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-658f995c57-xqjhp" event={"ID":"d456def0-4660-4a2b-ac99-c664da4d2d5d","Type":"ContainerStarted","Data":"02c1b1d1502b8ba9bd7558d92dac4acf88306a6e9fc91c9690dd4d238eb88215"} Sep 30 21:11:13 crc kubenswrapper[4756]: I0930 21:11:13.136204 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ec76931d-297e-4e71-9d01-f6acc920b97a" path="/var/lib/kubelet/pods/ec76931d-297e-4e71-9d01-f6acc920b97a/volumes" Sep 30 21:11:13 crc kubenswrapper[4756]: I0930 21:11:13.811969 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Sep 30 21:11:13 crc kubenswrapper[4756]: I0930 21:11:13.827615 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Sep 30 21:11:13 crc kubenswrapper[4756]: I0930 21:11:13.894116 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-658f995c57-xqjhp" event={"ID":"d456def0-4660-4a2b-ac99-c664da4d2d5d","Type":"ContainerStarted","Data":"4cbfc1b6152514ccc4d615a578b90468283a6e9192fd72681a6ee5847ca620b0"} Sep 30 21:11:13 crc kubenswrapper[4756]: I0930 21:11:13.894706 4756 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Sep 30 21:11:13 crc kubenswrapper[4756]: I0930 21:11:13.894771 4756 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Sep 30 21:11:13 crc kubenswrapper[4756]: I0930 21:11:13.917368 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/horizon-658f995c57-xqjhp" podStartSLOduration=3.185305109 podStartE2EDuration="27.917348633s" podCreationTimestamp="2025-09-30 21:10:46 +0000 UTC" firstStartedPulling="2025-09-30 21:10:47.832600887 +0000 UTC m=+5977.453534364" lastFinishedPulling="2025-09-30 21:11:12.564644411 +0000 UTC m=+6002.185577888" observedRunningTime="2025-09-30 21:11:13.916721127 +0000 UTC m=+6003.537654624" watchObservedRunningTime="2025-09-30 21:11:13.917348633 +0000 UTC m=+6003.538282110" Sep 30 21:11:13 crc kubenswrapper[4756]: I0930 21:11:13.925728 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Sep 30 21:11:13 crc kubenswrapper[4756]: I0930 21:11:13.999748 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Sep 30 21:11:14 crc kubenswrapper[4756]: I0930 21:11:14.910150 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-756d7594c9-2dtwc" event={"ID":"871a70bc-3a8f-4615-9e8f-cd949ec43e0d","Type":"ContainerStarted","Data":"2aae91d6f3588824470b4dd5d045c732e82503a683861c4f01ed7fc3db7aae02"} Sep 30 21:11:14 crc kubenswrapper[4756]: I0930 21:11:14.910592 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-756d7594c9-2dtwc" event={"ID":"871a70bc-3a8f-4615-9e8f-cd949ec43e0d","Type":"ContainerStarted","Data":"dc88469e8e7314e8896805fba7912f59b27b211f5411372a9fc6da3a5f53be50"} Sep 30 21:11:14 crc kubenswrapper[4756]: I0930 21:11:14.934505 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/horizon-756d7594c9-2dtwc" podStartSLOduration=-9223372008.920294 podStartE2EDuration="27.934482081s" podCreationTimestamp="2025-09-30 21:10:47 +0000 UTC" firstStartedPulling="2025-09-30 21:10:48.488081003 +0000 UTC m=+5978.109014500" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 21:11:14.930633871 +0000 UTC m=+6004.551567418" watchObservedRunningTime="2025-09-30 21:11:14.934482081 +0000 UTC m=+6004.555415578" Sep 30 21:11:15 crc kubenswrapper[4756]: I0930 21:11:15.026871 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-7b75-account-create-xgm9w"] Sep 30 21:11:15 crc kubenswrapper[4756]: I0930 21:11:15.038092 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-7b75-account-create-xgm9w"] Sep 30 21:11:15 crc kubenswrapper[4756]: I0930 21:11:15.127198 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="46c44632-7e0e-4302-8261-83270bb41439" path="/var/lib/kubelet/pods/46c44632-7e0e-4302-8261-83270bb41439/volumes" Sep 30 21:11:17 crc kubenswrapper[4756]: I0930 21:11:17.332097 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-658f995c57-xqjhp" Sep 30 21:11:17 crc kubenswrapper[4756]: I0930 21:11:17.332522 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/horizon-658f995c57-xqjhp" Sep 30 21:11:18 crc kubenswrapper[4756]: I0930 21:11:18.016278 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-756d7594c9-2dtwc" Sep 30 21:11:18 crc kubenswrapper[4756]: I0930 21:11:18.016336 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/horizon-756d7594c9-2dtwc" Sep 30 21:11:19 crc kubenswrapper[4756]: I0930 21:11:19.117847 4756 scope.go:117] "RemoveContainer" containerID="6282743b2855b6e8d5a5f2de10949d881993e076a7475251b665c1caeac15b6c" Sep 30 21:11:20 crc kubenswrapper[4756]: I0930 21:11:20.979046 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" event={"ID":"3370c2ca-fec3-4f90-8df7-51e21e6c7e1c","Type":"ContainerStarted","Data":"c1aadeaab73b9102a6512a1478fc007196fc8240e5db9d60dd58fca2c5f309cc"} Sep 30 21:11:23 crc kubenswrapper[4756]: I0930 21:11:23.047571 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-db-sync-2rw4m"] Sep 30 21:11:23 crc kubenswrapper[4756]: I0930 21:11:23.060797 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-db-sync-2rw4m"] Sep 30 21:11:23 crc kubenswrapper[4756]: I0930 21:11:23.133852 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="14a010fd-e7ad-43dc-bc3e-a19cfcfe0a9c" path="/var/lib/kubelet/pods/14a010fd-e7ad-43dc-bc3e-a19cfcfe0a9c/volumes" Sep 30 21:11:25 crc kubenswrapper[4756]: I0930 21:11:25.995860 4756 scope.go:117] "RemoveContainer" containerID="85b0b4211214b1b0940d27fac90ef0fc7dadfc4c7afc5981e944a56f3e595727" Sep 30 21:11:26 crc kubenswrapper[4756]: I0930 21:11:26.044777 4756 scope.go:117] "RemoveContainer" containerID="ab6dba53d51e0daddd120cde2c04642861f4699de3c2cac2b934fa7c65623f3c" Sep 30 21:11:26 crc kubenswrapper[4756]: I0930 21:11:26.080624 4756 scope.go:117] "RemoveContainer" containerID="a62eb74ea6ea7fb5bf1917c31450c012f9a6da3fab7321edb9ee09e478e98a13" Sep 30 21:11:26 crc kubenswrapper[4756]: I0930 21:11:26.152088 4756 scope.go:117] "RemoveContainer" containerID="4ae49e1404ac093ad727eb55bcadf7520d4b489934cc4899c2ab03d72ed55621" Sep 30 21:11:26 crc kubenswrapper[4756]: I0930 21:11:26.194549 4756 scope.go:117] "RemoveContainer" containerID="30e5fe0a52e1838b034a83f46a24e63f375c89cd78121be89f384bc094ce4abc" Sep 30 21:11:26 crc kubenswrapper[4756]: I0930 21:11:26.220118 4756 scope.go:117] "RemoveContainer" containerID="1ed06ca741613b448b386d339a6050885134868e51bebd7fb347b43ca5e6c977" Sep 30 21:11:26 crc kubenswrapper[4756]: I0930 21:11:26.288695 4756 scope.go:117] "RemoveContainer" containerID="4dd265c409a1699d3e5c079e15bfecc5bf99abec5907bc1455452b68cd9b7679" Sep 30 21:11:27 crc kubenswrapper[4756]: I0930 21:11:27.333521 4756 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-658f995c57-xqjhp" podUID="d456def0-4660-4a2b-ac99-c664da4d2d5d" containerName="horizon" probeResult="failure" output="Get \"http://10.217.1.115:8080/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.1.115:8080: connect: connection refused" Sep 30 21:11:27 crc kubenswrapper[4756]: I0930 21:11:27.812038 4756 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/glance-default-external-api-0" podUID="45b49577-8ba6-43a8-86be-987744002d0c" containerName="glance-log" probeResult="failure" output="Get \"http://10.217.1.47:9292/healthcheck\": dial tcp 10.217.1.47:9292: i/o timeout (Client.Timeout exceeded while awaiting headers)" Sep 30 21:11:27 crc kubenswrapper[4756]: I0930 21:11:27.812059 4756 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/glance-default-external-api-0" podUID="45b49577-8ba6-43a8-86be-987744002d0c" containerName="glance-httpd" probeResult="failure" output="Get \"http://10.217.1.47:9292/healthcheck\": dial tcp 10.217.1.47:9292: i/o timeout (Client.Timeout exceeded while awaiting headers)" Sep 30 21:11:28 crc kubenswrapper[4756]: I0930 21:11:28.018227 4756 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-756d7594c9-2dtwc" podUID="871a70bc-3a8f-4615-9e8f-cd949ec43e0d" containerName="horizon" probeResult="failure" output="Get \"http://10.217.1.116:8080/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.1.116:8080: connect: connection refused" Sep 30 21:11:39 crc kubenswrapper[4756]: I0930 21:11:39.197086 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/horizon-658f995c57-xqjhp" Sep 30 21:11:39 crc kubenswrapper[4756]: I0930 21:11:39.844053 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/horizon-756d7594c9-2dtwc" Sep 30 21:11:40 crc kubenswrapper[4756]: I0930 21:11:40.930779 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/horizon-658f995c57-xqjhp" Sep 30 21:11:41 crc kubenswrapper[4756]: I0930 21:11:41.593015 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/horizon-756d7594c9-2dtwc" Sep 30 21:11:41 crc kubenswrapper[4756]: I0930 21:11:41.649586 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-658f995c57-xqjhp"] Sep 30 21:11:41 crc kubenswrapper[4756]: I0930 21:11:41.649836 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-658f995c57-xqjhp" podUID="d456def0-4660-4a2b-ac99-c664da4d2d5d" containerName="horizon-log" containerID="cri-o://02c1b1d1502b8ba9bd7558d92dac4acf88306a6e9fc91c9690dd4d238eb88215" gracePeriod=30 Sep 30 21:11:41 crc kubenswrapper[4756]: I0930 21:11:41.649946 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-658f995c57-xqjhp" podUID="d456def0-4660-4a2b-ac99-c664da4d2d5d" containerName="horizon" containerID="cri-o://4cbfc1b6152514ccc4d615a578b90468283a6e9192fd72681a6ee5847ca620b0" gracePeriod=30 Sep 30 21:11:45 crc kubenswrapper[4756]: I0930 21:11:45.288528 4756 generic.go:334] "Generic (PLEG): container finished" podID="d456def0-4660-4a2b-ac99-c664da4d2d5d" containerID="4cbfc1b6152514ccc4d615a578b90468283a6e9192fd72681a6ee5847ca620b0" exitCode=0 Sep 30 21:11:45 crc kubenswrapper[4756]: I0930 21:11:45.288569 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-658f995c57-xqjhp" event={"ID":"d456def0-4660-4a2b-ac99-c664da4d2d5d","Type":"ContainerDied","Data":"4cbfc1b6152514ccc4d615a578b90468283a6e9192fd72681a6ee5847ca620b0"} Sep 30 21:11:47 crc kubenswrapper[4756]: I0930 21:11:47.332879 4756 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-658f995c57-xqjhp" podUID="d456def0-4660-4a2b-ac99-c664da4d2d5d" containerName="horizon" probeResult="failure" output="Get \"http://10.217.1.115:8080/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.1.115:8080: connect: connection refused" Sep 30 21:11:49 crc kubenswrapper[4756]: I0930 21:11:49.570479 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-bcc99cb89-l9p64"] Sep 30 21:11:49 crc kubenswrapper[4756]: E0930 21:11:49.571153 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ec76931d-297e-4e71-9d01-f6acc920b97a" containerName="registry-server" Sep 30 21:11:49 crc kubenswrapper[4756]: I0930 21:11:49.571168 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="ec76931d-297e-4e71-9d01-f6acc920b97a" containerName="registry-server" Sep 30 21:11:49 crc kubenswrapper[4756]: E0930 21:11:49.571196 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ec76931d-297e-4e71-9d01-f6acc920b97a" containerName="extract-utilities" Sep 30 21:11:49 crc kubenswrapper[4756]: I0930 21:11:49.571203 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="ec76931d-297e-4e71-9d01-f6acc920b97a" containerName="extract-utilities" Sep 30 21:11:49 crc kubenswrapper[4756]: E0930 21:11:49.571222 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ec76931d-297e-4e71-9d01-f6acc920b97a" containerName="extract-content" Sep 30 21:11:49 crc kubenswrapper[4756]: I0930 21:11:49.571229 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="ec76931d-297e-4e71-9d01-f6acc920b97a" containerName="extract-content" Sep 30 21:11:49 crc kubenswrapper[4756]: I0930 21:11:49.571467 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="ec76931d-297e-4e71-9d01-f6acc920b97a" containerName="registry-server" Sep 30 21:11:49 crc kubenswrapper[4756]: I0930 21:11:49.573011 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-bcc99cb89-l9p64" Sep 30 21:11:49 crc kubenswrapper[4756]: I0930 21:11:49.594588 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-bcc99cb89-l9p64"] Sep 30 21:11:49 crc kubenswrapper[4756]: I0930 21:11:49.730079 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/53c3034d-5847-407a-9003-3526fee71366-horizon-secret-key\") pod \"horizon-bcc99cb89-l9p64\" (UID: \"53c3034d-5847-407a-9003-3526fee71366\") " pod="openstack/horizon-bcc99cb89-l9p64" Sep 30 21:11:49 crc kubenswrapper[4756]: I0930 21:11:49.730493 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/53c3034d-5847-407a-9003-3526fee71366-scripts\") pod \"horizon-bcc99cb89-l9p64\" (UID: \"53c3034d-5847-407a-9003-3526fee71366\") " pod="openstack/horizon-bcc99cb89-l9p64" Sep 30 21:11:49 crc kubenswrapper[4756]: I0930 21:11:49.730520 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/53c3034d-5847-407a-9003-3526fee71366-logs\") pod \"horizon-bcc99cb89-l9p64\" (UID: \"53c3034d-5847-407a-9003-3526fee71366\") " pod="openstack/horizon-bcc99cb89-l9p64" Sep 30 21:11:49 crc kubenswrapper[4756]: I0930 21:11:49.730582 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w5xkp\" (UniqueName: \"kubernetes.io/projected/53c3034d-5847-407a-9003-3526fee71366-kube-api-access-w5xkp\") pod \"horizon-bcc99cb89-l9p64\" (UID: \"53c3034d-5847-407a-9003-3526fee71366\") " pod="openstack/horizon-bcc99cb89-l9p64" Sep 30 21:11:49 crc kubenswrapper[4756]: I0930 21:11:49.730607 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/53c3034d-5847-407a-9003-3526fee71366-config-data\") pod \"horizon-bcc99cb89-l9p64\" (UID: \"53c3034d-5847-407a-9003-3526fee71366\") " pod="openstack/horizon-bcc99cb89-l9p64" Sep 30 21:11:49 crc kubenswrapper[4756]: I0930 21:11:49.832038 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/53c3034d-5847-407a-9003-3526fee71366-horizon-secret-key\") pod \"horizon-bcc99cb89-l9p64\" (UID: \"53c3034d-5847-407a-9003-3526fee71366\") " pod="openstack/horizon-bcc99cb89-l9p64" Sep 30 21:11:49 crc kubenswrapper[4756]: I0930 21:11:49.832131 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/53c3034d-5847-407a-9003-3526fee71366-scripts\") pod \"horizon-bcc99cb89-l9p64\" (UID: \"53c3034d-5847-407a-9003-3526fee71366\") " pod="openstack/horizon-bcc99cb89-l9p64" Sep 30 21:11:49 crc kubenswrapper[4756]: I0930 21:11:49.832159 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/53c3034d-5847-407a-9003-3526fee71366-logs\") pod \"horizon-bcc99cb89-l9p64\" (UID: \"53c3034d-5847-407a-9003-3526fee71366\") " pod="openstack/horizon-bcc99cb89-l9p64" Sep 30 21:11:49 crc kubenswrapper[4756]: I0930 21:11:49.832223 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w5xkp\" (UniqueName: \"kubernetes.io/projected/53c3034d-5847-407a-9003-3526fee71366-kube-api-access-w5xkp\") pod \"horizon-bcc99cb89-l9p64\" (UID: \"53c3034d-5847-407a-9003-3526fee71366\") " pod="openstack/horizon-bcc99cb89-l9p64" Sep 30 21:11:49 crc kubenswrapper[4756]: I0930 21:11:49.832249 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/53c3034d-5847-407a-9003-3526fee71366-config-data\") pod \"horizon-bcc99cb89-l9p64\" (UID: \"53c3034d-5847-407a-9003-3526fee71366\") " pod="openstack/horizon-bcc99cb89-l9p64" Sep 30 21:11:49 crc kubenswrapper[4756]: I0930 21:11:49.833673 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/53c3034d-5847-407a-9003-3526fee71366-config-data\") pod \"horizon-bcc99cb89-l9p64\" (UID: \"53c3034d-5847-407a-9003-3526fee71366\") " pod="openstack/horizon-bcc99cb89-l9p64" Sep 30 21:11:49 crc kubenswrapper[4756]: I0930 21:11:49.833971 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/53c3034d-5847-407a-9003-3526fee71366-logs\") pod \"horizon-bcc99cb89-l9p64\" (UID: \"53c3034d-5847-407a-9003-3526fee71366\") " pod="openstack/horizon-bcc99cb89-l9p64" Sep 30 21:11:49 crc kubenswrapper[4756]: I0930 21:11:49.834679 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/53c3034d-5847-407a-9003-3526fee71366-scripts\") pod \"horizon-bcc99cb89-l9p64\" (UID: \"53c3034d-5847-407a-9003-3526fee71366\") " pod="openstack/horizon-bcc99cb89-l9p64" Sep 30 21:11:49 crc kubenswrapper[4756]: I0930 21:11:49.837587 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/53c3034d-5847-407a-9003-3526fee71366-horizon-secret-key\") pod \"horizon-bcc99cb89-l9p64\" (UID: \"53c3034d-5847-407a-9003-3526fee71366\") " pod="openstack/horizon-bcc99cb89-l9p64" Sep 30 21:11:49 crc kubenswrapper[4756]: I0930 21:11:49.860435 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w5xkp\" (UniqueName: \"kubernetes.io/projected/53c3034d-5847-407a-9003-3526fee71366-kube-api-access-w5xkp\") pod \"horizon-bcc99cb89-l9p64\" (UID: \"53c3034d-5847-407a-9003-3526fee71366\") " pod="openstack/horizon-bcc99cb89-l9p64" Sep 30 21:11:49 crc kubenswrapper[4756]: I0930 21:11:49.898683 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-bcc99cb89-l9p64" Sep 30 21:11:50 crc kubenswrapper[4756]: I0930 21:11:50.399930 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-bcc99cb89-l9p64"] Sep 30 21:11:50 crc kubenswrapper[4756]: I0930 21:11:50.750001 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/heat-db-create-hdf69"] Sep 30 21:11:50 crc kubenswrapper[4756]: I0930 21:11:50.752050 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-db-create-hdf69" Sep 30 21:11:50 crc kubenswrapper[4756]: I0930 21:11:50.759386 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-db-create-hdf69"] Sep 30 21:11:50 crc kubenswrapper[4756]: I0930 21:11:50.851784 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zd95t\" (UniqueName: \"kubernetes.io/projected/d5acee4c-8040-4b34-ae2c-e81a40d3c0da-kube-api-access-zd95t\") pod \"heat-db-create-hdf69\" (UID: \"d5acee4c-8040-4b34-ae2c-e81a40d3c0da\") " pod="openstack/heat-db-create-hdf69" Sep 30 21:11:50 crc kubenswrapper[4756]: I0930 21:11:50.953289 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zd95t\" (UniqueName: \"kubernetes.io/projected/d5acee4c-8040-4b34-ae2c-e81a40d3c0da-kube-api-access-zd95t\") pod \"heat-db-create-hdf69\" (UID: \"d5acee4c-8040-4b34-ae2c-e81a40d3c0da\") " pod="openstack/heat-db-create-hdf69" Sep 30 21:11:50 crc kubenswrapper[4756]: I0930 21:11:50.980269 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zd95t\" (UniqueName: \"kubernetes.io/projected/d5acee4c-8040-4b34-ae2c-e81a40d3c0da-kube-api-access-zd95t\") pod \"heat-db-create-hdf69\" (UID: \"d5acee4c-8040-4b34-ae2c-e81a40d3c0da\") " pod="openstack/heat-db-create-hdf69" Sep 30 21:11:51 crc kubenswrapper[4756]: I0930 21:11:51.070962 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-db-create-hdf69" Sep 30 21:11:51 crc kubenswrapper[4756]: I0930 21:11:51.394613 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-bcc99cb89-l9p64" event={"ID":"53c3034d-5847-407a-9003-3526fee71366","Type":"ContainerStarted","Data":"cc894d381c94d0f59dc0de455dc520e981f4553cf4703c302227c9cbd4fa03f2"} Sep 30 21:11:51 crc kubenswrapper[4756]: I0930 21:11:51.394937 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-bcc99cb89-l9p64" event={"ID":"53c3034d-5847-407a-9003-3526fee71366","Type":"ContainerStarted","Data":"4a2d3a46b88a2a656ee9ec912d08533251bd997c88a4d5d02c48d54b6dd11686"} Sep 30 21:11:51 crc kubenswrapper[4756]: I0930 21:11:51.394950 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-bcc99cb89-l9p64" event={"ID":"53c3034d-5847-407a-9003-3526fee71366","Type":"ContainerStarted","Data":"a879f971e3ef1845ea39a2720016de9f0eb05d263d6c73e421503d5f6f4846cb"} Sep 30 21:11:51 crc kubenswrapper[4756]: W0930 21:11:51.558238 4756 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd5acee4c_8040_4b34_ae2c_e81a40d3c0da.slice/crio-d617e388560b588a9a52b53b4f825ea78af129359fa323900c38fa41bb100075 WatchSource:0}: Error finding container d617e388560b588a9a52b53b4f825ea78af129359fa323900c38fa41bb100075: Status 404 returned error can't find the container with id d617e388560b588a9a52b53b4f825ea78af129359fa323900c38fa41bb100075 Sep 30 21:11:51 crc kubenswrapper[4756]: I0930 21:11:51.559042 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/horizon-bcc99cb89-l9p64" podStartSLOduration=2.559021998 podStartE2EDuration="2.559021998s" podCreationTimestamp="2025-09-30 21:11:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 21:11:51.430908197 +0000 UTC m=+6041.051841674" watchObservedRunningTime="2025-09-30 21:11:51.559021998 +0000 UTC m=+6041.179955475" Sep 30 21:11:51 crc kubenswrapper[4756]: I0930 21:11:51.559720 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-db-create-hdf69"] Sep 30 21:11:52 crc kubenswrapper[4756]: I0930 21:11:52.403466 4756 generic.go:334] "Generic (PLEG): container finished" podID="d5acee4c-8040-4b34-ae2c-e81a40d3c0da" containerID="eb4d3d4bc885a9df1108c5e3a04ccf0e5372ba7e031cc4508c38ea0de4b8ae7a" exitCode=0 Sep 30 21:11:52 crc kubenswrapper[4756]: I0930 21:11:52.403593 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-db-create-hdf69" event={"ID":"d5acee4c-8040-4b34-ae2c-e81a40d3c0da","Type":"ContainerDied","Data":"eb4d3d4bc885a9df1108c5e3a04ccf0e5372ba7e031cc4508c38ea0de4b8ae7a"} Sep 30 21:11:52 crc kubenswrapper[4756]: I0930 21:11:52.403856 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-db-create-hdf69" event={"ID":"d5acee4c-8040-4b34-ae2c-e81a40d3c0da","Type":"ContainerStarted","Data":"d617e388560b588a9a52b53b4f825ea78af129359fa323900c38fa41bb100075"} Sep 30 21:11:53 crc kubenswrapper[4756]: I0930 21:11:53.786755 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/heat-db-create-hdf69" Sep 30 21:11:53 crc kubenswrapper[4756]: I0930 21:11:53.834074 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zd95t\" (UniqueName: \"kubernetes.io/projected/d5acee4c-8040-4b34-ae2c-e81a40d3c0da-kube-api-access-zd95t\") pod \"d5acee4c-8040-4b34-ae2c-e81a40d3c0da\" (UID: \"d5acee4c-8040-4b34-ae2c-e81a40d3c0da\") " Sep 30 21:11:53 crc kubenswrapper[4756]: I0930 21:11:53.840763 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d5acee4c-8040-4b34-ae2c-e81a40d3c0da-kube-api-access-zd95t" (OuterVolumeSpecName: "kube-api-access-zd95t") pod "d5acee4c-8040-4b34-ae2c-e81a40d3c0da" (UID: "d5acee4c-8040-4b34-ae2c-e81a40d3c0da"). InnerVolumeSpecName "kube-api-access-zd95t". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 21:11:53 crc kubenswrapper[4756]: I0930 21:11:53.936327 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zd95t\" (UniqueName: \"kubernetes.io/projected/d5acee4c-8040-4b34-ae2c-e81a40d3c0da-kube-api-access-zd95t\") on node \"crc\" DevicePath \"\"" Sep 30 21:11:54 crc kubenswrapper[4756]: I0930 21:11:54.422335 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-db-create-hdf69" event={"ID":"d5acee4c-8040-4b34-ae2c-e81a40d3c0da","Type":"ContainerDied","Data":"d617e388560b588a9a52b53b4f825ea78af129359fa323900c38fa41bb100075"} Sep 30 21:11:54 crc kubenswrapper[4756]: I0930 21:11:54.422715 4756 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d617e388560b588a9a52b53b4f825ea78af129359fa323900c38fa41bb100075" Sep 30 21:11:54 crc kubenswrapper[4756]: I0930 21:11:54.422530 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/heat-db-create-hdf69" Sep 30 21:11:55 crc kubenswrapper[4756]: I0930 21:11:55.046440 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-db-create-cj4b4"] Sep 30 21:11:55 crc kubenswrapper[4756]: I0930 21:11:55.057560 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-db-create-cj4b4"] Sep 30 21:11:55 crc kubenswrapper[4756]: I0930 21:11:55.132312 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6451acc6-4e73-4d50-b309-adb65a297d63" path="/var/lib/kubelet/pods/6451acc6-4e73-4d50-b309-adb65a297d63/volumes" Sep 30 21:11:57 crc kubenswrapper[4756]: I0930 21:11:57.333239 4756 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-658f995c57-xqjhp" podUID="d456def0-4660-4a2b-ac99-c664da4d2d5d" containerName="horizon" probeResult="failure" output="Get \"http://10.217.1.115:8080/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.1.115:8080: connect: connection refused" Sep 30 21:11:59 crc kubenswrapper[4756]: I0930 21:11:59.900200 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/horizon-bcc99cb89-l9p64" Sep 30 21:11:59 crc kubenswrapper[4756]: I0930 21:11:59.900863 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-bcc99cb89-l9p64" Sep 30 21:12:00 crc kubenswrapper[4756]: I0930 21:12:00.830476 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/heat-1e35-account-create-fr9l6"] Sep 30 21:12:00 crc kubenswrapper[4756]: E0930 21:12:00.830924 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d5acee4c-8040-4b34-ae2c-e81a40d3c0da" containerName="mariadb-database-create" Sep 30 21:12:00 crc kubenswrapper[4756]: I0930 21:12:00.830945 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="d5acee4c-8040-4b34-ae2c-e81a40d3c0da" containerName="mariadb-database-create" Sep 30 21:12:00 crc kubenswrapper[4756]: I0930 21:12:00.831207 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="d5acee4c-8040-4b34-ae2c-e81a40d3c0da" containerName="mariadb-database-create" Sep 30 21:12:00 crc kubenswrapper[4756]: I0930 21:12:00.832000 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-1e35-account-create-fr9l6" Sep 30 21:12:00 crc kubenswrapper[4756]: I0930 21:12:00.834303 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"heat-db-secret" Sep 30 21:12:00 crc kubenswrapper[4756]: I0930 21:12:00.852368 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-1e35-account-create-fr9l6"] Sep 30 21:12:00 crc kubenswrapper[4756]: I0930 21:12:00.988007 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-87hj2\" (UniqueName: \"kubernetes.io/projected/bb9200a1-8617-478b-a82c-c91af7397ea0-kube-api-access-87hj2\") pod \"heat-1e35-account-create-fr9l6\" (UID: \"bb9200a1-8617-478b-a82c-c91af7397ea0\") " pod="openstack/heat-1e35-account-create-fr9l6" Sep 30 21:12:01 crc kubenswrapper[4756]: I0930 21:12:01.090881 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-87hj2\" (UniqueName: \"kubernetes.io/projected/bb9200a1-8617-478b-a82c-c91af7397ea0-kube-api-access-87hj2\") pod \"heat-1e35-account-create-fr9l6\" (UID: \"bb9200a1-8617-478b-a82c-c91af7397ea0\") " pod="openstack/heat-1e35-account-create-fr9l6" Sep 30 21:12:01 crc kubenswrapper[4756]: I0930 21:12:01.117130 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-87hj2\" (UniqueName: \"kubernetes.io/projected/bb9200a1-8617-478b-a82c-c91af7397ea0-kube-api-access-87hj2\") pod \"heat-1e35-account-create-fr9l6\" (UID: \"bb9200a1-8617-478b-a82c-c91af7397ea0\") " pod="openstack/heat-1e35-account-create-fr9l6" Sep 30 21:12:01 crc kubenswrapper[4756]: I0930 21:12:01.170942 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-1e35-account-create-fr9l6" Sep 30 21:12:01 crc kubenswrapper[4756]: I0930 21:12:01.496321 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-1e35-account-create-fr9l6"] Sep 30 21:12:01 crc kubenswrapper[4756]: E0930 21:12:01.953567 4756 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podbb9200a1_8617_478b_a82c_c91af7397ea0.slice/crio-conmon-f816c20cae3fb7173c5c2afd3b569ba21ba73211130fb7f47fbbb1d28c8167d2.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podbb9200a1_8617_478b_a82c_c91af7397ea0.slice/crio-f816c20cae3fb7173c5c2afd3b569ba21ba73211130fb7f47fbbb1d28c8167d2.scope\": RecentStats: unable to find data in memory cache]" Sep 30 21:12:02 crc kubenswrapper[4756]: I0930 21:12:02.508638 4756 generic.go:334] "Generic (PLEG): container finished" podID="bb9200a1-8617-478b-a82c-c91af7397ea0" containerID="f816c20cae3fb7173c5c2afd3b569ba21ba73211130fb7f47fbbb1d28c8167d2" exitCode=0 Sep 30 21:12:02 crc kubenswrapper[4756]: I0930 21:12:02.508680 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-1e35-account-create-fr9l6" event={"ID":"bb9200a1-8617-478b-a82c-c91af7397ea0","Type":"ContainerDied","Data":"f816c20cae3fb7173c5c2afd3b569ba21ba73211130fb7f47fbbb1d28c8167d2"} Sep 30 21:12:02 crc kubenswrapper[4756]: I0930 21:12:02.508976 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-1e35-account-create-fr9l6" event={"ID":"bb9200a1-8617-478b-a82c-c91af7397ea0","Type":"ContainerStarted","Data":"9083938442b45f33c025ff61c645d581a9134460ef06bb6b0019a5bdf04da277"} Sep 30 21:12:03 crc kubenswrapper[4756]: I0930 21:12:03.910073 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/heat-1e35-account-create-fr9l6" Sep 30 21:12:04 crc kubenswrapper[4756]: I0930 21:12:04.040888 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-5e82-account-create-rphpt"] Sep 30 21:12:04 crc kubenswrapper[4756]: I0930 21:12:04.049106 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-5e82-account-create-rphpt"] Sep 30 21:12:04 crc kubenswrapper[4756]: I0930 21:12:04.060308 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-87hj2\" (UniqueName: \"kubernetes.io/projected/bb9200a1-8617-478b-a82c-c91af7397ea0-kube-api-access-87hj2\") pod \"bb9200a1-8617-478b-a82c-c91af7397ea0\" (UID: \"bb9200a1-8617-478b-a82c-c91af7397ea0\") " Sep 30 21:12:04 crc kubenswrapper[4756]: I0930 21:12:04.080383 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bb9200a1-8617-478b-a82c-c91af7397ea0-kube-api-access-87hj2" (OuterVolumeSpecName: "kube-api-access-87hj2") pod "bb9200a1-8617-478b-a82c-c91af7397ea0" (UID: "bb9200a1-8617-478b-a82c-c91af7397ea0"). InnerVolumeSpecName "kube-api-access-87hj2". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 21:12:04 crc kubenswrapper[4756]: I0930 21:12:04.165996 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-87hj2\" (UniqueName: \"kubernetes.io/projected/bb9200a1-8617-478b-a82c-c91af7397ea0-kube-api-access-87hj2\") on node \"crc\" DevicePath \"\"" Sep 30 21:12:04 crc kubenswrapper[4756]: I0930 21:12:04.531428 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-1e35-account-create-fr9l6" event={"ID":"bb9200a1-8617-478b-a82c-c91af7397ea0","Type":"ContainerDied","Data":"9083938442b45f33c025ff61c645d581a9134460ef06bb6b0019a5bdf04da277"} Sep 30 21:12:04 crc kubenswrapper[4756]: I0930 21:12:04.531476 4756 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9083938442b45f33c025ff61c645d581a9134460ef06bb6b0019a5bdf04da277" Sep 30 21:12:04 crc kubenswrapper[4756]: I0930 21:12:04.531508 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/heat-1e35-account-create-fr9l6" Sep 30 21:12:05 crc kubenswrapper[4756]: I0930 21:12:05.130625 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="85a3f0b6-c541-4d6f-8d22-83b1e99e4713" path="/var/lib/kubelet/pods/85a3f0b6-c541-4d6f-8d22-83b1e99e4713/volumes" Sep 30 21:12:05 crc kubenswrapper[4756]: I0930 21:12:05.992177 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/heat-db-sync-t9cp4"] Sep 30 21:12:05 crc kubenswrapper[4756]: E0930 21:12:05.993114 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bb9200a1-8617-478b-a82c-c91af7397ea0" containerName="mariadb-account-create" Sep 30 21:12:05 crc kubenswrapper[4756]: I0930 21:12:05.993156 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="bb9200a1-8617-478b-a82c-c91af7397ea0" containerName="mariadb-account-create" Sep 30 21:12:05 crc kubenswrapper[4756]: I0930 21:12:05.993932 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="bb9200a1-8617-478b-a82c-c91af7397ea0" containerName="mariadb-account-create" Sep 30 21:12:05 crc kubenswrapper[4756]: I0930 21:12:05.995218 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-db-sync-t9cp4" Sep 30 21:12:06 crc kubenswrapper[4756]: I0930 21:12:06.000141 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"heat-heat-dockercfg-t7g82" Sep 30 21:12:06 crc kubenswrapper[4756]: I0930 21:12:06.000211 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"heat-config-data" Sep 30 21:12:06 crc kubenswrapper[4756]: I0930 21:12:06.010999 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-db-sync-t9cp4"] Sep 30 21:12:06 crc kubenswrapper[4756]: I0930 21:12:06.111210 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fd547ce9-ce4c-4a7d-9f4b-3c06aeb45d1d-config-data\") pod \"heat-db-sync-t9cp4\" (UID: \"fd547ce9-ce4c-4a7d-9f4b-3c06aeb45d1d\") " pod="openstack/heat-db-sync-t9cp4" Sep 30 21:12:06 crc kubenswrapper[4756]: I0930 21:12:06.111266 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jntjn\" (UniqueName: \"kubernetes.io/projected/fd547ce9-ce4c-4a7d-9f4b-3c06aeb45d1d-kube-api-access-jntjn\") pod \"heat-db-sync-t9cp4\" (UID: \"fd547ce9-ce4c-4a7d-9f4b-3c06aeb45d1d\") " pod="openstack/heat-db-sync-t9cp4" Sep 30 21:12:06 crc kubenswrapper[4756]: I0930 21:12:06.111355 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fd547ce9-ce4c-4a7d-9f4b-3c06aeb45d1d-combined-ca-bundle\") pod \"heat-db-sync-t9cp4\" (UID: \"fd547ce9-ce4c-4a7d-9f4b-3c06aeb45d1d\") " pod="openstack/heat-db-sync-t9cp4" Sep 30 21:12:06 crc kubenswrapper[4756]: I0930 21:12:06.212967 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fd547ce9-ce4c-4a7d-9f4b-3c06aeb45d1d-config-data\") pod \"heat-db-sync-t9cp4\" (UID: \"fd547ce9-ce4c-4a7d-9f4b-3c06aeb45d1d\") " pod="openstack/heat-db-sync-t9cp4" Sep 30 21:12:06 crc kubenswrapper[4756]: I0930 21:12:06.213048 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jntjn\" (UniqueName: \"kubernetes.io/projected/fd547ce9-ce4c-4a7d-9f4b-3c06aeb45d1d-kube-api-access-jntjn\") pod \"heat-db-sync-t9cp4\" (UID: \"fd547ce9-ce4c-4a7d-9f4b-3c06aeb45d1d\") " pod="openstack/heat-db-sync-t9cp4" Sep 30 21:12:06 crc kubenswrapper[4756]: I0930 21:12:06.213124 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fd547ce9-ce4c-4a7d-9f4b-3c06aeb45d1d-combined-ca-bundle\") pod \"heat-db-sync-t9cp4\" (UID: \"fd547ce9-ce4c-4a7d-9f4b-3c06aeb45d1d\") " pod="openstack/heat-db-sync-t9cp4" Sep 30 21:12:06 crc kubenswrapper[4756]: I0930 21:12:06.221541 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fd547ce9-ce4c-4a7d-9f4b-3c06aeb45d1d-config-data\") pod \"heat-db-sync-t9cp4\" (UID: \"fd547ce9-ce4c-4a7d-9f4b-3c06aeb45d1d\") " pod="openstack/heat-db-sync-t9cp4" Sep 30 21:12:06 crc kubenswrapper[4756]: I0930 21:12:06.222069 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fd547ce9-ce4c-4a7d-9f4b-3c06aeb45d1d-combined-ca-bundle\") pod \"heat-db-sync-t9cp4\" (UID: \"fd547ce9-ce4c-4a7d-9f4b-3c06aeb45d1d\") " pod="openstack/heat-db-sync-t9cp4" Sep 30 21:12:06 crc kubenswrapper[4756]: I0930 21:12:06.228336 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jntjn\" (UniqueName: \"kubernetes.io/projected/fd547ce9-ce4c-4a7d-9f4b-3c06aeb45d1d-kube-api-access-jntjn\") pod \"heat-db-sync-t9cp4\" (UID: \"fd547ce9-ce4c-4a7d-9f4b-3c06aeb45d1d\") " pod="openstack/heat-db-sync-t9cp4" Sep 30 21:12:06 crc kubenswrapper[4756]: I0930 21:12:06.322934 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-db-sync-t9cp4" Sep 30 21:12:06 crc kubenswrapper[4756]: I0930 21:12:06.790912 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-db-sync-t9cp4"] Sep 30 21:12:07 crc kubenswrapper[4756]: I0930 21:12:07.333136 4756 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-658f995c57-xqjhp" podUID="d456def0-4660-4a2b-ac99-c664da4d2d5d" containerName="horizon" probeResult="failure" output="Get \"http://10.217.1.115:8080/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.1.115:8080: connect: connection refused" Sep 30 21:12:07 crc kubenswrapper[4756]: I0930 21:12:07.333332 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-658f995c57-xqjhp" Sep 30 21:12:07 crc kubenswrapper[4756]: I0930 21:12:07.570938 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-db-sync-t9cp4" event={"ID":"fd547ce9-ce4c-4a7d-9f4b-3c06aeb45d1d","Type":"ContainerStarted","Data":"ae0fccd6875276a13b807ddd1bafdb95100825d7d82fdb7f9c2e9d156775376e"} Sep 30 21:12:10 crc kubenswrapper[4756]: I0930 21:12:10.037652 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-db-sync-8rdqr"] Sep 30 21:12:10 crc kubenswrapper[4756]: I0930 21:12:10.048609 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-db-sync-8rdqr"] Sep 30 21:12:11 crc kubenswrapper[4756]: I0930 21:12:11.130249 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9bdf287b-f9d9-44e4-926c-958054dd0ca7" path="/var/lib/kubelet/pods/9bdf287b-f9d9-44e4-926c-958054dd0ca7/volumes" Sep 30 21:12:12 crc kubenswrapper[4756]: I0930 21:12:12.635707 4756 generic.go:334] "Generic (PLEG): container finished" podID="d456def0-4660-4a2b-ac99-c664da4d2d5d" containerID="02c1b1d1502b8ba9bd7558d92dac4acf88306a6e9fc91c9690dd4d238eb88215" exitCode=137 Sep 30 21:12:12 crc kubenswrapper[4756]: I0930 21:12:12.636091 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-658f995c57-xqjhp" event={"ID":"d456def0-4660-4a2b-ac99-c664da4d2d5d","Type":"ContainerDied","Data":"02c1b1d1502b8ba9bd7558d92dac4acf88306a6e9fc91c9690dd4d238eb88215"} Sep 30 21:12:13 crc kubenswrapper[4756]: I0930 21:12:13.611935 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/horizon-bcc99cb89-l9p64" Sep 30 21:12:15 crc kubenswrapper[4756]: I0930 21:12:15.852014 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/horizon-bcc99cb89-l9p64" Sep 30 21:12:15 crc kubenswrapper[4756]: I0930 21:12:15.958262 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-756d7594c9-2dtwc"] Sep 30 21:12:15 crc kubenswrapper[4756]: I0930 21:12:15.958575 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-756d7594c9-2dtwc" podUID="871a70bc-3a8f-4615-9e8f-cd949ec43e0d" containerName="horizon-log" containerID="cri-o://dc88469e8e7314e8896805fba7912f59b27b211f5411372a9fc6da3a5f53be50" gracePeriod=30 Sep 30 21:12:15 crc kubenswrapper[4756]: I0930 21:12:15.958732 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-756d7594c9-2dtwc" podUID="871a70bc-3a8f-4615-9e8f-cd949ec43e0d" containerName="horizon" containerID="cri-o://2aae91d6f3588824470b4dd5d045c732e82503a683861c4f01ed7fc3db7aae02" gracePeriod=30 Sep 30 21:12:17 crc kubenswrapper[4756]: I0930 21:12:17.037223 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-658f995c57-xqjhp" Sep 30 21:12:17 crc kubenswrapper[4756]: I0930 21:12:17.162339 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d456def0-4660-4a2b-ac99-c664da4d2d5d-scripts\") pod \"d456def0-4660-4a2b-ac99-c664da4d2d5d\" (UID: \"d456def0-4660-4a2b-ac99-c664da4d2d5d\") " Sep 30 21:12:17 crc kubenswrapper[4756]: I0930 21:12:17.162523 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d456def0-4660-4a2b-ac99-c664da4d2d5d-logs\") pod \"d456def0-4660-4a2b-ac99-c664da4d2d5d\" (UID: \"d456def0-4660-4a2b-ac99-c664da4d2d5d\") " Sep 30 21:12:17 crc kubenswrapper[4756]: I0930 21:12:17.162579 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/d456def0-4660-4a2b-ac99-c664da4d2d5d-config-data\") pod \"d456def0-4660-4a2b-ac99-c664da4d2d5d\" (UID: \"d456def0-4660-4a2b-ac99-c664da4d2d5d\") " Sep 30 21:12:17 crc kubenswrapper[4756]: I0930 21:12:17.162623 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/d456def0-4660-4a2b-ac99-c664da4d2d5d-horizon-secret-key\") pod \"d456def0-4660-4a2b-ac99-c664da4d2d5d\" (UID: \"d456def0-4660-4a2b-ac99-c664da4d2d5d\") " Sep 30 21:12:17 crc kubenswrapper[4756]: I0930 21:12:17.162925 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wm4b6\" (UniqueName: \"kubernetes.io/projected/d456def0-4660-4a2b-ac99-c664da4d2d5d-kube-api-access-wm4b6\") pod \"d456def0-4660-4a2b-ac99-c664da4d2d5d\" (UID: \"d456def0-4660-4a2b-ac99-c664da4d2d5d\") " Sep 30 21:12:17 crc kubenswrapper[4756]: I0930 21:12:17.163656 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d456def0-4660-4a2b-ac99-c664da4d2d5d-logs" (OuterVolumeSpecName: "logs") pod "d456def0-4660-4a2b-ac99-c664da4d2d5d" (UID: "d456def0-4660-4a2b-ac99-c664da4d2d5d"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 21:12:17 crc kubenswrapper[4756]: I0930 21:12:17.163850 4756 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d456def0-4660-4a2b-ac99-c664da4d2d5d-logs\") on node \"crc\" DevicePath \"\"" Sep 30 21:12:17 crc kubenswrapper[4756]: I0930 21:12:17.166297 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d456def0-4660-4a2b-ac99-c664da4d2d5d-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "d456def0-4660-4a2b-ac99-c664da4d2d5d" (UID: "d456def0-4660-4a2b-ac99-c664da4d2d5d"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 21:12:17 crc kubenswrapper[4756]: I0930 21:12:17.166918 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d456def0-4660-4a2b-ac99-c664da4d2d5d-kube-api-access-wm4b6" (OuterVolumeSpecName: "kube-api-access-wm4b6") pod "d456def0-4660-4a2b-ac99-c664da4d2d5d" (UID: "d456def0-4660-4a2b-ac99-c664da4d2d5d"). InnerVolumeSpecName "kube-api-access-wm4b6". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 21:12:17 crc kubenswrapper[4756]: I0930 21:12:17.187745 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d456def0-4660-4a2b-ac99-c664da4d2d5d-config-data" (OuterVolumeSpecName: "config-data") pod "d456def0-4660-4a2b-ac99-c664da4d2d5d" (UID: "d456def0-4660-4a2b-ac99-c664da4d2d5d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 21:12:17 crc kubenswrapper[4756]: I0930 21:12:17.189825 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d456def0-4660-4a2b-ac99-c664da4d2d5d-scripts" (OuterVolumeSpecName: "scripts") pod "d456def0-4660-4a2b-ac99-c664da4d2d5d" (UID: "d456def0-4660-4a2b-ac99-c664da4d2d5d"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 21:12:17 crc kubenswrapper[4756]: I0930 21:12:17.265647 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wm4b6\" (UniqueName: \"kubernetes.io/projected/d456def0-4660-4a2b-ac99-c664da4d2d5d-kube-api-access-wm4b6\") on node \"crc\" DevicePath \"\"" Sep 30 21:12:17 crc kubenswrapper[4756]: I0930 21:12:17.265689 4756 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d456def0-4660-4a2b-ac99-c664da4d2d5d-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 21:12:17 crc kubenswrapper[4756]: I0930 21:12:17.265701 4756 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/d456def0-4660-4a2b-ac99-c664da4d2d5d-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 21:12:17 crc kubenswrapper[4756]: I0930 21:12:17.265715 4756 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/d456def0-4660-4a2b-ac99-c664da4d2d5d-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Sep 30 21:12:17 crc kubenswrapper[4756]: I0930 21:12:17.713285 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-db-sync-t9cp4" event={"ID":"fd547ce9-ce4c-4a7d-9f4b-3c06aeb45d1d","Type":"ContainerStarted","Data":"97abc75d3577dc1b769c05ea6591436fbc9c0bff6e16c7ddf58f0097efa9812f"} Sep 30 21:12:17 crc kubenswrapper[4756]: I0930 21:12:17.718249 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-658f995c57-xqjhp" event={"ID":"d456def0-4660-4a2b-ac99-c664da4d2d5d","Type":"ContainerDied","Data":"705be0fef9cc69209b8c112bb236515c8e83058280fcd30e131d37d4375fe010"} Sep 30 21:12:17 crc kubenswrapper[4756]: I0930 21:12:17.718306 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-658f995c57-xqjhp" Sep 30 21:12:17 crc kubenswrapper[4756]: I0930 21:12:17.718311 4756 scope.go:117] "RemoveContainer" containerID="4cbfc1b6152514ccc4d615a578b90468283a6e9192fd72681a6ee5847ca620b0" Sep 30 21:12:17 crc kubenswrapper[4756]: I0930 21:12:17.742362 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/heat-db-sync-t9cp4" podStartSLOduration=2.7704940799999997 podStartE2EDuration="12.742337207s" podCreationTimestamp="2025-09-30 21:12:05 +0000 UTC" firstStartedPulling="2025-09-30 21:12:06.808207801 +0000 UTC m=+6056.429141278" lastFinishedPulling="2025-09-30 21:12:16.780050928 +0000 UTC m=+6066.400984405" observedRunningTime="2025-09-30 21:12:17.733061465 +0000 UTC m=+6067.353994962" watchObservedRunningTime="2025-09-30 21:12:17.742337207 +0000 UTC m=+6067.363270694" Sep 30 21:12:17 crc kubenswrapper[4756]: I0930 21:12:17.798990 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-658f995c57-xqjhp"] Sep 30 21:12:17 crc kubenswrapper[4756]: I0930 21:12:17.806542 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-658f995c57-xqjhp"] Sep 30 21:12:17 crc kubenswrapper[4756]: I0930 21:12:17.939520 4756 scope.go:117] "RemoveContainer" containerID="02c1b1d1502b8ba9bd7558d92dac4acf88306a6e9fc91c9690dd4d238eb88215" Sep 30 21:12:19 crc kubenswrapper[4756]: I0930 21:12:19.101764 4756 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-756d7594c9-2dtwc" podUID="871a70bc-3a8f-4615-9e8f-cd949ec43e0d" containerName="horizon" probeResult="failure" output="Get \"http://10.217.1.116:8080/dashboard/auth/login/?next=/dashboard/\": read tcp 10.217.0.2:46174->10.217.1.116:8080: read: connection reset by peer" Sep 30 21:12:19 crc kubenswrapper[4756]: I0930 21:12:19.126486 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d456def0-4660-4a2b-ac99-c664da4d2d5d" path="/var/lib/kubelet/pods/d456def0-4660-4a2b-ac99-c664da4d2d5d/volumes" Sep 30 21:12:19 crc kubenswrapper[4756]: I0930 21:12:19.744573 4756 generic.go:334] "Generic (PLEG): container finished" podID="871a70bc-3a8f-4615-9e8f-cd949ec43e0d" containerID="2aae91d6f3588824470b4dd5d045c732e82503a683861c4f01ed7fc3db7aae02" exitCode=0 Sep 30 21:12:19 crc kubenswrapper[4756]: I0930 21:12:19.744658 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-756d7594c9-2dtwc" event={"ID":"871a70bc-3a8f-4615-9e8f-cd949ec43e0d","Type":"ContainerDied","Data":"2aae91d6f3588824470b4dd5d045c732e82503a683861c4f01ed7fc3db7aae02"} Sep 30 21:12:19 crc kubenswrapper[4756]: I0930 21:12:19.746312 4756 generic.go:334] "Generic (PLEG): container finished" podID="fd547ce9-ce4c-4a7d-9f4b-3c06aeb45d1d" containerID="97abc75d3577dc1b769c05ea6591436fbc9c0bff6e16c7ddf58f0097efa9812f" exitCode=0 Sep 30 21:12:19 crc kubenswrapper[4756]: I0930 21:12:19.746359 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-db-sync-t9cp4" event={"ID":"fd547ce9-ce4c-4a7d-9f4b-3c06aeb45d1d","Type":"ContainerDied","Data":"97abc75d3577dc1b769c05ea6591436fbc9c0bff6e16c7ddf58f0097efa9812f"} Sep 30 21:12:21 crc kubenswrapper[4756]: I0930 21:12:21.174446 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/heat-db-sync-t9cp4" Sep 30 21:12:21 crc kubenswrapper[4756]: I0930 21:12:21.342787 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fd547ce9-ce4c-4a7d-9f4b-3c06aeb45d1d-config-data\") pod \"fd547ce9-ce4c-4a7d-9f4b-3c06aeb45d1d\" (UID: \"fd547ce9-ce4c-4a7d-9f4b-3c06aeb45d1d\") " Sep 30 21:12:21 crc kubenswrapper[4756]: I0930 21:12:21.342890 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fd547ce9-ce4c-4a7d-9f4b-3c06aeb45d1d-combined-ca-bundle\") pod \"fd547ce9-ce4c-4a7d-9f4b-3c06aeb45d1d\" (UID: \"fd547ce9-ce4c-4a7d-9f4b-3c06aeb45d1d\") " Sep 30 21:12:21 crc kubenswrapper[4756]: I0930 21:12:21.342987 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jntjn\" (UniqueName: \"kubernetes.io/projected/fd547ce9-ce4c-4a7d-9f4b-3c06aeb45d1d-kube-api-access-jntjn\") pod \"fd547ce9-ce4c-4a7d-9f4b-3c06aeb45d1d\" (UID: \"fd547ce9-ce4c-4a7d-9f4b-3c06aeb45d1d\") " Sep 30 21:12:21 crc kubenswrapper[4756]: I0930 21:12:21.350114 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fd547ce9-ce4c-4a7d-9f4b-3c06aeb45d1d-kube-api-access-jntjn" (OuterVolumeSpecName: "kube-api-access-jntjn") pod "fd547ce9-ce4c-4a7d-9f4b-3c06aeb45d1d" (UID: "fd547ce9-ce4c-4a7d-9f4b-3c06aeb45d1d"). InnerVolumeSpecName "kube-api-access-jntjn". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 21:12:21 crc kubenswrapper[4756]: I0930 21:12:21.378368 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fd547ce9-ce4c-4a7d-9f4b-3c06aeb45d1d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "fd547ce9-ce4c-4a7d-9f4b-3c06aeb45d1d" (UID: "fd547ce9-ce4c-4a7d-9f4b-3c06aeb45d1d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 21:12:21 crc kubenswrapper[4756]: I0930 21:12:21.418524 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fd547ce9-ce4c-4a7d-9f4b-3c06aeb45d1d-config-data" (OuterVolumeSpecName: "config-data") pod "fd547ce9-ce4c-4a7d-9f4b-3c06aeb45d1d" (UID: "fd547ce9-ce4c-4a7d-9f4b-3c06aeb45d1d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 21:12:21 crc kubenswrapper[4756]: I0930 21:12:21.445365 4756 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fd547ce9-ce4c-4a7d-9f4b-3c06aeb45d1d-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 21:12:21 crc kubenswrapper[4756]: I0930 21:12:21.445432 4756 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fd547ce9-ce4c-4a7d-9f4b-3c06aeb45d1d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 21:12:21 crc kubenswrapper[4756]: I0930 21:12:21.445446 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jntjn\" (UniqueName: \"kubernetes.io/projected/fd547ce9-ce4c-4a7d-9f4b-3c06aeb45d1d-kube-api-access-jntjn\") on node \"crc\" DevicePath \"\"" Sep 30 21:12:21 crc kubenswrapper[4756]: I0930 21:12:21.779379 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-db-sync-t9cp4" event={"ID":"fd547ce9-ce4c-4a7d-9f4b-3c06aeb45d1d","Type":"ContainerDied","Data":"ae0fccd6875276a13b807ddd1bafdb95100825d7d82fdb7f9c2e9d156775376e"} Sep 30 21:12:21 crc kubenswrapper[4756]: I0930 21:12:21.779479 4756 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ae0fccd6875276a13b807ddd1bafdb95100825d7d82fdb7f9c2e9d156775376e" Sep 30 21:12:21 crc kubenswrapper[4756]: I0930 21:12:21.779547 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/heat-db-sync-t9cp4" Sep 30 21:12:23 crc kubenswrapper[4756]: I0930 21:12:23.150285 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/heat-engine-67b79b8959-pv8z8"] Sep 30 21:12:23 crc kubenswrapper[4756]: E0930 21:12:23.151054 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d456def0-4660-4a2b-ac99-c664da4d2d5d" containerName="horizon" Sep 30 21:12:23 crc kubenswrapper[4756]: I0930 21:12:23.151074 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="d456def0-4660-4a2b-ac99-c664da4d2d5d" containerName="horizon" Sep 30 21:12:23 crc kubenswrapper[4756]: E0930 21:12:23.151101 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d456def0-4660-4a2b-ac99-c664da4d2d5d" containerName="horizon-log" Sep 30 21:12:23 crc kubenswrapper[4756]: I0930 21:12:23.151110 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="d456def0-4660-4a2b-ac99-c664da4d2d5d" containerName="horizon-log" Sep 30 21:12:23 crc kubenswrapper[4756]: E0930 21:12:23.151157 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fd547ce9-ce4c-4a7d-9f4b-3c06aeb45d1d" containerName="heat-db-sync" Sep 30 21:12:23 crc kubenswrapper[4756]: I0930 21:12:23.151164 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="fd547ce9-ce4c-4a7d-9f4b-3c06aeb45d1d" containerName="heat-db-sync" Sep 30 21:12:23 crc kubenswrapper[4756]: I0930 21:12:23.151427 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="d456def0-4660-4a2b-ac99-c664da4d2d5d" containerName="horizon-log" Sep 30 21:12:23 crc kubenswrapper[4756]: I0930 21:12:23.151469 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="fd547ce9-ce4c-4a7d-9f4b-3c06aeb45d1d" containerName="heat-db-sync" Sep 30 21:12:23 crc kubenswrapper[4756]: I0930 21:12:23.151486 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="d456def0-4660-4a2b-ac99-c664da4d2d5d" containerName="horizon" Sep 30 21:12:23 crc kubenswrapper[4756]: I0930 21:12:23.152366 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-engine-67b79b8959-pv8z8" Sep 30 21:12:23 crc kubenswrapper[4756]: I0930 21:12:23.154508 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"heat-engine-config-data" Sep 30 21:12:23 crc kubenswrapper[4756]: I0930 21:12:23.154900 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"heat-heat-dockercfg-t7g82" Sep 30 21:12:23 crc kubenswrapper[4756]: I0930 21:12:23.157373 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"heat-config-data" Sep 30 21:12:23 crc kubenswrapper[4756]: I0930 21:12:23.179445 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-engine-67b79b8959-pv8z8"] Sep 30 21:12:23 crc kubenswrapper[4756]: I0930 21:12:23.234602 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/heat-api-c557b4776-tw52r"] Sep 30 21:12:23 crc kubenswrapper[4756]: I0930 21:12:23.236353 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-api-c557b4776-tw52r" Sep 30 21:12:23 crc kubenswrapper[4756]: I0930 21:12:23.244138 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"heat-api-config-data" Sep 30 21:12:23 crc kubenswrapper[4756]: I0930 21:12:23.246652 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-api-c557b4776-tw52r"] Sep 30 21:12:23 crc kubenswrapper[4756]: I0930 21:12:23.281636 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/a8308f94-a593-4155-ba5b-c734ca1e7911-config-data-custom\") pod \"heat-engine-67b79b8959-pv8z8\" (UID: \"a8308f94-a593-4155-ba5b-c734ca1e7911\") " pod="openstack/heat-engine-67b79b8959-pv8z8" Sep 30 21:12:23 crc kubenswrapper[4756]: I0930 21:12:23.281855 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a8308f94-a593-4155-ba5b-c734ca1e7911-config-data\") pod \"heat-engine-67b79b8959-pv8z8\" (UID: \"a8308f94-a593-4155-ba5b-c734ca1e7911\") " pod="openstack/heat-engine-67b79b8959-pv8z8" Sep 30 21:12:23 crc kubenswrapper[4756]: I0930 21:12:23.282013 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zlfvn\" (UniqueName: \"kubernetes.io/projected/a8308f94-a593-4155-ba5b-c734ca1e7911-kube-api-access-zlfvn\") pod \"heat-engine-67b79b8959-pv8z8\" (UID: \"a8308f94-a593-4155-ba5b-c734ca1e7911\") " pod="openstack/heat-engine-67b79b8959-pv8z8" Sep 30 21:12:23 crc kubenswrapper[4756]: I0930 21:12:23.282236 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a8308f94-a593-4155-ba5b-c734ca1e7911-combined-ca-bundle\") pod \"heat-engine-67b79b8959-pv8z8\" (UID: \"a8308f94-a593-4155-ba5b-c734ca1e7911\") " pod="openstack/heat-engine-67b79b8959-pv8z8" Sep 30 21:12:23 crc kubenswrapper[4756]: I0930 21:12:23.283319 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/heat-cfnapi-5f6d774f5b-qwz4z"] Sep 30 21:12:23 crc kubenswrapper[4756]: I0930 21:12:23.285449 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-cfnapi-5f6d774f5b-qwz4z" Sep 30 21:12:23 crc kubenswrapper[4756]: I0930 21:12:23.291884 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"heat-cfnapi-config-data" Sep 30 21:12:23 crc kubenswrapper[4756]: I0930 21:12:23.301066 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-cfnapi-5f6d774f5b-qwz4z"] Sep 30 21:12:23 crc kubenswrapper[4756]: I0930 21:12:23.384334 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5e2144fa-2a37-4e9c-882b-0cbc001e0206-config-data\") pod \"heat-api-c557b4776-tw52r\" (UID: \"5e2144fa-2a37-4e9c-882b-0cbc001e0206\") " pod="openstack/heat-api-c557b4776-tw52r" Sep 30 21:12:23 crc kubenswrapper[4756]: I0930 21:12:23.384379 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/a8308f94-a593-4155-ba5b-c734ca1e7911-config-data-custom\") pod \"heat-engine-67b79b8959-pv8z8\" (UID: \"a8308f94-a593-4155-ba5b-c734ca1e7911\") " pod="openstack/heat-engine-67b79b8959-pv8z8" Sep 30 21:12:23 crc kubenswrapper[4756]: I0930 21:12:23.384466 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a8308f94-a593-4155-ba5b-c734ca1e7911-config-data\") pod \"heat-engine-67b79b8959-pv8z8\" (UID: \"a8308f94-a593-4155-ba5b-c734ca1e7911\") " pod="openstack/heat-engine-67b79b8959-pv8z8" Sep 30 21:12:23 crc kubenswrapper[4756]: I0930 21:12:23.384508 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/5e2144fa-2a37-4e9c-882b-0cbc001e0206-config-data-custom\") pod \"heat-api-c557b4776-tw52r\" (UID: \"5e2144fa-2a37-4e9c-882b-0cbc001e0206\") " pod="openstack/heat-api-c557b4776-tw52r" Sep 30 21:12:23 crc kubenswrapper[4756]: I0930 21:12:23.384565 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zlfvn\" (UniqueName: \"kubernetes.io/projected/a8308f94-a593-4155-ba5b-c734ca1e7911-kube-api-access-zlfvn\") pod \"heat-engine-67b79b8959-pv8z8\" (UID: \"a8308f94-a593-4155-ba5b-c734ca1e7911\") " pod="openstack/heat-engine-67b79b8959-pv8z8" Sep 30 21:12:23 crc kubenswrapper[4756]: I0930 21:12:23.384583 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v4xnv\" (UniqueName: \"kubernetes.io/projected/5e2144fa-2a37-4e9c-882b-0cbc001e0206-kube-api-access-v4xnv\") pod \"heat-api-c557b4776-tw52r\" (UID: \"5e2144fa-2a37-4e9c-882b-0cbc001e0206\") " pod="openstack/heat-api-c557b4776-tw52r" Sep 30 21:12:23 crc kubenswrapper[4756]: I0930 21:12:23.384599 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cx2lg\" (UniqueName: \"kubernetes.io/projected/9755231c-4c74-409a-8d6a-626b63c508a2-kube-api-access-cx2lg\") pod \"heat-cfnapi-5f6d774f5b-qwz4z\" (UID: \"9755231c-4c74-409a-8d6a-626b63c508a2\") " pod="openstack/heat-cfnapi-5f6d774f5b-qwz4z" Sep 30 21:12:23 crc kubenswrapper[4756]: I0930 21:12:23.384634 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9755231c-4c74-409a-8d6a-626b63c508a2-config-data\") pod \"heat-cfnapi-5f6d774f5b-qwz4z\" (UID: \"9755231c-4c74-409a-8d6a-626b63c508a2\") " pod="openstack/heat-cfnapi-5f6d774f5b-qwz4z" Sep 30 21:12:23 crc kubenswrapper[4756]: I0930 21:12:23.384665 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9755231c-4c74-409a-8d6a-626b63c508a2-combined-ca-bundle\") pod \"heat-cfnapi-5f6d774f5b-qwz4z\" (UID: \"9755231c-4c74-409a-8d6a-626b63c508a2\") " pod="openstack/heat-cfnapi-5f6d774f5b-qwz4z" Sep 30 21:12:23 crc kubenswrapper[4756]: I0930 21:12:23.384686 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/9755231c-4c74-409a-8d6a-626b63c508a2-config-data-custom\") pod \"heat-cfnapi-5f6d774f5b-qwz4z\" (UID: \"9755231c-4c74-409a-8d6a-626b63c508a2\") " pod="openstack/heat-cfnapi-5f6d774f5b-qwz4z" Sep 30 21:12:23 crc kubenswrapper[4756]: I0930 21:12:23.384704 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a8308f94-a593-4155-ba5b-c734ca1e7911-combined-ca-bundle\") pod \"heat-engine-67b79b8959-pv8z8\" (UID: \"a8308f94-a593-4155-ba5b-c734ca1e7911\") " pod="openstack/heat-engine-67b79b8959-pv8z8" Sep 30 21:12:23 crc kubenswrapper[4756]: I0930 21:12:23.384771 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5e2144fa-2a37-4e9c-882b-0cbc001e0206-combined-ca-bundle\") pod \"heat-api-c557b4776-tw52r\" (UID: \"5e2144fa-2a37-4e9c-882b-0cbc001e0206\") " pod="openstack/heat-api-c557b4776-tw52r" Sep 30 21:12:23 crc kubenswrapper[4756]: I0930 21:12:23.391872 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/a8308f94-a593-4155-ba5b-c734ca1e7911-config-data-custom\") pod \"heat-engine-67b79b8959-pv8z8\" (UID: \"a8308f94-a593-4155-ba5b-c734ca1e7911\") " pod="openstack/heat-engine-67b79b8959-pv8z8" Sep 30 21:12:23 crc kubenswrapper[4756]: I0930 21:12:23.392056 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a8308f94-a593-4155-ba5b-c734ca1e7911-combined-ca-bundle\") pod \"heat-engine-67b79b8959-pv8z8\" (UID: \"a8308f94-a593-4155-ba5b-c734ca1e7911\") " pod="openstack/heat-engine-67b79b8959-pv8z8" Sep 30 21:12:23 crc kubenswrapper[4756]: I0930 21:12:23.393158 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a8308f94-a593-4155-ba5b-c734ca1e7911-config-data\") pod \"heat-engine-67b79b8959-pv8z8\" (UID: \"a8308f94-a593-4155-ba5b-c734ca1e7911\") " pod="openstack/heat-engine-67b79b8959-pv8z8" Sep 30 21:12:23 crc kubenswrapper[4756]: I0930 21:12:23.412219 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zlfvn\" (UniqueName: \"kubernetes.io/projected/a8308f94-a593-4155-ba5b-c734ca1e7911-kube-api-access-zlfvn\") pod \"heat-engine-67b79b8959-pv8z8\" (UID: \"a8308f94-a593-4155-ba5b-c734ca1e7911\") " pod="openstack/heat-engine-67b79b8959-pv8z8" Sep 30 21:12:23 crc kubenswrapper[4756]: I0930 21:12:23.483751 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-engine-67b79b8959-pv8z8" Sep 30 21:12:23 crc kubenswrapper[4756]: I0930 21:12:23.487282 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v4xnv\" (UniqueName: \"kubernetes.io/projected/5e2144fa-2a37-4e9c-882b-0cbc001e0206-kube-api-access-v4xnv\") pod \"heat-api-c557b4776-tw52r\" (UID: \"5e2144fa-2a37-4e9c-882b-0cbc001e0206\") " pod="openstack/heat-api-c557b4776-tw52r" Sep 30 21:12:23 crc kubenswrapper[4756]: I0930 21:12:23.487446 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cx2lg\" (UniqueName: \"kubernetes.io/projected/9755231c-4c74-409a-8d6a-626b63c508a2-kube-api-access-cx2lg\") pod \"heat-cfnapi-5f6d774f5b-qwz4z\" (UID: \"9755231c-4c74-409a-8d6a-626b63c508a2\") " pod="openstack/heat-cfnapi-5f6d774f5b-qwz4z" Sep 30 21:12:23 crc kubenswrapper[4756]: I0930 21:12:23.487583 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9755231c-4c74-409a-8d6a-626b63c508a2-config-data\") pod \"heat-cfnapi-5f6d774f5b-qwz4z\" (UID: \"9755231c-4c74-409a-8d6a-626b63c508a2\") " pod="openstack/heat-cfnapi-5f6d774f5b-qwz4z" Sep 30 21:12:23 crc kubenswrapper[4756]: I0930 21:12:23.487741 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9755231c-4c74-409a-8d6a-626b63c508a2-combined-ca-bundle\") pod \"heat-cfnapi-5f6d774f5b-qwz4z\" (UID: \"9755231c-4c74-409a-8d6a-626b63c508a2\") " pod="openstack/heat-cfnapi-5f6d774f5b-qwz4z" Sep 30 21:12:23 crc kubenswrapper[4756]: I0930 21:12:23.487895 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/9755231c-4c74-409a-8d6a-626b63c508a2-config-data-custom\") pod \"heat-cfnapi-5f6d774f5b-qwz4z\" (UID: \"9755231c-4c74-409a-8d6a-626b63c508a2\") " pod="openstack/heat-cfnapi-5f6d774f5b-qwz4z" Sep 30 21:12:23 crc kubenswrapper[4756]: I0930 21:12:23.488127 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5e2144fa-2a37-4e9c-882b-0cbc001e0206-combined-ca-bundle\") pod \"heat-api-c557b4776-tw52r\" (UID: \"5e2144fa-2a37-4e9c-882b-0cbc001e0206\") " pod="openstack/heat-api-c557b4776-tw52r" Sep 30 21:12:23 crc kubenswrapper[4756]: I0930 21:12:23.488295 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5e2144fa-2a37-4e9c-882b-0cbc001e0206-config-data\") pod \"heat-api-c557b4776-tw52r\" (UID: \"5e2144fa-2a37-4e9c-882b-0cbc001e0206\") " pod="openstack/heat-api-c557b4776-tw52r" Sep 30 21:12:23 crc kubenswrapper[4756]: I0930 21:12:23.488543 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/5e2144fa-2a37-4e9c-882b-0cbc001e0206-config-data-custom\") pod \"heat-api-c557b4776-tw52r\" (UID: \"5e2144fa-2a37-4e9c-882b-0cbc001e0206\") " pod="openstack/heat-api-c557b4776-tw52r" Sep 30 21:12:23 crc kubenswrapper[4756]: I0930 21:12:23.493193 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9755231c-4c74-409a-8d6a-626b63c508a2-combined-ca-bundle\") pod \"heat-cfnapi-5f6d774f5b-qwz4z\" (UID: \"9755231c-4c74-409a-8d6a-626b63c508a2\") " pod="openstack/heat-cfnapi-5f6d774f5b-qwz4z" Sep 30 21:12:23 crc kubenswrapper[4756]: I0930 21:12:23.493422 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5e2144fa-2a37-4e9c-882b-0cbc001e0206-combined-ca-bundle\") pod \"heat-api-c557b4776-tw52r\" (UID: \"5e2144fa-2a37-4e9c-882b-0cbc001e0206\") " pod="openstack/heat-api-c557b4776-tw52r" Sep 30 21:12:23 crc kubenswrapper[4756]: I0930 21:12:23.493492 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/9755231c-4c74-409a-8d6a-626b63c508a2-config-data-custom\") pod \"heat-cfnapi-5f6d774f5b-qwz4z\" (UID: \"9755231c-4c74-409a-8d6a-626b63c508a2\") " pod="openstack/heat-cfnapi-5f6d774f5b-qwz4z" Sep 30 21:12:23 crc kubenswrapper[4756]: I0930 21:12:23.493861 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/5e2144fa-2a37-4e9c-882b-0cbc001e0206-config-data-custom\") pod \"heat-api-c557b4776-tw52r\" (UID: \"5e2144fa-2a37-4e9c-882b-0cbc001e0206\") " pod="openstack/heat-api-c557b4776-tw52r" Sep 30 21:12:23 crc kubenswrapper[4756]: I0930 21:12:23.496625 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5e2144fa-2a37-4e9c-882b-0cbc001e0206-config-data\") pod \"heat-api-c557b4776-tw52r\" (UID: \"5e2144fa-2a37-4e9c-882b-0cbc001e0206\") " pod="openstack/heat-api-c557b4776-tw52r" Sep 30 21:12:23 crc kubenswrapper[4756]: I0930 21:12:23.505050 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9755231c-4c74-409a-8d6a-626b63c508a2-config-data\") pod \"heat-cfnapi-5f6d774f5b-qwz4z\" (UID: \"9755231c-4c74-409a-8d6a-626b63c508a2\") " pod="openstack/heat-cfnapi-5f6d774f5b-qwz4z" Sep 30 21:12:23 crc kubenswrapper[4756]: I0930 21:12:23.508371 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v4xnv\" (UniqueName: \"kubernetes.io/projected/5e2144fa-2a37-4e9c-882b-0cbc001e0206-kube-api-access-v4xnv\") pod \"heat-api-c557b4776-tw52r\" (UID: \"5e2144fa-2a37-4e9c-882b-0cbc001e0206\") " pod="openstack/heat-api-c557b4776-tw52r" Sep 30 21:12:23 crc kubenswrapper[4756]: I0930 21:12:23.508929 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cx2lg\" (UniqueName: \"kubernetes.io/projected/9755231c-4c74-409a-8d6a-626b63c508a2-kube-api-access-cx2lg\") pod \"heat-cfnapi-5f6d774f5b-qwz4z\" (UID: \"9755231c-4c74-409a-8d6a-626b63c508a2\") " pod="openstack/heat-cfnapi-5f6d774f5b-qwz4z" Sep 30 21:12:23 crc kubenswrapper[4756]: I0930 21:12:23.573191 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-api-c557b4776-tw52r" Sep 30 21:12:23 crc kubenswrapper[4756]: I0930 21:12:23.617794 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-cfnapi-5f6d774f5b-qwz4z" Sep 30 21:12:23 crc kubenswrapper[4756]: I0930 21:12:23.982334 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-engine-67b79b8959-pv8z8"] Sep 30 21:12:23 crc kubenswrapper[4756]: W0930 21:12:23.986942 4756 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda8308f94_a593_4155_ba5b_c734ca1e7911.slice/crio-26146548b1cd2c205ff8a4270a408d4e24fb1cf7655c566d52dbcc89b6ae0c56 WatchSource:0}: Error finding container 26146548b1cd2c205ff8a4270a408d4e24fb1cf7655c566d52dbcc89b6ae0c56: Status 404 returned error can't find the container with id 26146548b1cd2c205ff8a4270a408d4e24fb1cf7655c566d52dbcc89b6ae0c56 Sep 30 21:12:24 crc kubenswrapper[4756]: I0930 21:12:24.120040 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-api-c557b4776-tw52r"] Sep 30 21:12:24 crc kubenswrapper[4756]: W0930 21:12:24.123942 4756 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5e2144fa_2a37_4e9c_882b_0cbc001e0206.slice/crio-c9f5b35202dfd290739036d8a075d71e03530bdceb1e8322488dfbc5765b12b6 WatchSource:0}: Error finding container c9f5b35202dfd290739036d8a075d71e03530bdceb1e8322488dfbc5765b12b6: Status 404 returned error can't find the container with id c9f5b35202dfd290739036d8a075d71e03530bdceb1e8322488dfbc5765b12b6 Sep 30 21:12:24 crc kubenswrapper[4756]: I0930 21:12:24.179340 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-cfnapi-5f6d774f5b-qwz4z"] Sep 30 21:12:24 crc kubenswrapper[4756]: W0930 21:12:24.181503 4756 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9755231c_4c74_409a_8d6a_626b63c508a2.slice/crio-6c851b7662a0f03442d2b567b1bd5ce8337301b6a868bb6cd7ca9824a4b90247 WatchSource:0}: Error finding container 6c851b7662a0f03442d2b567b1bd5ce8337301b6a868bb6cd7ca9824a4b90247: Status 404 returned error can't find the container with id 6c851b7662a0f03442d2b567b1bd5ce8337301b6a868bb6cd7ca9824a4b90247 Sep 30 21:12:24 crc kubenswrapper[4756]: I0930 21:12:24.812007 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-api-c557b4776-tw52r" event={"ID":"5e2144fa-2a37-4e9c-882b-0cbc001e0206","Type":"ContainerStarted","Data":"c9f5b35202dfd290739036d8a075d71e03530bdceb1e8322488dfbc5765b12b6"} Sep 30 21:12:24 crc kubenswrapper[4756]: I0930 21:12:24.815463 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-engine-67b79b8959-pv8z8" event={"ID":"a8308f94-a593-4155-ba5b-c734ca1e7911","Type":"ContainerStarted","Data":"ab1d8d647373884a530da0f72d5011aaa64a84746f41d643bd523510e4fc04e4"} Sep 30 21:12:24 crc kubenswrapper[4756]: I0930 21:12:24.815518 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-engine-67b79b8959-pv8z8" event={"ID":"a8308f94-a593-4155-ba5b-c734ca1e7911","Type":"ContainerStarted","Data":"26146548b1cd2c205ff8a4270a408d4e24fb1cf7655c566d52dbcc89b6ae0c56"} Sep 30 21:12:24 crc kubenswrapper[4756]: I0930 21:12:24.815563 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/heat-engine-67b79b8959-pv8z8" Sep 30 21:12:24 crc kubenswrapper[4756]: I0930 21:12:24.818594 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-cfnapi-5f6d774f5b-qwz4z" event={"ID":"9755231c-4c74-409a-8d6a-626b63c508a2","Type":"ContainerStarted","Data":"6c851b7662a0f03442d2b567b1bd5ce8337301b6a868bb6cd7ca9824a4b90247"} Sep 30 21:12:24 crc kubenswrapper[4756]: I0930 21:12:24.833013 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/heat-engine-67b79b8959-pv8z8" podStartSLOduration=1.832997616 podStartE2EDuration="1.832997616s" podCreationTimestamp="2025-09-30 21:12:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 21:12:24.828916469 +0000 UTC m=+6074.449849956" watchObservedRunningTime="2025-09-30 21:12:24.832997616 +0000 UTC m=+6074.453931093" Sep 30 21:12:26 crc kubenswrapper[4756]: I0930 21:12:26.519556 4756 scope.go:117] "RemoveContainer" containerID="a7428519240363e8f4dbbb9d294845a32cf47fb9d46aa848158c3ed7999b53db" Sep 30 21:12:26 crc kubenswrapper[4756]: I0930 21:12:26.567354 4756 scope.go:117] "RemoveContainer" containerID="9a3649c9dbeee4aed8b0754aa266b9bf14510988fa809693258ef1a89aa92a61" Sep 30 21:12:26 crc kubenswrapper[4756]: I0930 21:12:26.604711 4756 scope.go:117] "RemoveContainer" containerID="dac1f145b7259b22522b30be300f16e0c63bba40c22562c75e9311a0e4aa4fb3" Sep 30 21:12:26 crc kubenswrapper[4756]: I0930 21:12:26.838563 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-cfnapi-5f6d774f5b-qwz4z" event={"ID":"9755231c-4c74-409a-8d6a-626b63c508a2","Type":"ContainerStarted","Data":"a7e64dd614623588a90dd5e5d53ab8b64c90760b2de0eb9f3a26d55992feb463"} Sep 30 21:12:26 crc kubenswrapper[4756]: I0930 21:12:26.840643 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-api-c557b4776-tw52r" event={"ID":"5e2144fa-2a37-4e9c-882b-0cbc001e0206","Type":"ContainerStarted","Data":"5479a0acb7712e8f1e81c052887bcb136dd0aa6e3f13e76c5aaa87d2f1e43f98"} Sep 30 21:12:26 crc kubenswrapper[4756]: I0930 21:12:26.840778 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/heat-api-c557b4776-tw52r" Sep 30 21:12:26 crc kubenswrapper[4756]: I0930 21:12:26.877588 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/heat-cfnapi-5f6d774f5b-qwz4z" podStartSLOduration=1.9152801099999999 podStartE2EDuration="3.877565262s" podCreationTimestamp="2025-09-30 21:12:23 +0000 UTC" firstStartedPulling="2025-09-30 21:12:24.183206467 +0000 UTC m=+6073.804139944" lastFinishedPulling="2025-09-30 21:12:26.145491619 +0000 UTC m=+6075.766425096" observedRunningTime="2025-09-30 21:12:26.868038664 +0000 UTC m=+6076.488972141" watchObservedRunningTime="2025-09-30 21:12:26.877565262 +0000 UTC m=+6076.498498739" Sep 30 21:12:26 crc kubenswrapper[4756]: I0930 21:12:26.887366 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/heat-api-c557b4776-tw52r" podStartSLOduration=1.8642842210000001 podStartE2EDuration="3.887344777s" podCreationTimestamp="2025-09-30 21:12:23 +0000 UTC" firstStartedPulling="2025-09-30 21:12:24.126128079 +0000 UTC m=+6073.747061556" lastFinishedPulling="2025-09-30 21:12:26.149188615 +0000 UTC m=+6075.770122112" observedRunningTime="2025-09-30 21:12:26.886155866 +0000 UTC m=+6076.507089343" watchObservedRunningTime="2025-09-30 21:12:26.887344777 +0000 UTC m=+6076.508278254" Sep 30 21:12:27 crc kubenswrapper[4756]: I0930 21:12:27.849520 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/heat-cfnapi-5f6d774f5b-qwz4z" Sep 30 21:12:28 crc kubenswrapper[4756]: I0930 21:12:28.017858 4756 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-756d7594c9-2dtwc" podUID="871a70bc-3a8f-4615-9e8f-cd949ec43e0d" containerName="horizon" probeResult="failure" output="Get \"http://10.217.1.116:8080/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.1.116:8080: connect: connection refused" Sep 30 21:12:34 crc kubenswrapper[4756]: I0930 21:12:34.948518 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/heat-api-c557b4776-tw52r" Sep 30 21:12:34 crc kubenswrapper[4756]: I0930 21:12:34.975415 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/heat-cfnapi-5f6d774f5b-qwz4z" Sep 30 21:12:38 crc kubenswrapper[4756]: I0930 21:12:38.017074 4756 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-756d7594c9-2dtwc" podUID="871a70bc-3a8f-4615-9e8f-cd949ec43e0d" containerName="horizon" probeResult="failure" output="Get \"http://10.217.1.116:8080/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.1.116:8080: connect: connection refused" Sep 30 21:12:38 crc kubenswrapper[4756]: I0930 21:12:38.017838 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-756d7594c9-2dtwc" Sep 30 21:12:43 crc kubenswrapper[4756]: I0930 21:12:43.522458 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/heat-engine-67b79b8959-pv8z8" Sep 30 21:12:46 crc kubenswrapper[4756]: I0930 21:12:46.044362 4756 generic.go:334] "Generic (PLEG): container finished" podID="871a70bc-3a8f-4615-9e8f-cd949ec43e0d" containerID="dc88469e8e7314e8896805fba7912f59b27b211f5411372a9fc6da3a5f53be50" exitCode=137 Sep 30 21:12:46 crc kubenswrapper[4756]: I0930 21:12:46.044448 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-756d7594c9-2dtwc" event={"ID":"871a70bc-3a8f-4615-9e8f-cd949ec43e0d","Type":"ContainerDied","Data":"dc88469e8e7314e8896805fba7912f59b27b211f5411372a9fc6da3a5f53be50"} Sep 30 21:12:46 crc kubenswrapper[4756]: I0930 21:12:46.570032 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-756d7594c9-2dtwc" Sep 30 21:12:46 crc kubenswrapper[4756]: I0930 21:12:46.607895 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/871a70bc-3a8f-4615-9e8f-cd949ec43e0d-config-data\") pod \"871a70bc-3a8f-4615-9e8f-cd949ec43e0d\" (UID: \"871a70bc-3a8f-4615-9e8f-cd949ec43e0d\") " Sep 30 21:12:46 crc kubenswrapper[4756]: I0930 21:12:46.608265 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/871a70bc-3a8f-4615-9e8f-cd949ec43e0d-logs\") pod \"871a70bc-3a8f-4615-9e8f-cd949ec43e0d\" (UID: \"871a70bc-3a8f-4615-9e8f-cd949ec43e0d\") " Sep 30 21:12:46 crc kubenswrapper[4756]: I0930 21:12:46.608318 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s85qt\" (UniqueName: \"kubernetes.io/projected/871a70bc-3a8f-4615-9e8f-cd949ec43e0d-kube-api-access-s85qt\") pod \"871a70bc-3a8f-4615-9e8f-cd949ec43e0d\" (UID: \"871a70bc-3a8f-4615-9e8f-cd949ec43e0d\") " Sep 30 21:12:46 crc kubenswrapper[4756]: I0930 21:12:46.608375 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/871a70bc-3a8f-4615-9e8f-cd949ec43e0d-scripts\") pod \"871a70bc-3a8f-4615-9e8f-cd949ec43e0d\" (UID: \"871a70bc-3a8f-4615-9e8f-cd949ec43e0d\") " Sep 30 21:12:46 crc kubenswrapper[4756]: I0930 21:12:46.608616 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/871a70bc-3a8f-4615-9e8f-cd949ec43e0d-horizon-secret-key\") pod \"871a70bc-3a8f-4615-9e8f-cd949ec43e0d\" (UID: \"871a70bc-3a8f-4615-9e8f-cd949ec43e0d\") " Sep 30 21:12:46 crc kubenswrapper[4756]: I0930 21:12:46.608852 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/871a70bc-3a8f-4615-9e8f-cd949ec43e0d-logs" (OuterVolumeSpecName: "logs") pod "871a70bc-3a8f-4615-9e8f-cd949ec43e0d" (UID: "871a70bc-3a8f-4615-9e8f-cd949ec43e0d"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 21:12:46 crc kubenswrapper[4756]: I0930 21:12:46.609274 4756 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/871a70bc-3a8f-4615-9e8f-cd949ec43e0d-logs\") on node \"crc\" DevicePath \"\"" Sep 30 21:12:46 crc kubenswrapper[4756]: I0930 21:12:46.627602 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/871a70bc-3a8f-4615-9e8f-cd949ec43e0d-kube-api-access-s85qt" (OuterVolumeSpecName: "kube-api-access-s85qt") pod "871a70bc-3a8f-4615-9e8f-cd949ec43e0d" (UID: "871a70bc-3a8f-4615-9e8f-cd949ec43e0d"). InnerVolumeSpecName "kube-api-access-s85qt". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 21:12:46 crc kubenswrapper[4756]: I0930 21:12:46.629841 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/871a70bc-3a8f-4615-9e8f-cd949ec43e0d-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "871a70bc-3a8f-4615-9e8f-cd949ec43e0d" (UID: "871a70bc-3a8f-4615-9e8f-cd949ec43e0d"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 21:12:46 crc kubenswrapper[4756]: I0930 21:12:46.661241 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/871a70bc-3a8f-4615-9e8f-cd949ec43e0d-config-data" (OuterVolumeSpecName: "config-data") pod "871a70bc-3a8f-4615-9e8f-cd949ec43e0d" (UID: "871a70bc-3a8f-4615-9e8f-cd949ec43e0d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 21:12:46 crc kubenswrapper[4756]: I0930 21:12:46.677110 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/871a70bc-3a8f-4615-9e8f-cd949ec43e0d-scripts" (OuterVolumeSpecName: "scripts") pod "871a70bc-3a8f-4615-9e8f-cd949ec43e0d" (UID: "871a70bc-3a8f-4615-9e8f-cd949ec43e0d"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 21:12:46 crc kubenswrapper[4756]: I0930 21:12:46.710951 4756 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/871a70bc-3a8f-4615-9e8f-cd949ec43e0d-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Sep 30 21:12:46 crc kubenswrapper[4756]: I0930 21:12:46.710989 4756 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/871a70bc-3a8f-4615-9e8f-cd949ec43e0d-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 21:12:46 crc kubenswrapper[4756]: I0930 21:12:46.710999 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s85qt\" (UniqueName: \"kubernetes.io/projected/871a70bc-3a8f-4615-9e8f-cd949ec43e0d-kube-api-access-s85qt\") on node \"crc\" DevicePath \"\"" Sep 30 21:12:46 crc kubenswrapper[4756]: I0930 21:12:46.711011 4756 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/871a70bc-3a8f-4615-9e8f-cd949ec43e0d-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 21:12:47 crc kubenswrapper[4756]: I0930 21:12:47.059004 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-756d7594c9-2dtwc" event={"ID":"871a70bc-3a8f-4615-9e8f-cd949ec43e0d","Type":"ContainerDied","Data":"95a756160987bfaf98b48e3b195b7338bf2af269c6b4a13f823ee49f9614260b"} Sep 30 21:12:47 crc kubenswrapper[4756]: I0930 21:12:47.059058 4756 scope.go:117] "RemoveContainer" containerID="2aae91d6f3588824470b4dd5d045c732e82503a683861c4f01ed7fc3db7aae02" Sep 30 21:12:47 crc kubenswrapper[4756]: I0930 21:12:47.059235 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-756d7594c9-2dtwc" Sep 30 21:12:47 crc kubenswrapper[4756]: I0930 21:12:47.106078 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-756d7594c9-2dtwc"] Sep 30 21:12:47 crc kubenswrapper[4756]: I0930 21:12:47.113982 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-756d7594c9-2dtwc"] Sep 30 21:12:47 crc kubenswrapper[4756]: I0930 21:12:47.131033 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="871a70bc-3a8f-4615-9e8f-cd949ec43e0d" path="/var/lib/kubelet/pods/871a70bc-3a8f-4615-9e8f-cd949ec43e0d/volumes" Sep 30 21:12:47 crc kubenswrapper[4756]: I0930 21:12:47.259784 4756 scope.go:117] "RemoveContainer" containerID="dc88469e8e7314e8896805fba7912f59b27b211f5411372a9fc6da3a5f53be50" Sep 30 21:13:06 crc kubenswrapper[4756]: I0930 21:13:06.039091 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-db-create-jsrcc"] Sep 30 21:13:06 crc kubenswrapper[4756]: I0930 21:13:06.048137 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-db-create-jsrcc"] Sep 30 21:13:07 crc kubenswrapper[4756]: I0930 21:13:07.029710 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-db-create-pg6bc"] Sep 30 21:13:07 crc kubenswrapper[4756]: I0930 21:13:07.038782 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-db-create-st8q8"] Sep 30 21:13:07 crc kubenswrapper[4756]: I0930 21:13:07.046241 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-db-create-pg6bc"] Sep 30 21:13:07 crc kubenswrapper[4756]: I0930 21:13:07.054268 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-db-create-st8q8"] Sep 30 21:13:07 crc kubenswrapper[4756]: I0930 21:13:07.131884 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4e782094-bf7e-4670-ae47-42a92bb17066" path="/var/lib/kubelet/pods/4e782094-bf7e-4670-ae47-42a92bb17066/volumes" Sep 30 21:13:07 crc kubenswrapper[4756]: I0930 21:13:07.132783 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8e05b932-7161-43ea-a0cc-0fb87c3e0518" path="/var/lib/kubelet/pods/8e05b932-7161-43ea-a0cc-0fb87c3e0518/volumes" Sep 30 21:13:07 crc kubenswrapper[4756]: I0930 21:13:07.134099 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="eee9c0c3-6721-41ad-a547-cceea6c853b5" path="/var/lib/kubelet/pods/eee9c0c3-6721-41ad-a547-cceea6c853b5/volumes" Sep 30 21:13:07 crc kubenswrapper[4756]: I0930 21:13:07.252697 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2d8r5nq"] Sep 30 21:13:07 crc kubenswrapper[4756]: E0930 21:13:07.253185 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="871a70bc-3a8f-4615-9e8f-cd949ec43e0d" containerName="horizon" Sep 30 21:13:07 crc kubenswrapper[4756]: I0930 21:13:07.253204 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="871a70bc-3a8f-4615-9e8f-cd949ec43e0d" containerName="horizon" Sep 30 21:13:07 crc kubenswrapper[4756]: E0930 21:13:07.253216 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="871a70bc-3a8f-4615-9e8f-cd949ec43e0d" containerName="horizon-log" Sep 30 21:13:07 crc kubenswrapper[4756]: I0930 21:13:07.253225 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="871a70bc-3a8f-4615-9e8f-cd949ec43e0d" containerName="horizon-log" Sep 30 21:13:07 crc kubenswrapper[4756]: I0930 21:13:07.253479 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="871a70bc-3a8f-4615-9e8f-cd949ec43e0d" containerName="horizon-log" Sep 30 21:13:07 crc kubenswrapper[4756]: I0930 21:13:07.253515 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="871a70bc-3a8f-4615-9e8f-cd949ec43e0d" containerName="horizon" Sep 30 21:13:07 crc kubenswrapper[4756]: I0930 21:13:07.255145 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2d8r5nq" Sep 30 21:13:07 crc kubenswrapper[4756]: I0930 21:13:07.257365 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Sep 30 21:13:07 crc kubenswrapper[4756]: I0930 21:13:07.278012 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2d8r5nq"] Sep 30 21:13:07 crc kubenswrapper[4756]: I0930 21:13:07.349245 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/7f8e9cd9-d70a-43d4-829d-bcf006fe3916-util\") pod \"a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2d8r5nq\" (UID: \"7f8e9cd9-d70a-43d4-829d-bcf006fe3916\") " pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2d8r5nq" Sep 30 21:13:07 crc kubenswrapper[4756]: I0930 21:13:07.349342 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mt8rt\" (UniqueName: \"kubernetes.io/projected/7f8e9cd9-d70a-43d4-829d-bcf006fe3916-kube-api-access-mt8rt\") pod \"a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2d8r5nq\" (UID: \"7f8e9cd9-d70a-43d4-829d-bcf006fe3916\") " pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2d8r5nq" Sep 30 21:13:07 crc kubenswrapper[4756]: I0930 21:13:07.349415 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/7f8e9cd9-d70a-43d4-829d-bcf006fe3916-bundle\") pod \"a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2d8r5nq\" (UID: \"7f8e9cd9-d70a-43d4-829d-bcf006fe3916\") " pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2d8r5nq" Sep 30 21:13:07 crc kubenswrapper[4756]: I0930 21:13:07.451994 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/7f8e9cd9-d70a-43d4-829d-bcf006fe3916-util\") pod \"a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2d8r5nq\" (UID: \"7f8e9cd9-d70a-43d4-829d-bcf006fe3916\") " pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2d8r5nq" Sep 30 21:13:07 crc kubenswrapper[4756]: I0930 21:13:07.452215 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mt8rt\" (UniqueName: \"kubernetes.io/projected/7f8e9cd9-d70a-43d4-829d-bcf006fe3916-kube-api-access-mt8rt\") pod \"a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2d8r5nq\" (UID: \"7f8e9cd9-d70a-43d4-829d-bcf006fe3916\") " pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2d8r5nq" Sep 30 21:13:07 crc kubenswrapper[4756]: I0930 21:13:07.452383 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/7f8e9cd9-d70a-43d4-829d-bcf006fe3916-bundle\") pod \"a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2d8r5nq\" (UID: \"7f8e9cd9-d70a-43d4-829d-bcf006fe3916\") " pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2d8r5nq" Sep 30 21:13:07 crc kubenswrapper[4756]: I0930 21:13:07.452507 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/7f8e9cd9-d70a-43d4-829d-bcf006fe3916-util\") pod \"a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2d8r5nq\" (UID: \"7f8e9cd9-d70a-43d4-829d-bcf006fe3916\") " pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2d8r5nq" Sep 30 21:13:07 crc kubenswrapper[4756]: I0930 21:13:07.453141 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/7f8e9cd9-d70a-43d4-829d-bcf006fe3916-bundle\") pod \"a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2d8r5nq\" (UID: \"7f8e9cd9-d70a-43d4-829d-bcf006fe3916\") " pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2d8r5nq" Sep 30 21:13:07 crc kubenswrapper[4756]: I0930 21:13:07.471492 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mt8rt\" (UniqueName: \"kubernetes.io/projected/7f8e9cd9-d70a-43d4-829d-bcf006fe3916-kube-api-access-mt8rt\") pod \"a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2d8r5nq\" (UID: \"7f8e9cd9-d70a-43d4-829d-bcf006fe3916\") " pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2d8r5nq" Sep 30 21:13:07 crc kubenswrapper[4756]: I0930 21:13:07.574644 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2d8r5nq" Sep 30 21:13:08 crc kubenswrapper[4756]: I0930 21:13:08.142173 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2d8r5nq"] Sep 30 21:13:08 crc kubenswrapper[4756]: I0930 21:13:08.257703 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2d8r5nq" event={"ID":"7f8e9cd9-d70a-43d4-829d-bcf006fe3916","Type":"ContainerStarted","Data":"52a81455db9e41150f9b96761c1cf7da310e5ddc9ae8eac2b7dce4aa0195b65e"} Sep 30 21:13:09 crc kubenswrapper[4756]: I0930 21:13:09.272409 4756 generic.go:334] "Generic (PLEG): container finished" podID="7f8e9cd9-d70a-43d4-829d-bcf006fe3916" containerID="5cef9d58ae41bfb97499a504b75a81f4e34211dff1c4351b8bc903e4146323f9" exitCode=0 Sep 30 21:13:09 crc kubenswrapper[4756]: I0930 21:13:09.272460 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2d8r5nq" event={"ID":"7f8e9cd9-d70a-43d4-829d-bcf006fe3916","Type":"ContainerDied","Data":"5cef9d58ae41bfb97499a504b75a81f4e34211dff1c4351b8bc903e4146323f9"} Sep 30 21:13:12 crc kubenswrapper[4756]: I0930 21:13:12.309859 4756 generic.go:334] "Generic (PLEG): container finished" podID="7f8e9cd9-d70a-43d4-829d-bcf006fe3916" containerID="4ce1d61f8789e94b4fa7bd5f1e3eb78b882df387e0353e3e675374ee5e6875fd" exitCode=0 Sep 30 21:13:12 crc kubenswrapper[4756]: I0930 21:13:12.309956 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2d8r5nq" event={"ID":"7f8e9cd9-d70a-43d4-829d-bcf006fe3916","Type":"ContainerDied","Data":"4ce1d61f8789e94b4fa7bd5f1e3eb78b882df387e0353e3e675374ee5e6875fd"} Sep 30 21:13:13 crc kubenswrapper[4756]: I0930 21:13:13.320741 4756 generic.go:334] "Generic (PLEG): container finished" podID="7f8e9cd9-d70a-43d4-829d-bcf006fe3916" containerID="06986c39904bbea161266051814fc3a6058e54246d47c7c707252bbeb0d7219a" exitCode=0 Sep 30 21:13:13 crc kubenswrapper[4756]: I0930 21:13:13.320794 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2d8r5nq" event={"ID":"7f8e9cd9-d70a-43d4-829d-bcf006fe3916","Type":"ContainerDied","Data":"06986c39904bbea161266051814fc3a6058e54246d47c7c707252bbeb0d7219a"} Sep 30 21:13:14 crc kubenswrapper[4756]: I0930 21:13:14.931486 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2d8r5nq" Sep 30 21:13:15 crc kubenswrapper[4756]: I0930 21:13:15.020562 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/7f8e9cd9-d70a-43d4-829d-bcf006fe3916-bundle\") pod \"7f8e9cd9-d70a-43d4-829d-bcf006fe3916\" (UID: \"7f8e9cd9-d70a-43d4-829d-bcf006fe3916\") " Sep 30 21:13:15 crc kubenswrapper[4756]: I0930 21:13:15.020634 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/7f8e9cd9-d70a-43d4-829d-bcf006fe3916-util\") pod \"7f8e9cd9-d70a-43d4-829d-bcf006fe3916\" (UID: \"7f8e9cd9-d70a-43d4-829d-bcf006fe3916\") " Sep 30 21:13:15 crc kubenswrapper[4756]: I0930 21:13:15.020709 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mt8rt\" (UniqueName: \"kubernetes.io/projected/7f8e9cd9-d70a-43d4-829d-bcf006fe3916-kube-api-access-mt8rt\") pod \"7f8e9cd9-d70a-43d4-829d-bcf006fe3916\" (UID: \"7f8e9cd9-d70a-43d4-829d-bcf006fe3916\") " Sep 30 21:13:15 crc kubenswrapper[4756]: I0930 21:13:15.023574 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7f8e9cd9-d70a-43d4-829d-bcf006fe3916-bundle" (OuterVolumeSpecName: "bundle") pod "7f8e9cd9-d70a-43d4-829d-bcf006fe3916" (UID: "7f8e9cd9-d70a-43d4-829d-bcf006fe3916"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 21:13:15 crc kubenswrapper[4756]: I0930 21:13:15.028615 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7f8e9cd9-d70a-43d4-829d-bcf006fe3916-kube-api-access-mt8rt" (OuterVolumeSpecName: "kube-api-access-mt8rt") pod "7f8e9cd9-d70a-43d4-829d-bcf006fe3916" (UID: "7f8e9cd9-d70a-43d4-829d-bcf006fe3916"). InnerVolumeSpecName "kube-api-access-mt8rt". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 21:13:15 crc kubenswrapper[4756]: I0930 21:13:15.032518 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7f8e9cd9-d70a-43d4-829d-bcf006fe3916-util" (OuterVolumeSpecName: "util") pod "7f8e9cd9-d70a-43d4-829d-bcf006fe3916" (UID: "7f8e9cd9-d70a-43d4-829d-bcf006fe3916"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 21:13:15 crc kubenswrapper[4756]: I0930 21:13:15.123177 4756 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/7f8e9cd9-d70a-43d4-829d-bcf006fe3916-util\") on node \"crc\" DevicePath \"\"" Sep 30 21:13:15 crc kubenswrapper[4756]: I0930 21:13:15.123205 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mt8rt\" (UniqueName: \"kubernetes.io/projected/7f8e9cd9-d70a-43d4-829d-bcf006fe3916-kube-api-access-mt8rt\") on node \"crc\" DevicePath \"\"" Sep 30 21:13:15 crc kubenswrapper[4756]: I0930 21:13:15.123216 4756 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/7f8e9cd9-d70a-43d4-829d-bcf006fe3916-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 21:13:15 crc kubenswrapper[4756]: I0930 21:13:15.368282 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2d8r5nq" event={"ID":"7f8e9cd9-d70a-43d4-829d-bcf006fe3916","Type":"ContainerDied","Data":"52a81455db9e41150f9b96761c1cf7da310e5ddc9ae8eac2b7dce4aa0195b65e"} Sep 30 21:13:15 crc kubenswrapper[4756]: I0930 21:13:15.368744 4756 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="52a81455db9e41150f9b96761c1cf7da310e5ddc9ae8eac2b7dce4aa0195b65e" Sep 30 21:13:15 crc kubenswrapper[4756]: I0930 21:13:15.368372 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2d8r5nq" Sep 30 21:13:17 crc kubenswrapper[4756]: I0930 21:13:17.034736 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-5f75-account-create-66wxn"] Sep 30 21:13:17 crc kubenswrapper[4756]: I0930 21:13:17.045289 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-5f75-account-create-66wxn"] Sep 30 21:13:17 crc kubenswrapper[4756]: I0930 21:13:17.135580 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6ed2348c-6c17-4c81-a72f-eba6b032f566" path="/var/lib/kubelet/pods/6ed2348c-6c17-4c81-a72f-eba6b032f566/volumes" Sep 30 21:13:18 crc kubenswrapper[4756]: I0930 21:13:18.052055 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-c790-account-create-p82f9"] Sep 30 21:13:18 crc kubenswrapper[4756]: I0930 21:13:18.063810 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-c790-account-create-p82f9"] Sep 30 21:13:18 crc kubenswrapper[4756]: I0930 21:13:18.075906 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-00ab-account-create-qzmg6"] Sep 30 21:13:18 crc kubenswrapper[4756]: I0930 21:13:18.086312 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-00ab-account-create-qzmg6"] Sep 30 21:13:19 crc kubenswrapper[4756]: I0930 21:13:19.158311 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="61039609-1019-4e9c-9aeb-24d37151f9d6" path="/var/lib/kubelet/pods/61039609-1019-4e9c-9aeb-24d37151f9d6/volumes" Sep 30 21:13:19 crc kubenswrapper[4756]: I0930 21:13:19.163986 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c67011a6-d3f0-4ce8-b8de-3372650df4a2" path="/var/lib/kubelet/pods/c67011a6-d3f0-4ce8-b8de-3372650df4a2/volumes" Sep 30 21:13:25 crc kubenswrapper[4756]: I0930 21:13:25.128410 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/obo-prometheus-operator-7c8cf85677-pkmns"] Sep 30 21:13:25 crc kubenswrapper[4756]: E0930 21:13:25.129263 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7f8e9cd9-d70a-43d4-829d-bcf006fe3916" containerName="extract" Sep 30 21:13:25 crc kubenswrapper[4756]: I0930 21:13:25.129276 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="7f8e9cd9-d70a-43d4-829d-bcf006fe3916" containerName="extract" Sep 30 21:13:25 crc kubenswrapper[4756]: E0930 21:13:25.129299 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7f8e9cd9-d70a-43d4-829d-bcf006fe3916" containerName="pull" Sep 30 21:13:25 crc kubenswrapper[4756]: I0930 21:13:25.129305 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="7f8e9cd9-d70a-43d4-829d-bcf006fe3916" containerName="pull" Sep 30 21:13:25 crc kubenswrapper[4756]: E0930 21:13:25.129326 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7f8e9cd9-d70a-43d4-829d-bcf006fe3916" containerName="util" Sep 30 21:13:25 crc kubenswrapper[4756]: I0930 21:13:25.129331 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="7f8e9cd9-d70a-43d4-829d-bcf006fe3916" containerName="util" Sep 30 21:13:25 crc kubenswrapper[4756]: I0930 21:13:25.129540 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="7f8e9cd9-d70a-43d4-829d-bcf006fe3916" containerName="extract" Sep 30 21:13:25 crc kubenswrapper[4756]: I0930 21:13:25.130266 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-7c8cf85677-pkmns" Sep 30 21:13:25 crc kubenswrapper[4756]: I0930 21:13:25.147984 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operators"/"kube-root-ca.crt" Sep 30 21:13:25 crc kubenswrapper[4756]: I0930 21:13:25.148532 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"obo-prometheus-operator-dockercfg-tm9hj" Sep 30 21:13:25 crc kubenswrapper[4756]: I0930 21:13:25.158455 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-7c8cf85677-pkmns"] Sep 30 21:13:25 crc kubenswrapper[4756]: I0930 21:13:25.162623 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operators"/"openshift-service-ca.crt" Sep 30 21:13:25 crc kubenswrapper[4756]: I0930 21:13:25.264577 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pg5vn\" (UniqueName: \"kubernetes.io/projected/6710b308-b5a5-4043-97c2-68b7816fac9a-kube-api-access-pg5vn\") pod \"obo-prometheus-operator-7c8cf85677-pkmns\" (UID: \"6710b308-b5a5-4043-97c2-68b7816fac9a\") " pod="openshift-operators/obo-prometheus-operator-7c8cf85677-pkmns" Sep 30 21:13:25 crc kubenswrapper[4756]: I0930 21:13:25.313976 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-76d859999c-v2j65"] Sep 30 21:13:25 crc kubenswrapper[4756]: I0930 21:13:25.315635 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-76d859999c-v2j65" Sep 30 21:13:25 crc kubenswrapper[4756]: I0930 21:13:25.320564 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"obo-prometheus-operator-admission-webhook-service-cert" Sep 30 21:13:25 crc kubenswrapper[4756]: I0930 21:13:25.320687 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"obo-prometheus-operator-admission-webhook-dockercfg-vl7x6" Sep 30 21:13:25 crc kubenswrapper[4756]: I0930 21:13:25.339698 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-76d859999c-rr264"] Sep 30 21:13:25 crc kubenswrapper[4756]: I0930 21:13:25.341069 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-76d859999c-rr264" Sep 30 21:13:25 crc kubenswrapper[4756]: I0930 21:13:25.351960 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-76d859999c-v2j65"] Sep 30 21:13:25 crc kubenswrapper[4756]: I0930 21:13:25.366149 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pg5vn\" (UniqueName: \"kubernetes.io/projected/6710b308-b5a5-4043-97c2-68b7816fac9a-kube-api-access-pg5vn\") pod \"obo-prometheus-operator-7c8cf85677-pkmns\" (UID: \"6710b308-b5a5-4043-97c2-68b7816fac9a\") " pod="openshift-operators/obo-prometheus-operator-7c8cf85677-pkmns" Sep 30 21:13:25 crc kubenswrapper[4756]: I0930 21:13:25.386564 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-76d859999c-rr264"] Sep 30 21:13:25 crc kubenswrapper[4756]: I0930 21:13:25.432155 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pg5vn\" (UniqueName: \"kubernetes.io/projected/6710b308-b5a5-4043-97c2-68b7816fac9a-kube-api-access-pg5vn\") pod \"obo-prometheus-operator-7c8cf85677-pkmns\" (UID: \"6710b308-b5a5-4043-97c2-68b7816fac9a\") " pod="openshift-operators/obo-prometheus-operator-7c8cf85677-pkmns" Sep 30 21:13:25 crc kubenswrapper[4756]: I0930 21:13:25.456205 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-7c8cf85677-pkmns" Sep 30 21:13:25 crc kubenswrapper[4756]: I0930 21:13:25.473934 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/observability-operator-cc5f78dfc-87pm5"] Sep 30 21:13:25 crc kubenswrapper[4756]: I0930 21:13:25.475547 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/observability-operator-cc5f78dfc-87pm5" Sep 30 21:13:25 crc kubenswrapper[4756]: I0930 21:13:25.475918 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/72fdf198-38d1-43f1-896e-4ed41cc91785-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-76d859999c-rr264\" (UID: \"72fdf198-38d1-43f1-896e-4ed41cc91785\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-76d859999c-rr264" Sep 30 21:13:25 crc kubenswrapper[4756]: I0930 21:13:25.477041 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/fc61e8ad-97b2-4569-afbc-b78a6536b09f-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-76d859999c-v2j65\" (UID: \"fc61e8ad-97b2-4569-afbc-b78a6536b09f\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-76d859999c-v2j65" Sep 30 21:13:25 crc kubenswrapper[4756]: I0930 21:13:25.477123 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/72fdf198-38d1-43f1-896e-4ed41cc91785-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-76d859999c-rr264\" (UID: \"72fdf198-38d1-43f1-896e-4ed41cc91785\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-76d859999c-rr264" Sep 30 21:13:25 crc kubenswrapper[4756]: I0930 21:13:25.477282 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/fc61e8ad-97b2-4569-afbc-b78a6536b09f-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-76d859999c-v2j65\" (UID: \"fc61e8ad-97b2-4569-afbc-b78a6536b09f\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-76d859999c-v2j65" Sep 30 21:13:25 crc kubenswrapper[4756]: I0930 21:13:25.485015 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"observability-operator-tls" Sep 30 21:13:25 crc kubenswrapper[4756]: I0930 21:13:25.485191 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"observability-operator-sa-dockercfg-tw626" Sep 30 21:13:25 crc kubenswrapper[4756]: I0930 21:13:25.491857 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/observability-operator-cc5f78dfc-87pm5"] Sep 30 21:13:25 crc kubenswrapper[4756]: I0930 21:13:25.580577 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fgfgs\" (UniqueName: \"kubernetes.io/projected/32f5b188-86d1-4426-a043-04707dc47288-kube-api-access-fgfgs\") pod \"observability-operator-cc5f78dfc-87pm5\" (UID: \"32f5b188-86d1-4426-a043-04707dc47288\") " pod="openshift-operators/observability-operator-cc5f78dfc-87pm5" Sep 30 21:13:25 crc kubenswrapper[4756]: I0930 21:13:25.580634 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"observability-operator-tls\" (UniqueName: \"kubernetes.io/secret/32f5b188-86d1-4426-a043-04707dc47288-observability-operator-tls\") pod \"observability-operator-cc5f78dfc-87pm5\" (UID: \"32f5b188-86d1-4426-a043-04707dc47288\") " pod="openshift-operators/observability-operator-cc5f78dfc-87pm5" Sep 30 21:13:25 crc kubenswrapper[4756]: I0930 21:13:25.580679 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/fc61e8ad-97b2-4569-afbc-b78a6536b09f-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-76d859999c-v2j65\" (UID: \"fc61e8ad-97b2-4569-afbc-b78a6536b09f\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-76d859999c-v2j65" Sep 30 21:13:25 crc kubenswrapper[4756]: I0930 21:13:25.580764 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/72fdf198-38d1-43f1-896e-4ed41cc91785-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-76d859999c-rr264\" (UID: \"72fdf198-38d1-43f1-896e-4ed41cc91785\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-76d859999c-rr264" Sep 30 21:13:25 crc kubenswrapper[4756]: I0930 21:13:25.580857 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/fc61e8ad-97b2-4569-afbc-b78a6536b09f-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-76d859999c-v2j65\" (UID: \"fc61e8ad-97b2-4569-afbc-b78a6536b09f\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-76d859999c-v2j65" Sep 30 21:13:25 crc kubenswrapper[4756]: I0930 21:13:25.580889 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/72fdf198-38d1-43f1-896e-4ed41cc91785-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-76d859999c-rr264\" (UID: \"72fdf198-38d1-43f1-896e-4ed41cc91785\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-76d859999c-rr264" Sep 30 21:13:25 crc kubenswrapper[4756]: I0930 21:13:25.584421 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/fc61e8ad-97b2-4569-afbc-b78a6536b09f-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-76d859999c-v2j65\" (UID: \"fc61e8ad-97b2-4569-afbc-b78a6536b09f\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-76d859999c-v2j65" Sep 30 21:13:25 crc kubenswrapper[4756]: I0930 21:13:25.585830 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/72fdf198-38d1-43f1-896e-4ed41cc91785-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-76d859999c-rr264\" (UID: \"72fdf198-38d1-43f1-896e-4ed41cc91785\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-76d859999c-rr264" Sep 30 21:13:25 crc kubenswrapper[4756]: I0930 21:13:25.586739 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/72fdf198-38d1-43f1-896e-4ed41cc91785-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-76d859999c-rr264\" (UID: \"72fdf198-38d1-43f1-896e-4ed41cc91785\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-76d859999c-rr264" Sep 30 21:13:25 crc kubenswrapper[4756]: I0930 21:13:25.587226 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/fc61e8ad-97b2-4569-afbc-b78a6536b09f-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-76d859999c-v2j65\" (UID: \"fc61e8ad-97b2-4569-afbc-b78a6536b09f\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-76d859999c-v2j65" Sep 30 21:13:25 crc kubenswrapper[4756]: I0930 21:13:25.630714 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-76d859999c-v2j65" Sep 30 21:13:25 crc kubenswrapper[4756]: I0930 21:13:25.660991 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-76d859999c-rr264" Sep 30 21:13:25 crc kubenswrapper[4756]: I0930 21:13:25.682825 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fgfgs\" (UniqueName: \"kubernetes.io/projected/32f5b188-86d1-4426-a043-04707dc47288-kube-api-access-fgfgs\") pod \"observability-operator-cc5f78dfc-87pm5\" (UID: \"32f5b188-86d1-4426-a043-04707dc47288\") " pod="openshift-operators/observability-operator-cc5f78dfc-87pm5" Sep 30 21:13:25 crc kubenswrapper[4756]: I0930 21:13:25.682899 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"observability-operator-tls\" (UniqueName: \"kubernetes.io/secret/32f5b188-86d1-4426-a043-04707dc47288-observability-operator-tls\") pod \"observability-operator-cc5f78dfc-87pm5\" (UID: \"32f5b188-86d1-4426-a043-04707dc47288\") " pod="openshift-operators/observability-operator-cc5f78dfc-87pm5" Sep 30 21:13:25 crc kubenswrapper[4756]: I0930 21:13:25.699153 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"observability-operator-tls\" (UniqueName: \"kubernetes.io/secret/32f5b188-86d1-4426-a043-04707dc47288-observability-operator-tls\") pod \"observability-operator-cc5f78dfc-87pm5\" (UID: \"32f5b188-86d1-4426-a043-04707dc47288\") " pod="openshift-operators/observability-operator-cc5f78dfc-87pm5" Sep 30 21:13:25 crc kubenswrapper[4756]: I0930 21:13:25.707920 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/perses-operator-54bc95c9fb-vhmnz"] Sep 30 21:13:25 crc kubenswrapper[4756]: I0930 21:13:25.713217 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fgfgs\" (UniqueName: \"kubernetes.io/projected/32f5b188-86d1-4426-a043-04707dc47288-kube-api-access-fgfgs\") pod \"observability-operator-cc5f78dfc-87pm5\" (UID: \"32f5b188-86d1-4426-a043-04707dc47288\") " pod="openshift-operators/observability-operator-cc5f78dfc-87pm5" Sep 30 21:13:25 crc kubenswrapper[4756]: I0930 21:13:25.714791 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/perses-operator-54bc95c9fb-vhmnz" Sep 30 21:13:25 crc kubenswrapper[4756]: I0930 21:13:25.719728 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"perses-operator-dockercfg-x4jcd" Sep 30 21:13:25 crc kubenswrapper[4756]: I0930 21:13:25.723472 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/perses-operator-54bc95c9fb-vhmnz"] Sep 30 21:13:25 crc kubenswrapper[4756]: I0930 21:13:25.886531 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l4wjd\" (UniqueName: \"kubernetes.io/projected/ddaf74a9-b3d6-426a-8e7f-f2bd8d51fc84-kube-api-access-l4wjd\") pod \"perses-operator-54bc95c9fb-vhmnz\" (UID: \"ddaf74a9-b3d6-426a-8e7f-f2bd8d51fc84\") " pod="openshift-operators/perses-operator-54bc95c9fb-vhmnz" Sep 30 21:13:25 crc kubenswrapper[4756]: I0930 21:13:25.886584 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openshift-service-ca\" (UniqueName: \"kubernetes.io/configmap/ddaf74a9-b3d6-426a-8e7f-f2bd8d51fc84-openshift-service-ca\") pod \"perses-operator-54bc95c9fb-vhmnz\" (UID: \"ddaf74a9-b3d6-426a-8e7f-f2bd8d51fc84\") " pod="openshift-operators/perses-operator-54bc95c9fb-vhmnz" Sep 30 21:13:25 crc kubenswrapper[4756]: I0930 21:13:25.890658 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/observability-operator-cc5f78dfc-87pm5" Sep 30 21:13:25 crc kubenswrapper[4756]: I0930 21:13:25.991879 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l4wjd\" (UniqueName: \"kubernetes.io/projected/ddaf74a9-b3d6-426a-8e7f-f2bd8d51fc84-kube-api-access-l4wjd\") pod \"perses-operator-54bc95c9fb-vhmnz\" (UID: \"ddaf74a9-b3d6-426a-8e7f-f2bd8d51fc84\") " pod="openshift-operators/perses-operator-54bc95c9fb-vhmnz" Sep 30 21:13:25 crc kubenswrapper[4756]: I0930 21:13:25.992485 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openshift-service-ca\" (UniqueName: \"kubernetes.io/configmap/ddaf74a9-b3d6-426a-8e7f-f2bd8d51fc84-openshift-service-ca\") pod \"perses-operator-54bc95c9fb-vhmnz\" (UID: \"ddaf74a9-b3d6-426a-8e7f-f2bd8d51fc84\") " pod="openshift-operators/perses-operator-54bc95c9fb-vhmnz" Sep 30 21:13:25 crc kubenswrapper[4756]: I0930 21:13:25.993239 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openshift-service-ca\" (UniqueName: \"kubernetes.io/configmap/ddaf74a9-b3d6-426a-8e7f-f2bd8d51fc84-openshift-service-ca\") pod \"perses-operator-54bc95c9fb-vhmnz\" (UID: \"ddaf74a9-b3d6-426a-8e7f-f2bd8d51fc84\") " pod="openshift-operators/perses-operator-54bc95c9fb-vhmnz" Sep 30 21:13:26 crc kubenswrapper[4756]: I0930 21:13:26.026657 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l4wjd\" (UniqueName: \"kubernetes.io/projected/ddaf74a9-b3d6-426a-8e7f-f2bd8d51fc84-kube-api-access-l4wjd\") pod \"perses-operator-54bc95c9fb-vhmnz\" (UID: \"ddaf74a9-b3d6-426a-8e7f-f2bd8d51fc84\") " pod="openshift-operators/perses-operator-54bc95c9fb-vhmnz" Sep 30 21:13:26 crc kubenswrapper[4756]: I0930 21:13:26.058855 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/perses-operator-54bc95c9fb-vhmnz" Sep 30 21:13:26 crc kubenswrapper[4756]: I0930 21:13:26.150314 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-7c8cf85677-pkmns"] Sep 30 21:13:26 crc kubenswrapper[4756]: I0930 21:13:26.308502 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-76d859999c-v2j65"] Sep 30 21:13:26 crc kubenswrapper[4756]: I0930 21:13:26.418968 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-76d859999c-rr264"] Sep 30 21:13:26 crc kubenswrapper[4756]: W0930 21:13:26.421270 4756 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod72fdf198_38d1_43f1_896e_4ed41cc91785.slice/crio-d8a7c8ca52c53d1cf1101d62ac3b3c5f4447a02ea82c5c91e63151b73bf98574 WatchSource:0}: Error finding container d8a7c8ca52c53d1cf1101d62ac3b3c5f4447a02ea82c5c91e63151b73bf98574: Status 404 returned error can't find the container with id d8a7c8ca52c53d1cf1101d62ac3b3c5f4447a02ea82c5c91e63151b73bf98574 Sep 30 21:13:26 crc kubenswrapper[4756]: I0930 21:13:26.554911 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-admission-webhook-76d859999c-rr264" event={"ID":"72fdf198-38d1-43f1-896e-4ed41cc91785","Type":"ContainerStarted","Data":"d8a7c8ca52c53d1cf1101d62ac3b3c5f4447a02ea82c5c91e63151b73bf98574"} Sep 30 21:13:26 crc kubenswrapper[4756]: I0930 21:13:26.556688 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-admission-webhook-76d859999c-v2j65" event={"ID":"fc61e8ad-97b2-4569-afbc-b78a6536b09f","Type":"ContainerStarted","Data":"e71a27fb258b33d8d31230db670a3d31e9540d2587d42d2dbbde2c9850ca32e4"} Sep 30 21:13:26 crc kubenswrapper[4756]: I0930 21:13:26.557954 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-7c8cf85677-pkmns" event={"ID":"6710b308-b5a5-4043-97c2-68b7816fac9a","Type":"ContainerStarted","Data":"2e217ddb418f3478ea458bc6230d170f74271073348775f35f0bce5db412a631"} Sep 30 21:13:26 crc kubenswrapper[4756]: I0930 21:13:26.559686 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/observability-operator-cc5f78dfc-87pm5"] Sep 30 21:13:26 crc kubenswrapper[4756]: I0930 21:13:26.686165 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/perses-operator-54bc95c9fb-vhmnz"] Sep 30 21:13:26 crc kubenswrapper[4756]: I0930 21:13:26.735251 4756 scope.go:117] "RemoveContainer" containerID="17c2e6db03e4efc1e51d234dd3d5d389b910a3f8f9803a38f09c70f331aefc76" Sep 30 21:13:26 crc kubenswrapper[4756]: I0930 21:13:26.758373 4756 scope.go:117] "RemoveContainer" containerID="a22f1622dcfd04bf5db7bcd1135473d970103cabfcf897fae357586540d14479" Sep 30 21:13:26 crc kubenswrapper[4756]: I0930 21:13:26.819847 4756 scope.go:117] "RemoveContainer" containerID="34ca515ffc06242358cfc7613e003721216660a5d42f6f8a0e53301976e83a72" Sep 30 21:13:26 crc kubenswrapper[4756]: I0930 21:13:26.842809 4756 scope.go:117] "RemoveContainer" containerID="a84a3a34d1828508c86f88dbac5b79d7c18d102bc693414081a1f91e7a159dc9" Sep 30 21:13:26 crc kubenswrapper[4756]: I0930 21:13:26.866331 4756 scope.go:117] "RemoveContainer" containerID="310f89380ae9046cb077ef97b6c26621edbdaed42ecb026128dc9b788c43e333" Sep 30 21:13:26 crc kubenswrapper[4756]: I0930 21:13:26.894224 4756 scope.go:117] "RemoveContainer" containerID="01072dd17822e98196499fcbfee9bc9575306997c9eaffc0bf463433e52a7147" Sep 30 21:13:27 crc kubenswrapper[4756]: I0930 21:13:27.040626 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-xsk5l"] Sep 30 21:13:27 crc kubenswrapper[4756]: I0930 21:13:27.052539 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-xsk5l"] Sep 30 21:13:27 crc kubenswrapper[4756]: I0930 21:13:27.147870 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="134e5ca4-9f84-4f3f-8f8e-6b6dc7953308" path="/var/lib/kubelet/pods/134e5ca4-9f84-4f3f-8f8e-6b6dc7953308/volumes" Sep 30 21:13:27 crc kubenswrapper[4756]: I0930 21:13:27.573523 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/observability-operator-cc5f78dfc-87pm5" event={"ID":"32f5b188-86d1-4426-a043-04707dc47288","Type":"ContainerStarted","Data":"0ac0fec3150aff1f806e3a8f818e44f31f4d8d3565454bc05d1983785f5d5cdc"} Sep 30 21:13:27 crc kubenswrapper[4756]: I0930 21:13:27.591538 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/perses-operator-54bc95c9fb-vhmnz" event={"ID":"ddaf74a9-b3d6-426a-8e7f-f2bd8d51fc84","Type":"ContainerStarted","Data":"f808a2e21b5afef90b950fa4ea528f17713cc91d0bfd1b1054bb2dcf3a71802c"} Sep 30 21:13:35 crc kubenswrapper[4756]: I0930 21:13:35.673264 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-admission-webhook-76d859999c-v2j65" event={"ID":"fc61e8ad-97b2-4569-afbc-b78a6536b09f","Type":"ContainerStarted","Data":"38bc7a0818ea619e4d4ebfb51e2517543c0a696ec0aa5ec2fe21c1c5d5324dbd"} Sep 30 21:13:35 crc kubenswrapper[4756]: I0930 21:13:35.675891 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-7c8cf85677-pkmns" event={"ID":"6710b308-b5a5-4043-97c2-68b7816fac9a","Type":"ContainerStarted","Data":"48dd2a1f2b5f775922a8cc229c3e6a7749e9558990512b39f140bb0f58db17fb"} Sep 30 21:13:35 crc kubenswrapper[4756]: I0930 21:13:35.680171 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/observability-operator-cc5f78dfc-87pm5" event={"ID":"32f5b188-86d1-4426-a043-04707dc47288","Type":"ContainerStarted","Data":"e7b62aef59db43a69767bd63bd472b160fc60c10595e1c0497e8de5d27f9d9a6"} Sep 30 21:13:35 crc kubenswrapper[4756]: I0930 21:13:35.682388 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operators/observability-operator-cc5f78dfc-87pm5" Sep 30 21:13:35 crc kubenswrapper[4756]: I0930 21:13:35.685882 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-admission-webhook-76d859999c-rr264" event={"ID":"72fdf198-38d1-43f1-896e-4ed41cc91785","Type":"ContainerStarted","Data":"a27189d6df9c74f33b4abc72ed3b718788ab7b7991101ef8685d7b141d88b711"} Sep 30 21:13:35 crc kubenswrapper[4756]: I0930 21:13:35.688285 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/perses-operator-54bc95c9fb-vhmnz" event={"ID":"ddaf74a9-b3d6-426a-8e7f-f2bd8d51fc84","Type":"ContainerStarted","Data":"0eeab7bf30d771c4b6e7000a67a798a064bfd6c95954a21ed7113960b012e8a1"} Sep 30 21:13:35 crc kubenswrapper[4756]: I0930 21:13:35.688426 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operators/perses-operator-54bc95c9fb-vhmnz" Sep 30 21:13:35 crc kubenswrapper[4756]: I0930 21:13:35.695084 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/obo-prometheus-operator-admission-webhook-76d859999c-v2j65" podStartSLOduration=2.512989998 podStartE2EDuration="10.695069589s" podCreationTimestamp="2025-09-30 21:13:25 +0000 UTC" firstStartedPulling="2025-09-30 21:13:26.316078253 +0000 UTC m=+6135.937011730" lastFinishedPulling="2025-09-30 21:13:34.498157844 +0000 UTC m=+6144.119091321" observedRunningTime="2025-09-30 21:13:35.692525873 +0000 UTC m=+6145.313459360" watchObservedRunningTime="2025-09-30 21:13:35.695069589 +0000 UTC m=+6145.316003066" Sep 30 21:13:35 crc kubenswrapper[4756]: I0930 21:13:35.732512 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/obo-prometheus-operator-7c8cf85677-pkmns" podStartSLOduration=2.3035389090000002 podStartE2EDuration="10.732490085s" podCreationTimestamp="2025-09-30 21:13:25 +0000 UTC" firstStartedPulling="2025-09-30 21:13:26.161096133 +0000 UTC m=+6135.782029610" lastFinishedPulling="2025-09-30 21:13:34.590047319 +0000 UTC m=+6144.210980786" observedRunningTime="2025-09-30 21:13:35.722248008 +0000 UTC m=+6145.343181485" watchObservedRunningTime="2025-09-30 21:13:35.732490085 +0000 UTC m=+6145.353423562" Sep 30 21:13:35 crc kubenswrapper[4756]: I0930 21:13:35.752768 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operators/observability-operator-cc5f78dfc-87pm5" Sep 30 21:13:35 crc kubenswrapper[4756]: I0930 21:13:35.758272 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/observability-operator-cc5f78dfc-87pm5" podStartSLOduration=2.6712163049999997 podStartE2EDuration="10.758254617s" podCreationTimestamp="2025-09-30 21:13:25 +0000 UTC" firstStartedPulling="2025-09-30 21:13:26.59861864 +0000 UTC m=+6136.219552117" lastFinishedPulling="2025-09-30 21:13:34.685656952 +0000 UTC m=+6144.306590429" observedRunningTime="2025-09-30 21:13:35.75684949 +0000 UTC m=+6145.377782967" watchObservedRunningTime="2025-09-30 21:13:35.758254617 +0000 UTC m=+6145.379188094" Sep 30 21:13:35 crc kubenswrapper[4756]: I0930 21:13:35.802408 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/perses-operator-54bc95c9fb-vhmnz" podStartSLOduration=2.903957513 podStartE2EDuration="10.802380057s" podCreationTimestamp="2025-09-30 21:13:25 +0000 UTC" firstStartedPulling="2025-09-30 21:13:26.691669876 +0000 UTC m=+6136.312603353" lastFinishedPulling="2025-09-30 21:13:34.59009242 +0000 UTC m=+6144.211025897" observedRunningTime="2025-09-30 21:13:35.801116144 +0000 UTC m=+6145.422049621" watchObservedRunningTime="2025-09-30 21:13:35.802380057 +0000 UTC m=+6145.423313534" Sep 30 21:13:35 crc kubenswrapper[4756]: I0930 21:13:35.870070 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/obo-prometheus-operator-admission-webhook-76d859999c-rr264" podStartSLOduration=2.799483048 podStartE2EDuration="10.870052191s" podCreationTimestamp="2025-09-30 21:13:25 +0000 UTC" firstStartedPulling="2025-09-30 21:13:26.43294849 +0000 UTC m=+6136.053881957" lastFinishedPulling="2025-09-30 21:13:34.503517603 +0000 UTC m=+6144.124451100" observedRunningTime="2025-09-30 21:13:35.82665208 +0000 UTC m=+6145.447585557" watchObservedRunningTime="2025-09-30 21:13:35.870052191 +0000 UTC m=+6145.490985668" Sep 30 21:13:41 crc kubenswrapper[4756]: I0930 21:13:41.539223 4756 patch_prober.go:28] interesting pod/machine-config-daemon-4n9zj container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 21:13:41 crc kubenswrapper[4756]: I0930 21:13:41.539841 4756 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 21:13:42 crc kubenswrapper[4756]: I0930 21:13:42.056645 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-svqlp"] Sep 30 21:13:42 crc kubenswrapper[4756]: I0930 21:13:42.072897 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-svqlp"] Sep 30 21:13:43 crc kubenswrapper[4756]: I0930 21:13:43.038586 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-cell-mapping-g7jd6"] Sep 30 21:13:43 crc kubenswrapper[4756]: I0930 21:13:43.048296 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-cell-mapping-g7jd6"] Sep 30 21:13:43 crc kubenswrapper[4756]: I0930 21:13:43.129777 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1c7caeef-7ea4-4e7f-9e4b-de3aed88f7e2" path="/var/lib/kubelet/pods/1c7caeef-7ea4-4e7f-9e4b-de3aed88f7e2/volumes" Sep 30 21:13:43 crc kubenswrapper[4756]: I0930 21:13:43.130953 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4bb17af9-3b2c-4719-b35b-349cd56624f2" path="/var/lib/kubelet/pods/4bb17af9-3b2c-4719-b35b-349cd56624f2/volumes" Sep 30 21:13:46 crc kubenswrapper[4756]: I0930 21:13:46.062108 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operators/perses-operator-54bc95c9fb-vhmnz" Sep 30 21:13:48 crc kubenswrapper[4756]: I0930 21:13:48.970576 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/openstackclient"] Sep 30 21:13:48 crc kubenswrapper[4756]: I0930 21:13:48.971265 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/openstackclient" podUID="530205ed-633e-43aa-bc1b-dd05d2d62bd3" containerName="openstackclient" containerID="cri-o://87a18660104dc824d44430d13a0440c6d942e490cb930b20077b42c735ff8d1f" gracePeriod=2 Sep 30 21:13:48 crc kubenswrapper[4756]: I0930 21:13:48.980264 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/openstackclient"] Sep 30 21:13:49 crc kubenswrapper[4756]: I0930 21:13:49.036201 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstackclient"] Sep 30 21:13:49 crc kubenswrapper[4756]: E0930 21:13:49.036714 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="530205ed-633e-43aa-bc1b-dd05d2d62bd3" containerName="openstackclient" Sep 30 21:13:49 crc kubenswrapper[4756]: I0930 21:13:49.036731 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="530205ed-633e-43aa-bc1b-dd05d2d62bd3" containerName="openstackclient" Sep 30 21:13:49 crc kubenswrapper[4756]: I0930 21:13:49.036970 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="530205ed-633e-43aa-bc1b-dd05d2d62bd3" containerName="openstackclient" Sep 30 21:13:49 crc kubenswrapper[4756]: I0930 21:13:49.037795 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Sep 30 21:13:49 crc kubenswrapper[4756]: I0930 21:13:49.052273 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Sep 30 21:13:49 crc kubenswrapper[4756]: I0930 21:13:49.064684 4756 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openstack/openstackclient" oldPodUID="530205ed-633e-43aa-bc1b-dd05d2d62bd3" podUID="bc558c9b-5ec2-4fd2-9d2b-2a7d5e3e59b1" Sep 30 21:13:49 crc kubenswrapper[4756]: I0930 21:13:49.112011 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/bc558c9b-5ec2-4fd2-9d2b-2a7d5e3e59b1-openstack-config\") pod \"openstackclient\" (UID: \"bc558c9b-5ec2-4fd2-9d2b-2a7d5e3e59b1\") " pod="openstack/openstackclient" Sep 30 21:13:49 crc kubenswrapper[4756]: I0930 21:13:49.112254 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/bc558c9b-5ec2-4fd2-9d2b-2a7d5e3e59b1-openstack-config-secret\") pod \"openstackclient\" (UID: \"bc558c9b-5ec2-4fd2-9d2b-2a7d5e3e59b1\") " pod="openstack/openstackclient" Sep 30 21:13:49 crc kubenswrapper[4756]: I0930 21:13:49.112423 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bbbln\" (UniqueName: \"kubernetes.io/projected/bc558c9b-5ec2-4fd2-9d2b-2a7d5e3e59b1-kube-api-access-bbbln\") pod \"openstackclient\" (UID: \"bc558c9b-5ec2-4fd2-9d2b-2a7d5e3e59b1\") " pod="openstack/openstackclient" Sep 30 21:13:49 crc kubenswrapper[4756]: I0930 21:13:49.214332 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bbbln\" (UniqueName: \"kubernetes.io/projected/bc558c9b-5ec2-4fd2-9d2b-2a7d5e3e59b1-kube-api-access-bbbln\") pod \"openstackclient\" (UID: \"bc558c9b-5ec2-4fd2-9d2b-2a7d5e3e59b1\") " pod="openstack/openstackclient" Sep 30 21:13:49 crc kubenswrapper[4756]: I0930 21:13:49.214461 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/bc558c9b-5ec2-4fd2-9d2b-2a7d5e3e59b1-openstack-config\") pod \"openstackclient\" (UID: \"bc558c9b-5ec2-4fd2-9d2b-2a7d5e3e59b1\") " pod="openstack/openstackclient" Sep 30 21:13:49 crc kubenswrapper[4756]: I0930 21:13:49.214567 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/bc558c9b-5ec2-4fd2-9d2b-2a7d5e3e59b1-openstack-config-secret\") pod \"openstackclient\" (UID: \"bc558c9b-5ec2-4fd2-9d2b-2a7d5e3e59b1\") " pod="openstack/openstackclient" Sep 30 21:13:49 crc kubenswrapper[4756]: I0930 21:13:49.217211 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/bc558c9b-5ec2-4fd2-9d2b-2a7d5e3e59b1-openstack-config\") pod \"openstackclient\" (UID: \"bc558c9b-5ec2-4fd2-9d2b-2a7d5e3e59b1\") " pod="openstack/openstackclient" Sep 30 21:13:49 crc kubenswrapper[4756]: I0930 21:13:49.227446 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/bc558c9b-5ec2-4fd2-9d2b-2a7d5e3e59b1-openstack-config-secret\") pod \"openstackclient\" (UID: \"bc558c9b-5ec2-4fd2-9d2b-2a7d5e3e59b1\") " pod="openstack/openstackclient" Sep 30 21:13:49 crc kubenswrapper[4756]: I0930 21:13:49.268242 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/kube-state-metrics-0"] Sep 30 21:13:49 crc kubenswrapper[4756]: I0930 21:13:49.272044 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Sep 30 21:13:49 crc kubenswrapper[4756]: I0930 21:13:49.276219 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"telemetry-ceilometer-dockercfg-pgktl" Sep 30 21:13:49 crc kubenswrapper[4756]: I0930 21:13:49.292432 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bbbln\" (UniqueName: \"kubernetes.io/projected/bc558c9b-5ec2-4fd2-9d2b-2a7d5e3e59b1-kube-api-access-bbbln\") pod \"openstackclient\" (UID: \"bc558c9b-5ec2-4fd2-9d2b-2a7d5e3e59b1\") " pod="openstack/openstackclient" Sep 30 21:13:49 crc kubenswrapper[4756]: I0930 21:13:49.298694 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Sep 30 21:13:49 crc kubenswrapper[4756]: I0930 21:13:49.316642 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2drt2\" (UniqueName: \"kubernetes.io/projected/6b29dd70-bdc6-4616-8503-f787d4602e39-kube-api-access-2drt2\") pod \"kube-state-metrics-0\" (UID: \"6b29dd70-bdc6-4616-8503-f787d4602e39\") " pod="openstack/kube-state-metrics-0" Sep 30 21:13:49 crc kubenswrapper[4756]: I0930 21:13:49.399837 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Sep 30 21:13:49 crc kubenswrapper[4756]: I0930 21:13:49.418857 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2drt2\" (UniqueName: \"kubernetes.io/projected/6b29dd70-bdc6-4616-8503-f787d4602e39-kube-api-access-2drt2\") pod \"kube-state-metrics-0\" (UID: \"6b29dd70-bdc6-4616-8503-f787d4602e39\") " pod="openstack/kube-state-metrics-0" Sep 30 21:13:49 crc kubenswrapper[4756]: I0930 21:13:49.485288 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2drt2\" (UniqueName: \"kubernetes.io/projected/6b29dd70-bdc6-4616-8503-f787d4602e39-kube-api-access-2drt2\") pod \"kube-state-metrics-0\" (UID: \"6b29dd70-bdc6-4616-8503-f787d4602e39\") " pod="openstack/kube-state-metrics-0" Sep 30 21:13:49 crc kubenswrapper[4756]: I0930 21:13:49.677096 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Sep 30 21:13:49 crc kubenswrapper[4756]: I0930 21:13:49.990334 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/alertmanager-metric-storage-0"] Sep 30 21:13:50 crc kubenswrapper[4756]: I0930 21:13:50.010409 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/alertmanager-metric-storage-0" Sep 30 21:13:50 crc kubenswrapper[4756]: I0930 21:13:50.017860 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"alertmanager-metric-storage-generated" Sep 30 21:13:50 crc kubenswrapper[4756]: I0930 21:13:50.017990 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"metric-storage-alertmanager-dockercfg-mqb5w" Sep 30 21:13:50 crc kubenswrapper[4756]: I0930 21:13:50.018045 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"alertmanager-metric-storage-web-config" Sep 30 21:13:50 crc kubenswrapper[4756]: I0930 21:13:50.018157 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"alertmanager-metric-storage-tls-assets-0" Sep 30 21:13:50 crc kubenswrapper[4756]: I0930 21:13:50.069871 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/alertmanager-metric-storage-0"] Sep 30 21:13:50 crc kubenswrapper[4756]: I0930 21:13:50.169121 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/20597afa-0a2a-4763-a158-89cb5f521be9-config-out\") pod \"alertmanager-metric-storage-0\" (UID: \"20597afa-0a2a-4763-a158-89cb5f521be9\") " pod="openstack/alertmanager-metric-storage-0" Sep 30 21:13:50 crc kubenswrapper[4756]: I0930 21:13:50.169166 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/20597afa-0a2a-4763-a158-89cb5f521be9-web-config\") pod \"alertmanager-metric-storage-0\" (UID: \"20597afa-0a2a-4763-a158-89cb5f521be9\") " pod="openstack/alertmanager-metric-storage-0" Sep 30 21:13:50 crc kubenswrapper[4756]: I0930 21:13:50.169192 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dvr9x\" (UniqueName: \"kubernetes.io/projected/20597afa-0a2a-4763-a158-89cb5f521be9-kube-api-access-dvr9x\") pod \"alertmanager-metric-storage-0\" (UID: \"20597afa-0a2a-4763-a158-89cb5f521be9\") " pod="openstack/alertmanager-metric-storage-0" Sep 30 21:13:50 crc kubenswrapper[4756]: I0930 21:13:50.169243 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/secret/20597afa-0a2a-4763-a158-89cb5f521be9-config-volume\") pod \"alertmanager-metric-storage-0\" (UID: \"20597afa-0a2a-4763-a158-89cb5f521be9\") " pod="openstack/alertmanager-metric-storage-0" Sep 30 21:13:50 crc kubenswrapper[4756]: I0930 21:13:50.169304 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"alertmanager-metric-storage-db\" (UniqueName: \"kubernetes.io/empty-dir/20597afa-0a2a-4763-a158-89cb5f521be9-alertmanager-metric-storage-db\") pod \"alertmanager-metric-storage-0\" (UID: \"20597afa-0a2a-4763-a158-89cb5f521be9\") " pod="openstack/alertmanager-metric-storage-0" Sep 30 21:13:50 crc kubenswrapper[4756]: I0930 21:13:50.169326 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/20597afa-0a2a-4763-a158-89cb5f521be9-tls-assets\") pod \"alertmanager-metric-storage-0\" (UID: \"20597afa-0a2a-4763-a158-89cb5f521be9\") " pod="openstack/alertmanager-metric-storage-0" Sep 30 21:13:50 crc kubenswrapper[4756]: I0930 21:13:50.273381 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/20597afa-0a2a-4763-a158-89cb5f521be9-web-config\") pod \"alertmanager-metric-storage-0\" (UID: \"20597afa-0a2a-4763-a158-89cb5f521be9\") " pod="openstack/alertmanager-metric-storage-0" Sep 30 21:13:50 crc kubenswrapper[4756]: I0930 21:13:50.274809 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dvr9x\" (UniqueName: \"kubernetes.io/projected/20597afa-0a2a-4763-a158-89cb5f521be9-kube-api-access-dvr9x\") pod \"alertmanager-metric-storage-0\" (UID: \"20597afa-0a2a-4763-a158-89cb5f521be9\") " pod="openstack/alertmanager-metric-storage-0" Sep 30 21:13:50 crc kubenswrapper[4756]: I0930 21:13:50.275063 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/secret/20597afa-0a2a-4763-a158-89cb5f521be9-config-volume\") pod \"alertmanager-metric-storage-0\" (UID: \"20597afa-0a2a-4763-a158-89cb5f521be9\") " pod="openstack/alertmanager-metric-storage-0" Sep 30 21:13:50 crc kubenswrapper[4756]: I0930 21:13:50.275271 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"alertmanager-metric-storage-db\" (UniqueName: \"kubernetes.io/empty-dir/20597afa-0a2a-4763-a158-89cb5f521be9-alertmanager-metric-storage-db\") pod \"alertmanager-metric-storage-0\" (UID: \"20597afa-0a2a-4763-a158-89cb5f521be9\") " pod="openstack/alertmanager-metric-storage-0" Sep 30 21:13:50 crc kubenswrapper[4756]: I0930 21:13:50.275305 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/20597afa-0a2a-4763-a158-89cb5f521be9-tls-assets\") pod \"alertmanager-metric-storage-0\" (UID: \"20597afa-0a2a-4763-a158-89cb5f521be9\") " pod="openstack/alertmanager-metric-storage-0" Sep 30 21:13:50 crc kubenswrapper[4756]: I0930 21:13:50.275469 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/20597afa-0a2a-4763-a158-89cb5f521be9-config-out\") pod \"alertmanager-metric-storage-0\" (UID: \"20597afa-0a2a-4763-a158-89cb5f521be9\") " pod="openstack/alertmanager-metric-storage-0" Sep 30 21:13:50 crc kubenswrapper[4756]: I0930 21:13:50.278963 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"alertmanager-metric-storage-db\" (UniqueName: \"kubernetes.io/empty-dir/20597afa-0a2a-4763-a158-89cb5f521be9-alertmanager-metric-storage-db\") pod \"alertmanager-metric-storage-0\" (UID: \"20597afa-0a2a-4763-a158-89cb5f521be9\") " pod="openstack/alertmanager-metric-storage-0" Sep 30 21:13:50 crc kubenswrapper[4756]: I0930 21:13:50.284794 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/20597afa-0a2a-4763-a158-89cb5f521be9-web-config\") pod \"alertmanager-metric-storage-0\" (UID: \"20597afa-0a2a-4763-a158-89cb5f521be9\") " pod="openstack/alertmanager-metric-storage-0" Sep 30 21:13:50 crc kubenswrapper[4756]: I0930 21:13:50.295011 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/20597afa-0a2a-4763-a158-89cb5f521be9-tls-assets\") pod \"alertmanager-metric-storage-0\" (UID: \"20597afa-0a2a-4763-a158-89cb5f521be9\") " pod="openstack/alertmanager-metric-storage-0" Sep 30 21:13:50 crc kubenswrapper[4756]: I0930 21:13:50.295677 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/secret/20597afa-0a2a-4763-a158-89cb5f521be9-config-volume\") pod \"alertmanager-metric-storage-0\" (UID: \"20597afa-0a2a-4763-a158-89cb5f521be9\") " pod="openstack/alertmanager-metric-storage-0" Sep 30 21:13:50 crc kubenswrapper[4756]: I0930 21:13:50.303727 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/20597afa-0a2a-4763-a158-89cb5f521be9-config-out\") pod \"alertmanager-metric-storage-0\" (UID: \"20597afa-0a2a-4763-a158-89cb5f521be9\") " pod="openstack/alertmanager-metric-storage-0" Sep 30 21:13:50 crc kubenswrapper[4756]: I0930 21:13:50.306244 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dvr9x\" (UniqueName: \"kubernetes.io/projected/20597afa-0a2a-4763-a158-89cb5f521be9-kube-api-access-dvr9x\") pod \"alertmanager-metric-storage-0\" (UID: \"20597afa-0a2a-4763-a158-89cb5f521be9\") " pod="openstack/alertmanager-metric-storage-0" Sep 30 21:13:50 crc kubenswrapper[4756]: I0930 21:13:50.372939 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/alertmanager-metric-storage-0" Sep 30 21:13:50 crc kubenswrapper[4756]: I0930 21:13:50.619909 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Sep 30 21:13:50 crc kubenswrapper[4756]: I0930 21:13:50.648535 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/prometheus-metric-storage-0"] Sep 30 21:13:50 crc kubenswrapper[4756]: I0930 21:13:50.650873 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/prometheus-metric-storage-0" Sep 30 21:13:50 crc kubenswrapper[4756]: I0930 21:13:50.657225 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"prometheus-metric-storage-rulefiles-0" Sep 30 21:13:50 crc kubenswrapper[4756]: I0930 21:13:50.657389 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage-web-config" Sep 30 21:13:50 crc kubenswrapper[4756]: I0930 21:13:50.657601 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"metric-storage-prometheus-dockercfg-jcljb" Sep 30 21:13:50 crc kubenswrapper[4756]: I0930 21:13:50.657730 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage" Sep 30 21:13:50 crc kubenswrapper[4756]: I0930 21:13:50.657830 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage-tls-assets-0" Sep 30 21:13:50 crc kubenswrapper[4756]: I0930 21:13:50.662698 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage-thanos-prometheus-http-client-file" Sep 30 21:13:50 crc kubenswrapper[4756]: I0930 21:13:50.670236 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/prometheus-metric-storage-0"] Sep 30 21:13:50 crc kubenswrapper[4756]: I0930 21:13:50.795975 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/c79a9792-587e-498a-9506-be25271a0884-prometheus-metric-storage-rulefiles-0\") pod \"prometheus-metric-storage-0\" (UID: \"c79a9792-587e-498a-9506-be25271a0884\") " pod="openstack/prometheus-metric-storage-0" Sep 30 21:13:50 crc kubenswrapper[4756]: I0930 21:13:50.796551 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/c79a9792-587e-498a-9506-be25271a0884-config-out\") pod \"prometheus-metric-storage-0\" (UID: \"c79a9792-587e-498a-9506-be25271a0884\") " pod="openstack/prometheus-metric-storage-0" Sep 30 21:13:50 crc kubenswrapper[4756]: I0930 21:13:50.796637 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/c79a9792-587e-498a-9506-be25271a0884-config\") pod \"prometheus-metric-storage-0\" (UID: \"c79a9792-587e-498a-9506-be25271a0884\") " pod="openstack/prometheus-metric-storage-0" Sep 30 21:13:50 crc kubenswrapper[4756]: I0930 21:13:50.796710 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7grsm\" (UniqueName: \"kubernetes.io/projected/c79a9792-587e-498a-9506-be25271a0884-kube-api-access-7grsm\") pod \"prometheus-metric-storage-0\" (UID: \"c79a9792-587e-498a-9506-be25271a0884\") " pod="openstack/prometheus-metric-storage-0" Sep 30 21:13:50 crc kubenswrapper[4756]: I0930 21:13:50.796820 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/c79a9792-587e-498a-9506-be25271a0884-tls-assets\") pod \"prometheus-metric-storage-0\" (UID: \"c79a9792-587e-498a-9506-be25271a0884\") " pod="openstack/prometheus-metric-storage-0" Sep 30 21:13:50 crc kubenswrapper[4756]: I0930 21:13:50.796849 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/c79a9792-587e-498a-9506-be25271a0884-thanos-prometheus-http-client-file\") pod \"prometheus-metric-storage-0\" (UID: \"c79a9792-587e-498a-9506-be25271a0884\") " pod="openstack/prometheus-metric-storage-0" Sep 30 21:13:50 crc kubenswrapper[4756]: I0930 21:13:50.796909 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-fd254bc6-446c-4dc1-b682-8a461a76cd75\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-fd254bc6-446c-4dc1-b682-8a461a76cd75\") pod \"prometheus-metric-storage-0\" (UID: \"c79a9792-587e-498a-9506-be25271a0884\") " pod="openstack/prometheus-metric-storage-0" Sep 30 21:13:50 crc kubenswrapper[4756]: I0930 21:13:50.796942 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/c79a9792-587e-498a-9506-be25271a0884-web-config\") pod \"prometheus-metric-storage-0\" (UID: \"c79a9792-587e-498a-9506-be25271a0884\") " pod="openstack/prometheus-metric-storage-0" Sep 30 21:13:50 crc kubenswrapper[4756]: I0930 21:13:50.851825 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"bc558c9b-5ec2-4fd2-9d2b-2a7d5e3e59b1","Type":"ContainerStarted","Data":"1b09c233416a59e7db77c5b9bed96e0ff3023229fdbd7572eb70d26cbceeca89"} Sep 30 21:13:50 crc kubenswrapper[4756]: I0930 21:13:50.884845 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Sep 30 21:13:50 crc kubenswrapper[4756]: I0930 21:13:50.900598 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/c79a9792-587e-498a-9506-be25271a0884-config\") pod \"prometheus-metric-storage-0\" (UID: \"c79a9792-587e-498a-9506-be25271a0884\") " pod="openstack/prometheus-metric-storage-0" Sep 30 21:13:50 crc kubenswrapper[4756]: I0930 21:13:50.900663 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7grsm\" (UniqueName: \"kubernetes.io/projected/c79a9792-587e-498a-9506-be25271a0884-kube-api-access-7grsm\") pod \"prometheus-metric-storage-0\" (UID: \"c79a9792-587e-498a-9506-be25271a0884\") " pod="openstack/prometheus-metric-storage-0" Sep 30 21:13:50 crc kubenswrapper[4756]: I0930 21:13:50.900718 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/c79a9792-587e-498a-9506-be25271a0884-tls-assets\") pod \"prometheus-metric-storage-0\" (UID: \"c79a9792-587e-498a-9506-be25271a0884\") " pod="openstack/prometheus-metric-storage-0" Sep 30 21:13:50 crc kubenswrapper[4756]: I0930 21:13:50.900734 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/c79a9792-587e-498a-9506-be25271a0884-thanos-prometheus-http-client-file\") pod \"prometheus-metric-storage-0\" (UID: \"c79a9792-587e-498a-9506-be25271a0884\") " pod="openstack/prometheus-metric-storage-0" Sep 30 21:13:50 crc kubenswrapper[4756]: I0930 21:13:50.900763 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-fd254bc6-446c-4dc1-b682-8a461a76cd75\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-fd254bc6-446c-4dc1-b682-8a461a76cd75\") pod \"prometheus-metric-storage-0\" (UID: \"c79a9792-587e-498a-9506-be25271a0884\") " pod="openstack/prometheus-metric-storage-0" Sep 30 21:13:50 crc kubenswrapper[4756]: I0930 21:13:50.900780 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/c79a9792-587e-498a-9506-be25271a0884-web-config\") pod \"prometheus-metric-storage-0\" (UID: \"c79a9792-587e-498a-9506-be25271a0884\") " pod="openstack/prometheus-metric-storage-0" Sep 30 21:13:50 crc kubenswrapper[4756]: I0930 21:13:50.900874 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/c79a9792-587e-498a-9506-be25271a0884-prometheus-metric-storage-rulefiles-0\") pod \"prometheus-metric-storage-0\" (UID: \"c79a9792-587e-498a-9506-be25271a0884\") " pod="openstack/prometheus-metric-storage-0" Sep 30 21:13:50 crc kubenswrapper[4756]: I0930 21:13:50.900892 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/c79a9792-587e-498a-9506-be25271a0884-config-out\") pod \"prometheus-metric-storage-0\" (UID: \"c79a9792-587e-498a-9506-be25271a0884\") " pod="openstack/prometheus-metric-storage-0" Sep 30 21:13:50 crc kubenswrapper[4756]: I0930 21:13:50.904891 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/c79a9792-587e-498a-9506-be25271a0884-prometheus-metric-storage-rulefiles-0\") pod \"prometheus-metric-storage-0\" (UID: \"c79a9792-587e-498a-9506-be25271a0884\") " pod="openstack/prometheus-metric-storage-0" Sep 30 21:13:50 crc kubenswrapper[4756]: I0930 21:13:50.905412 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/c79a9792-587e-498a-9506-be25271a0884-thanos-prometheus-http-client-file\") pod \"prometheus-metric-storage-0\" (UID: \"c79a9792-587e-498a-9506-be25271a0884\") " pod="openstack/prometheus-metric-storage-0" Sep 30 21:13:50 crc kubenswrapper[4756]: I0930 21:13:50.908528 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/c79a9792-587e-498a-9506-be25271a0884-config-out\") pod \"prometheus-metric-storage-0\" (UID: \"c79a9792-587e-498a-9506-be25271a0884\") " pod="openstack/prometheus-metric-storage-0" Sep 30 21:13:50 crc kubenswrapper[4756]: I0930 21:13:50.909540 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/c79a9792-587e-498a-9506-be25271a0884-tls-assets\") pod \"prometheus-metric-storage-0\" (UID: \"c79a9792-587e-498a-9506-be25271a0884\") " pod="openstack/prometheus-metric-storage-0" Sep 30 21:13:50 crc kubenswrapper[4756]: I0930 21:13:50.912745 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/c79a9792-587e-498a-9506-be25271a0884-web-config\") pod \"prometheus-metric-storage-0\" (UID: \"c79a9792-587e-498a-9506-be25271a0884\") " pod="openstack/prometheus-metric-storage-0" Sep 30 21:13:50 crc kubenswrapper[4756]: I0930 21:13:50.913979 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/c79a9792-587e-498a-9506-be25271a0884-config\") pod \"prometheus-metric-storage-0\" (UID: \"c79a9792-587e-498a-9506-be25271a0884\") " pod="openstack/prometheus-metric-storage-0" Sep 30 21:13:50 crc kubenswrapper[4756]: I0930 21:13:50.937124 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7grsm\" (UniqueName: \"kubernetes.io/projected/c79a9792-587e-498a-9506-be25271a0884-kube-api-access-7grsm\") pod \"prometheus-metric-storage-0\" (UID: \"c79a9792-587e-498a-9506-be25271a0884\") " pod="openstack/prometheus-metric-storage-0" Sep 30 21:13:50 crc kubenswrapper[4756]: I0930 21:13:50.948150 4756 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Sep 30 21:13:50 crc kubenswrapper[4756]: I0930 21:13:50.948199 4756 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-fd254bc6-446c-4dc1-b682-8a461a76cd75\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-fd254bc6-446c-4dc1-b682-8a461a76cd75\") pod \"prometheus-metric-storage-0\" (UID: \"c79a9792-587e-498a-9506-be25271a0884\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/c26cea2ae195f465432070961257a3ca9e33550de12db3218deee2ba5dd42a58/globalmount\"" pod="openstack/prometheus-metric-storage-0" Sep 30 21:13:51 crc kubenswrapper[4756]: I0930 21:13:51.053473 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-fd254bc6-446c-4dc1-b682-8a461a76cd75\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-fd254bc6-446c-4dc1-b682-8a461a76cd75\") pod \"prometheus-metric-storage-0\" (UID: \"c79a9792-587e-498a-9506-be25271a0884\") " pod="openstack/prometheus-metric-storage-0" Sep 30 21:13:51 crc kubenswrapper[4756]: I0930 21:13:51.285455 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/prometheus-metric-storage-0" Sep 30 21:13:51 crc kubenswrapper[4756]: I0930 21:13:51.488780 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/alertmanager-metric-storage-0"] Sep 30 21:13:51 crc kubenswrapper[4756]: I0930 21:13:51.542301 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Sep 30 21:13:51 crc kubenswrapper[4756]: I0930 21:13:51.546831 4756 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openstack/openstackclient" oldPodUID="530205ed-633e-43aa-bc1b-dd05d2d62bd3" podUID="bc558c9b-5ec2-4fd2-9d2b-2a7d5e3e59b1" Sep 30 21:13:51 crc kubenswrapper[4756]: I0930 21:13:51.633583 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/530205ed-633e-43aa-bc1b-dd05d2d62bd3-openstack-config-secret\") pod \"530205ed-633e-43aa-bc1b-dd05d2d62bd3\" (UID: \"530205ed-633e-43aa-bc1b-dd05d2d62bd3\") " Sep 30 21:13:51 crc kubenswrapper[4756]: I0930 21:13:51.633678 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/530205ed-633e-43aa-bc1b-dd05d2d62bd3-openstack-config\") pod \"530205ed-633e-43aa-bc1b-dd05d2d62bd3\" (UID: \"530205ed-633e-43aa-bc1b-dd05d2d62bd3\") " Sep 30 21:13:51 crc kubenswrapper[4756]: I0930 21:13:51.633724 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-96ndk\" (UniqueName: \"kubernetes.io/projected/530205ed-633e-43aa-bc1b-dd05d2d62bd3-kube-api-access-96ndk\") pod \"530205ed-633e-43aa-bc1b-dd05d2d62bd3\" (UID: \"530205ed-633e-43aa-bc1b-dd05d2d62bd3\") " Sep 30 21:13:51 crc kubenswrapper[4756]: I0930 21:13:51.642626 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/530205ed-633e-43aa-bc1b-dd05d2d62bd3-kube-api-access-96ndk" (OuterVolumeSpecName: "kube-api-access-96ndk") pod "530205ed-633e-43aa-bc1b-dd05d2d62bd3" (UID: "530205ed-633e-43aa-bc1b-dd05d2d62bd3"). InnerVolumeSpecName "kube-api-access-96ndk". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 21:13:51 crc kubenswrapper[4756]: I0930 21:13:51.720672 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/530205ed-633e-43aa-bc1b-dd05d2d62bd3-openstack-config" (OuterVolumeSpecName: "openstack-config") pod "530205ed-633e-43aa-bc1b-dd05d2d62bd3" (UID: "530205ed-633e-43aa-bc1b-dd05d2d62bd3"). InnerVolumeSpecName "openstack-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 21:13:51 crc kubenswrapper[4756]: I0930 21:13:51.730936 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/530205ed-633e-43aa-bc1b-dd05d2d62bd3-openstack-config-secret" (OuterVolumeSpecName: "openstack-config-secret") pod "530205ed-633e-43aa-bc1b-dd05d2d62bd3" (UID: "530205ed-633e-43aa-bc1b-dd05d2d62bd3"). InnerVolumeSpecName "openstack-config-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 21:13:51 crc kubenswrapper[4756]: I0930 21:13:51.735983 4756 reconciler_common.go:293] "Volume detached for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/530205ed-633e-43aa-bc1b-dd05d2d62bd3-openstack-config\") on node \"crc\" DevicePath \"\"" Sep 30 21:13:51 crc kubenswrapper[4756]: I0930 21:13:51.736020 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-96ndk\" (UniqueName: \"kubernetes.io/projected/530205ed-633e-43aa-bc1b-dd05d2d62bd3-kube-api-access-96ndk\") on node \"crc\" DevicePath \"\"" Sep 30 21:13:51 crc kubenswrapper[4756]: I0930 21:13:51.736033 4756 reconciler_common.go:293] "Volume detached for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/530205ed-633e-43aa-bc1b-dd05d2d62bd3-openstack-config-secret\") on node \"crc\" DevicePath \"\"" Sep 30 21:13:51 crc kubenswrapper[4756]: I0930 21:13:51.867166 4756 generic.go:334] "Generic (PLEG): container finished" podID="530205ed-633e-43aa-bc1b-dd05d2d62bd3" containerID="87a18660104dc824d44430d13a0440c6d942e490cb930b20077b42c735ff8d1f" exitCode=137 Sep 30 21:13:51 crc kubenswrapper[4756]: I0930 21:13:51.867303 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Sep 30 21:13:51 crc kubenswrapper[4756]: I0930 21:13:51.867351 4756 scope.go:117] "RemoveContainer" containerID="87a18660104dc824d44430d13a0440c6d942e490cb930b20077b42c735ff8d1f" Sep 30 21:13:51 crc kubenswrapper[4756]: I0930 21:13:51.869720 4756 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openstack/openstackclient" oldPodUID="530205ed-633e-43aa-bc1b-dd05d2d62bd3" podUID="bc558c9b-5ec2-4fd2-9d2b-2a7d5e3e59b1" Sep 30 21:13:51 crc kubenswrapper[4756]: I0930 21:13:51.881809 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"bc558c9b-5ec2-4fd2-9d2b-2a7d5e3e59b1","Type":"ContainerStarted","Data":"d49e5a7278e6e165fcba4d2fad681bce28a5708925bdd896f21a54cc118574cc"} Sep 30 21:13:51 crc kubenswrapper[4756]: I0930 21:13:51.887712 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/alertmanager-metric-storage-0" event={"ID":"20597afa-0a2a-4763-a158-89cb5f521be9","Type":"ContainerStarted","Data":"45bbf8f8820ec57b7b15debe880217bbbc49d6167100baa1be8c5ced78fc89d1"} Sep 30 21:13:51 crc kubenswrapper[4756]: I0930 21:13:51.901591 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"6b29dd70-bdc6-4616-8503-f787d4602e39","Type":"ContainerStarted","Data":"ac242092816df235760c39dc757252e004f1f9072560639594df6cedf790be02"} Sep 30 21:13:51 crc kubenswrapper[4756]: I0930 21:13:51.909383 4756 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openstack/openstackclient" oldPodUID="530205ed-633e-43aa-bc1b-dd05d2d62bd3" podUID="bc558c9b-5ec2-4fd2-9d2b-2a7d5e3e59b1" Sep 30 21:13:51 crc kubenswrapper[4756]: I0930 21:13:51.915226 4756 scope.go:117] "RemoveContainer" containerID="87a18660104dc824d44430d13a0440c6d942e490cb930b20077b42c735ff8d1f" Sep 30 21:13:51 crc kubenswrapper[4756]: I0930 21:13:51.917086 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/prometheus-metric-storage-0"] Sep 30 21:13:51 crc kubenswrapper[4756]: E0930 21:13:51.919562 4756 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"87a18660104dc824d44430d13a0440c6d942e490cb930b20077b42c735ff8d1f\": container with ID starting with 87a18660104dc824d44430d13a0440c6d942e490cb930b20077b42c735ff8d1f not found: ID does not exist" containerID="87a18660104dc824d44430d13a0440c6d942e490cb930b20077b42c735ff8d1f" Sep 30 21:13:51 crc kubenswrapper[4756]: I0930 21:13:51.919594 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"87a18660104dc824d44430d13a0440c6d942e490cb930b20077b42c735ff8d1f"} err="failed to get container status \"87a18660104dc824d44430d13a0440c6d942e490cb930b20077b42c735ff8d1f\": rpc error: code = NotFound desc = could not find container \"87a18660104dc824d44430d13a0440c6d942e490cb930b20077b42c735ff8d1f\": container with ID starting with 87a18660104dc824d44430d13a0440c6d942e490cb930b20077b42c735ff8d1f not found: ID does not exist" Sep 30 21:13:51 crc kubenswrapper[4756]: I0930 21:13:51.919942 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstackclient" podStartSLOduration=2.919898018 podStartE2EDuration="2.919898018s" podCreationTimestamp="2025-09-30 21:13:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 21:13:51.904380203 +0000 UTC m=+6161.525313680" watchObservedRunningTime="2025-09-30 21:13:51.919898018 +0000 UTC m=+6161.540831505" Sep 30 21:13:52 crc kubenswrapper[4756]: I0930 21:13:52.914956 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"6b29dd70-bdc6-4616-8503-f787d4602e39","Type":"ContainerStarted","Data":"1f29f02547bca5e284e91619e678211f72b7ca2ad6508663cfc09936c7156bf8"} Sep 30 21:13:52 crc kubenswrapper[4756]: I0930 21:13:52.916250 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/kube-state-metrics-0" Sep 30 21:13:52 crc kubenswrapper[4756]: I0930 21:13:52.917507 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"c79a9792-587e-498a-9506-be25271a0884","Type":"ContainerStarted","Data":"840f056ef43f5aa70e777e4cceb3850e9c014fe92ee38478ad8535b620791fe9"} Sep 30 21:13:52 crc kubenswrapper[4756]: I0930 21:13:52.935437 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/kube-state-metrics-0" podStartSLOduration=3.109346786 podStartE2EDuration="3.935422034s" podCreationTimestamp="2025-09-30 21:13:49 +0000 UTC" firstStartedPulling="2025-09-30 21:13:50.899919795 +0000 UTC m=+6160.520853272" lastFinishedPulling="2025-09-30 21:13:51.725995043 +0000 UTC m=+6161.346928520" observedRunningTime="2025-09-30 21:13:52.933339159 +0000 UTC m=+6162.554272656" watchObservedRunningTime="2025-09-30 21:13:52.935422034 +0000 UTC m=+6162.556355511" Sep 30 21:13:53 crc kubenswrapper[4756]: I0930 21:13:53.143648 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="530205ed-633e-43aa-bc1b-dd05d2d62bd3" path="/var/lib/kubelet/pods/530205ed-633e-43aa-bc1b-dd05d2d62bd3/volumes" Sep 30 21:13:59 crc kubenswrapper[4756]: I0930 21:13:59.002562 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/alertmanager-metric-storage-0" event={"ID":"20597afa-0a2a-4763-a158-89cb5f521be9","Type":"ContainerStarted","Data":"350333b2625582ee9df75ee190f0451124c48da5c6a3b0195515429c1a0632b0"} Sep 30 21:13:59 crc kubenswrapper[4756]: I0930 21:13:59.006075 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"c79a9792-587e-498a-9506-be25271a0884","Type":"ContainerStarted","Data":"5f97a45b2259823b8685dede8764651ef3383165f9b25a6fd62dc77d694d107c"} Sep 30 21:13:59 crc kubenswrapper[4756]: I0930 21:13:59.684026 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/kube-state-metrics-0" Sep 30 21:14:01 crc kubenswrapper[4756]: I0930 21:14:01.038890 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-cell-mapping-dvd7j"] Sep 30 21:14:01 crc kubenswrapper[4756]: I0930 21:14:01.047507 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-cell-mapping-dvd7j"] Sep 30 21:14:01 crc kubenswrapper[4756]: I0930 21:14:01.131860 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2fab8c82-6b0a-4747-b130-d61ef9d1b329" path="/var/lib/kubelet/pods/2fab8c82-6b0a-4747-b130-d61ef9d1b329/volumes" Sep 30 21:14:05 crc kubenswrapper[4756]: I0930 21:14:05.013992 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-rw7vk"] Sep 30 21:14:05 crc kubenswrapper[4756]: I0930 21:14:05.018246 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-rw7vk" Sep 30 21:14:05 crc kubenswrapper[4756]: I0930 21:14:05.045074 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-rw7vk"] Sep 30 21:14:05 crc kubenswrapper[4756]: I0930 21:14:05.082261 4756 generic.go:334] "Generic (PLEG): container finished" podID="c79a9792-587e-498a-9506-be25271a0884" containerID="5f97a45b2259823b8685dede8764651ef3383165f9b25a6fd62dc77d694d107c" exitCode=0 Sep 30 21:14:05 crc kubenswrapper[4756]: I0930 21:14:05.082309 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"c79a9792-587e-498a-9506-be25271a0884","Type":"ContainerDied","Data":"5f97a45b2259823b8685dede8764651ef3383165f9b25a6fd62dc77d694d107c"} Sep 30 21:14:05 crc kubenswrapper[4756]: I0930 21:14:05.112868 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/edfe3f82-d6ff-483f-ac9c-85055165ef67-catalog-content\") pod \"redhat-operators-rw7vk\" (UID: \"edfe3f82-d6ff-483f-ac9c-85055165ef67\") " pod="openshift-marketplace/redhat-operators-rw7vk" Sep 30 21:14:05 crc kubenswrapper[4756]: I0930 21:14:05.113017 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qgjfb\" (UniqueName: \"kubernetes.io/projected/edfe3f82-d6ff-483f-ac9c-85055165ef67-kube-api-access-qgjfb\") pod \"redhat-operators-rw7vk\" (UID: \"edfe3f82-d6ff-483f-ac9c-85055165ef67\") " pod="openshift-marketplace/redhat-operators-rw7vk" Sep 30 21:14:05 crc kubenswrapper[4756]: I0930 21:14:05.113045 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/edfe3f82-d6ff-483f-ac9c-85055165ef67-utilities\") pod \"redhat-operators-rw7vk\" (UID: \"edfe3f82-d6ff-483f-ac9c-85055165ef67\") " pod="openshift-marketplace/redhat-operators-rw7vk" Sep 30 21:14:05 crc kubenswrapper[4756]: I0930 21:14:05.215603 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/edfe3f82-d6ff-483f-ac9c-85055165ef67-catalog-content\") pod \"redhat-operators-rw7vk\" (UID: \"edfe3f82-d6ff-483f-ac9c-85055165ef67\") " pod="openshift-marketplace/redhat-operators-rw7vk" Sep 30 21:14:05 crc kubenswrapper[4756]: I0930 21:14:05.215742 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qgjfb\" (UniqueName: \"kubernetes.io/projected/edfe3f82-d6ff-483f-ac9c-85055165ef67-kube-api-access-qgjfb\") pod \"redhat-operators-rw7vk\" (UID: \"edfe3f82-d6ff-483f-ac9c-85055165ef67\") " pod="openshift-marketplace/redhat-operators-rw7vk" Sep 30 21:14:05 crc kubenswrapper[4756]: I0930 21:14:05.215795 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/edfe3f82-d6ff-483f-ac9c-85055165ef67-utilities\") pod \"redhat-operators-rw7vk\" (UID: \"edfe3f82-d6ff-483f-ac9c-85055165ef67\") " pod="openshift-marketplace/redhat-operators-rw7vk" Sep 30 21:14:05 crc kubenswrapper[4756]: I0930 21:14:05.216344 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/edfe3f82-d6ff-483f-ac9c-85055165ef67-catalog-content\") pod \"redhat-operators-rw7vk\" (UID: \"edfe3f82-d6ff-483f-ac9c-85055165ef67\") " pod="openshift-marketplace/redhat-operators-rw7vk" Sep 30 21:14:05 crc kubenswrapper[4756]: I0930 21:14:05.217079 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/edfe3f82-d6ff-483f-ac9c-85055165ef67-utilities\") pod \"redhat-operators-rw7vk\" (UID: \"edfe3f82-d6ff-483f-ac9c-85055165ef67\") " pod="openshift-marketplace/redhat-operators-rw7vk" Sep 30 21:14:05 crc kubenswrapper[4756]: I0930 21:14:05.244608 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qgjfb\" (UniqueName: \"kubernetes.io/projected/edfe3f82-d6ff-483f-ac9c-85055165ef67-kube-api-access-qgjfb\") pod \"redhat-operators-rw7vk\" (UID: \"edfe3f82-d6ff-483f-ac9c-85055165ef67\") " pod="openshift-marketplace/redhat-operators-rw7vk" Sep 30 21:14:05 crc kubenswrapper[4756]: I0930 21:14:05.347823 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-rw7vk" Sep 30 21:14:05 crc kubenswrapper[4756]: I0930 21:14:05.847985 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-rw7vk"] Sep 30 21:14:06 crc kubenswrapper[4756]: I0930 21:14:06.092347 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rw7vk" event={"ID":"edfe3f82-d6ff-483f-ac9c-85055165ef67","Type":"ContainerStarted","Data":"b1ee2774b3a117978ceab6a7e7c7ebd0c41c144886a67a950f7dbd7c5279d8a1"} Sep 30 21:14:06 crc kubenswrapper[4756]: I0930 21:14:06.092413 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rw7vk" event={"ID":"edfe3f82-d6ff-483f-ac9c-85055165ef67","Type":"ContainerStarted","Data":"c84b50fdc88cc7c4ca4109bd26177b9b9fe99eee609923b796379eee6784a637"} Sep 30 21:14:06 crc kubenswrapper[4756]: I0930 21:14:06.093591 4756 generic.go:334] "Generic (PLEG): container finished" podID="20597afa-0a2a-4763-a158-89cb5f521be9" containerID="350333b2625582ee9df75ee190f0451124c48da5c6a3b0195515429c1a0632b0" exitCode=0 Sep 30 21:14:06 crc kubenswrapper[4756]: I0930 21:14:06.093632 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/alertmanager-metric-storage-0" event={"ID":"20597afa-0a2a-4763-a158-89cb5f521be9","Type":"ContainerDied","Data":"350333b2625582ee9df75ee190f0451124c48da5c6a3b0195515429c1a0632b0"} Sep 30 21:14:07 crc kubenswrapper[4756]: I0930 21:14:07.103795 4756 generic.go:334] "Generic (PLEG): container finished" podID="edfe3f82-d6ff-483f-ac9c-85055165ef67" containerID="b1ee2774b3a117978ceab6a7e7c7ebd0c41c144886a67a950f7dbd7c5279d8a1" exitCode=0 Sep 30 21:14:07 crc kubenswrapper[4756]: I0930 21:14:07.103862 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rw7vk" event={"ID":"edfe3f82-d6ff-483f-ac9c-85055165ef67","Type":"ContainerDied","Data":"b1ee2774b3a117978ceab6a7e7c7ebd0c41c144886a67a950f7dbd7c5279d8a1"} Sep 30 21:14:11 crc kubenswrapper[4756]: I0930 21:14:11.140859 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/alertmanager-metric-storage-0" event={"ID":"20597afa-0a2a-4763-a158-89cb5f521be9","Type":"ContainerStarted","Data":"64ad1a06b81d3029e29f1410c09dc71ab8219b08bafb275d3eacb0a56358d123"} Sep 30 21:14:11 crc kubenswrapper[4756]: I0930 21:14:11.142832 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"c79a9792-587e-498a-9506-be25271a0884","Type":"ContainerStarted","Data":"88151abd338ee4bc392e483426073a0ea1655deb4b25e66f7eb0efd4b7f98e07"} Sep 30 21:14:11 crc kubenswrapper[4756]: I0930 21:14:11.144598 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rw7vk" event={"ID":"edfe3f82-d6ff-483f-ac9c-85055165ef67","Type":"ContainerStarted","Data":"75a8eff6122cbb477a07a49b5a383ad53170aa8e1e9a5b5b167cb1771c77f9dd"} Sep 30 21:14:11 crc kubenswrapper[4756]: I0930 21:14:11.539643 4756 patch_prober.go:28] interesting pod/machine-config-daemon-4n9zj container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 21:14:11 crc kubenswrapper[4756]: I0930 21:14:11.539718 4756 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 21:14:15 crc kubenswrapper[4756]: I0930 21:14:15.189297 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/alertmanager-metric-storage-0" event={"ID":"20597afa-0a2a-4763-a158-89cb5f521be9","Type":"ContainerStarted","Data":"d385b447933891d7ed000132933e3926011de332614af264fb1bffd4370cda65"} Sep 30 21:14:15 crc kubenswrapper[4756]: I0930 21:14:15.189720 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/alertmanager-metric-storage-0" Sep 30 21:14:15 crc kubenswrapper[4756]: I0930 21:14:15.192331 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/alertmanager-metric-storage-0" Sep 30 21:14:15 crc kubenswrapper[4756]: I0930 21:14:15.196004 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"c79a9792-587e-498a-9506-be25271a0884","Type":"ContainerStarted","Data":"b4ccb44e10d9286495800ad3ee7d449bed672116b42d7bff42f2915d6fe88304"} Sep 30 21:14:15 crc kubenswrapper[4756]: I0930 21:14:15.216330 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/alertmanager-metric-storage-0" podStartSLOduration=7.175363206 podStartE2EDuration="26.216311726s" podCreationTimestamp="2025-09-30 21:13:49 +0000 UTC" firstStartedPulling="2025-09-30 21:13:51.610572833 +0000 UTC m=+6161.231506310" lastFinishedPulling="2025-09-30 21:14:10.651521353 +0000 UTC m=+6180.272454830" observedRunningTime="2025-09-30 21:14:15.206693435 +0000 UTC m=+6184.827626922" watchObservedRunningTime="2025-09-30 21:14:15.216311726 +0000 UTC m=+6184.837245203" Sep 30 21:14:16 crc kubenswrapper[4756]: I0930 21:14:16.210074 4756 generic.go:334] "Generic (PLEG): container finished" podID="edfe3f82-d6ff-483f-ac9c-85055165ef67" containerID="75a8eff6122cbb477a07a49b5a383ad53170aa8e1e9a5b5b167cb1771c77f9dd" exitCode=0 Sep 30 21:14:16 crc kubenswrapper[4756]: I0930 21:14:16.210174 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rw7vk" event={"ID":"edfe3f82-d6ff-483f-ac9c-85055165ef67","Type":"ContainerDied","Data":"75a8eff6122cbb477a07a49b5a383ad53170aa8e1e9a5b5b167cb1771c77f9dd"} Sep 30 21:14:17 crc kubenswrapper[4756]: I0930 21:14:17.234709 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rw7vk" event={"ID":"edfe3f82-d6ff-483f-ac9c-85055165ef67","Type":"ContainerStarted","Data":"bec502ba3d82deec77117af3657dd2a9e46932f52a3662bc523888396a3c111d"} Sep 30 21:14:17 crc kubenswrapper[4756]: I0930 21:14:17.260288 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-rw7vk" podStartSLOduration=3.735741284 podStartE2EDuration="13.260269095s" podCreationTimestamp="2025-09-30 21:14:04 +0000 UTC" firstStartedPulling="2025-09-30 21:14:07.105731788 +0000 UTC m=+6176.726665265" lastFinishedPulling="2025-09-30 21:14:16.630259599 +0000 UTC m=+6186.251193076" observedRunningTime="2025-09-30 21:14:17.254199967 +0000 UTC m=+6186.875133454" watchObservedRunningTime="2025-09-30 21:14:17.260269095 +0000 UTC m=+6186.881202572" Sep 30 21:14:20 crc kubenswrapper[4756]: I0930 21:14:20.268913 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"c79a9792-587e-498a-9506-be25271a0884","Type":"ContainerStarted","Data":"2b499da8a4cf34f75c8a8490e1ce7d1fc3659f605ca6e8791e528459075ae43b"} Sep 30 21:14:20 crc kubenswrapper[4756]: I0930 21:14:20.302110 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/prometheus-metric-storage-0" podStartSLOduration=4.070591039 podStartE2EDuration="31.30208906s" podCreationTimestamp="2025-09-30 21:13:49 +0000 UTC" firstStartedPulling="2025-09-30 21:13:51.934040297 +0000 UTC m=+6161.554973774" lastFinishedPulling="2025-09-30 21:14:19.165538318 +0000 UTC m=+6188.786471795" observedRunningTime="2025-09-30 21:14:20.299140683 +0000 UTC m=+6189.920074160" watchObservedRunningTime="2025-09-30 21:14:20.30208906 +0000 UTC m=+6189.923022537" Sep 30 21:14:21 crc kubenswrapper[4756]: I0930 21:14:21.285906 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/prometheus-metric-storage-0" Sep 30 21:14:21 crc kubenswrapper[4756]: I0930 21:14:21.285960 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/prometheus-metric-storage-0" Sep 30 21:14:21 crc kubenswrapper[4756]: I0930 21:14:21.288585 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/prometheus-metric-storage-0" Sep 30 21:14:22 crc kubenswrapper[4756]: I0930 21:14:22.285968 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/prometheus-metric-storage-0" Sep 30 21:14:23 crc kubenswrapper[4756]: I0930 21:14:23.672153 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Sep 30 21:14:23 crc kubenswrapper[4756]: I0930 21:14:23.675582 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 21:14:23 crc kubenswrapper[4756]: I0930 21:14:23.677521 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Sep 30 21:14:23 crc kubenswrapper[4756]: I0930 21:14:23.678113 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Sep 30 21:14:23 crc kubenswrapper[4756]: I0930 21:14:23.693041 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 30 21:14:23 crc kubenswrapper[4756]: I0930 21:14:23.815559 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6b08747b-e36e-4fa3-8306-49231ddaff24-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"6b08747b-e36e-4fa3-8306-49231ddaff24\") " pod="openstack/ceilometer-0" Sep 30 21:14:23 crc kubenswrapper[4756]: I0930 21:14:23.815771 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/6b08747b-e36e-4fa3-8306-49231ddaff24-run-httpd\") pod \"ceilometer-0\" (UID: \"6b08747b-e36e-4fa3-8306-49231ddaff24\") " pod="openstack/ceilometer-0" Sep 30 21:14:23 crc kubenswrapper[4756]: I0930 21:14:23.815925 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6b08747b-e36e-4fa3-8306-49231ddaff24-scripts\") pod \"ceilometer-0\" (UID: \"6b08747b-e36e-4fa3-8306-49231ddaff24\") " pod="openstack/ceilometer-0" Sep 30 21:14:23 crc kubenswrapper[4756]: I0930 21:14:23.815972 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/6b08747b-e36e-4fa3-8306-49231ddaff24-log-httpd\") pod \"ceilometer-0\" (UID: \"6b08747b-e36e-4fa3-8306-49231ddaff24\") " pod="openstack/ceilometer-0" Sep 30 21:14:23 crc kubenswrapper[4756]: I0930 21:14:23.816006 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6b08747b-e36e-4fa3-8306-49231ddaff24-config-data\") pod \"ceilometer-0\" (UID: \"6b08747b-e36e-4fa3-8306-49231ddaff24\") " pod="openstack/ceilometer-0" Sep 30 21:14:23 crc kubenswrapper[4756]: I0930 21:14:23.816163 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-br78h\" (UniqueName: \"kubernetes.io/projected/6b08747b-e36e-4fa3-8306-49231ddaff24-kube-api-access-br78h\") pod \"ceilometer-0\" (UID: \"6b08747b-e36e-4fa3-8306-49231ddaff24\") " pod="openstack/ceilometer-0" Sep 30 21:14:23 crc kubenswrapper[4756]: I0930 21:14:23.816316 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/6b08747b-e36e-4fa3-8306-49231ddaff24-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"6b08747b-e36e-4fa3-8306-49231ddaff24\") " pod="openstack/ceilometer-0" Sep 30 21:14:23 crc kubenswrapper[4756]: I0930 21:14:23.917982 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6b08747b-e36e-4fa3-8306-49231ddaff24-scripts\") pod \"ceilometer-0\" (UID: \"6b08747b-e36e-4fa3-8306-49231ddaff24\") " pod="openstack/ceilometer-0" Sep 30 21:14:23 crc kubenswrapper[4756]: I0930 21:14:23.918047 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/6b08747b-e36e-4fa3-8306-49231ddaff24-log-httpd\") pod \"ceilometer-0\" (UID: \"6b08747b-e36e-4fa3-8306-49231ddaff24\") " pod="openstack/ceilometer-0" Sep 30 21:14:23 crc kubenswrapper[4756]: I0930 21:14:23.918087 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6b08747b-e36e-4fa3-8306-49231ddaff24-config-data\") pod \"ceilometer-0\" (UID: \"6b08747b-e36e-4fa3-8306-49231ddaff24\") " pod="openstack/ceilometer-0" Sep 30 21:14:23 crc kubenswrapper[4756]: I0930 21:14:23.918161 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-br78h\" (UniqueName: \"kubernetes.io/projected/6b08747b-e36e-4fa3-8306-49231ddaff24-kube-api-access-br78h\") pod \"ceilometer-0\" (UID: \"6b08747b-e36e-4fa3-8306-49231ddaff24\") " pod="openstack/ceilometer-0" Sep 30 21:14:23 crc kubenswrapper[4756]: I0930 21:14:23.918227 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/6b08747b-e36e-4fa3-8306-49231ddaff24-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"6b08747b-e36e-4fa3-8306-49231ddaff24\") " pod="openstack/ceilometer-0" Sep 30 21:14:23 crc kubenswrapper[4756]: I0930 21:14:23.918275 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6b08747b-e36e-4fa3-8306-49231ddaff24-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"6b08747b-e36e-4fa3-8306-49231ddaff24\") " pod="openstack/ceilometer-0" Sep 30 21:14:23 crc kubenswrapper[4756]: I0930 21:14:23.918338 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/6b08747b-e36e-4fa3-8306-49231ddaff24-run-httpd\") pod \"ceilometer-0\" (UID: \"6b08747b-e36e-4fa3-8306-49231ddaff24\") " pod="openstack/ceilometer-0" Sep 30 21:14:23 crc kubenswrapper[4756]: I0930 21:14:23.919124 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/6b08747b-e36e-4fa3-8306-49231ddaff24-log-httpd\") pod \"ceilometer-0\" (UID: \"6b08747b-e36e-4fa3-8306-49231ddaff24\") " pod="openstack/ceilometer-0" Sep 30 21:14:23 crc kubenswrapper[4756]: I0930 21:14:23.919202 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/6b08747b-e36e-4fa3-8306-49231ddaff24-run-httpd\") pod \"ceilometer-0\" (UID: \"6b08747b-e36e-4fa3-8306-49231ddaff24\") " pod="openstack/ceilometer-0" Sep 30 21:14:23 crc kubenswrapper[4756]: I0930 21:14:23.925010 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6b08747b-e36e-4fa3-8306-49231ddaff24-scripts\") pod \"ceilometer-0\" (UID: \"6b08747b-e36e-4fa3-8306-49231ddaff24\") " pod="openstack/ceilometer-0" Sep 30 21:14:23 crc kubenswrapper[4756]: I0930 21:14:23.925521 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6b08747b-e36e-4fa3-8306-49231ddaff24-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"6b08747b-e36e-4fa3-8306-49231ddaff24\") " pod="openstack/ceilometer-0" Sep 30 21:14:23 crc kubenswrapper[4756]: I0930 21:14:23.926808 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6b08747b-e36e-4fa3-8306-49231ddaff24-config-data\") pod \"ceilometer-0\" (UID: \"6b08747b-e36e-4fa3-8306-49231ddaff24\") " pod="openstack/ceilometer-0" Sep 30 21:14:23 crc kubenswrapper[4756]: I0930 21:14:23.929969 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/6b08747b-e36e-4fa3-8306-49231ddaff24-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"6b08747b-e36e-4fa3-8306-49231ddaff24\") " pod="openstack/ceilometer-0" Sep 30 21:14:23 crc kubenswrapper[4756]: I0930 21:14:23.938246 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-br78h\" (UniqueName: \"kubernetes.io/projected/6b08747b-e36e-4fa3-8306-49231ddaff24-kube-api-access-br78h\") pod \"ceilometer-0\" (UID: \"6b08747b-e36e-4fa3-8306-49231ddaff24\") " pod="openstack/ceilometer-0" Sep 30 21:14:23 crc kubenswrapper[4756]: I0930 21:14:23.992077 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 21:14:24 crc kubenswrapper[4756]: I0930 21:14:24.513534 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 30 21:14:24 crc kubenswrapper[4756]: W0930 21:14:24.518173 4756 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6b08747b_e36e_4fa3_8306_49231ddaff24.slice/crio-fce74d856de88546a0718fc87e0768bf2268d2224d22eb0b4f713597e739ce92 WatchSource:0}: Error finding container fce74d856de88546a0718fc87e0768bf2268d2224d22eb0b4f713597e739ce92: Status 404 returned error can't find the container with id fce74d856de88546a0718fc87e0768bf2268d2224d22eb0b4f713597e739ce92 Sep 30 21:14:25 crc kubenswrapper[4756]: I0930 21:14:25.325551 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"6b08747b-e36e-4fa3-8306-49231ddaff24","Type":"ContainerStarted","Data":"31e3716a8f786c73d473d22f67927831afa23e566631c06b431829edfc9978b9"} Sep 30 21:14:25 crc kubenswrapper[4756]: I0930 21:14:25.325820 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"6b08747b-e36e-4fa3-8306-49231ddaff24","Type":"ContainerStarted","Data":"fce74d856de88546a0718fc87e0768bf2268d2224d22eb0b4f713597e739ce92"} Sep 30 21:14:25 crc kubenswrapper[4756]: I0930 21:14:25.348577 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-rw7vk" Sep 30 21:14:25 crc kubenswrapper[4756]: I0930 21:14:25.348630 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-rw7vk" Sep 30 21:14:26 crc kubenswrapper[4756]: I0930 21:14:26.345585 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"6b08747b-e36e-4fa3-8306-49231ddaff24","Type":"ContainerStarted","Data":"1bf3677fc5d17f43143a12610cbe34d009d9324d26daeb768eebe10ce788b66f"} Sep 30 21:14:26 crc kubenswrapper[4756]: I0930 21:14:26.398804 4756 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-rw7vk" podUID="edfe3f82-d6ff-483f-ac9c-85055165ef67" containerName="registry-server" probeResult="failure" output=< Sep 30 21:14:26 crc kubenswrapper[4756]: timeout: failed to connect service ":50051" within 1s Sep 30 21:14:26 crc kubenswrapper[4756]: > Sep 30 21:14:27 crc kubenswrapper[4756]: I0930 21:14:27.065763 4756 scope.go:117] "RemoveContainer" containerID="39690dbc215f423057d4ee5db6901f534d08ef585a8c5ad48b9dbadb32db422b" Sep 30 21:14:27 crc kubenswrapper[4756]: I0930 21:14:27.150059 4756 scope.go:117] "RemoveContainer" containerID="91b0e4a91df6435eebf9199d7a8bfff9d646e32b50218937b93ef02c4b8f0288" Sep 30 21:14:27 crc kubenswrapper[4756]: I0930 21:14:27.229039 4756 scope.go:117] "RemoveContainer" containerID="8407d7908fdfd3a1ea404f121edecbdb2836503b62c9f27ad191957603fd4fca" Sep 30 21:14:27 crc kubenswrapper[4756]: I0930 21:14:27.270416 4756 scope.go:117] "RemoveContainer" containerID="84a4ab483a45f9f5f306b97ffcfd9900f6b1710503c9cff033755cd83774eee1" Sep 30 21:14:27 crc kubenswrapper[4756]: I0930 21:14:27.315258 4756 scope.go:117] "RemoveContainer" containerID="c54d32dfa8249d867f175844112222b3906bea1c51de1cc2408e0b68fcdedbe7" Sep 30 21:14:27 crc kubenswrapper[4756]: I0930 21:14:27.361119 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"6b08747b-e36e-4fa3-8306-49231ddaff24","Type":"ContainerStarted","Data":"03cf0cc099eb3011a3fc1e92d459a39789636bb1da651f5a098fbd964852bab1"} Sep 30 21:14:27 crc kubenswrapper[4756]: I0930 21:14:27.372064 4756 scope.go:117] "RemoveContainer" containerID="6aaed6b894650103f5ab222d89bb8c8ef15da0085a383ae9eb3fa8d7f0e78b53" Sep 30 21:14:29 crc kubenswrapper[4756]: I0930 21:14:29.388744 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"6b08747b-e36e-4fa3-8306-49231ddaff24","Type":"ContainerStarted","Data":"19daccefd2d2dded94832ed1f222e136406502fbbc1b8f5af4942eefd6d6e05c"} Sep 30 21:14:29 crc kubenswrapper[4756]: I0930 21:14:29.389340 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Sep 30 21:14:29 crc kubenswrapper[4756]: I0930 21:14:29.415961 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.659810464 podStartE2EDuration="6.415940503s" podCreationTimestamp="2025-09-30 21:14:23 +0000 UTC" firstStartedPulling="2025-09-30 21:14:24.519914665 +0000 UTC m=+6194.140848142" lastFinishedPulling="2025-09-30 21:14:28.276044714 +0000 UTC m=+6197.896978181" observedRunningTime="2025-09-30 21:14:29.410136122 +0000 UTC m=+6199.031069599" watchObservedRunningTime="2025-09-30 21:14:29.415940503 +0000 UTC m=+6199.036873980" Sep 30 21:14:33 crc kubenswrapper[4756]: I0930 21:14:33.320718 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/aodh-db-create-kkqjz"] Sep 30 21:14:33 crc kubenswrapper[4756]: I0930 21:14:33.322422 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-db-create-kkqjz" Sep 30 21:14:33 crc kubenswrapper[4756]: I0930 21:14:33.331451 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/aodh-db-create-kkqjz"] Sep 30 21:14:33 crc kubenswrapper[4756]: I0930 21:14:33.427846 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hs4kx\" (UniqueName: \"kubernetes.io/projected/ced7e732-18e2-428b-9fdb-15d1d252ad85-kube-api-access-hs4kx\") pod \"aodh-db-create-kkqjz\" (UID: \"ced7e732-18e2-428b-9fdb-15d1d252ad85\") " pod="openstack/aodh-db-create-kkqjz" Sep 30 21:14:33 crc kubenswrapper[4756]: I0930 21:14:33.530064 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hs4kx\" (UniqueName: \"kubernetes.io/projected/ced7e732-18e2-428b-9fdb-15d1d252ad85-kube-api-access-hs4kx\") pod \"aodh-db-create-kkqjz\" (UID: \"ced7e732-18e2-428b-9fdb-15d1d252ad85\") " pod="openstack/aodh-db-create-kkqjz" Sep 30 21:14:33 crc kubenswrapper[4756]: I0930 21:14:33.562411 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hs4kx\" (UniqueName: \"kubernetes.io/projected/ced7e732-18e2-428b-9fdb-15d1d252ad85-kube-api-access-hs4kx\") pod \"aodh-db-create-kkqjz\" (UID: \"ced7e732-18e2-428b-9fdb-15d1d252ad85\") " pod="openstack/aodh-db-create-kkqjz" Sep 30 21:14:33 crc kubenswrapper[4756]: I0930 21:14:33.640572 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-db-create-kkqjz" Sep 30 21:14:34 crc kubenswrapper[4756]: I0930 21:14:34.127464 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/aodh-db-create-kkqjz"] Sep 30 21:14:34 crc kubenswrapper[4756]: I0930 21:14:34.484830 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-db-create-kkqjz" event={"ID":"ced7e732-18e2-428b-9fdb-15d1d252ad85","Type":"ContainerStarted","Data":"e77b1bd226dc21052b0bae20dc90c3dc6cd34761ae2683dbf3971e5137d5c5be"} Sep 30 21:14:35 crc kubenswrapper[4756]: I0930 21:14:35.498035 4756 generic.go:334] "Generic (PLEG): container finished" podID="ced7e732-18e2-428b-9fdb-15d1d252ad85" containerID="3ef02ab4a3614d88783435bf5129a7e80ad1288520852c76ded7cd3c2c82e23c" exitCode=0 Sep 30 21:14:35 crc kubenswrapper[4756]: I0930 21:14:35.498118 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-db-create-kkqjz" event={"ID":"ced7e732-18e2-428b-9fdb-15d1d252ad85","Type":"ContainerDied","Data":"3ef02ab4a3614d88783435bf5129a7e80ad1288520852c76ded7cd3c2c82e23c"} Sep 30 21:14:36 crc kubenswrapper[4756]: I0930 21:14:36.409270 4756 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-rw7vk" podUID="edfe3f82-d6ff-483f-ac9c-85055165ef67" containerName="registry-server" probeResult="failure" output=< Sep 30 21:14:36 crc kubenswrapper[4756]: timeout: failed to connect service ":50051" within 1s Sep 30 21:14:36 crc kubenswrapper[4756]: > Sep 30 21:14:36 crc kubenswrapper[4756]: I0930 21:14:36.948225 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-db-create-kkqjz" Sep 30 21:14:37 crc kubenswrapper[4756]: I0930 21:14:37.004057 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hs4kx\" (UniqueName: \"kubernetes.io/projected/ced7e732-18e2-428b-9fdb-15d1d252ad85-kube-api-access-hs4kx\") pod \"ced7e732-18e2-428b-9fdb-15d1d252ad85\" (UID: \"ced7e732-18e2-428b-9fdb-15d1d252ad85\") " Sep 30 21:14:37 crc kubenswrapper[4756]: I0930 21:14:37.010421 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ced7e732-18e2-428b-9fdb-15d1d252ad85-kube-api-access-hs4kx" (OuterVolumeSpecName: "kube-api-access-hs4kx") pod "ced7e732-18e2-428b-9fdb-15d1d252ad85" (UID: "ced7e732-18e2-428b-9fdb-15d1d252ad85"). InnerVolumeSpecName "kube-api-access-hs4kx". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 21:14:37 crc kubenswrapper[4756]: I0930 21:14:37.106240 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hs4kx\" (UniqueName: \"kubernetes.io/projected/ced7e732-18e2-428b-9fdb-15d1d252ad85-kube-api-access-hs4kx\") on node \"crc\" DevicePath \"\"" Sep 30 21:14:37 crc kubenswrapper[4756]: I0930 21:14:37.517192 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-db-create-kkqjz" event={"ID":"ced7e732-18e2-428b-9fdb-15d1d252ad85","Type":"ContainerDied","Data":"e77b1bd226dc21052b0bae20dc90c3dc6cd34761ae2683dbf3971e5137d5c5be"} Sep 30 21:14:37 crc kubenswrapper[4756]: I0930 21:14:37.517575 4756 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e77b1bd226dc21052b0bae20dc90c3dc6cd34761ae2683dbf3971e5137d5c5be" Sep 30 21:14:37 crc kubenswrapper[4756]: I0930 21:14:37.517255 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-db-create-kkqjz" Sep 30 21:14:41 crc kubenswrapper[4756]: I0930 21:14:41.539191 4756 patch_prober.go:28] interesting pod/machine-config-daemon-4n9zj container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 21:14:41 crc kubenswrapper[4756]: I0930 21:14:41.539895 4756 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 21:14:41 crc kubenswrapper[4756]: I0930 21:14:41.539992 4756 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" Sep 30 21:14:41 crc kubenswrapper[4756]: I0930 21:14:41.541052 4756 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"c1aadeaab73b9102a6512a1478fc007196fc8240e5db9d60dd58fca2c5f309cc"} pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 30 21:14:41 crc kubenswrapper[4756]: I0930 21:14:41.541137 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" containerName="machine-config-daemon" containerID="cri-o://c1aadeaab73b9102a6512a1478fc007196fc8240e5db9d60dd58fca2c5f309cc" gracePeriod=600 Sep 30 21:14:42 crc kubenswrapper[4756]: I0930 21:14:42.571620 4756 generic.go:334] "Generic (PLEG): container finished" podID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" containerID="c1aadeaab73b9102a6512a1478fc007196fc8240e5db9d60dd58fca2c5f309cc" exitCode=0 Sep 30 21:14:42 crc kubenswrapper[4756]: I0930 21:14:42.571706 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" event={"ID":"3370c2ca-fec3-4f90-8df7-51e21e6c7e1c","Type":"ContainerDied","Data":"c1aadeaab73b9102a6512a1478fc007196fc8240e5db9d60dd58fca2c5f309cc"} Sep 30 21:14:42 crc kubenswrapper[4756]: I0930 21:14:42.572176 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" event={"ID":"3370c2ca-fec3-4f90-8df7-51e21e6c7e1c","Type":"ContainerStarted","Data":"6f222caf34daae427ad1ef1c9c0a49f393ad85f849cbc1acf4a5ac67e633c58d"} Sep 30 21:14:42 crc kubenswrapper[4756]: I0930 21:14:42.572208 4756 scope.go:117] "RemoveContainer" containerID="6282743b2855b6e8d5a5f2de10949d881993e076a7475251b665c1caeac15b6c" Sep 30 21:14:43 crc kubenswrapper[4756]: I0930 21:14:43.453983 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/aodh-f034-account-create-n2g2c"] Sep 30 21:14:43 crc kubenswrapper[4756]: E0930 21:14:43.454876 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ced7e732-18e2-428b-9fdb-15d1d252ad85" containerName="mariadb-database-create" Sep 30 21:14:43 crc kubenswrapper[4756]: I0930 21:14:43.454954 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="ced7e732-18e2-428b-9fdb-15d1d252ad85" containerName="mariadb-database-create" Sep 30 21:14:43 crc kubenswrapper[4756]: I0930 21:14:43.455237 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="ced7e732-18e2-428b-9fdb-15d1d252ad85" containerName="mariadb-database-create" Sep 30 21:14:43 crc kubenswrapper[4756]: I0930 21:14:43.456113 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-f034-account-create-n2g2c" Sep 30 21:14:43 crc kubenswrapper[4756]: I0930 21:14:43.458333 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"aodh-db-secret" Sep 30 21:14:43 crc kubenswrapper[4756]: I0930 21:14:43.463980 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/aodh-f034-account-create-n2g2c"] Sep 30 21:14:43 crc kubenswrapper[4756]: I0930 21:14:43.545096 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-667ph\" (UniqueName: \"kubernetes.io/projected/1dac2869-2e8d-403d-bfe4-65d5d74ee236-kube-api-access-667ph\") pod \"aodh-f034-account-create-n2g2c\" (UID: \"1dac2869-2e8d-403d-bfe4-65d5d74ee236\") " pod="openstack/aodh-f034-account-create-n2g2c" Sep 30 21:14:43 crc kubenswrapper[4756]: I0930 21:14:43.647045 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-667ph\" (UniqueName: \"kubernetes.io/projected/1dac2869-2e8d-403d-bfe4-65d5d74ee236-kube-api-access-667ph\") pod \"aodh-f034-account-create-n2g2c\" (UID: \"1dac2869-2e8d-403d-bfe4-65d5d74ee236\") " pod="openstack/aodh-f034-account-create-n2g2c" Sep 30 21:14:43 crc kubenswrapper[4756]: I0930 21:14:43.669014 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-667ph\" (UniqueName: \"kubernetes.io/projected/1dac2869-2e8d-403d-bfe4-65d5d74ee236-kube-api-access-667ph\") pod \"aodh-f034-account-create-n2g2c\" (UID: \"1dac2869-2e8d-403d-bfe4-65d5d74ee236\") " pod="openstack/aodh-f034-account-create-n2g2c" Sep 30 21:14:43 crc kubenswrapper[4756]: I0930 21:14:43.777053 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-f034-account-create-n2g2c" Sep 30 21:14:44 crc kubenswrapper[4756]: I0930 21:14:44.309763 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/aodh-f034-account-create-n2g2c"] Sep 30 21:14:44 crc kubenswrapper[4756]: W0930 21:14:44.320791 4756 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1dac2869_2e8d_403d_bfe4_65d5d74ee236.slice/crio-78d2d30c7f095b7a261b33d65b83e971d6b2f125331416f350d24288e388e715 WatchSource:0}: Error finding container 78d2d30c7f095b7a261b33d65b83e971d6b2f125331416f350d24288e388e715: Status 404 returned error can't find the container with id 78d2d30c7f095b7a261b33d65b83e971d6b2f125331416f350d24288e388e715 Sep 30 21:14:44 crc kubenswrapper[4756]: I0930 21:14:44.595691 4756 generic.go:334] "Generic (PLEG): container finished" podID="1dac2869-2e8d-403d-bfe4-65d5d74ee236" containerID="8a089b5063061775d2a83998f225b590b328d1b9e6eab35c6d079004e6512cb2" exitCode=0 Sep 30 21:14:44 crc kubenswrapper[4756]: I0930 21:14:44.595792 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-f034-account-create-n2g2c" event={"ID":"1dac2869-2e8d-403d-bfe4-65d5d74ee236","Type":"ContainerDied","Data":"8a089b5063061775d2a83998f225b590b328d1b9e6eab35c6d079004e6512cb2"} Sep 30 21:14:44 crc kubenswrapper[4756]: I0930 21:14:44.596005 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-f034-account-create-n2g2c" event={"ID":"1dac2869-2e8d-403d-bfe4-65d5d74ee236","Type":"ContainerStarted","Data":"78d2d30c7f095b7a261b33d65b83e971d6b2f125331416f350d24288e388e715"} Sep 30 21:14:45 crc kubenswrapper[4756]: I0930 21:14:45.029745 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-db-create-8gl65"] Sep 30 21:14:45 crc kubenswrapper[4756]: I0930 21:14:45.038065 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-db-create-8gl65"] Sep 30 21:14:45 crc kubenswrapper[4756]: I0930 21:14:45.130762 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c17ad536-947a-46ba-8e65-7278fd29341d" path="/var/lib/kubelet/pods/c17ad536-947a-46ba-8e65-7278fd29341d/volumes" Sep 30 21:14:45 crc kubenswrapper[4756]: I0930 21:14:45.394207 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-rw7vk" Sep 30 21:14:45 crc kubenswrapper[4756]: I0930 21:14:45.438307 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-rw7vk" Sep 30 21:14:45 crc kubenswrapper[4756]: I0930 21:14:45.630186 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-rw7vk"] Sep 30 21:14:46 crc kubenswrapper[4756]: I0930 21:14:46.049556 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-f034-account-create-n2g2c" Sep 30 21:14:46 crc kubenswrapper[4756]: I0930 21:14:46.207867 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-667ph\" (UniqueName: \"kubernetes.io/projected/1dac2869-2e8d-403d-bfe4-65d5d74ee236-kube-api-access-667ph\") pod \"1dac2869-2e8d-403d-bfe4-65d5d74ee236\" (UID: \"1dac2869-2e8d-403d-bfe4-65d5d74ee236\") " Sep 30 21:14:46 crc kubenswrapper[4756]: I0930 21:14:46.213629 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1dac2869-2e8d-403d-bfe4-65d5d74ee236-kube-api-access-667ph" (OuterVolumeSpecName: "kube-api-access-667ph") pod "1dac2869-2e8d-403d-bfe4-65d5d74ee236" (UID: "1dac2869-2e8d-403d-bfe4-65d5d74ee236"). InnerVolumeSpecName "kube-api-access-667ph". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 21:14:46 crc kubenswrapper[4756]: I0930 21:14:46.310781 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-667ph\" (UniqueName: \"kubernetes.io/projected/1dac2869-2e8d-403d-bfe4-65d5d74ee236-kube-api-access-667ph\") on node \"crc\" DevicePath \"\"" Sep 30 21:14:46 crc kubenswrapper[4756]: I0930 21:14:46.616714 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-f034-account-create-n2g2c" event={"ID":"1dac2869-2e8d-403d-bfe4-65d5d74ee236","Type":"ContainerDied","Data":"78d2d30c7f095b7a261b33d65b83e971d6b2f125331416f350d24288e388e715"} Sep 30 21:14:46 crc kubenswrapper[4756]: I0930 21:14:46.617336 4756 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="78d2d30c7f095b7a261b33d65b83e971d6b2f125331416f350d24288e388e715" Sep 30 21:14:46 crc kubenswrapper[4756]: I0930 21:14:46.617061 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-rw7vk" podUID="edfe3f82-d6ff-483f-ac9c-85055165ef67" containerName="registry-server" containerID="cri-o://bec502ba3d82deec77117af3657dd2a9e46932f52a3662bc523888396a3c111d" gracePeriod=2 Sep 30 21:14:46 crc kubenswrapper[4756]: I0930 21:14:46.616735 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-f034-account-create-n2g2c" Sep 30 21:14:46 crc kubenswrapper[4756]: E0930 21:14:46.854819 4756 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podedfe3f82_d6ff_483f_ac9c_85055165ef67.slice/crio-bec502ba3d82deec77117af3657dd2a9e46932f52a3662bc523888396a3c111d.scope\": RecentStats: unable to find data in memory cache]" Sep 30 21:14:47 crc kubenswrapper[4756]: I0930 21:14:47.138739 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-rw7vk" Sep 30 21:14:47 crc kubenswrapper[4756]: I0930 21:14:47.226939 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qgjfb\" (UniqueName: \"kubernetes.io/projected/edfe3f82-d6ff-483f-ac9c-85055165ef67-kube-api-access-qgjfb\") pod \"edfe3f82-d6ff-483f-ac9c-85055165ef67\" (UID: \"edfe3f82-d6ff-483f-ac9c-85055165ef67\") " Sep 30 21:14:47 crc kubenswrapper[4756]: I0930 21:14:47.227018 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/edfe3f82-d6ff-483f-ac9c-85055165ef67-utilities\") pod \"edfe3f82-d6ff-483f-ac9c-85055165ef67\" (UID: \"edfe3f82-d6ff-483f-ac9c-85055165ef67\") " Sep 30 21:14:47 crc kubenswrapper[4756]: I0930 21:14:47.227216 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/edfe3f82-d6ff-483f-ac9c-85055165ef67-catalog-content\") pod \"edfe3f82-d6ff-483f-ac9c-85055165ef67\" (UID: \"edfe3f82-d6ff-483f-ac9c-85055165ef67\") " Sep 30 21:14:47 crc kubenswrapper[4756]: I0930 21:14:47.227640 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/edfe3f82-d6ff-483f-ac9c-85055165ef67-utilities" (OuterVolumeSpecName: "utilities") pod "edfe3f82-d6ff-483f-ac9c-85055165ef67" (UID: "edfe3f82-d6ff-483f-ac9c-85055165ef67"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 21:14:47 crc kubenswrapper[4756]: I0930 21:14:47.237043 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/edfe3f82-d6ff-483f-ac9c-85055165ef67-kube-api-access-qgjfb" (OuterVolumeSpecName: "kube-api-access-qgjfb") pod "edfe3f82-d6ff-483f-ac9c-85055165ef67" (UID: "edfe3f82-d6ff-483f-ac9c-85055165ef67"). InnerVolumeSpecName "kube-api-access-qgjfb". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 21:14:47 crc kubenswrapper[4756]: I0930 21:14:47.317261 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/edfe3f82-d6ff-483f-ac9c-85055165ef67-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "edfe3f82-d6ff-483f-ac9c-85055165ef67" (UID: "edfe3f82-d6ff-483f-ac9c-85055165ef67"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 21:14:47 crc kubenswrapper[4756]: I0930 21:14:47.330039 4756 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/edfe3f82-d6ff-483f-ac9c-85055165ef67-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 21:14:47 crc kubenswrapper[4756]: I0930 21:14:47.330087 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qgjfb\" (UniqueName: \"kubernetes.io/projected/edfe3f82-d6ff-483f-ac9c-85055165ef67-kube-api-access-qgjfb\") on node \"crc\" DevicePath \"\"" Sep 30 21:14:47 crc kubenswrapper[4756]: I0930 21:14:47.330098 4756 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/edfe3f82-d6ff-483f-ac9c-85055165ef67-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 21:14:47 crc kubenswrapper[4756]: I0930 21:14:47.640688 4756 generic.go:334] "Generic (PLEG): container finished" podID="edfe3f82-d6ff-483f-ac9c-85055165ef67" containerID="bec502ba3d82deec77117af3657dd2a9e46932f52a3662bc523888396a3c111d" exitCode=0 Sep 30 21:14:47 crc kubenswrapper[4756]: I0930 21:14:47.640728 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rw7vk" event={"ID":"edfe3f82-d6ff-483f-ac9c-85055165ef67","Type":"ContainerDied","Data":"bec502ba3d82deec77117af3657dd2a9e46932f52a3662bc523888396a3c111d"} Sep 30 21:14:47 crc kubenswrapper[4756]: I0930 21:14:47.640755 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rw7vk" event={"ID":"edfe3f82-d6ff-483f-ac9c-85055165ef67","Type":"ContainerDied","Data":"c84b50fdc88cc7c4ca4109bd26177b9b9fe99eee609923b796379eee6784a637"} Sep 30 21:14:47 crc kubenswrapper[4756]: I0930 21:14:47.640776 4756 scope.go:117] "RemoveContainer" containerID="bec502ba3d82deec77117af3657dd2a9e46932f52a3662bc523888396a3c111d" Sep 30 21:14:47 crc kubenswrapper[4756]: I0930 21:14:47.640904 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-rw7vk" Sep 30 21:14:47 crc kubenswrapper[4756]: I0930 21:14:47.676449 4756 scope.go:117] "RemoveContainer" containerID="75a8eff6122cbb477a07a49b5a383ad53170aa8e1e9a5b5b167cb1771c77f9dd" Sep 30 21:14:47 crc kubenswrapper[4756]: I0930 21:14:47.700234 4756 scope.go:117] "RemoveContainer" containerID="b1ee2774b3a117978ceab6a7e7c7ebd0c41c144886a67a950f7dbd7c5279d8a1" Sep 30 21:14:47 crc kubenswrapper[4756]: I0930 21:14:47.700637 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-rw7vk"] Sep 30 21:14:47 crc kubenswrapper[4756]: I0930 21:14:47.712510 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-rw7vk"] Sep 30 21:14:47 crc kubenswrapper[4756]: I0930 21:14:47.772868 4756 scope.go:117] "RemoveContainer" containerID="bec502ba3d82deec77117af3657dd2a9e46932f52a3662bc523888396a3c111d" Sep 30 21:14:47 crc kubenswrapper[4756]: E0930 21:14:47.773453 4756 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bec502ba3d82deec77117af3657dd2a9e46932f52a3662bc523888396a3c111d\": container with ID starting with bec502ba3d82deec77117af3657dd2a9e46932f52a3662bc523888396a3c111d not found: ID does not exist" containerID="bec502ba3d82deec77117af3657dd2a9e46932f52a3662bc523888396a3c111d" Sep 30 21:14:47 crc kubenswrapper[4756]: I0930 21:14:47.773489 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bec502ba3d82deec77117af3657dd2a9e46932f52a3662bc523888396a3c111d"} err="failed to get container status \"bec502ba3d82deec77117af3657dd2a9e46932f52a3662bc523888396a3c111d\": rpc error: code = NotFound desc = could not find container \"bec502ba3d82deec77117af3657dd2a9e46932f52a3662bc523888396a3c111d\": container with ID starting with bec502ba3d82deec77117af3657dd2a9e46932f52a3662bc523888396a3c111d not found: ID does not exist" Sep 30 21:14:47 crc kubenswrapper[4756]: I0930 21:14:47.773515 4756 scope.go:117] "RemoveContainer" containerID="75a8eff6122cbb477a07a49b5a383ad53170aa8e1e9a5b5b167cb1771c77f9dd" Sep 30 21:14:47 crc kubenswrapper[4756]: E0930 21:14:47.774164 4756 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"75a8eff6122cbb477a07a49b5a383ad53170aa8e1e9a5b5b167cb1771c77f9dd\": container with ID starting with 75a8eff6122cbb477a07a49b5a383ad53170aa8e1e9a5b5b167cb1771c77f9dd not found: ID does not exist" containerID="75a8eff6122cbb477a07a49b5a383ad53170aa8e1e9a5b5b167cb1771c77f9dd" Sep 30 21:14:47 crc kubenswrapper[4756]: I0930 21:14:47.774189 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"75a8eff6122cbb477a07a49b5a383ad53170aa8e1e9a5b5b167cb1771c77f9dd"} err="failed to get container status \"75a8eff6122cbb477a07a49b5a383ad53170aa8e1e9a5b5b167cb1771c77f9dd\": rpc error: code = NotFound desc = could not find container \"75a8eff6122cbb477a07a49b5a383ad53170aa8e1e9a5b5b167cb1771c77f9dd\": container with ID starting with 75a8eff6122cbb477a07a49b5a383ad53170aa8e1e9a5b5b167cb1771c77f9dd not found: ID does not exist" Sep 30 21:14:47 crc kubenswrapper[4756]: I0930 21:14:47.774205 4756 scope.go:117] "RemoveContainer" containerID="b1ee2774b3a117978ceab6a7e7c7ebd0c41c144886a67a950f7dbd7c5279d8a1" Sep 30 21:14:47 crc kubenswrapper[4756]: E0930 21:14:47.774492 4756 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b1ee2774b3a117978ceab6a7e7c7ebd0c41c144886a67a950f7dbd7c5279d8a1\": container with ID starting with b1ee2774b3a117978ceab6a7e7c7ebd0c41c144886a67a950f7dbd7c5279d8a1 not found: ID does not exist" containerID="b1ee2774b3a117978ceab6a7e7c7ebd0c41c144886a67a950f7dbd7c5279d8a1" Sep 30 21:14:47 crc kubenswrapper[4756]: I0930 21:14:47.774540 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b1ee2774b3a117978ceab6a7e7c7ebd0c41c144886a67a950f7dbd7c5279d8a1"} err="failed to get container status \"b1ee2774b3a117978ceab6a7e7c7ebd0c41c144886a67a950f7dbd7c5279d8a1\": rpc error: code = NotFound desc = could not find container \"b1ee2774b3a117978ceab6a7e7c7ebd0c41c144886a67a950f7dbd7c5279d8a1\": container with ID starting with b1ee2774b3a117978ceab6a7e7c7ebd0c41c144886a67a950f7dbd7c5279d8a1 not found: ID does not exist" Sep 30 21:14:48 crc kubenswrapper[4756]: I0930 21:14:48.789226 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/aodh-db-sync-7vh8h"] Sep 30 21:14:48 crc kubenswrapper[4756]: E0930 21:14:48.789859 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="edfe3f82-d6ff-483f-ac9c-85055165ef67" containerName="extract-utilities" Sep 30 21:14:48 crc kubenswrapper[4756]: I0930 21:14:48.789873 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="edfe3f82-d6ff-483f-ac9c-85055165ef67" containerName="extract-utilities" Sep 30 21:14:48 crc kubenswrapper[4756]: E0930 21:14:48.789896 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1dac2869-2e8d-403d-bfe4-65d5d74ee236" containerName="mariadb-account-create" Sep 30 21:14:48 crc kubenswrapper[4756]: I0930 21:14:48.789902 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="1dac2869-2e8d-403d-bfe4-65d5d74ee236" containerName="mariadb-account-create" Sep 30 21:14:48 crc kubenswrapper[4756]: E0930 21:14:48.789916 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="edfe3f82-d6ff-483f-ac9c-85055165ef67" containerName="extract-content" Sep 30 21:14:48 crc kubenswrapper[4756]: I0930 21:14:48.789922 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="edfe3f82-d6ff-483f-ac9c-85055165ef67" containerName="extract-content" Sep 30 21:14:48 crc kubenswrapper[4756]: E0930 21:14:48.789934 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="edfe3f82-d6ff-483f-ac9c-85055165ef67" containerName="registry-server" Sep 30 21:14:48 crc kubenswrapper[4756]: I0930 21:14:48.789941 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="edfe3f82-d6ff-483f-ac9c-85055165ef67" containerName="registry-server" Sep 30 21:14:48 crc kubenswrapper[4756]: I0930 21:14:48.790144 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="1dac2869-2e8d-403d-bfe4-65d5d74ee236" containerName="mariadb-account-create" Sep 30 21:14:48 crc kubenswrapper[4756]: I0930 21:14:48.790159 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="edfe3f82-d6ff-483f-ac9c-85055165ef67" containerName="registry-server" Sep 30 21:14:48 crc kubenswrapper[4756]: I0930 21:14:48.790896 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-db-sync-7vh8h" Sep 30 21:14:48 crc kubenswrapper[4756]: I0930 21:14:48.793801 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"aodh-scripts" Sep 30 21:14:48 crc kubenswrapper[4756]: I0930 21:14:48.794669 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"aodh-config-data" Sep 30 21:14:48 crc kubenswrapper[4756]: I0930 21:14:48.798851 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"telemetry-autoscaling-dockercfg-cf5mz" Sep 30 21:14:48 crc kubenswrapper[4756]: I0930 21:14:48.811983 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/aodh-db-sync-7vh8h"] Sep 30 21:14:48 crc kubenswrapper[4756]: I0930 21:14:48.867257 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/16598b9e-0208-4a53-87f6-1069af4b6c45-combined-ca-bundle\") pod \"aodh-db-sync-7vh8h\" (UID: \"16598b9e-0208-4a53-87f6-1069af4b6c45\") " pod="openstack/aodh-db-sync-7vh8h" Sep 30 21:14:48 crc kubenswrapper[4756]: I0930 21:14:48.867318 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fsxzj\" (UniqueName: \"kubernetes.io/projected/16598b9e-0208-4a53-87f6-1069af4b6c45-kube-api-access-fsxzj\") pod \"aodh-db-sync-7vh8h\" (UID: \"16598b9e-0208-4a53-87f6-1069af4b6c45\") " pod="openstack/aodh-db-sync-7vh8h" Sep 30 21:14:48 crc kubenswrapper[4756]: I0930 21:14:48.867411 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/16598b9e-0208-4a53-87f6-1069af4b6c45-scripts\") pod \"aodh-db-sync-7vh8h\" (UID: \"16598b9e-0208-4a53-87f6-1069af4b6c45\") " pod="openstack/aodh-db-sync-7vh8h" Sep 30 21:14:48 crc kubenswrapper[4756]: I0930 21:14:48.867608 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/16598b9e-0208-4a53-87f6-1069af4b6c45-config-data\") pod \"aodh-db-sync-7vh8h\" (UID: \"16598b9e-0208-4a53-87f6-1069af4b6c45\") " pod="openstack/aodh-db-sync-7vh8h" Sep 30 21:14:48 crc kubenswrapper[4756]: I0930 21:14:48.970127 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/16598b9e-0208-4a53-87f6-1069af4b6c45-combined-ca-bundle\") pod \"aodh-db-sync-7vh8h\" (UID: \"16598b9e-0208-4a53-87f6-1069af4b6c45\") " pod="openstack/aodh-db-sync-7vh8h" Sep 30 21:14:48 crc kubenswrapper[4756]: I0930 21:14:48.970183 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fsxzj\" (UniqueName: \"kubernetes.io/projected/16598b9e-0208-4a53-87f6-1069af4b6c45-kube-api-access-fsxzj\") pod \"aodh-db-sync-7vh8h\" (UID: \"16598b9e-0208-4a53-87f6-1069af4b6c45\") " pod="openstack/aodh-db-sync-7vh8h" Sep 30 21:14:48 crc kubenswrapper[4756]: I0930 21:14:48.970225 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/16598b9e-0208-4a53-87f6-1069af4b6c45-scripts\") pod \"aodh-db-sync-7vh8h\" (UID: \"16598b9e-0208-4a53-87f6-1069af4b6c45\") " pod="openstack/aodh-db-sync-7vh8h" Sep 30 21:14:48 crc kubenswrapper[4756]: I0930 21:14:48.970351 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/16598b9e-0208-4a53-87f6-1069af4b6c45-config-data\") pod \"aodh-db-sync-7vh8h\" (UID: \"16598b9e-0208-4a53-87f6-1069af4b6c45\") " pod="openstack/aodh-db-sync-7vh8h" Sep 30 21:14:48 crc kubenswrapper[4756]: I0930 21:14:48.977904 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/16598b9e-0208-4a53-87f6-1069af4b6c45-scripts\") pod \"aodh-db-sync-7vh8h\" (UID: \"16598b9e-0208-4a53-87f6-1069af4b6c45\") " pod="openstack/aodh-db-sync-7vh8h" Sep 30 21:14:48 crc kubenswrapper[4756]: I0930 21:14:48.983018 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/16598b9e-0208-4a53-87f6-1069af4b6c45-combined-ca-bundle\") pod \"aodh-db-sync-7vh8h\" (UID: \"16598b9e-0208-4a53-87f6-1069af4b6c45\") " pod="openstack/aodh-db-sync-7vh8h" Sep 30 21:14:48 crc kubenswrapper[4756]: I0930 21:14:48.985726 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/16598b9e-0208-4a53-87f6-1069af4b6c45-config-data\") pod \"aodh-db-sync-7vh8h\" (UID: \"16598b9e-0208-4a53-87f6-1069af4b6c45\") " pod="openstack/aodh-db-sync-7vh8h" Sep 30 21:14:48 crc kubenswrapper[4756]: I0930 21:14:48.993303 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fsxzj\" (UniqueName: \"kubernetes.io/projected/16598b9e-0208-4a53-87f6-1069af4b6c45-kube-api-access-fsxzj\") pod \"aodh-db-sync-7vh8h\" (UID: \"16598b9e-0208-4a53-87f6-1069af4b6c45\") " pod="openstack/aodh-db-sync-7vh8h" Sep 30 21:14:49 crc kubenswrapper[4756]: I0930 21:14:49.114492 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-db-sync-7vh8h" Sep 30 21:14:49 crc kubenswrapper[4756]: I0930 21:14:49.128969 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="edfe3f82-d6ff-483f-ac9c-85055165ef67" path="/var/lib/kubelet/pods/edfe3f82-d6ff-483f-ac9c-85055165ef67/volumes" Sep 30 21:14:49 crc kubenswrapper[4756]: I0930 21:14:49.607572 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/aodh-db-sync-7vh8h"] Sep 30 21:14:49 crc kubenswrapper[4756]: I0930 21:14:49.660924 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-db-sync-7vh8h" event={"ID":"16598b9e-0208-4a53-87f6-1069af4b6c45","Type":"ContainerStarted","Data":"ec4501c434dfe765f6ba6adc79d6e30ea8bb2a2185c9fc8e9c061f33749a315b"} Sep 30 21:14:54 crc kubenswrapper[4756]: I0930 21:14:54.002480 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Sep 30 21:14:54 crc kubenswrapper[4756]: I0930 21:14:54.051321 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-489a-account-create-jdh7m"] Sep 30 21:14:54 crc kubenswrapper[4756]: I0930 21:14:54.081175 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-489a-account-create-jdh7m"] Sep 30 21:14:54 crc kubenswrapper[4756]: I0930 21:14:54.712857 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-db-sync-7vh8h" event={"ID":"16598b9e-0208-4a53-87f6-1069af4b6c45","Type":"ContainerStarted","Data":"81e871acea470b2f0f08557c5fe480030d01259a99afbd54dd9e939d91d10216"} Sep 30 21:14:54 crc kubenswrapper[4756]: I0930 21:14:54.730959 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/aodh-db-sync-7vh8h" podStartSLOduration=2.787905588 podStartE2EDuration="6.730940657s" podCreationTimestamp="2025-09-30 21:14:48 +0000 UTC" firstStartedPulling="2025-09-30 21:14:49.614434713 +0000 UTC m=+6219.235368190" lastFinishedPulling="2025-09-30 21:14:53.557469782 +0000 UTC m=+6223.178403259" observedRunningTime="2025-09-30 21:14:54.729611772 +0000 UTC m=+6224.350545239" watchObservedRunningTime="2025-09-30 21:14:54.730940657 +0000 UTC m=+6224.351874144" Sep 30 21:14:55 crc kubenswrapper[4756]: I0930 21:14:55.132671 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1ce7b085-4d4c-4a96-a6fb-98c40eca978e" path="/var/lib/kubelet/pods/1ce7b085-4d4c-4a96-a6fb-98c40eca978e/volumes" Sep 30 21:14:56 crc kubenswrapper[4756]: I0930 21:14:56.738475 4756 generic.go:334] "Generic (PLEG): container finished" podID="16598b9e-0208-4a53-87f6-1069af4b6c45" containerID="81e871acea470b2f0f08557c5fe480030d01259a99afbd54dd9e939d91d10216" exitCode=0 Sep 30 21:14:56 crc kubenswrapper[4756]: I0930 21:14:56.738581 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-db-sync-7vh8h" event={"ID":"16598b9e-0208-4a53-87f6-1069af4b6c45","Type":"ContainerDied","Data":"81e871acea470b2f0f08557c5fe480030d01259a99afbd54dd9e939d91d10216"} Sep 30 21:14:58 crc kubenswrapper[4756]: I0930 21:14:58.166530 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-db-sync-7vh8h" Sep 30 21:14:58 crc kubenswrapper[4756]: I0930 21:14:58.269962 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/16598b9e-0208-4a53-87f6-1069af4b6c45-combined-ca-bundle\") pod \"16598b9e-0208-4a53-87f6-1069af4b6c45\" (UID: \"16598b9e-0208-4a53-87f6-1069af4b6c45\") " Sep 30 21:14:58 crc kubenswrapper[4756]: I0930 21:14:58.270020 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/16598b9e-0208-4a53-87f6-1069af4b6c45-scripts\") pod \"16598b9e-0208-4a53-87f6-1069af4b6c45\" (UID: \"16598b9e-0208-4a53-87f6-1069af4b6c45\") " Sep 30 21:14:58 crc kubenswrapper[4756]: I0930 21:14:58.270208 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/16598b9e-0208-4a53-87f6-1069af4b6c45-config-data\") pod \"16598b9e-0208-4a53-87f6-1069af4b6c45\" (UID: \"16598b9e-0208-4a53-87f6-1069af4b6c45\") " Sep 30 21:14:58 crc kubenswrapper[4756]: I0930 21:14:58.270325 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fsxzj\" (UniqueName: \"kubernetes.io/projected/16598b9e-0208-4a53-87f6-1069af4b6c45-kube-api-access-fsxzj\") pod \"16598b9e-0208-4a53-87f6-1069af4b6c45\" (UID: \"16598b9e-0208-4a53-87f6-1069af4b6c45\") " Sep 30 21:14:58 crc kubenswrapper[4756]: I0930 21:14:58.286578 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/16598b9e-0208-4a53-87f6-1069af4b6c45-scripts" (OuterVolumeSpecName: "scripts") pod "16598b9e-0208-4a53-87f6-1069af4b6c45" (UID: "16598b9e-0208-4a53-87f6-1069af4b6c45"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 21:14:58 crc kubenswrapper[4756]: I0930 21:14:58.298904 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/16598b9e-0208-4a53-87f6-1069af4b6c45-kube-api-access-fsxzj" (OuterVolumeSpecName: "kube-api-access-fsxzj") pod "16598b9e-0208-4a53-87f6-1069af4b6c45" (UID: "16598b9e-0208-4a53-87f6-1069af4b6c45"). InnerVolumeSpecName "kube-api-access-fsxzj". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 21:14:58 crc kubenswrapper[4756]: I0930 21:14:58.309381 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/16598b9e-0208-4a53-87f6-1069af4b6c45-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "16598b9e-0208-4a53-87f6-1069af4b6c45" (UID: "16598b9e-0208-4a53-87f6-1069af4b6c45"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 21:14:58 crc kubenswrapper[4756]: I0930 21:14:58.319445 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/16598b9e-0208-4a53-87f6-1069af4b6c45-config-data" (OuterVolumeSpecName: "config-data") pod "16598b9e-0208-4a53-87f6-1069af4b6c45" (UID: "16598b9e-0208-4a53-87f6-1069af4b6c45"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 21:14:58 crc kubenswrapper[4756]: I0930 21:14:58.373135 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fsxzj\" (UniqueName: \"kubernetes.io/projected/16598b9e-0208-4a53-87f6-1069af4b6c45-kube-api-access-fsxzj\") on node \"crc\" DevicePath \"\"" Sep 30 21:14:58 crc kubenswrapper[4756]: I0930 21:14:58.373190 4756 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/16598b9e-0208-4a53-87f6-1069af4b6c45-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 21:14:58 crc kubenswrapper[4756]: I0930 21:14:58.373208 4756 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/16598b9e-0208-4a53-87f6-1069af4b6c45-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 21:14:58 crc kubenswrapper[4756]: I0930 21:14:58.373226 4756 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/16598b9e-0208-4a53-87f6-1069af4b6c45-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 21:14:58 crc kubenswrapper[4756]: I0930 21:14:58.761870 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-db-sync-7vh8h" event={"ID":"16598b9e-0208-4a53-87f6-1069af4b6c45","Type":"ContainerDied","Data":"ec4501c434dfe765f6ba6adc79d6e30ea8bb2a2185c9fc8e9c061f33749a315b"} Sep 30 21:14:58 crc kubenswrapper[4756]: I0930 21:14:58.761930 4756 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ec4501c434dfe765f6ba6adc79d6e30ea8bb2a2185c9fc8e9c061f33749a315b" Sep 30 21:14:58 crc kubenswrapper[4756]: I0930 21:14:58.762011 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-db-sync-7vh8h" Sep 30 21:15:00 crc kubenswrapper[4756]: I0930 21:15:00.157195 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29321115-7mxlv"] Sep 30 21:15:00 crc kubenswrapper[4756]: E0930 21:15:00.158134 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="16598b9e-0208-4a53-87f6-1069af4b6c45" containerName="aodh-db-sync" Sep 30 21:15:00 crc kubenswrapper[4756]: I0930 21:15:00.158152 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="16598b9e-0208-4a53-87f6-1069af4b6c45" containerName="aodh-db-sync" Sep 30 21:15:00 crc kubenswrapper[4756]: I0930 21:15:00.158614 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="16598b9e-0208-4a53-87f6-1069af4b6c45" containerName="aodh-db-sync" Sep 30 21:15:00 crc kubenswrapper[4756]: I0930 21:15:00.159870 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29321115-7mxlv" Sep 30 21:15:00 crc kubenswrapper[4756]: I0930 21:15:00.166919 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Sep 30 21:15:00 crc kubenswrapper[4756]: I0930 21:15:00.167105 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Sep 30 21:15:00 crc kubenswrapper[4756]: I0930 21:15:00.169372 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29321115-7mxlv"] Sep 30 21:15:00 crc kubenswrapper[4756]: I0930 21:15:00.212463 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tvrsz\" (UniqueName: \"kubernetes.io/projected/983f605a-1b2b-4f3c-87c9-a2ef950cae71-kube-api-access-tvrsz\") pod \"collect-profiles-29321115-7mxlv\" (UID: \"983f605a-1b2b-4f3c-87c9-a2ef950cae71\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321115-7mxlv" Sep 30 21:15:00 crc kubenswrapper[4756]: I0930 21:15:00.212512 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/983f605a-1b2b-4f3c-87c9-a2ef950cae71-secret-volume\") pod \"collect-profiles-29321115-7mxlv\" (UID: \"983f605a-1b2b-4f3c-87c9-a2ef950cae71\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321115-7mxlv" Sep 30 21:15:00 crc kubenswrapper[4756]: I0930 21:15:00.212594 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/983f605a-1b2b-4f3c-87c9-a2ef950cae71-config-volume\") pod \"collect-profiles-29321115-7mxlv\" (UID: \"983f605a-1b2b-4f3c-87c9-a2ef950cae71\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321115-7mxlv" Sep 30 21:15:00 crc kubenswrapper[4756]: I0930 21:15:00.315152 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tvrsz\" (UniqueName: \"kubernetes.io/projected/983f605a-1b2b-4f3c-87c9-a2ef950cae71-kube-api-access-tvrsz\") pod \"collect-profiles-29321115-7mxlv\" (UID: \"983f605a-1b2b-4f3c-87c9-a2ef950cae71\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321115-7mxlv" Sep 30 21:15:00 crc kubenswrapper[4756]: I0930 21:15:00.315230 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/983f605a-1b2b-4f3c-87c9-a2ef950cae71-secret-volume\") pod \"collect-profiles-29321115-7mxlv\" (UID: \"983f605a-1b2b-4f3c-87c9-a2ef950cae71\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321115-7mxlv" Sep 30 21:15:00 crc kubenswrapper[4756]: I0930 21:15:00.315302 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/983f605a-1b2b-4f3c-87c9-a2ef950cae71-config-volume\") pod \"collect-profiles-29321115-7mxlv\" (UID: \"983f605a-1b2b-4f3c-87c9-a2ef950cae71\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321115-7mxlv" Sep 30 21:15:00 crc kubenswrapper[4756]: I0930 21:15:00.316465 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/983f605a-1b2b-4f3c-87c9-a2ef950cae71-config-volume\") pod \"collect-profiles-29321115-7mxlv\" (UID: \"983f605a-1b2b-4f3c-87c9-a2ef950cae71\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321115-7mxlv" Sep 30 21:15:00 crc kubenswrapper[4756]: I0930 21:15:00.322180 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/983f605a-1b2b-4f3c-87c9-a2ef950cae71-secret-volume\") pod \"collect-profiles-29321115-7mxlv\" (UID: \"983f605a-1b2b-4f3c-87c9-a2ef950cae71\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321115-7mxlv" Sep 30 21:15:00 crc kubenswrapper[4756]: I0930 21:15:00.342763 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tvrsz\" (UniqueName: \"kubernetes.io/projected/983f605a-1b2b-4f3c-87c9-a2ef950cae71-kube-api-access-tvrsz\") pod \"collect-profiles-29321115-7mxlv\" (UID: \"983f605a-1b2b-4f3c-87c9-a2ef950cae71\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321115-7mxlv" Sep 30 21:15:00 crc kubenswrapper[4756]: I0930 21:15:00.479922 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29321115-7mxlv" Sep 30 21:15:00 crc kubenswrapper[4756]: I0930 21:15:00.948288 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29321115-7mxlv"] Sep 30 21:15:01 crc kubenswrapper[4756]: I0930 21:15:01.795808 4756 generic.go:334] "Generic (PLEG): container finished" podID="983f605a-1b2b-4f3c-87c9-a2ef950cae71" containerID="55f1febff9d06e3c9c9942d6817c3d4c249f6c10708777b3e7a5a028ac24d9b4" exitCode=0 Sep 30 21:15:01 crc kubenswrapper[4756]: I0930 21:15:01.795896 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29321115-7mxlv" event={"ID":"983f605a-1b2b-4f3c-87c9-a2ef950cae71","Type":"ContainerDied","Data":"55f1febff9d06e3c9c9942d6817c3d4c249f6c10708777b3e7a5a028ac24d9b4"} Sep 30 21:15:01 crc kubenswrapper[4756]: I0930 21:15:01.796138 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29321115-7mxlv" event={"ID":"983f605a-1b2b-4f3c-87c9-a2ef950cae71","Type":"ContainerStarted","Data":"f1b2c58e3d9ba196ecfd1bd3b1e09b01752f9940cc5698dc7af2ae8d222b150d"} Sep 30 21:15:02 crc kubenswrapper[4756]: I0930 21:15:02.042881 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-db-sync-vtfrx"] Sep 30 21:15:02 crc kubenswrapper[4756]: I0930 21:15:02.052156 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-db-sync-vtfrx"] Sep 30 21:15:03 crc kubenswrapper[4756]: I0930 21:15:03.136804 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="515e601c-2353-45a9-8acc-db3f68605dd4" path="/var/lib/kubelet/pods/515e601c-2353-45a9-8acc-db3f68605dd4/volumes" Sep 30 21:15:03 crc kubenswrapper[4756]: I0930 21:15:03.278013 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29321115-7mxlv" Sep 30 21:15:03 crc kubenswrapper[4756]: I0930 21:15:03.377304 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tvrsz\" (UniqueName: \"kubernetes.io/projected/983f605a-1b2b-4f3c-87c9-a2ef950cae71-kube-api-access-tvrsz\") pod \"983f605a-1b2b-4f3c-87c9-a2ef950cae71\" (UID: \"983f605a-1b2b-4f3c-87c9-a2ef950cae71\") " Sep 30 21:15:03 crc kubenswrapper[4756]: I0930 21:15:03.377384 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/983f605a-1b2b-4f3c-87c9-a2ef950cae71-secret-volume\") pod \"983f605a-1b2b-4f3c-87c9-a2ef950cae71\" (UID: \"983f605a-1b2b-4f3c-87c9-a2ef950cae71\") " Sep 30 21:15:03 crc kubenswrapper[4756]: I0930 21:15:03.377482 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/983f605a-1b2b-4f3c-87c9-a2ef950cae71-config-volume\") pod \"983f605a-1b2b-4f3c-87c9-a2ef950cae71\" (UID: \"983f605a-1b2b-4f3c-87c9-a2ef950cae71\") " Sep 30 21:15:03 crc kubenswrapper[4756]: I0930 21:15:03.378918 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/983f605a-1b2b-4f3c-87c9-a2ef950cae71-config-volume" (OuterVolumeSpecName: "config-volume") pod "983f605a-1b2b-4f3c-87c9-a2ef950cae71" (UID: "983f605a-1b2b-4f3c-87c9-a2ef950cae71"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 21:15:03 crc kubenswrapper[4756]: I0930 21:15:03.392718 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/983f605a-1b2b-4f3c-87c9-a2ef950cae71-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "983f605a-1b2b-4f3c-87c9-a2ef950cae71" (UID: "983f605a-1b2b-4f3c-87c9-a2ef950cae71"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 21:15:03 crc kubenswrapper[4756]: I0930 21:15:03.392753 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/983f605a-1b2b-4f3c-87c9-a2ef950cae71-kube-api-access-tvrsz" (OuterVolumeSpecName: "kube-api-access-tvrsz") pod "983f605a-1b2b-4f3c-87c9-a2ef950cae71" (UID: "983f605a-1b2b-4f3c-87c9-a2ef950cae71"). InnerVolumeSpecName "kube-api-access-tvrsz". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 21:15:03 crc kubenswrapper[4756]: I0930 21:15:03.424364 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/aodh-0"] Sep 30 21:15:03 crc kubenswrapper[4756]: E0930 21:15:03.424901 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="983f605a-1b2b-4f3c-87c9-a2ef950cae71" containerName="collect-profiles" Sep 30 21:15:03 crc kubenswrapper[4756]: I0930 21:15:03.424922 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="983f605a-1b2b-4f3c-87c9-a2ef950cae71" containerName="collect-profiles" Sep 30 21:15:03 crc kubenswrapper[4756]: I0930 21:15:03.425218 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="983f605a-1b2b-4f3c-87c9-a2ef950cae71" containerName="collect-profiles" Sep 30 21:15:03 crc kubenswrapper[4756]: I0930 21:15:03.435258 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/aodh-0"] Sep 30 21:15:03 crc kubenswrapper[4756]: I0930 21:15:03.435392 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-0" Sep 30 21:15:03 crc kubenswrapper[4756]: I0930 21:15:03.454428 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"aodh-scripts" Sep 30 21:15:03 crc kubenswrapper[4756]: I0930 21:15:03.454667 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"telemetry-autoscaling-dockercfg-cf5mz" Sep 30 21:15:03 crc kubenswrapper[4756]: I0930 21:15:03.454892 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"aodh-config-data" Sep 30 21:15:03 crc kubenswrapper[4756]: I0930 21:15:03.480154 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8d6xg\" (UniqueName: \"kubernetes.io/projected/e7797bdb-6b9f-4e9a-a825-3ddcb81b24a0-kube-api-access-8d6xg\") pod \"aodh-0\" (UID: \"e7797bdb-6b9f-4e9a-a825-3ddcb81b24a0\") " pod="openstack/aodh-0" Sep 30 21:15:03 crc kubenswrapper[4756]: I0930 21:15:03.480225 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e7797bdb-6b9f-4e9a-a825-3ddcb81b24a0-config-data\") pod \"aodh-0\" (UID: \"e7797bdb-6b9f-4e9a-a825-3ddcb81b24a0\") " pod="openstack/aodh-0" Sep 30 21:15:03 crc kubenswrapper[4756]: I0930 21:15:03.480246 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e7797bdb-6b9f-4e9a-a825-3ddcb81b24a0-combined-ca-bundle\") pod \"aodh-0\" (UID: \"e7797bdb-6b9f-4e9a-a825-3ddcb81b24a0\") " pod="openstack/aodh-0" Sep 30 21:15:03 crc kubenswrapper[4756]: I0930 21:15:03.480386 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e7797bdb-6b9f-4e9a-a825-3ddcb81b24a0-scripts\") pod \"aodh-0\" (UID: \"e7797bdb-6b9f-4e9a-a825-3ddcb81b24a0\") " pod="openstack/aodh-0" Sep 30 21:15:03 crc kubenswrapper[4756]: I0930 21:15:03.480479 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tvrsz\" (UniqueName: \"kubernetes.io/projected/983f605a-1b2b-4f3c-87c9-a2ef950cae71-kube-api-access-tvrsz\") on node \"crc\" DevicePath \"\"" Sep 30 21:15:03 crc kubenswrapper[4756]: I0930 21:15:03.480492 4756 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/983f605a-1b2b-4f3c-87c9-a2ef950cae71-secret-volume\") on node \"crc\" DevicePath \"\"" Sep 30 21:15:03 crc kubenswrapper[4756]: I0930 21:15:03.480501 4756 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/983f605a-1b2b-4f3c-87c9-a2ef950cae71-config-volume\") on node \"crc\" DevicePath \"\"" Sep 30 21:15:03 crc kubenswrapper[4756]: I0930 21:15:03.581938 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8d6xg\" (UniqueName: \"kubernetes.io/projected/e7797bdb-6b9f-4e9a-a825-3ddcb81b24a0-kube-api-access-8d6xg\") pod \"aodh-0\" (UID: \"e7797bdb-6b9f-4e9a-a825-3ddcb81b24a0\") " pod="openstack/aodh-0" Sep 30 21:15:03 crc kubenswrapper[4756]: I0930 21:15:03.582250 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e7797bdb-6b9f-4e9a-a825-3ddcb81b24a0-config-data\") pod \"aodh-0\" (UID: \"e7797bdb-6b9f-4e9a-a825-3ddcb81b24a0\") " pod="openstack/aodh-0" Sep 30 21:15:03 crc kubenswrapper[4756]: I0930 21:15:03.582274 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e7797bdb-6b9f-4e9a-a825-3ddcb81b24a0-combined-ca-bundle\") pod \"aodh-0\" (UID: \"e7797bdb-6b9f-4e9a-a825-3ddcb81b24a0\") " pod="openstack/aodh-0" Sep 30 21:15:03 crc kubenswrapper[4756]: I0930 21:15:03.582431 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e7797bdb-6b9f-4e9a-a825-3ddcb81b24a0-scripts\") pod \"aodh-0\" (UID: \"e7797bdb-6b9f-4e9a-a825-3ddcb81b24a0\") " pod="openstack/aodh-0" Sep 30 21:15:03 crc kubenswrapper[4756]: I0930 21:15:03.586912 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e7797bdb-6b9f-4e9a-a825-3ddcb81b24a0-config-data\") pod \"aodh-0\" (UID: \"e7797bdb-6b9f-4e9a-a825-3ddcb81b24a0\") " pod="openstack/aodh-0" Sep 30 21:15:03 crc kubenswrapper[4756]: I0930 21:15:03.594928 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e7797bdb-6b9f-4e9a-a825-3ddcb81b24a0-scripts\") pod \"aodh-0\" (UID: \"e7797bdb-6b9f-4e9a-a825-3ddcb81b24a0\") " pod="openstack/aodh-0" Sep 30 21:15:03 crc kubenswrapper[4756]: I0930 21:15:03.595122 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e7797bdb-6b9f-4e9a-a825-3ddcb81b24a0-combined-ca-bundle\") pod \"aodh-0\" (UID: \"e7797bdb-6b9f-4e9a-a825-3ddcb81b24a0\") " pod="openstack/aodh-0" Sep 30 21:15:03 crc kubenswrapper[4756]: I0930 21:15:03.603002 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8d6xg\" (UniqueName: \"kubernetes.io/projected/e7797bdb-6b9f-4e9a-a825-3ddcb81b24a0-kube-api-access-8d6xg\") pod \"aodh-0\" (UID: \"e7797bdb-6b9f-4e9a-a825-3ddcb81b24a0\") " pod="openstack/aodh-0" Sep 30 21:15:03 crc kubenswrapper[4756]: I0930 21:15:03.797959 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-0" Sep 30 21:15:03 crc kubenswrapper[4756]: I0930 21:15:03.818981 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29321115-7mxlv" Sep 30 21:15:03 crc kubenswrapper[4756]: I0930 21:15:03.818980 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29321115-7mxlv" event={"ID":"983f605a-1b2b-4f3c-87c9-a2ef950cae71","Type":"ContainerDied","Data":"f1b2c58e3d9ba196ecfd1bd3b1e09b01752f9940cc5698dc7af2ae8d222b150d"} Sep 30 21:15:03 crc kubenswrapper[4756]: I0930 21:15:03.819130 4756 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f1b2c58e3d9ba196ecfd1bd3b1e09b01752f9940cc5698dc7af2ae8d222b150d" Sep 30 21:15:04 crc kubenswrapper[4756]: I0930 21:15:04.316519 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/aodh-0"] Sep 30 21:15:04 crc kubenswrapper[4756]: W0930 21:15:04.325334 4756 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode7797bdb_6b9f_4e9a_a825_3ddcb81b24a0.slice/crio-5e15f2a5868dd1b3fa5bcda382281296a5f11f280b45b0f76a24537d20d72ad8 WatchSource:0}: Error finding container 5e15f2a5868dd1b3fa5bcda382281296a5f11f280b45b0f76a24537d20d72ad8: Status 404 returned error can't find the container with id 5e15f2a5868dd1b3fa5bcda382281296a5f11f280b45b0f76a24537d20d72ad8 Sep 30 21:15:04 crc kubenswrapper[4756]: I0930 21:15:04.365726 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29321070-88ddl"] Sep 30 21:15:04 crc kubenswrapper[4756]: I0930 21:15:04.373381 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29321070-88ddl"] Sep 30 21:15:04 crc kubenswrapper[4756]: I0930 21:15:04.830104 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-0" event={"ID":"e7797bdb-6b9f-4e9a-a825-3ddcb81b24a0","Type":"ContainerStarted","Data":"5e15f2a5868dd1b3fa5bcda382281296a5f11f280b45b0f76a24537d20d72ad8"} Sep 30 21:15:05 crc kubenswrapper[4756]: I0930 21:15:05.129819 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="45e26ed6-2f29-41e4-9fd4-c83c0da2b1d6" path="/var/lib/kubelet/pods/45e26ed6-2f29-41e4-9fd4-c83c0da2b1d6/volumes" Sep 30 21:15:05 crc kubenswrapper[4756]: I0930 21:15:05.847166 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 30 21:15:05 crc kubenswrapper[4756]: I0930 21:15:05.847545 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="6b08747b-e36e-4fa3-8306-49231ddaff24" containerName="ceilometer-central-agent" containerID="cri-o://31e3716a8f786c73d473d22f67927831afa23e566631c06b431829edfc9978b9" gracePeriod=30 Sep 30 21:15:05 crc kubenswrapper[4756]: I0930 21:15:05.847870 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="6b08747b-e36e-4fa3-8306-49231ddaff24" containerName="proxy-httpd" containerID="cri-o://19daccefd2d2dded94832ed1f222e136406502fbbc1b8f5af4942eefd6d6e05c" gracePeriod=30 Sep 30 21:15:05 crc kubenswrapper[4756]: I0930 21:15:05.847944 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="6b08747b-e36e-4fa3-8306-49231ddaff24" containerName="sg-core" containerID="cri-o://03cf0cc099eb3011a3fc1e92d459a39789636bb1da651f5a098fbd964852bab1" gracePeriod=30 Sep 30 21:15:05 crc kubenswrapper[4756]: I0930 21:15:05.847996 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="6b08747b-e36e-4fa3-8306-49231ddaff24" containerName="ceilometer-notification-agent" containerID="cri-o://1bf3677fc5d17f43143a12610cbe34d009d9324d26daeb768eebe10ce788b66f" gracePeriod=30 Sep 30 21:15:05 crc kubenswrapper[4756]: I0930 21:15:05.870894 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-0" event={"ID":"e7797bdb-6b9f-4e9a-a825-3ddcb81b24a0","Type":"ContainerStarted","Data":"3d0e6c859cf9b205d435745a7b703e1ee3964c5b2a7fa3451c8fced607367061"} Sep 30 21:15:06 crc kubenswrapper[4756]: I0930 21:15:06.881094 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-0" event={"ID":"e7797bdb-6b9f-4e9a-a825-3ddcb81b24a0","Type":"ContainerStarted","Data":"81818c8dc229157b0999c893b5e8b986d8c2b357b62aeff41746717e56af162c"} Sep 30 21:15:06 crc kubenswrapper[4756]: I0930 21:15:06.883951 4756 generic.go:334] "Generic (PLEG): container finished" podID="6b08747b-e36e-4fa3-8306-49231ddaff24" containerID="19daccefd2d2dded94832ed1f222e136406502fbbc1b8f5af4942eefd6d6e05c" exitCode=0 Sep 30 21:15:06 crc kubenswrapper[4756]: I0930 21:15:06.883983 4756 generic.go:334] "Generic (PLEG): container finished" podID="6b08747b-e36e-4fa3-8306-49231ddaff24" containerID="03cf0cc099eb3011a3fc1e92d459a39789636bb1da651f5a098fbd964852bab1" exitCode=2 Sep 30 21:15:06 crc kubenswrapper[4756]: I0930 21:15:06.883994 4756 generic.go:334] "Generic (PLEG): container finished" podID="6b08747b-e36e-4fa3-8306-49231ddaff24" containerID="31e3716a8f786c73d473d22f67927831afa23e566631c06b431829edfc9978b9" exitCode=0 Sep 30 21:15:06 crc kubenswrapper[4756]: I0930 21:15:06.884014 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"6b08747b-e36e-4fa3-8306-49231ddaff24","Type":"ContainerDied","Data":"19daccefd2d2dded94832ed1f222e136406502fbbc1b8f5af4942eefd6d6e05c"} Sep 30 21:15:06 crc kubenswrapper[4756]: I0930 21:15:06.884040 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"6b08747b-e36e-4fa3-8306-49231ddaff24","Type":"ContainerDied","Data":"03cf0cc099eb3011a3fc1e92d459a39789636bb1da651f5a098fbd964852bab1"} Sep 30 21:15:06 crc kubenswrapper[4756]: I0930 21:15:06.884097 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"6b08747b-e36e-4fa3-8306-49231ddaff24","Type":"ContainerDied","Data":"31e3716a8f786c73d473d22f67927831afa23e566631c06b431829edfc9978b9"} Sep 30 21:15:08 crc kubenswrapper[4756]: I0930 21:15:08.915370 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-0" event={"ID":"e7797bdb-6b9f-4e9a-a825-3ddcb81b24a0","Type":"ContainerStarted","Data":"fee9e3832aa894bf9a1c44450706c7d011cc2d6567693a303218f45c2f178e06"} Sep 30 21:15:09 crc kubenswrapper[4756]: I0930 21:15:09.936458 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-0" event={"ID":"e7797bdb-6b9f-4e9a-a825-3ddcb81b24a0","Type":"ContainerStarted","Data":"d813974cb1181bf9bd0e8b01d180cb9df6cbd9e279a1582fe5b0a707f7e3d713"} Sep 30 21:15:09 crc kubenswrapper[4756]: I0930 21:15:09.942717 4756 generic.go:334] "Generic (PLEG): container finished" podID="6b08747b-e36e-4fa3-8306-49231ddaff24" containerID="1bf3677fc5d17f43143a12610cbe34d009d9324d26daeb768eebe10ce788b66f" exitCode=0 Sep 30 21:15:09 crc kubenswrapper[4756]: I0930 21:15:09.942753 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"6b08747b-e36e-4fa3-8306-49231ddaff24","Type":"ContainerDied","Data":"1bf3677fc5d17f43143a12610cbe34d009d9324d26daeb768eebe10ce788b66f"} Sep 30 21:15:09 crc kubenswrapper[4756]: I0930 21:15:09.972110 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/aodh-0" podStartSLOduration=1.764686682 podStartE2EDuration="6.972089048s" podCreationTimestamp="2025-09-30 21:15:03 +0000 UTC" firstStartedPulling="2025-09-30 21:15:04.328729735 +0000 UTC m=+6233.949663202" lastFinishedPulling="2025-09-30 21:15:09.536132091 +0000 UTC m=+6239.157065568" observedRunningTime="2025-09-30 21:15:09.969336866 +0000 UTC m=+6239.590270343" watchObservedRunningTime="2025-09-30 21:15:09.972089048 +0000 UTC m=+6239.593022535" Sep 30 21:15:10 crc kubenswrapper[4756]: I0930 21:15:10.057935 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 21:15:10 crc kubenswrapper[4756]: I0930 21:15:10.127400 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6b08747b-e36e-4fa3-8306-49231ddaff24-combined-ca-bundle\") pod \"6b08747b-e36e-4fa3-8306-49231ddaff24\" (UID: \"6b08747b-e36e-4fa3-8306-49231ddaff24\") " Sep 30 21:15:10 crc kubenswrapper[4756]: I0930 21:15:10.127792 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-br78h\" (UniqueName: \"kubernetes.io/projected/6b08747b-e36e-4fa3-8306-49231ddaff24-kube-api-access-br78h\") pod \"6b08747b-e36e-4fa3-8306-49231ddaff24\" (UID: \"6b08747b-e36e-4fa3-8306-49231ddaff24\") " Sep 30 21:15:10 crc kubenswrapper[4756]: I0930 21:15:10.127824 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6b08747b-e36e-4fa3-8306-49231ddaff24-scripts\") pod \"6b08747b-e36e-4fa3-8306-49231ddaff24\" (UID: \"6b08747b-e36e-4fa3-8306-49231ddaff24\") " Sep 30 21:15:10 crc kubenswrapper[4756]: I0930 21:15:10.127940 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6b08747b-e36e-4fa3-8306-49231ddaff24-config-data\") pod \"6b08747b-e36e-4fa3-8306-49231ddaff24\" (UID: \"6b08747b-e36e-4fa3-8306-49231ddaff24\") " Sep 30 21:15:10 crc kubenswrapper[4756]: I0930 21:15:10.127984 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/6b08747b-e36e-4fa3-8306-49231ddaff24-run-httpd\") pod \"6b08747b-e36e-4fa3-8306-49231ddaff24\" (UID: \"6b08747b-e36e-4fa3-8306-49231ddaff24\") " Sep 30 21:15:10 crc kubenswrapper[4756]: I0930 21:15:10.128060 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/6b08747b-e36e-4fa3-8306-49231ddaff24-sg-core-conf-yaml\") pod \"6b08747b-e36e-4fa3-8306-49231ddaff24\" (UID: \"6b08747b-e36e-4fa3-8306-49231ddaff24\") " Sep 30 21:15:10 crc kubenswrapper[4756]: I0930 21:15:10.128154 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/6b08747b-e36e-4fa3-8306-49231ddaff24-log-httpd\") pod \"6b08747b-e36e-4fa3-8306-49231ddaff24\" (UID: \"6b08747b-e36e-4fa3-8306-49231ddaff24\") " Sep 30 21:15:10 crc kubenswrapper[4756]: I0930 21:15:10.130216 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6b08747b-e36e-4fa3-8306-49231ddaff24-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "6b08747b-e36e-4fa3-8306-49231ddaff24" (UID: "6b08747b-e36e-4fa3-8306-49231ddaff24"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 21:15:10 crc kubenswrapper[4756]: I0930 21:15:10.150121 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6b08747b-e36e-4fa3-8306-49231ddaff24-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "6b08747b-e36e-4fa3-8306-49231ddaff24" (UID: "6b08747b-e36e-4fa3-8306-49231ddaff24"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 21:15:10 crc kubenswrapper[4756]: I0930 21:15:10.155555 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6b08747b-e36e-4fa3-8306-49231ddaff24-scripts" (OuterVolumeSpecName: "scripts") pod "6b08747b-e36e-4fa3-8306-49231ddaff24" (UID: "6b08747b-e36e-4fa3-8306-49231ddaff24"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 21:15:10 crc kubenswrapper[4756]: I0930 21:15:10.175079 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6b08747b-e36e-4fa3-8306-49231ddaff24-kube-api-access-br78h" (OuterVolumeSpecName: "kube-api-access-br78h") pod "6b08747b-e36e-4fa3-8306-49231ddaff24" (UID: "6b08747b-e36e-4fa3-8306-49231ddaff24"). InnerVolumeSpecName "kube-api-access-br78h". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 21:15:10 crc kubenswrapper[4756]: I0930 21:15:10.196738 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6b08747b-e36e-4fa3-8306-49231ddaff24-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "6b08747b-e36e-4fa3-8306-49231ddaff24" (UID: "6b08747b-e36e-4fa3-8306-49231ddaff24"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 21:15:10 crc kubenswrapper[4756]: I0930 21:15:10.231522 4756 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/6b08747b-e36e-4fa3-8306-49231ddaff24-run-httpd\") on node \"crc\" DevicePath \"\"" Sep 30 21:15:10 crc kubenswrapper[4756]: I0930 21:15:10.231554 4756 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/6b08747b-e36e-4fa3-8306-49231ddaff24-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Sep 30 21:15:10 crc kubenswrapper[4756]: I0930 21:15:10.231564 4756 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/6b08747b-e36e-4fa3-8306-49231ddaff24-log-httpd\") on node \"crc\" DevicePath \"\"" Sep 30 21:15:10 crc kubenswrapper[4756]: I0930 21:15:10.231573 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-br78h\" (UniqueName: \"kubernetes.io/projected/6b08747b-e36e-4fa3-8306-49231ddaff24-kube-api-access-br78h\") on node \"crc\" DevicePath \"\"" Sep 30 21:15:10 crc kubenswrapper[4756]: I0930 21:15:10.231581 4756 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6b08747b-e36e-4fa3-8306-49231ddaff24-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 21:15:10 crc kubenswrapper[4756]: I0930 21:15:10.250242 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6b08747b-e36e-4fa3-8306-49231ddaff24-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "6b08747b-e36e-4fa3-8306-49231ddaff24" (UID: "6b08747b-e36e-4fa3-8306-49231ddaff24"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 21:15:10 crc kubenswrapper[4756]: I0930 21:15:10.284948 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6b08747b-e36e-4fa3-8306-49231ddaff24-config-data" (OuterVolumeSpecName: "config-data") pod "6b08747b-e36e-4fa3-8306-49231ddaff24" (UID: "6b08747b-e36e-4fa3-8306-49231ddaff24"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 21:15:10 crc kubenswrapper[4756]: I0930 21:15:10.333824 4756 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6b08747b-e36e-4fa3-8306-49231ddaff24-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 21:15:10 crc kubenswrapper[4756]: I0930 21:15:10.333861 4756 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6b08747b-e36e-4fa3-8306-49231ddaff24-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 21:15:10 crc kubenswrapper[4756]: I0930 21:15:10.969333 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 21:15:10 crc kubenswrapper[4756]: I0930 21:15:10.969456 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"6b08747b-e36e-4fa3-8306-49231ddaff24","Type":"ContainerDied","Data":"fce74d856de88546a0718fc87e0768bf2268d2224d22eb0b4f713597e739ce92"} Sep 30 21:15:10 crc kubenswrapper[4756]: I0930 21:15:10.969562 4756 scope.go:117] "RemoveContainer" containerID="19daccefd2d2dded94832ed1f222e136406502fbbc1b8f5af4942eefd6d6e05c" Sep 30 21:15:11 crc kubenswrapper[4756]: I0930 21:15:11.038175 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 30 21:15:11 crc kubenswrapper[4756]: I0930 21:15:11.048465 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Sep 30 21:15:11 crc kubenswrapper[4756]: I0930 21:15:11.049028 4756 scope.go:117] "RemoveContainer" containerID="03cf0cc099eb3011a3fc1e92d459a39789636bb1da651f5a098fbd964852bab1" Sep 30 21:15:11 crc kubenswrapper[4756]: I0930 21:15:11.059502 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Sep 30 21:15:11 crc kubenswrapper[4756]: E0930 21:15:11.059907 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6b08747b-e36e-4fa3-8306-49231ddaff24" containerName="sg-core" Sep 30 21:15:11 crc kubenswrapper[4756]: I0930 21:15:11.059926 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="6b08747b-e36e-4fa3-8306-49231ddaff24" containerName="sg-core" Sep 30 21:15:11 crc kubenswrapper[4756]: E0930 21:15:11.059949 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6b08747b-e36e-4fa3-8306-49231ddaff24" containerName="proxy-httpd" Sep 30 21:15:11 crc kubenswrapper[4756]: I0930 21:15:11.059955 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="6b08747b-e36e-4fa3-8306-49231ddaff24" containerName="proxy-httpd" Sep 30 21:15:11 crc kubenswrapper[4756]: E0930 21:15:11.059981 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6b08747b-e36e-4fa3-8306-49231ddaff24" containerName="ceilometer-central-agent" Sep 30 21:15:11 crc kubenswrapper[4756]: I0930 21:15:11.059988 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="6b08747b-e36e-4fa3-8306-49231ddaff24" containerName="ceilometer-central-agent" Sep 30 21:15:11 crc kubenswrapper[4756]: E0930 21:15:11.060005 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6b08747b-e36e-4fa3-8306-49231ddaff24" containerName="ceilometer-notification-agent" Sep 30 21:15:11 crc kubenswrapper[4756]: I0930 21:15:11.060012 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="6b08747b-e36e-4fa3-8306-49231ddaff24" containerName="ceilometer-notification-agent" Sep 30 21:15:11 crc kubenswrapper[4756]: I0930 21:15:11.060180 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="6b08747b-e36e-4fa3-8306-49231ddaff24" containerName="ceilometer-central-agent" Sep 30 21:15:11 crc kubenswrapper[4756]: I0930 21:15:11.060194 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="6b08747b-e36e-4fa3-8306-49231ddaff24" containerName="sg-core" Sep 30 21:15:11 crc kubenswrapper[4756]: I0930 21:15:11.060211 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="6b08747b-e36e-4fa3-8306-49231ddaff24" containerName="proxy-httpd" Sep 30 21:15:11 crc kubenswrapper[4756]: I0930 21:15:11.060230 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="6b08747b-e36e-4fa3-8306-49231ddaff24" containerName="ceilometer-notification-agent" Sep 30 21:15:11 crc kubenswrapper[4756]: I0930 21:15:11.062113 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 21:15:11 crc kubenswrapper[4756]: I0930 21:15:11.067375 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Sep 30 21:15:11 crc kubenswrapper[4756]: I0930 21:15:11.067671 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Sep 30 21:15:11 crc kubenswrapper[4756]: I0930 21:15:11.091985 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 30 21:15:11 crc kubenswrapper[4756]: I0930 21:15:11.107536 4756 scope.go:117] "RemoveContainer" containerID="1bf3677fc5d17f43143a12610cbe34d009d9324d26daeb768eebe10ce788b66f" Sep 30 21:15:11 crc kubenswrapper[4756]: I0930 21:15:11.135418 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6b08747b-e36e-4fa3-8306-49231ddaff24" path="/var/lib/kubelet/pods/6b08747b-e36e-4fa3-8306-49231ddaff24/volumes" Sep 30 21:15:11 crc kubenswrapper[4756]: I0930 21:15:11.158042 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a003f868-fa8c-419d-842b-99b9b9ff31a3-config-data\") pod \"ceilometer-0\" (UID: \"a003f868-fa8c-419d-842b-99b9b9ff31a3\") " pod="openstack/ceilometer-0" Sep 30 21:15:11 crc kubenswrapper[4756]: I0930 21:15:11.158321 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a003f868-fa8c-419d-842b-99b9b9ff31a3-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"a003f868-fa8c-419d-842b-99b9b9ff31a3\") " pod="openstack/ceilometer-0" Sep 30 21:15:11 crc kubenswrapper[4756]: I0930 21:15:11.158499 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x7mpn\" (UniqueName: \"kubernetes.io/projected/a003f868-fa8c-419d-842b-99b9b9ff31a3-kube-api-access-x7mpn\") pod \"ceilometer-0\" (UID: \"a003f868-fa8c-419d-842b-99b9b9ff31a3\") " pod="openstack/ceilometer-0" Sep 30 21:15:11 crc kubenswrapper[4756]: I0930 21:15:11.158671 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a003f868-fa8c-419d-842b-99b9b9ff31a3-scripts\") pod \"ceilometer-0\" (UID: \"a003f868-fa8c-419d-842b-99b9b9ff31a3\") " pod="openstack/ceilometer-0" Sep 30 21:15:11 crc kubenswrapper[4756]: I0930 21:15:11.158884 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a003f868-fa8c-419d-842b-99b9b9ff31a3-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"a003f868-fa8c-419d-842b-99b9b9ff31a3\") " pod="openstack/ceilometer-0" Sep 30 21:15:11 crc kubenswrapper[4756]: I0930 21:15:11.159127 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a003f868-fa8c-419d-842b-99b9b9ff31a3-log-httpd\") pod \"ceilometer-0\" (UID: \"a003f868-fa8c-419d-842b-99b9b9ff31a3\") " pod="openstack/ceilometer-0" Sep 30 21:15:11 crc kubenswrapper[4756]: I0930 21:15:11.159303 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a003f868-fa8c-419d-842b-99b9b9ff31a3-run-httpd\") pod \"ceilometer-0\" (UID: \"a003f868-fa8c-419d-842b-99b9b9ff31a3\") " pod="openstack/ceilometer-0" Sep 30 21:15:11 crc kubenswrapper[4756]: I0930 21:15:11.163126 4756 scope.go:117] "RemoveContainer" containerID="31e3716a8f786c73d473d22f67927831afa23e566631c06b431829edfc9978b9" Sep 30 21:15:11 crc kubenswrapper[4756]: I0930 21:15:11.261316 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a003f868-fa8c-419d-842b-99b9b9ff31a3-config-data\") pod \"ceilometer-0\" (UID: \"a003f868-fa8c-419d-842b-99b9b9ff31a3\") " pod="openstack/ceilometer-0" Sep 30 21:15:11 crc kubenswrapper[4756]: I0930 21:15:11.261390 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a003f868-fa8c-419d-842b-99b9b9ff31a3-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"a003f868-fa8c-419d-842b-99b9b9ff31a3\") " pod="openstack/ceilometer-0" Sep 30 21:15:11 crc kubenswrapper[4756]: I0930 21:15:11.261877 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x7mpn\" (UniqueName: \"kubernetes.io/projected/a003f868-fa8c-419d-842b-99b9b9ff31a3-kube-api-access-x7mpn\") pod \"ceilometer-0\" (UID: \"a003f868-fa8c-419d-842b-99b9b9ff31a3\") " pod="openstack/ceilometer-0" Sep 30 21:15:11 crc kubenswrapper[4756]: I0930 21:15:11.261959 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a003f868-fa8c-419d-842b-99b9b9ff31a3-scripts\") pod \"ceilometer-0\" (UID: \"a003f868-fa8c-419d-842b-99b9b9ff31a3\") " pod="openstack/ceilometer-0" Sep 30 21:15:11 crc kubenswrapper[4756]: I0930 21:15:11.262236 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a003f868-fa8c-419d-842b-99b9b9ff31a3-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"a003f868-fa8c-419d-842b-99b9b9ff31a3\") " pod="openstack/ceilometer-0" Sep 30 21:15:11 crc kubenswrapper[4756]: I0930 21:15:11.262432 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a003f868-fa8c-419d-842b-99b9b9ff31a3-log-httpd\") pod \"ceilometer-0\" (UID: \"a003f868-fa8c-419d-842b-99b9b9ff31a3\") " pod="openstack/ceilometer-0" Sep 30 21:15:11 crc kubenswrapper[4756]: I0930 21:15:11.262489 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a003f868-fa8c-419d-842b-99b9b9ff31a3-run-httpd\") pod \"ceilometer-0\" (UID: \"a003f868-fa8c-419d-842b-99b9b9ff31a3\") " pod="openstack/ceilometer-0" Sep 30 21:15:11 crc kubenswrapper[4756]: I0930 21:15:11.264083 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Sep 30 21:15:11 crc kubenswrapper[4756]: I0930 21:15:11.264534 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Sep 30 21:15:11 crc kubenswrapper[4756]: I0930 21:15:11.265590 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a003f868-fa8c-419d-842b-99b9b9ff31a3-log-httpd\") pod \"ceilometer-0\" (UID: \"a003f868-fa8c-419d-842b-99b9b9ff31a3\") " pod="openstack/ceilometer-0" Sep 30 21:15:11 crc kubenswrapper[4756]: I0930 21:15:11.267167 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a003f868-fa8c-419d-842b-99b9b9ff31a3-run-httpd\") pod \"ceilometer-0\" (UID: \"a003f868-fa8c-419d-842b-99b9b9ff31a3\") " pod="openstack/ceilometer-0" Sep 30 21:15:11 crc kubenswrapper[4756]: I0930 21:15:11.270117 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a003f868-fa8c-419d-842b-99b9b9ff31a3-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"a003f868-fa8c-419d-842b-99b9b9ff31a3\") " pod="openstack/ceilometer-0" Sep 30 21:15:11 crc kubenswrapper[4756]: I0930 21:15:11.274877 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a003f868-fa8c-419d-842b-99b9b9ff31a3-config-data\") pod \"ceilometer-0\" (UID: \"a003f868-fa8c-419d-842b-99b9b9ff31a3\") " pod="openstack/ceilometer-0" Sep 30 21:15:11 crc kubenswrapper[4756]: I0930 21:15:11.277276 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a003f868-fa8c-419d-842b-99b9b9ff31a3-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"a003f868-fa8c-419d-842b-99b9b9ff31a3\") " pod="openstack/ceilometer-0" Sep 30 21:15:11 crc kubenswrapper[4756]: I0930 21:15:11.279046 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x7mpn\" (UniqueName: \"kubernetes.io/projected/a003f868-fa8c-419d-842b-99b9b9ff31a3-kube-api-access-x7mpn\") pod \"ceilometer-0\" (UID: \"a003f868-fa8c-419d-842b-99b9b9ff31a3\") " pod="openstack/ceilometer-0" Sep 30 21:15:11 crc kubenswrapper[4756]: I0930 21:15:11.279338 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a003f868-fa8c-419d-842b-99b9b9ff31a3-scripts\") pod \"ceilometer-0\" (UID: \"a003f868-fa8c-419d-842b-99b9b9ff31a3\") " pod="openstack/ceilometer-0" Sep 30 21:15:11 crc kubenswrapper[4756]: I0930 21:15:11.403987 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 21:15:11 crc kubenswrapper[4756]: I0930 21:15:11.890029 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 30 21:15:11 crc kubenswrapper[4756]: W0930 21:15:11.894741 4756 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda003f868_fa8c_419d_842b_99b9b9ff31a3.slice/crio-95e5c365ddac163e0249aa04ec639eff4bc686fa572da3ce8c4c4a370a795cfd WatchSource:0}: Error finding container 95e5c365ddac163e0249aa04ec639eff4bc686fa572da3ce8c4c4a370a795cfd: Status 404 returned error can't find the container with id 95e5c365ddac163e0249aa04ec639eff4bc686fa572da3ce8c4c4a370a795cfd Sep 30 21:15:11 crc kubenswrapper[4756]: I0930 21:15:11.979488 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a003f868-fa8c-419d-842b-99b9b9ff31a3","Type":"ContainerStarted","Data":"95e5c365ddac163e0249aa04ec639eff4bc686fa572da3ce8c4c4a370a795cfd"} Sep 30 21:15:12 crc kubenswrapper[4756]: I0930 21:15:12.994642 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a003f868-fa8c-419d-842b-99b9b9ff31a3","Type":"ContainerStarted","Data":"eb2a9be47867fa8730ed2c29c7fb01195af7e2ce72d8a067ceec782750e4f931"} Sep 30 21:15:14 crc kubenswrapper[4756]: I0930 21:15:14.008323 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a003f868-fa8c-419d-842b-99b9b9ff31a3","Type":"ContainerStarted","Data":"001136e882df2faa71a7c2e5cede26ea88fb0dea0e43c2ff70f699717b4f995b"} Sep 30 21:15:15 crc kubenswrapper[4756]: I0930 21:15:15.019800 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a003f868-fa8c-419d-842b-99b9b9ff31a3","Type":"ContainerStarted","Data":"0b7db4958914a538253f5d318f60bf88c38151098ac4f049aef161e44b1d98cd"} Sep 30 21:15:16 crc kubenswrapper[4756]: I0930 21:15:16.034028 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a003f868-fa8c-419d-842b-99b9b9ff31a3","Type":"ContainerStarted","Data":"d3d62565d27a0d541febffdf8f27dd392273a59392c381c4d76c928e57971206"} Sep 30 21:15:16 crc kubenswrapper[4756]: I0930 21:15:16.035448 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Sep 30 21:15:16 crc kubenswrapper[4756]: I0930 21:15:16.071249 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=1.734061746 podStartE2EDuration="5.071219002s" podCreationTimestamp="2025-09-30 21:15:11 +0000 UTC" firstStartedPulling="2025-09-30 21:15:11.897473815 +0000 UTC m=+6241.518407292" lastFinishedPulling="2025-09-30 21:15:15.234631031 +0000 UTC m=+6244.855564548" observedRunningTime="2025-09-30 21:15:16.065874133 +0000 UTC m=+6245.686807640" watchObservedRunningTime="2025-09-30 21:15:16.071219002 +0000 UTC m=+6245.692152479" Sep 30 21:15:16 crc kubenswrapper[4756]: I0930 21:15:16.527444 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/manila-db-create-nmdrj"] Sep 30 21:15:16 crc kubenswrapper[4756]: I0930 21:15:16.528997 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-db-create-nmdrj" Sep 30 21:15:16 crc kubenswrapper[4756]: I0930 21:15:16.538821 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-db-create-nmdrj"] Sep 30 21:15:16 crc kubenswrapper[4756]: I0930 21:15:16.681103 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4t5dk\" (UniqueName: \"kubernetes.io/projected/1ce68e47-019b-472e-8c36-7d3edef58f90-kube-api-access-4t5dk\") pod \"manila-db-create-nmdrj\" (UID: \"1ce68e47-019b-472e-8c36-7d3edef58f90\") " pod="openstack/manila-db-create-nmdrj" Sep 30 21:15:16 crc kubenswrapper[4756]: I0930 21:15:16.783887 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4t5dk\" (UniqueName: \"kubernetes.io/projected/1ce68e47-019b-472e-8c36-7d3edef58f90-kube-api-access-4t5dk\") pod \"manila-db-create-nmdrj\" (UID: \"1ce68e47-019b-472e-8c36-7d3edef58f90\") " pod="openstack/manila-db-create-nmdrj" Sep 30 21:15:16 crc kubenswrapper[4756]: I0930 21:15:16.809574 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4t5dk\" (UniqueName: \"kubernetes.io/projected/1ce68e47-019b-472e-8c36-7d3edef58f90-kube-api-access-4t5dk\") pod \"manila-db-create-nmdrj\" (UID: \"1ce68e47-019b-472e-8c36-7d3edef58f90\") " pod="openstack/manila-db-create-nmdrj" Sep 30 21:15:16 crc kubenswrapper[4756]: I0930 21:15:16.854508 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-db-create-nmdrj" Sep 30 21:15:17 crc kubenswrapper[4756]: I0930 21:15:17.419694 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-db-create-nmdrj"] Sep 30 21:15:17 crc kubenswrapper[4756]: W0930 21:15:17.425733 4756 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1ce68e47_019b_472e_8c36_7d3edef58f90.slice/crio-1f3c4863cc5b22ca6d9eaab36e94bc3fb0055e6b5ab1c48652b4ec82b8cfd695 WatchSource:0}: Error finding container 1f3c4863cc5b22ca6d9eaab36e94bc3fb0055e6b5ab1c48652b4ec82b8cfd695: Status 404 returned error can't find the container with id 1f3c4863cc5b22ca6d9eaab36e94bc3fb0055e6b5ab1c48652b4ec82b8cfd695 Sep 30 21:15:18 crc kubenswrapper[4756]: I0930 21:15:18.077096 4756 generic.go:334] "Generic (PLEG): container finished" podID="1ce68e47-019b-472e-8c36-7d3edef58f90" containerID="5eecfb54244f451e4fa19dd8b0bf107f3f6bcc10d8cae05d8259b06d8e1a91be" exitCode=0 Sep 30 21:15:18 crc kubenswrapper[4756]: I0930 21:15:18.077195 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-db-create-nmdrj" event={"ID":"1ce68e47-019b-472e-8c36-7d3edef58f90","Type":"ContainerDied","Data":"5eecfb54244f451e4fa19dd8b0bf107f3f6bcc10d8cae05d8259b06d8e1a91be"} Sep 30 21:15:18 crc kubenswrapper[4756]: I0930 21:15:18.077794 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-db-create-nmdrj" event={"ID":"1ce68e47-019b-472e-8c36-7d3edef58f90","Type":"ContainerStarted","Data":"1f3c4863cc5b22ca6d9eaab36e94bc3fb0055e6b5ab1c48652b4ec82b8cfd695"} Sep 30 21:15:19 crc kubenswrapper[4756]: I0930 21:15:19.576758 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/manila-db-create-nmdrj" Sep 30 21:15:19 crc kubenswrapper[4756]: I0930 21:15:19.642449 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4t5dk\" (UniqueName: \"kubernetes.io/projected/1ce68e47-019b-472e-8c36-7d3edef58f90-kube-api-access-4t5dk\") pod \"1ce68e47-019b-472e-8c36-7d3edef58f90\" (UID: \"1ce68e47-019b-472e-8c36-7d3edef58f90\") " Sep 30 21:15:19 crc kubenswrapper[4756]: I0930 21:15:19.652341 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1ce68e47-019b-472e-8c36-7d3edef58f90-kube-api-access-4t5dk" (OuterVolumeSpecName: "kube-api-access-4t5dk") pod "1ce68e47-019b-472e-8c36-7d3edef58f90" (UID: "1ce68e47-019b-472e-8c36-7d3edef58f90"). InnerVolumeSpecName "kube-api-access-4t5dk". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 21:15:19 crc kubenswrapper[4756]: I0930 21:15:19.746526 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4t5dk\" (UniqueName: \"kubernetes.io/projected/1ce68e47-019b-472e-8c36-7d3edef58f90-kube-api-access-4t5dk\") on node \"crc\" DevicePath \"\"" Sep 30 21:15:20 crc kubenswrapper[4756]: I0930 21:15:20.103780 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-db-create-nmdrj" event={"ID":"1ce68e47-019b-472e-8c36-7d3edef58f90","Type":"ContainerDied","Data":"1f3c4863cc5b22ca6d9eaab36e94bc3fb0055e6b5ab1c48652b4ec82b8cfd695"} Sep 30 21:15:20 crc kubenswrapper[4756]: I0930 21:15:20.103827 4756 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1f3c4863cc5b22ca6d9eaab36e94bc3fb0055e6b5ab1c48652b4ec82b8cfd695" Sep 30 21:15:20 crc kubenswrapper[4756]: I0930 21:15:20.103839 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/manila-db-create-nmdrj" Sep 30 21:15:26 crc kubenswrapper[4756]: I0930 21:15:26.591506 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/manila-c8e9-account-create-jnb24"] Sep 30 21:15:26 crc kubenswrapper[4756]: E0930 21:15:26.592597 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1ce68e47-019b-472e-8c36-7d3edef58f90" containerName="mariadb-database-create" Sep 30 21:15:26 crc kubenswrapper[4756]: I0930 21:15:26.592613 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="1ce68e47-019b-472e-8c36-7d3edef58f90" containerName="mariadb-database-create" Sep 30 21:15:26 crc kubenswrapper[4756]: I0930 21:15:26.592915 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="1ce68e47-019b-472e-8c36-7d3edef58f90" containerName="mariadb-database-create" Sep 30 21:15:26 crc kubenswrapper[4756]: I0930 21:15:26.593834 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-c8e9-account-create-jnb24" Sep 30 21:15:26 crc kubenswrapper[4756]: I0930 21:15:26.597205 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-db-secret" Sep 30 21:15:26 crc kubenswrapper[4756]: I0930 21:15:26.606425 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-c8e9-account-create-jnb24"] Sep 30 21:15:26 crc kubenswrapper[4756]: I0930 21:15:26.694357 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vsnxm\" (UniqueName: \"kubernetes.io/projected/33741501-84e5-400c-b66f-897f2cbe23f0-kube-api-access-vsnxm\") pod \"manila-c8e9-account-create-jnb24\" (UID: \"33741501-84e5-400c-b66f-897f2cbe23f0\") " pod="openstack/manila-c8e9-account-create-jnb24" Sep 30 21:15:26 crc kubenswrapper[4756]: I0930 21:15:26.796424 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vsnxm\" (UniqueName: \"kubernetes.io/projected/33741501-84e5-400c-b66f-897f2cbe23f0-kube-api-access-vsnxm\") pod \"manila-c8e9-account-create-jnb24\" (UID: \"33741501-84e5-400c-b66f-897f2cbe23f0\") " pod="openstack/manila-c8e9-account-create-jnb24" Sep 30 21:15:26 crc kubenswrapper[4756]: I0930 21:15:26.819063 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vsnxm\" (UniqueName: \"kubernetes.io/projected/33741501-84e5-400c-b66f-897f2cbe23f0-kube-api-access-vsnxm\") pod \"manila-c8e9-account-create-jnb24\" (UID: \"33741501-84e5-400c-b66f-897f2cbe23f0\") " pod="openstack/manila-c8e9-account-create-jnb24" Sep 30 21:15:26 crc kubenswrapper[4756]: I0930 21:15:26.921671 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-c8e9-account-create-jnb24" Sep 30 21:15:27 crc kubenswrapper[4756]: I0930 21:15:27.406211 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-c8e9-account-create-jnb24"] Sep 30 21:15:27 crc kubenswrapper[4756]: W0930 21:15:27.415949 4756 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod33741501_84e5_400c_b66f_897f2cbe23f0.slice/crio-4a5cf99c3df8ca2d822f3a7aaf43349d9ce491df904607d312dc0008543bfb64 WatchSource:0}: Error finding container 4a5cf99c3df8ca2d822f3a7aaf43349d9ce491df904607d312dc0008543bfb64: Status 404 returned error can't find the container with id 4a5cf99c3df8ca2d822f3a7aaf43349d9ce491df904607d312dc0008543bfb64 Sep 30 21:15:27 crc kubenswrapper[4756]: I0930 21:15:27.548321 4756 scope.go:117] "RemoveContainer" containerID="73f7e5b3073364c3579afe79763dde44a71d4e0a7e699ad746bafeede2a463c7" Sep 30 21:15:27 crc kubenswrapper[4756]: I0930 21:15:27.580705 4756 scope.go:117] "RemoveContainer" containerID="9edaec9102677cb3ad0033bc639eb569160f9fa78284ce28bf43aec76f0e2086" Sep 30 21:15:27 crc kubenswrapper[4756]: I0930 21:15:27.608450 4756 scope.go:117] "RemoveContainer" containerID="ef2b0afd717aa33f6df89fea39ee762a49ac54116ec6479f76c5b4c9d2282417" Sep 30 21:15:27 crc kubenswrapper[4756]: I0930 21:15:27.633365 4756 scope.go:117] "RemoveContainer" containerID="3f42fb4d9bddb3a9bca5ae7139ec584e94997c02854d55611896e919d136f7c3" Sep 30 21:15:28 crc kubenswrapper[4756]: I0930 21:15:28.183269 4756 generic.go:334] "Generic (PLEG): container finished" podID="33741501-84e5-400c-b66f-897f2cbe23f0" containerID="489d5f2eb99d615c80c1c6974746a924688aac53f1fe0777c9e43e8338b68012" exitCode=0 Sep 30 21:15:28 crc kubenswrapper[4756]: I0930 21:15:28.183344 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-c8e9-account-create-jnb24" event={"ID":"33741501-84e5-400c-b66f-897f2cbe23f0","Type":"ContainerDied","Data":"489d5f2eb99d615c80c1c6974746a924688aac53f1fe0777c9e43e8338b68012"} Sep 30 21:15:28 crc kubenswrapper[4756]: I0930 21:15:28.183787 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-c8e9-account-create-jnb24" event={"ID":"33741501-84e5-400c-b66f-897f2cbe23f0","Type":"ContainerStarted","Data":"4a5cf99c3df8ca2d822f3a7aaf43349d9ce491df904607d312dc0008543bfb64"} Sep 30 21:15:29 crc kubenswrapper[4756]: I0930 21:15:29.640952 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/manila-c8e9-account-create-jnb24" Sep 30 21:15:29 crc kubenswrapper[4756]: I0930 21:15:29.757841 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vsnxm\" (UniqueName: \"kubernetes.io/projected/33741501-84e5-400c-b66f-897f2cbe23f0-kube-api-access-vsnxm\") pod \"33741501-84e5-400c-b66f-897f2cbe23f0\" (UID: \"33741501-84e5-400c-b66f-897f2cbe23f0\") " Sep 30 21:15:29 crc kubenswrapper[4756]: I0930 21:15:29.763649 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/33741501-84e5-400c-b66f-897f2cbe23f0-kube-api-access-vsnxm" (OuterVolumeSpecName: "kube-api-access-vsnxm") pod "33741501-84e5-400c-b66f-897f2cbe23f0" (UID: "33741501-84e5-400c-b66f-897f2cbe23f0"). InnerVolumeSpecName "kube-api-access-vsnxm". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 21:15:29 crc kubenswrapper[4756]: I0930 21:15:29.860235 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vsnxm\" (UniqueName: \"kubernetes.io/projected/33741501-84e5-400c-b66f-897f2cbe23f0-kube-api-access-vsnxm\") on node \"crc\" DevicePath \"\"" Sep 30 21:15:30 crc kubenswrapper[4756]: I0930 21:15:30.201312 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-c8e9-account-create-jnb24" event={"ID":"33741501-84e5-400c-b66f-897f2cbe23f0","Type":"ContainerDied","Data":"4a5cf99c3df8ca2d822f3a7aaf43349d9ce491df904607d312dc0008543bfb64"} Sep 30 21:15:30 crc kubenswrapper[4756]: I0930 21:15:30.201349 4756 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4a5cf99c3df8ca2d822f3a7aaf43349d9ce491df904607d312dc0008543bfb64" Sep 30 21:15:30 crc kubenswrapper[4756]: I0930 21:15:30.201418 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/manila-c8e9-account-create-jnb24" Sep 30 21:15:31 crc kubenswrapper[4756]: I0930 21:15:31.887746 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/manila-db-sync-wplbx"] Sep 30 21:15:31 crc kubenswrapper[4756]: E0930 21:15:31.888579 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="33741501-84e5-400c-b66f-897f2cbe23f0" containerName="mariadb-account-create" Sep 30 21:15:31 crc kubenswrapper[4756]: I0930 21:15:31.888595 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="33741501-84e5-400c-b66f-897f2cbe23f0" containerName="mariadb-account-create" Sep 30 21:15:31 crc kubenswrapper[4756]: I0930 21:15:31.888856 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="33741501-84e5-400c-b66f-897f2cbe23f0" containerName="mariadb-account-create" Sep 30 21:15:31 crc kubenswrapper[4756]: I0930 21:15:31.889808 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-db-sync-wplbx" Sep 30 21:15:31 crc kubenswrapper[4756]: I0930 21:15:31.892925 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-manila-dockercfg-gxq45" Sep 30 21:15:31 crc kubenswrapper[4756]: I0930 21:15:31.892985 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-config-data" Sep 30 21:15:31 crc kubenswrapper[4756]: I0930 21:15:31.908681 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"job-config-data\" (UniqueName: \"kubernetes.io/secret/e980e664-e5cd-44f9-90fd-4078378e83bf-job-config-data\") pod \"manila-db-sync-wplbx\" (UID: \"e980e664-e5cd-44f9-90fd-4078378e83bf\") " pod="openstack/manila-db-sync-wplbx" Sep 30 21:15:31 crc kubenswrapper[4756]: I0930 21:15:31.908852 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e980e664-e5cd-44f9-90fd-4078378e83bf-config-data\") pod \"manila-db-sync-wplbx\" (UID: \"e980e664-e5cd-44f9-90fd-4078378e83bf\") " pod="openstack/manila-db-sync-wplbx" Sep 30 21:15:31 crc kubenswrapper[4756]: I0930 21:15:31.908892 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kstk9\" (UniqueName: \"kubernetes.io/projected/e980e664-e5cd-44f9-90fd-4078378e83bf-kube-api-access-kstk9\") pod \"manila-db-sync-wplbx\" (UID: \"e980e664-e5cd-44f9-90fd-4078378e83bf\") " pod="openstack/manila-db-sync-wplbx" Sep 30 21:15:31 crc kubenswrapper[4756]: I0930 21:15:31.908917 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e980e664-e5cd-44f9-90fd-4078378e83bf-combined-ca-bundle\") pod \"manila-db-sync-wplbx\" (UID: \"e980e664-e5cd-44f9-90fd-4078378e83bf\") " pod="openstack/manila-db-sync-wplbx" Sep 30 21:15:31 crc kubenswrapper[4756]: I0930 21:15:31.921571 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-db-sync-wplbx"] Sep 30 21:15:32 crc kubenswrapper[4756]: I0930 21:15:32.010864 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"job-config-data\" (UniqueName: \"kubernetes.io/secret/e980e664-e5cd-44f9-90fd-4078378e83bf-job-config-data\") pod \"manila-db-sync-wplbx\" (UID: \"e980e664-e5cd-44f9-90fd-4078378e83bf\") " pod="openstack/manila-db-sync-wplbx" Sep 30 21:15:32 crc kubenswrapper[4756]: I0930 21:15:32.011025 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e980e664-e5cd-44f9-90fd-4078378e83bf-config-data\") pod \"manila-db-sync-wplbx\" (UID: \"e980e664-e5cd-44f9-90fd-4078378e83bf\") " pod="openstack/manila-db-sync-wplbx" Sep 30 21:15:32 crc kubenswrapper[4756]: I0930 21:15:32.011061 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kstk9\" (UniqueName: \"kubernetes.io/projected/e980e664-e5cd-44f9-90fd-4078378e83bf-kube-api-access-kstk9\") pod \"manila-db-sync-wplbx\" (UID: \"e980e664-e5cd-44f9-90fd-4078378e83bf\") " pod="openstack/manila-db-sync-wplbx" Sep 30 21:15:32 crc kubenswrapper[4756]: I0930 21:15:32.011082 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e980e664-e5cd-44f9-90fd-4078378e83bf-combined-ca-bundle\") pod \"manila-db-sync-wplbx\" (UID: \"e980e664-e5cd-44f9-90fd-4078378e83bf\") " pod="openstack/manila-db-sync-wplbx" Sep 30 21:15:32 crc kubenswrapper[4756]: I0930 21:15:32.017596 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"job-config-data\" (UniqueName: \"kubernetes.io/secret/e980e664-e5cd-44f9-90fd-4078378e83bf-job-config-data\") pod \"manila-db-sync-wplbx\" (UID: \"e980e664-e5cd-44f9-90fd-4078378e83bf\") " pod="openstack/manila-db-sync-wplbx" Sep 30 21:15:32 crc kubenswrapper[4756]: I0930 21:15:32.019701 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e980e664-e5cd-44f9-90fd-4078378e83bf-config-data\") pod \"manila-db-sync-wplbx\" (UID: \"e980e664-e5cd-44f9-90fd-4078378e83bf\") " pod="openstack/manila-db-sync-wplbx" Sep 30 21:15:32 crc kubenswrapper[4756]: I0930 21:15:32.020233 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e980e664-e5cd-44f9-90fd-4078378e83bf-combined-ca-bundle\") pod \"manila-db-sync-wplbx\" (UID: \"e980e664-e5cd-44f9-90fd-4078378e83bf\") " pod="openstack/manila-db-sync-wplbx" Sep 30 21:15:32 crc kubenswrapper[4756]: I0930 21:15:32.031732 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kstk9\" (UniqueName: \"kubernetes.io/projected/e980e664-e5cd-44f9-90fd-4078378e83bf-kube-api-access-kstk9\") pod \"manila-db-sync-wplbx\" (UID: \"e980e664-e5cd-44f9-90fd-4078378e83bf\") " pod="openstack/manila-db-sync-wplbx" Sep 30 21:15:32 crc kubenswrapper[4756]: I0930 21:15:32.211381 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-db-sync-wplbx" Sep 30 21:15:33 crc kubenswrapper[4756]: W0930 21:15:33.132908 4756 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode980e664_e5cd_44f9_90fd_4078378e83bf.slice/crio-f7514ab4c15db6084f29ef8bc4ba922f36269a3e984f18f978f91370e1c9d485 WatchSource:0}: Error finding container f7514ab4c15db6084f29ef8bc4ba922f36269a3e984f18f978f91370e1c9d485: Status 404 returned error can't find the container with id f7514ab4c15db6084f29ef8bc4ba922f36269a3e984f18f978f91370e1c9d485 Sep 30 21:15:33 crc kubenswrapper[4756]: I0930 21:15:33.134719 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-db-sync-wplbx"] Sep 30 21:15:33 crc kubenswrapper[4756]: I0930 21:15:33.256586 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-db-sync-wplbx" event={"ID":"e980e664-e5cd-44f9-90fd-4078378e83bf","Type":"ContainerStarted","Data":"f7514ab4c15db6084f29ef8bc4ba922f36269a3e984f18f978f91370e1c9d485"} Sep 30 21:15:39 crc kubenswrapper[4756]: I0930 21:15:39.326985 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-db-sync-wplbx" event={"ID":"e980e664-e5cd-44f9-90fd-4078378e83bf","Type":"ContainerStarted","Data":"b069bad693fa1d97c8a9d99a32f8640798f16c816d5b45cbba695b12684dd219"} Sep 30 21:15:39 crc kubenswrapper[4756]: I0930 21:15:39.362153 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/manila-db-sync-wplbx" podStartSLOduration=3.374443707 podStartE2EDuration="8.362128065s" podCreationTimestamp="2025-09-30 21:15:31 +0000 UTC" firstStartedPulling="2025-09-30 21:15:33.137502048 +0000 UTC m=+6262.758435555" lastFinishedPulling="2025-09-30 21:15:38.125186436 +0000 UTC m=+6267.746119913" observedRunningTime="2025-09-30 21:15:39.347101824 +0000 UTC m=+6268.968035301" watchObservedRunningTime="2025-09-30 21:15:39.362128065 +0000 UTC m=+6268.983061562" Sep 30 21:15:41 crc kubenswrapper[4756]: I0930 21:15:41.349511 4756 generic.go:334] "Generic (PLEG): container finished" podID="e980e664-e5cd-44f9-90fd-4078378e83bf" containerID="b069bad693fa1d97c8a9d99a32f8640798f16c816d5b45cbba695b12684dd219" exitCode=0 Sep 30 21:15:41 crc kubenswrapper[4756]: I0930 21:15:41.349598 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-db-sync-wplbx" event={"ID":"e980e664-e5cd-44f9-90fd-4078378e83bf","Type":"ContainerDied","Data":"b069bad693fa1d97c8a9d99a32f8640798f16c816d5b45cbba695b12684dd219"} Sep 30 21:15:41 crc kubenswrapper[4756]: I0930 21:15:41.409590 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Sep 30 21:15:42 crc kubenswrapper[4756]: I0930 21:15:42.921748 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/manila-db-sync-wplbx" Sep 30 21:15:43 crc kubenswrapper[4756]: I0930 21:15:43.049709 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e980e664-e5cd-44f9-90fd-4078378e83bf-config-data\") pod \"e980e664-e5cd-44f9-90fd-4078378e83bf\" (UID: \"e980e664-e5cd-44f9-90fd-4078378e83bf\") " Sep 30 21:15:43 crc kubenswrapper[4756]: I0930 21:15:43.049819 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e980e664-e5cd-44f9-90fd-4078378e83bf-combined-ca-bundle\") pod \"e980e664-e5cd-44f9-90fd-4078378e83bf\" (UID: \"e980e664-e5cd-44f9-90fd-4078378e83bf\") " Sep 30 21:15:43 crc kubenswrapper[4756]: I0930 21:15:43.049893 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"job-config-data\" (UniqueName: \"kubernetes.io/secret/e980e664-e5cd-44f9-90fd-4078378e83bf-job-config-data\") pod \"e980e664-e5cd-44f9-90fd-4078378e83bf\" (UID: \"e980e664-e5cd-44f9-90fd-4078378e83bf\") " Sep 30 21:15:43 crc kubenswrapper[4756]: I0930 21:15:43.050048 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kstk9\" (UniqueName: \"kubernetes.io/projected/e980e664-e5cd-44f9-90fd-4078378e83bf-kube-api-access-kstk9\") pod \"e980e664-e5cd-44f9-90fd-4078378e83bf\" (UID: \"e980e664-e5cd-44f9-90fd-4078378e83bf\") " Sep 30 21:15:43 crc kubenswrapper[4756]: I0930 21:15:43.058350 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e980e664-e5cd-44f9-90fd-4078378e83bf-job-config-data" (OuterVolumeSpecName: "job-config-data") pod "e980e664-e5cd-44f9-90fd-4078378e83bf" (UID: "e980e664-e5cd-44f9-90fd-4078378e83bf"). InnerVolumeSpecName "job-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 21:15:43 crc kubenswrapper[4756]: I0930 21:15:43.058822 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e980e664-e5cd-44f9-90fd-4078378e83bf-config-data" (OuterVolumeSpecName: "config-data") pod "e980e664-e5cd-44f9-90fd-4078378e83bf" (UID: "e980e664-e5cd-44f9-90fd-4078378e83bf"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 21:15:43 crc kubenswrapper[4756]: I0930 21:15:43.060037 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e980e664-e5cd-44f9-90fd-4078378e83bf-kube-api-access-kstk9" (OuterVolumeSpecName: "kube-api-access-kstk9") pod "e980e664-e5cd-44f9-90fd-4078378e83bf" (UID: "e980e664-e5cd-44f9-90fd-4078378e83bf"). InnerVolumeSpecName "kube-api-access-kstk9". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 21:15:43 crc kubenswrapper[4756]: I0930 21:15:43.091979 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e980e664-e5cd-44f9-90fd-4078378e83bf-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e980e664-e5cd-44f9-90fd-4078378e83bf" (UID: "e980e664-e5cd-44f9-90fd-4078378e83bf"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 21:15:43 crc kubenswrapper[4756]: I0930 21:15:43.152571 4756 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e980e664-e5cd-44f9-90fd-4078378e83bf-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 21:15:43 crc kubenswrapper[4756]: I0930 21:15:43.152828 4756 reconciler_common.go:293] "Volume detached for volume \"job-config-data\" (UniqueName: \"kubernetes.io/secret/e980e664-e5cd-44f9-90fd-4078378e83bf-job-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 21:15:43 crc kubenswrapper[4756]: I0930 21:15:43.153022 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kstk9\" (UniqueName: \"kubernetes.io/projected/e980e664-e5cd-44f9-90fd-4078378e83bf-kube-api-access-kstk9\") on node \"crc\" DevicePath \"\"" Sep 30 21:15:43 crc kubenswrapper[4756]: I0930 21:15:43.153107 4756 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e980e664-e5cd-44f9-90fd-4078378e83bf-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 21:15:43 crc kubenswrapper[4756]: I0930 21:15:43.370661 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-db-sync-wplbx" event={"ID":"e980e664-e5cd-44f9-90fd-4078378e83bf","Type":"ContainerDied","Data":"f7514ab4c15db6084f29ef8bc4ba922f36269a3e984f18f978f91370e1c9d485"} Sep 30 21:15:43 crc kubenswrapper[4756]: I0930 21:15:43.370707 4756 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f7514ab4c15db6084f29ef8bc4ba922f36269a3e984f18f978f91370e1c9d485" Sep 30 21:15:43 crc kubenswrapper[4756]: I0930 21:15:43.370745 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/manila-db-sync-wplbx" Sep 30 21:15:43 crc kubenswrapper[4756]: I0930 21:15:43.679120 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/manila-scheduler-0"] Sep 30 21:15:43 crc kubenswrapper[4756]: E0930 21:15:43.679881 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e980e664-e5cd-44f9-90fd-4078378e83bf" containerName="manila-db-sync" Sep 30 21:15:43 crc kubenswrapper[4756]: I0930 21:15:43.679901 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="e980e664-e5cd-44f9-90fd-4078378e83bf" containerName="manila-db-sync" Sep 30 21:15:43 crc kubenswrapper[4756]: I0930 21:15:43.680128 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="e980e664-e5cd-44f9-90fd-4078378e83bf" containerName="manila-db-sync" Sep 30 21:15:43 crc kubenswrapper[4756]: I0930 21:15:43.681411 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-scheduler-0" Sep 30 21:15:43 crc kubenswrapper[4756]: I0930 21:15:43.684094 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-scripts" Sep 30 21:15:43 crc kubenswrapper[4756]: I0930 21:15:43.684633 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-scheduler-config-data" Sep 30 21:15:43 crc kubenswrapper[4756]: I0930 21:15:43.684833 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-manila-dockercfg-gxq45" Sep 30 21:15:43 crc kubenswrapper[4756]: I0930 21:15:43.685000 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-config-data" Sep 30 21:15:43 crc kubenswrapper[4756]: I0930 21:15:43.704507 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/manila-share-share1-0"] Sep 30 21:15:43 crc kubenswrapper[4756]: I0930 21:15:43.706442 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-share-share1-0" Sep 30 21:15:43 crc kubenswrapper[4756]: I0930 21:15:43.713733 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-share-share1-config-data" Sep 30 21:15:43 crc kubenswrapper[4756]: I0930 21:15:43.747842 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-share-share1-0"] Sep 30 21:15:43 crc kubenswrapper[4756]: I0930 21:15:43.785765 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-scheduler-0"] Sep 30 21:15:43 crc kubenswrapper[4756]: I0930 21:15:43.799308 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/34f74470-0f4b-4c5d-8842-b8ba95456f82-config-data\") pod \"manila-share-share1-0\" (UID: \"34f74470-0f4b-4c5d-8842-b8ba95456f82\") " pod="openstack/manila-share-share1-0" Sep 30 21:15:43 crc kubenswrapper[4756]: I0930 21:15:43.799662 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/34f74470-0f4b-4c5d-8842-b8ba95456f82-config-data-custom\") pod \"manila-share-share1-0\" (UID: \"34f74470-0f4b-4c5d-8842-b8ba95456f82\") " pod="openstack/manila-share-share1-0" Sep 30 21:15:43 crc kubenswrapper[4756]: I0930 21:15:43.799725 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/34f74470-0f4b-4c5d-8842-b8ba95456f82-ceph\") pod \"manila-share-share1-0\" (UID: \"34f74470-0f4b-4c5d-8842-b8ba95456f82\") " pod="openstack/manila-share-share1-0" Sep 30 21:15:43 crc kubenswrapper[4756]: I0930 21:15:43.799763 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z4lwz\" (UniqueName: \"kubernetes.io/projected/34f74470-0f4b-4c5d-8842-b8ba95456f82-kube-api-access-z4lwz\") pod \"manila-share-share1-0\" (UID: \"34f74470-0f4b-4c5d-8842-b8ba95456f82\") " pod="openstack/manila-share-share1-0" Sep 30 21:15:43 crc kubenswrapper[4756]: I0930 21:15:43.799988 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2e6e6c35-e89e-412f-b025-dcf18e6d2691-scripts\") pod \"manila-scheduler-0\" (UID: \"2e6e6c35-e89e-412f-b025-dcf18e6d2691\") " pod="openstack/manila-scheduler-0" Sep 30 21:15:43 crc kubenswrapper[4756]: I0930 21:15:43.800104 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/2e6e6c35-e89e-412f-b025-dcf18e6d2691-config-data-custom\") pod \"manila-scheduler-0\" (UID: \"2e6e6c35-e89e-412f-b025-dcf18e6d2691\") " pod="openstack/manila-scheduler-0" Sep 30 21:15:43 crc kubenswrapper[4756]: I0930 21:15:43.800183 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/34f74470-0f4b-4c5d-8842-b8ba95456f82-combined-ca-bundle\") pod \"manila-share-share1-0\" (UID: \"34f74470-0f4b-4c5d-8842-b8ba95456f82\") " pod="openstack/manila-share-share1-0" Sep 30 21:15:43 crc kubenswrapper[4756]: I0930 21:15:43.800222 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2e6e6c35-e89e-412f-b025-dcf18e6d2691-combined-ca-bundle\") pod \"manila-scheduler-0\" (UID: \"2e6e6c35-e89e-412f-b025-dcf18e6d2691\") " pod="openstack/manila-scheduler-0" Sep 30 21:15:43 crc kubenswrapper[4756]: I0930 21:15:43.800290 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h9zqm\" (UniqueName: \"kubernetes.io/projected/2e6e6c35-e89e-412f-b025-dcf18e6d2691-kube-api-access-h9zqm\") pod \"manila-scheduler-0\" (UID: \"2e6e6c35-e89e-412f-b025-dcf18e6d2691\") " pod="openstack/manila-scheduler-0" Sep 30 21:15:43 crc kubenswrapper[4756]: I0930 21:15:43.800338 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2e6e6c35-e89e-412f-b025-dcf18e6d2691-config-data\") pod \"manila-scheduler-0\" (UID: \"2e6e6c35-e89e-412f-b025-dcf18e6d2691\") " pod="openstack/manila-scheduler-0" Sep 30 21:15:43 crc kubenswrapper[4756]: I0930 21:15:43.800494 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/2e6e6c35-e89e-412f-b025-dcf18e6d2691-etc-machine-id\") pod \"manila-scheduler-0\" (UID: \"2e6e6c35-e89e-412f-b025-dcf18e6d2691\") " pod="openstack/manila-scheduler-0" Sep 30 21:15:43 crc kubenswrapper[4756]: I0930 21:15:43.800568 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-manila\" (UniqueName: \"kubernetes.io/host-path/34f74470-0f4b-4c5d-8842-b8ba95456f82-var-lib-manila\") pod \"manila-share-share1-0\" (UID: \"34f74470-0f4b-4c5d-8842-b8ba95456f82\") " pod="openstack/manila-share-share1-0" Sep 30 21:15:43 crc kubenswrapper[4756]: I0930 21:15:43.800674 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/34f74470-0f4b-4c5d-8842-b8ba95456f82-etc-machine-id\") pod \"manila-share-share1-0\" (UID: \"34f74470-0f4b-4c5d-8842-b8ba95456f82\") " pod="openstack/manila-share-share1-0" Sep 30 21:15:43 crc kubenswrapper[4756]: I0930 21:15:43.800750 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/34f74470-0f4b-4c5d-8842-b8ba95456f82-scripts\") pod \"manila-share-share1-0\" (UID: \"34f74470-0f4b-4c5d-8842-b8ba95456f82\") " pod="openstack/manila-share-share1-0" Sep 30 21:15:43 crc kubenswrapper[4756]: I0930 21:15:43.828462 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-7cdd98bb8f-cmpfq"] Sep 30 21:15:43 crc kubenswrapper[4756]: I0930 21:15:43.830486 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7cdd98bb8f-cmpfq" Sep 30 21:15:43 crc kubenswrapper[4756]: I0930 21:15:43.878538 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7cdd98bb8f-cmpfq"] Sep 30 21:15:43 crc kubenswrapper[4756]: I0930 21:15:43.903891 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/0482d439-b7e2-4502-8d28-85ac4f826cc4-ovsdbserver-nb\") pod \"dnsmasq-dns-7cdd98bb8f-cmpfq\" (UID: \"0482d439-b7e2-4502-8d28-85ac4f826cc4\") " pod="openstack/dnsmasq-dns-7cdd98bb8f-cmpfq" Sep 30 21:15:43 crc kubenswrapper[4756]: I0930 21:15:43.903962 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/34f74470-0f4b-4c5d-8842-b8ba95456f82-etc-machine-id\") pod \"manila-share-share1-0\" (UID: \"34f74470-0f4b-4c5d-8842-b8ba95456f82\") " pod="openstack/manila-share-share1-0" Sep 30 21:15:43 crc kubenswrapper[4756]: I0930 21:15:43.903997 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/34f74470-0f4b-4c5d-8842-b8ba95456f82-scripts\") pod \"manila-share-share1-0\" (UID: \"34f74470-0f4b-4c5d-8842-b8ba95456f82\") " pod="openstack/manila-share-share1-0" Sep 30 21:15:43 crc kubenswrapper[4756]: I0930 21:15:43.904023 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/34f74470-0f4b-4c5d-8842-b8ba95456f82-config-data\") pod \"manila-share-share1-0\" (UID: \"34f74470-0f4b-4c5d-8842-b8ba95456f82\") " pod="openstack/manila-share-share1-0" Sep 30 21:15:43 crc kubenswrapper[4756]: I0930 21:15:43.904110 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/34f74470-0f4b-4c5d-8842-b8ba95456f82-config-data-custom\") pod \"manila-share-share1-0\" (UID: \"34f74470-0f4b-4c5d-8842-b8ba95456f82\") " pod="openstack/manila-share-share1-0" Sep 30 21:15:43 crc kubenswrapper[4756]: I0930 21:15:43.904135 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/34f74470-0f4b-4c5d-8842-b8ba95456f82-ceph\") pod \"manila-share-share1-0\" (UID: \"34f74470-0f4b-4c5d-8842-b8ba95456f82\") " pod="openstack/manila-share-share1-0" Sep 30 21:15:43 crc kubenswrapper[4756]: I0930 21:15:43.904154 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z4lwz\" (UniqueName: \"kubernetes.io/projected/34f74470-0f4b-4c5d-8842-b8ba95456f82-kube-api-access-z4lwz\") pod \"manila-share-share1-0\" (UID: \"34f74470-0f4b-4c5d-8842-b8ba95456f82\") " pod="openstack/manila-share-share1-0" Sep 30 21:15:43 crc kubenswrapper[4756]: I0930 21:15:43.904235 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2e6e6c35-e89e-412f-b025-dcf18e6d2691-scripts\") pod \"manila-scheduler-0\" (UID: \"2e6e6c35-e89e-412f-b025-dcf18e6d2691\") " pod="openstack/manila-scheduler-0" Sep 30 21:15:43 crc kubenswrapper[4756]: I0930 21:15:43.904296 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0482d439-b7e2-4502-8d28-85ac4f826cc4-config\") pod \"dnsmasq-dns-7cdd98bb8f-cmpfq\" (UID: \"0482d439-b7e2-4502-8d28-85ac4f826cc4\") " pod="openstack/dnsmasq-dns-7cdd98bb8f-cmpfq" Sep 30 21:15:43 crc kubenswrapper[4756]: I0930 21:15:43.904329 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/2e6e6c35-e89e-412f-b025-dcf18e6d2691-config-data-custom\") pod \"manila-scheduler-0\" (UID: \"2e6e6c35-e89e-412f-b025-dcf18e6d2691\") " pod="openstack/manila-scheduler-0" Sep 30 21:15:43 crc kubenswrapper[4756]: I0930 21:15:43.904370 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/34f74470-0f4b-4c5d-8842-b8ba95456f82-combined-ca-bundle\") pod \"manila-share-share1-0\" (UID: \"34f74470-0f4b-4c5d-8842-b8ba95456f82\") " pod="openstack/manila-share-share1-0" Sep 30 21:15:43 crc kubenswrapper[4756]: I0930 21:15:43.904390 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2e6e6c35-e89e-412f-b025-dcf18e6d2691-combined-ca-bundle\") pod \"manila-scheduler-0\" (UID: \"2e6e6c35-e89e-412f-b025-dcf18e6d2691\") " pod="openstack/manila-scheduler-0" Sep 30 21:15:43 crc kubenswrapper[4756]: I0930 21:15:43.904436 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h9zqm\" (UniqueName: \"kubernetes.io/projected/2e6e6c35-e89e-412f-b025-dcf18e6d2691-kube-api-access-h9zqm\") pod \"manila-scheduler-0\" (UID: \"2e6e6c35-e89e-412f-b025-dcf18e6d2691\") " pod="openstack/manila-scheduler-0" Sep 30 21:15:43 crc kubenswrapper[4756]: I0930 21:15:43.904463 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2e6e6c35-e89e-412f-b025-dcf18e6d2691-config-data\") pod \"manila-scheduler-0\" (UID: \"2e6e6c35-e89e-412f-b025-dcf18e6d2691\") " pod="openstack/manila-scheduler-0" Sep 30 21:15:43 crc kubenswrapper[4756]: I0930 21:15:43.904485 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nbd2q\" (UniqueName: \"kubernetes.io/projected/0482d439-b7e2-4502-8d28-85ac4f826cc4-kube-api-access-nbd2q\") pod \"dnsmasq-dns-7cdd98bb8f-cmpfq\" (UID: \"0482d439-b7e2-4502-8d28-85ac4f826cc4\") " pod="openstack/dnsmasq-dns-7cdd98bb8f-cmpfq" Sep 30 21:15:43 crc kubenswrapper[4756]: I0930 21:15:43.904514 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/0482d439-b7e2-4502-8d28-85ac4f826cc4-ovsdbserver-sb\") pod \"dnsmasq-dns-7cdd98bb8f-cmpfq\" (UID: \"0482d439-b7e2-4502-8d28-85ac4f826cc4\") " pod="openstack/dnsmasq-dns-7cdd98bb8f-cmpfq" Sep 30 21:15:43 crc kubenswrapper[4756]: I0930 21:15:43.904558 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/2e6e6c35-e89e-412f-b025-dcf18e6d2691-etc-machine-id\") pod \"manila-scheduler-0\" (UID: \"2e6e6c35-e89e-412f-b025-dcf18e6d2691\") " pod="openstack/manila-scheduler-0" Sep 30 21:15:43 crc kubenswrapper[4756]: I0930 21:15:43.904581 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-manila\" (UniqueName: \"kubernetes.io/host-path/34f74470-0f4b-4c5d-8842-b8ba95456f82-var-lib-manila\") pod \"manila-share-share1-0\" (UID: \"34f74470-0f4b-4c5d-8842-b8ba95456f82\") " pod="openstack/manila-share-share1-0" Sep 30 21:15:43 crc kubenswrapper[4756]: I0930 21:15:43.904599 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0482d439-b7e2-4502-8d28-85ac4f826cc4-dns-svc\") pod \"dnsmasq-dns-7cdd98bb8f-cmpfq\" (UID: \"0482d439-b7e2-4502-8d28-85ac4f826cc4\") " pod="openstack/dnsmasq-dns-7cdd98bb8f-cmpfq" Sep 30 21:15:43 crc kubenswrapper[4756]: I0930 21:15:43.904700 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/34f74470-0f4b-4c5d-8842-b8ba95456f82-etc-machine-id\") pod \"manila-share-share1-0\" (UID: \"34f74470-0f4b-4c5d-8842-b8ba95456f82\") " pod="openstack/manila-share-share1-0" Sep 30 21:15:43 crc kubenswrapper[4756]: I0930 21:15:43.908257 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/34f74470-0f4b-4c5d-8842-b8ba95456f82-scripts\") pod \"manila-share-share1-0\" (UID: \"34f74470-0f4b-4c5d-8842-b8ba95456f82\") " pod="openstack/manila-share-share1-0" Sep 30 21:15:43 crc kubenswrapper[4756]: I0930 21:15:43.908338 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/2e6e6c35-e89e-412f-b025-dcf18e6d2691-etc-machine-id\") pod \"manila-scheduler-0\" (UID: \"2e6e6c35-e89e-412f-b025-dcf18e6d2691\") " pod="openstack/manila-scheduler-0" Sep 30 21:15:43 crc kubenswrapper[4756]: I0930 21:15:43.908461 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-manila\" (UniqueName: \"kubernetes.io/host-path/34f74470-0f4b-4c5d-8842-b8ba95456f82-var-lib-manila\") pod \"manila-share-share1-0\" (UID: \"34f74470-0f4b-4c5d-8842-b8ba95456f82\") " pod="openstack/manila-share-share1-0" Sep 30 21:15:43 crc kubenswrapper[4756]: I0930 21:15:43.909604 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/34f74470-0f4b-4c5d-8842-b8ba95456f82-combined-ca-bundle\") pod \"manila-share-share1-0\" (UID: \"34f74470-0f4b-4c5d-8842-b8ba95456f82\") " pod="openstack/manila-share-share1-0" Sep 30 21:15:43 crc kubenswrapper[4756]: I0930 21:15:43.915328 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/manila-api-0"] Sep 30 21:15:43 crc kubenswrapper[4756]: I0930 21:15:43.915902 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2e6e6c35-e89e-412f-b025-dcf18e6d2691-config-data\") pod \"manila-scheduler-0\" (UID: \"2e6e6c35-e89e-412f-b025-dcf18e6d2691\") " pod="openstack/manila-scheduler-0" Sep 30 21:15:43 crc kubenswrapper[4756]: I0930 21:15:43.918049 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-api-0" Sep 30 21:15:43 crc kubenswrapper[4756]: I0930 21:15:43.921105 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2e6e6c35-e89e-412f-b025-dcf18e6d2691-combined-ca-bundle\") pod \"manila-scheduler-0\" (UID: \"2e6e6c35-e89e-412f-b025-dcf18e6d2691\") " pod="openstack/manila-scheduler-0" Sep 30 21:15:43 crc kubenswrapper[4756]: I0930 21:15:43.922021 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/34f74470-0f4b-4c5d-8842-b8ba95456f82-ceph\") pod \"manila-share-share1-0\" (UID: \"34f74470-0f4b-4c5d-8842-b8ba95456f82\") " pod="openstack/manila-share-share1-0" Sep 30 21:15:43 crc kubenswrapper[4756]: I0930 21:15:43.922203 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/2e6e6c35-e89e-412f-b025-dcf18e6d2691-config-data-custom\") pod \"manila-scheduler-0\" (UID: \"2e6e6c35-e89e-412f-b025-dcf18e6d2691\") " pod="openstack/manila-scheduler-0" Sep 30 21:15:43 crc kubenswrapper[4756]: I0930 21:15:43.922598 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2e6e6c35-e89e-412f-b025-dcf18e6d2691-scripts\") pod \"manila-scheduler-0\" (UID: \"2e6e6c35-e89e-412f-b025-dcf18e6d2691\") " pod="openstack/manila-scheduler-0" Sep 30 21:15:43 crc kubenswrapper[4756]: I0930 21:15:43.924151 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-api-config-data" Sep 30 21:15:43 crc kubenswrapper[4756]: I0930 21:15:43.924885 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-api-0"] Sep 30 21:15:43 crc kubenswrapper[4756]: I0930 21:15:43.929037 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/34f74470-0f4b-4c5d-8842-b8ba95456f82-config-data\") pod \"manila-share-share1-0\" (UID: \"34f74470-0f4b-4c5d-8842-b8ba95456f82\") " pod="openstack/manila-share-share1-0" Sep 30 21:15:43 crc kubenswrapper[4756]: I0930 21:15:43.932897 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/34f74470-0f4b-4c5d-8842-b8ba95456f82-config-data-custom\") pod \"manila-share-share1-0\" (UID: \"34f74470-0f4b-4c5d-8842-b8ba95456f82\") " pod="openstack/manila-share-share1-0" Sep 30 21:15:43 crc kubenswrapper[4756]: I0930 21:15:43.933264 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z4lwz\" (UniqueName: \"kubernetes.io/projected/34f74470-0f4b-4c5d-8842-b8ba95456f82-kube-api-access-z4lwz\") pod \"manila-share-share1-0\" (UID: \"34f74470-0f4b-4c5d-8842-b8ba95456f82\") " pod="openstack/manila-share-share1-0" Sep 30 21:15:43 crc kubenswrapper[4756]: I0930 21:15:43.933727 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h9zqm\" (UniqueName: \"kubernetes.io/projected/2e6e6c35-e89e-412f-b025-dcf18e6d2691-kube-api-access-h9zqm\") pod \"manila-scheduler-0\" (UID: \"2e6e6c35-e89e-412f-b025-dcf18e6d2691\") " pod="openstack/manila-scheduler-0" Sep 30 21:15:44 crc kubenswrapper[4756]: I0930 21:15:44.005897 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/c029f432-5bb2-48e9-9526-533ac7d999c4-etc-machine-id\") pod \"manila-api-0\" (UID: \"c029f432-5bb2-48e9-9526-533ac7d999c4\") " pod="openstack/manila-api-0" Sep 30 21:15:44 crc kubenswrapper[4756]: I0930 21:15:44.005951 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0482d439-b7e2-4502-8d28-85ac4f826cc4-config\") pod \"dnsmasq-dns-7cdd98bb8f-cmpfq\" (UID: \"0482d439-b7e2-4502-8d28-85ac4f826cc4\") " pod="openstack/dnsmasq-dns-7cdd98bb8f-cmpfq" Sep 30 21:15:44 crc kubenswrapper[4756]: I0930 21:15:44.005984 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c029f432-5bb2-48e9-9526-533ac7d999c4-scripts\") pod \"manila-api-0\" (UID: \"c029f432-5bb2-48e9-9526-533ac7d999c4\") " pod="openstack/manila-api-0" Sep 30 21:15:44 crc kubenswrapper[4756]: I0930 21:15:44.006019 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nbd2q\" (UniqueName: \"kubernetes.io/projected/0482d439-b7e2-4502-8d28-85ac4f826cc4-kube-api-access-nbd2q\") pod \"dnsmasq-dns-7cdd98bb8f-cmpfq\" (UID: \"0482d439-b7e2-4502-8d28-85ac4f826cc4\") " pod="openstack/dnsmasq-dns-7cdd98bb8f-cmpfq" Sep 30 21:15:44 crc kubenswrapper[4756]: I0930 21:15:44.006039 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/0482d439-b7e2-4502-8d28-85ac4f826cc4-ovsdbserver-sb\") pod \"dnsmasq-dns-7cdd98bb8f-cmpfq\" (UID: \"0482d439-b7e2-4502-8d28-85ac4f826cc4\") " pod="openstack/dnsmasq-dns-7cdd98bb8f-cmpfq" Sep 30 21:15:44 crc kubenswrapper[4756]: I0930 21:15:44.006059 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c029f432-5bb2-48e9-9526-533ac7d999c4-combined-ca-bundle\") pod \"manila-api-0\" (UID: \"c029f432-5bb2-48e9-9526-533ac7d999c4\") " pod="openstack/manila-api-0" Sep 30 21:15:44 crc kubenswrapper[4756]: I0930 21:15:44.006085 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0482d439-b7e2-4502-8d28-85ac4f826cc4-dns-svc\") pod \"dnsmasq-dns-7cdd98bb8f-cmpfq\" (UID: \"0482d439-b7e2-4502-8d28-85ac4f826cc4\") " pod="openstack/dnsmasq-dns-7cdd98bb8f-cmpfq" Sep 30 21:15:44 crc kubenswrapper[4756]: I0930 21:15:44.006107 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/0482d439-b7e2-4502-8d28-85ac4f826cc4-ovsdbserver-nb\") pod \"dnsmasq-dns-7cdd98bb8f-cmpfq\" (UID: \"0482d439-b7e2-4502-8d28-85ac4f826cc4\") " pod="openstack/dnsmasq-dns-7cdd98bb8f-cmpfq" Sep 30 21:15:44 crc kubenswrapper[4756]: I0930 21:15:44.006128 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jtbdn\" (UniqueName: \"kubernetes.io/projected/c029f432-5bb2-48e9-9526-533ac7d999c4-kube-api-access-jtbdn\") pod \"manila-api-0\" (UID: \"c029f432-5bb2-48e9-9526-533ac7d999c4\") " pod="openstack/manila-api-0" Sep 30 21:15:44 crc kubenswrapper[4756]: I0930 21:15:44.006171 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/c029f432-5bb2-48e9-9526-533ac7d999c4-config-data-custom\") pod \"manila-api-0\" (UID: \"c029f432-5bb2-48e9-9526-533ac7d999c4\") " pod="openstack/manila-api-0" Sep 30 21:15:44 crc kubenswrapper[4756]: I0930 21:15:44.006207 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c029f432-5bb2-48e9-9526-533ac7d999c4-config-data\") pod \"manila-api-0\" (UID: \"c029f432-5bb2-48e9-9526-533ac7d999c4\") " pod="openstack/manila-api-0" Sep 30 21:15:44 crc kubenswrapper[4756]: I0930 21:15:44.006252 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c029f432-5bb2-48e9-9526-533ac7d999c4-logs\") pod \"manila-api-0\" (UID: \"c029f432-5bb2-48e9-9526-533ac7d999c4\") " pod="openstack/manila-api-0" Sep 30 21:15:44 crc kubenswrapper[4756]: I0930 21:15:44.007177 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0482d439-b7e2-4502-8d28-85ac4f826cc4-config\") pod \"dnsmasq-dns-7cdd98bb8f-cmpfq\" (UID: \"0482d439-b7e2-4502-8d28-85ac4f826cc4\") " pod="openstack/dnsmasq-dns-7cdd98bb8f-cmpfq" Sep 30 21:15:44 crc kubenswrapper[4756]: I0930 21:15:44.008048 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/0482d439-b7e2-4502-8d28-85ac4f826cc4-ovsdbserver-sb\") pod \"dnsmasq-dns-7cdd98bb8f-cmpfq\" (UID: \"0482d439-b7e2-4502-8d28-85ac4f826cc4\") " pod="openstack/dnsmasq-dns-7cdd98bb8f-cmpfq" Sep 30 21:15:44 crc kubenswrapper[4756]: I0930 21:15:44.008624 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0482d439-b7e2-4502-8d28-85ac4f826cc4-dns-svc\") pod \"dnsmasq-dns-7cdd98bb8f-cmpfq\" (UID: \"0482d439-b7e2-4502-8d28-85ac4f826cc4\") " pod="openstack/dnsmasq-dns-7cdd98bb8f-cmpfq" Sep 30 21:15:44 crc kubenswrapper[4756]: I0930 21:15:44.009120 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/0482d439-b7e2-4502-8d28-85ac4f826cc4-ovsdbserver-nb\") pod \"dnsmasq-dns-7cdd98bb8f-cmpfq\" (UID: \"0482d439-b7e2-4502-8d28-85ac4f826cc4\") " pod="openstack/dnsmasq-dns-7cdd98bb8f-cmpfq" Sep 30 21:15:44 crc kubenswrapper[4756]: I0930 21:15:44.009323 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-scheduler-0" Sep 30 21:15:44 crc kubenswrapper[4756]: I0930 21:15:44.023666 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nbd2q\" (UniqueName: \"kubernetes.io/projected/0482d439-b7e2-4502-8d28-85ac4f826cc4-kube-api-access-nbd2q\") pod \"dnsmasq-dns-7cdd98bb8f-cmpfq\" (UID: \"0482d439-b7e2-4502-8d28-85ac4f826cc4\") " pod="openstack/dnsmasq-dns-7cdd98bb8f-cmpfq" Sep 30 21:15:44 crc kubenswrapper[4756]: I0930 21:15:44.058953 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-share-share1-0" Sep 30 21:15:44 crc kubenswrapper[4756]: I0930 21:15:44.109556 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/c029f432-5bb2-48e9-9526-533ac7d999c4-config-data-custom\") pod \"manila-api-0\" (UID: \"c029f432-5bb2-48e9-9526-533ac7d999c4\") " pod="openstack/manila-api-0" Sep 30 21:15:44 crc kubenswrapper[4756]: I0930 21:15:44.109669 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c029f432-5bb2-48e9-9526-533ac7d999c4-config-data\") pod \"manila-api-0\" (UID: \"c029f432-5bb2-48e9-9526-533ac7d999c4\") " pod="openstack/manila-api-0" Sep 30 21:15:44 crc kubenswrapper[4756]: I0930 21:15:44.109816 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c029f432-5bb2-48e9-9526-533ac7d999c4-logs\") pod \"manila-api-0\" (UID: \"c029f432-5bb2-48e9-9526-533ac7d999c4\") " pod="openstack/manila-api-0" Sep 30 21:15:44 crc kubenswrapper[4756]: I0930 21:15:44.109887 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/c029f432-5bb2-48e9-9526-533ac7d999c4-etc-machine-id\") pod \"manila-api-0\" (UID: \"c029f432-5bb2-48e9-9526-533ac7d999c4\") " pod="openstack/manila-api-0" Sep 30 21:15:44 crc kubenswrapper[4756]: I0930 21:15:44.109961 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c029f432-5bb2-48e9-9526-533ac7d999c4-scripts\") pod \"manila-api-0\" (UID: \"c029f432-5bb2-48e9-9526-533ac7d999c4\") " pod="openstack/manila-api-0" Sep 30 21:15:44 crc kubenswrapper[4756]: I0930 21:15:44.110033 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c029f432-5bb2-48e9-9526-533ac7d999c4-combined-ca-bundle\") pod \"manila-api-0\" (UID: \"c029f432-5bb2-48e9-9526-533ac7d999c4\") " pod="openstack/manila-api-0" Sep 30 21:15:44 crc kubenswrapper[4756]: I0930 21:15:44.110113 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jtbdn\" (UniqueName: \"kubernetes.io/projected/c029f432-5bb2-48e9-9526-533ac7d999c4-kube-api-access-jtbdn\") pod \"manila-api-0\" (UID: \"c029f432-5bb2-48e9-9526-533ac7d999c4\") " pod="openstack/manila-api-0" Sep 30 21:15:44 crc kubenswrapper[4756]: I0930 21:15:44.110337 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c029f432-5bb2-48e9-9526-533ac7d999c4-logs\") pod \"manila-api-0\" (UID: \"c029f432-5bb2-48e9-9526-533ac7d999c4\") " pod="openstack/manila-api-0" Sep 30 21:15:44 crc kubenswrapper[4756]: I0930 21:15:44.110390 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/c029f432-5bb2-48e9-9526-533ac7d999c4-etc-machine-id\") pod \"manila-api-0\" (UID: \"c029f432-5bb2-48e9-9526-533ac7d999c4\") " pod="openstack/manila-api-0" Sep 30 21:15:44 crc kubenswrapper[4756]: I0930 21:15:44.114184 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/c029f432-5bb2-48e9-9526-533ac7d999c4-config-data-custom\") pod \"manila-api-0\" (UID: \"c029f432-5bb2-48e9-9526-533ac7d999c4\") " pod="openstack/manila-api-0" Sep 30 21:15:44 crc kubenswrapper[4756]: I0930 21:15:44.115601 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c029f432-5bb2-48e9-9526-533ac7d999c4-combined-ca-bundle\") pod \"manila-api-0\" (UID: \"c029f432-5bb2-48e9-9526-533ac7d999c4\") " pod="openstack/manila-api-0" Sep 30 21:15:44 crc kubenswrapper[4756]: I0930 21:15:44.117819 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c029f432-5bb2-48e9-9526-533ac7d999c4-scripts\") pod \"manila-api-0\" (UID: \"c029f432-5bb2-48e9-9526-533ac7d999c4\") " pod="openstack/manila-api-0" Sep 30 21:15:44 crc kubenswrapper[4756]: I0930 21:15:44.119889 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c029f432-5bb2-48e9-9526-533ac7d999c4-config-data\") pod \"manila-api-0\" (UID: \"c029f432-5bb2-48e9-9526-533ac7d999c4\") " pod="openstack/manila-api-0" Sep 30 21:15:44 crc kubenswrapper[4756]: I0930 21:15:44.126344 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jtbdn\" (UniqueName: \"kubernetes.io/projected/c029f432-5bb2-48e9-9526-533ac7d999c4-kube-api-access-jtbdn\") pod \"manila-api-0\" (UID: \"c029f432-5bb2-48e9-9526-533ac7d999c4\") " pod="openstack/manila-api-0" Sep 30 21:15:44 crc kubenswrapper[4756]: I0930 21:15:44.157073 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7cdd98bb8f-cmpfq" Sep 30 21:15:44 crc kubenswrapper[4756]: I0930 21:15:44.174620 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-api-0" Sep 30 21:15:44 crc kubenswrapper[4756]: I0930 21:15:44.571106 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-scheduler-0"] Sep 30 21:15:44 crc kubenswrapper[4756]: I0930 21:15:44.796549 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-share-share1-0"] Sep 30 21:15:44 crc kubenswrapper[4756]: W0930 21:15:44.796964 4756 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod34f74470_0f4b_4c5d_8842_b8ba95456f82.slice/crio-f3bf873267df8503495a0b4cb6a5d40435bda6fcde235edb1190a8de6f6f3bca WatchSource:0}: Error finding container f3bf873267df8503495a0b4cb6a5d40435bda6fcde235edb1190a8de6f6f3bca: Status 404 returned error can't find the container with id f3bf873267df8503495a0b4cb6a5d40435bda6fcde235edb1190a8de6f6f3bca Sep 30 21:15:44 crc kubenswrapper[4756]: I0930 21:15:44.933229 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-api-0"] Sep 30 21:15:44 crc kubenswrapper[4756]: I0930 21:15:44.971043 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7cdd98bb8f-cmpfq"] Sep 30 21:15:44 crc kubenswrapper[4756]: W0930 21:15:44.991583 4756 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0482d439_b7e2_4502_8d28_85ac4f826cc4.slice/crio-fe8b79c7b095d08d9f6b0f5d704fbaa1f76e814a8fc2464c8e3d1c9383cc2bbe WatchSource:0}: Error finding container fe8b79c7b095d08d9f6b0f5d704fbaa1f76e814a8fc2464c8e3d1c9383cc2bbe: Status 404 returned error can't find the container with id fe8b79c7b095d08d9f6b0f5d704fbaa1f76e814a8fc2464c8e3d1c9383cc2bbe Sep 30 21:15:45 crc kubenswrapper[4756]: I0930 21:15:45.415816 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-api-0" event={"ID":"c029f432-5bb2-48e9-9526-533ac7d999c4","Type":"ContainerStarted","Data":"0a8f74262d4547e47477a950fa494525717cfe9bca61b0cf8e608027bb9fafee"} Sep 30 21:15:45 crc kubenswrapper[4756]: I0930 21:15:45.418501 4756 generic.go:334] "Generic (PLEG): container finished" podID="0482d439-b7e2-4502-8d28-85ac4f826cc4" containerID="a6f7951f2c5f6c97fc4cd0b80d77cdf654c4dafea5c8c4ee56ac891aeca97837" exitCode=0 Sep 30 21:15:45 crc kubenswrapper[4756]: I0930 21:15:45.418586 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7cdd98bb8f-cmpfq" event={"ID":"0482d439-b7e2-4502-8d28-85ac4f826cc4","Type":"ContainerDied","Data":"a6f7951f2c5f6c97fc4cd0b80d77cdf654c4dafea5c8c4ee56ac891aeca97837"} Sep 30 21:15:45 crc kubenswrapper[4756]: I0930 21:15:45.418638 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7cdd98bb8f-cmpfq" event={"ID":"0482d439-b7e2-4502-8d28-85ac4f826cc4","Type":"ContainerStarted","Data":"fe8b79c7b095d08d9f6b0f5d704fbaa1f76e814a8fc2464c8e3d1c9383cc2bbe"} Sep 30 21:15:45 crc kubenswrapper[4756]: I0930 21:15:45.420342 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-scheduler-0" event={"ID":"2e6e6c35-e89e-412f-b025-dcf18e6d2691","Type":"ContainerStarted","Data":"6a95ca994bbb5d1adc9ded08e2dbc7e73281638d2bde6658a1aeb9749f904b9f"} Sep 30 21:15:45 crc kubenswrapper[4756]: I0930 21:15:45.422039 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-share-share1-0" event={"ID":"34f74470-0f4b-4c5d-8842-b8ba95456f82","Type":"ContainerStarted","Data":"f3bf873267df8503495a0b4cb6a5d40435bda6fcde235edb1190a8de6f6f3bca"} Sep 30 21:15:46 crc kubenswrapper[4756]: I0930 21:15:46.430807 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-api-0" event={"ID":"c029f432-5bb2-48e9-9526-533ac7d999c4","Type":"ContainerStarted","Data":"b7e010bf287ee338c6861b4ab14421e17692abb148007add15db5098ad69d22a"} Sep 30 21:15:46 crc kubenswrapper[4756]: I0930 21:15:46.431339 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-api-0" event={"ID":"c029f432-5bb2-48e9-9526-533ac7d999c4","Type":"ContainerStarted","Data":"6299c17182cca8f923a8a7115baaf33a298fea7943f07a73e7c859b96b8b755d"} Sep 30 21:15:46 crc kubenswrapper[4756]: I0930 21:15:46.432743 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/manila-api-0" Sep 30 21:15:46 crc kubenswrapper[4756]: I0930 21:15:46.457743 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7cdd98bb8f-cmpfq" event={"ID":"0482d439-b7e2-4502-8d28-85ac4f826cc4","Type":"ContainerStarted","Data":"2888fc3dc34dfa51913bddf28670343d76278d14691e91632de1a464f56d68bf"} Sep 30 21:15:46 crc kubenswrapper[4756]: I0930 21:15:46.459166 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-7cdd98bb8f-cmpfq" Sep 30 21:15:46 crc kubenswrapper[4756]: I0930 21:15:46.483781 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-scheduler-0" event={"ID":"2e6e6c35-e89e-412f-b025-dcf18e6d2691","Type":"ContainerStarted","Data":"b9eedba4100393c8b580150d33a302517aa699ba526077258a13e92aa7e9cd3c"} Sep 30 21:15:46 crc kubenswrapper[4756]: I0930 21:15:46.483847 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-scheduler-0" event={"ID":"2e6e6c35-e89e-412f-b025-dcf18e6d2691","Type":"ContainerStarted","Data":"761398fc8abf7ec0644bbb0088bed8e35a577cd087a023f5e5290a824234a506"} Sep 30 21:15:46 crc kubenswrapper[4756]: I0930 21:15:46.493901 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/manila-api-0" podStartSLOduration=3.493882382 podStartE2EDuration="3.493882382s" podCreationTimestamp="2025-09-30 21:15:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 21:15:46.454713321 +0000 UTC m=+6276.075646798" watchObservedRunningTime="2025-09-30 21:15:46.493882382 +0000 UTC m=+6276.114815859" Sep 30 21:15:46 crc kubenswrapper[4756]: I0930 21:15:46.498871 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-7cdd98bb8f-cmpfq" podStartSLOduration=3.4988493419999998 podStartE2EDuration="3.498849342s" podCreationTimestamp="2025-09-30 21:15:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 21:15:46.481858079 +0000 UTC m=+6276.102791566" watchObservedRunningTime="2025-09-30 21:15:46.498849342 +0000 UTC m=+6276.119782819" Sep 30 21:15:46 crc kubenswrapper[4756]: I0930 21:15:46.504388 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/manila-scheduler-0" podStartSLOduration=2.73282144 podStartE2EDuration="3.504368746s" podCreationTimestamp="2025-09-30 21:15:43 +0000 UTC" firstStartedPulling="2025-09-30 21:15:44.585790015 +0000 UTC m=+6274.206723492" lastFinishedPulling="2025-09-30 21:15:45.357337311 +0000 UTC m=+6274.978270798" observedRunningTime="2025-09-30 21:15:46.499874059 +0000 UTC m=+6276.120807536" watchObservedRunningTime="2025-09-30 21:15:46.504368746 +0000 UTC m=+6276.125302233" Sep 30 21:15:51 crc kubenswrapper[4756]: I0930 21:15:51.552179 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-share-share1-0" event={"ID":"34f74470-0f4b-4c5d-8842-b8ba95456f82","Type":"ContainerStarted","Data":"3be5ace83cf994c9fcc36f5bd6e6439594ca672db8cda674eb40f61a7f88b042"} Sep 30 21:15:51 crc kubenswrapper[4756]: I0930 21:15:51.552794 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-share-share1-0" event={"ID":"34f74470-0f4b-4c5d-8842-b8ba95456f82","Type":"ContainerStarted","Data":"f2ebb208bee7b607919bf43cf71469e0993c552144a18b2001d8c0f91693a230"} Sep 30 21:15:54 crc kubenswrapper[4756]: I0930 21:15:54.010323 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/manila-scheduler-0" Sep 30 21:15:54 crc kubenswrapper[4756]: I0930 21:15:54.060039 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/manila-share-share1-0" Sep 30 21:15:54 crc kubenswrapper[4756]: I0930 21:15:54.159714 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-7cdd98bb8f-cmpfq" Sep 30 21:15:54 crc kubenswrapper[4756]: I0930 21:15:54.197866 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/manila-share-share1-0" podStartSLOduration=5.687287517 podStartE2EDuration="11.197839957s" podCreationTimestamp="2025-09-30 21:15:43 +0000 UTC" firstStartedPulling="2025-09-30 21:15:44.800332818 +0000 UTC m=+6274.421266295" lastFinishedPulling="2025-09-30 21:15:50.310885248 +0000 UTC m=+6279.931818735" observedRunningTime="2025-09-30 21:15:51.584210906 +0000 UTC m=+6281.205144383" watchObservedRunningTime="2025-09-30 21:15:54.197839957 +0000 UTC m=+6283.818773444" Sep 30 21:15:54 crc kubenswrapper[4756]: I0930 21:15:54.253496 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-f7bc97665-ssfql"] Sep 30 21:15:54 crc kubenswrapper[4756]: I0930 21:15:54.253726 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-f7bc97665-ssfql" podUID="bf306b44-0f89-4344-949c-d24959e4e757" containerName="dnsmasq-dns" containerID="cri-o://105b09f331644cb6fab8275f34bb6d8529e61eac7e68fee8646ef634682ac9df" gracePeriod=10 Sep 30 21:15:54 crc kubenswrapper[4756]: I0930 21:15:54.589256 4756 generic.go:334] "Generic (PLEG): container finished" podID="bf306b44-0f89-4344-949c-d24959e4e757" containerID="105b09f331644cb6fab8275f34bb6d8529e61eac7e68fee8646ef634682ac9df" exitCode=0 Sep 30 21:15:54 crc kubenswrapper[4756]: I0930 21:15:54.589307 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-f7bc97665-ssfql" event={"ID":"bf306b44-0f89-4344-949c-d24959e4e757","Type":"ContainerDied","Data":"105b09f331644cb6fab8275f34bb6d8529e61eac7e68fee8646ef634682ac9df"} Sep 30 21:15:55 crc kubenswrapper[4756]: I0930 21:15:55.049247 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-f7bc97665-ssfql" Sep 30 21:15:55 crc kubenswrapper[4756]: I0930 21:15:55.177266 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/bf306b44-0f89-4344-949c-d24959e4e757-dns-svc\") pod \"bf306b44-0f89-4344-949c-d24959e4e757\" (UID: \"bf306b44-0f89-4344-949c-d24959e4e757\") " Sep 30 21:15:55 crc kubenswrapper[4756]: I0930 21:15:55.177335 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/bf306b44-0f89-4344-949c-d24959e4e757-ovsdbserver-sb\") pod \"bf306b44-0f89-4344-949c-d24959e4e757\" (UID: \"bf306b44-0f89-4344-949c-d24959e4e757\") " Sep 30 21:15:55 crc kubenswrapper[4756]: I0930 21:15:55.177589 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bf306b44-0f89-4344-949c-d24959e4e757-config\") pod \"bf306b44-0f89-4344-949c-d24959e4e757\" (UID: \"bf306b44-0f89-4344-949c-d24959e4e757\") " Sep 30 21:15:55 crc kubenswrapper[4756]: I0930 21:15:55.177674 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-snprs\" (UniqueName: \"kubernetes.io/projected/bf306b44-0f89-4344-949c-d24959e4e757-kube-api-access-snprs\") pod \"bf306b44-0f89-4344-949c-d24959e4e757\" (UID: \"bf306b44-0f89-4344-949c-d24959e4e757\") " Sep 30 21:15:55 crc kubenswrapper[4756]: I0930 21:15:55.177698 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/bf306b44-0f89-4344-949c-d24959e4e757-ovsdbserver-nb\") pod \"bf306b44-0f89-4344-949c-d24959e4e757\" (UID: \"bf306b44-0f89-4344-949c-d24959e4e757\") " Sep 30 21:15:55 crc kubenswrapper[4756]: I0930 21:15:55.239751 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf306b44-0f89-4344-949c-d24959e4e757-kube-api-access-snprs" (OuterVolumeSpecName: "kube-api-access-snprs") pod "bf306b44-0f89-4344-949c-d24959e4e757" (UID: "bf306b44-0f89-4344-949c-d24959e4e757"). InnerVolumeSpecName "kube-api-access-snprs". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 21:15:55 crc kubenswrapper[4756]: I0930 21:15:55.330684 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-snprs\" (UniqueName: \"kubernetes.io/projected/bf306b44-0f89-4344-949c-d24959e4e757-kube-api-access-snprs\") on node \"crc\" DevicePath \"\"" Sep 30 21:15:55 crc kubenswrapper[4756]: I0930 21:15:55.338435 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bf306b44-0f89-4344-949c-d24959e4e757-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "bf306b44-0f89-4344-949c-d24959e4e757" (UID: "bf306b44-0f89-4344-949c-d24959e4e757"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 21:15:55 crc kubenswrapper[4756]: I0930 21:15:55.339953 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bf306b44-0f89-4344-949c-d24959e4e757-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "bf306b44-0f89-4344-949c-d24959e4e757" (UID: "bf306b44-0f89-4344-949c-d24959e4e757"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 21:15:55 crc kubenswrapper[4756]: I0930 21:15:55.345895 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bf306b44-0f89-4344-949c-d24959e4e757-config" (OuterVolumeSpecName: "config") pod "bf306b44-0f89-4344-949c-d24959e4e757" (UID: "bf306b44-0f89-4344-949c-d24959e4e757"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 21:15:55 crc kubenswrapper[4756]: I0930 21:15:55.349178 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bf306b44-0f89-4344-949c-d24959e4e757-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "bf306b44-0f89-4344-949c-d24959e4e757" (UID: "bf306b44-0f89-4344-949c-d24959e4e757"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 21:15:55 crc kubenswrapper[4756]: I0930 21:15:55.433868 4756 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/bf306b44-0f89-4344-949c-d24959e4e757-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 30 21:15:55 crc kubenswrapper[4756]: I0930 21:15:55.434108 4756 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/bf306b44-0f89-4344-949c-d24959e4e757-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Sep 30 21:15:55 crc kubenswrapper[4756]: I0930 21:15:55.434184 4756 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bf306b44-0f89-4344-949c-d24959e4e757-config\") on node \"crc\" DevicePath \"\"" Sep 30 21:15:55 crc kubenswrapper[4756]: I0930 21:15:55.434242 4756 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/bf306b44-0f89-4344-949c-d24959e4e757-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Sep 30 21:15:55 crc kubenswrapper[4756]: I0930 21:15:55.604239 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-f7bc97665-ssfql" event={"ID":"bf306b44-0f89-4344-949c-d24959e4e757","Type":"ContainerDied","Data":"0438a81493d16f9527d6dbb5f91efc704c44a7ad0c740a00ba97d4745a0bbd6e"} Sep 30 21:15:55 crc kubenswrapper[4756]: I0930 21:15:55.604551 4756 scope.go:117] "RemoveContainer" containerID="105b09f331644cb6fab8275f34bb6d8529e61eac7e68fee8646ef634682ac9df" Sep 30 21:15:55 crc kubenswrapper[4756]: I0930 21:15:55.604339 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-f7bc97665-ssfql" Sep 30 21:15:55 crc kubenswrapper[4756]: I0930 21:15:55.637582 4756 scope.go:117] "RemoveContainer" containerID="7123d9f29acfbe7bc3e46c418c6b45c32bfae6f3cf5b6b054ba979afbc73b84e" Sep 30 21:15:55 crc kubenswrapper[4756]: I0930 21:15:55.643499 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-f7bc97665-ssfql"] Sep 30 21:15:55 crc kubenswrapper[4756]: I0930 21:15:55.653123 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-f7bc97665-ssfql"] Sep 30 21:15:57 crc kubenswrapper[4756]: I0930 21:15:57.057839 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 30 21:15:57 crc kubenswrapper[4756]: I0930 21:15:57.058132 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="a003f868-fa8c-419d-842b-99b9b9ff31a3" containerName="ceilometer-central-agent" containerID="cri-o://eb2a9be47867fa8730ed2c29c7fb01195af7e2ce72d8a067ceec782750e4f931" gracePeriod=30 Sep 30 21:15:57 crc kubenswrapper[4756]: I0930 21:15:57.058286 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="a003f868-fa8c-419d-842b-99b9b9ff31a3" containerName="proxy-httpd" containerID="cri-o://d3d62565d27a0d541febffdf8f27dd392273a59392c381c4d76c928e57971206" gracePeriod=30 Sep 30 21:15:57 crc kubenswrapper[4756]: I0930 21:15:57.058338 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="a003f868-fa8c-419d-842b-99b9b9ff31a3" containerName="sg-core" containerID="cri-o://0b7db4958914a538253f5d318f60bf88c38151098ac4f049aef161e44b1d98cd" gracePeriod=30 Sep 30 21:15:57 crc kubenswrapper[4756]: I0930 21:15:57.058383 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="a003f868-fa8c-419d-842b-99b9b9ff31a3" containerName="ceilometer-notification-agent" containerID="cri-o://001136e882df2faa71a7c2e5cede26ea88fb0dea0e43c2ff70f699717b4f995b" gracePeriod=30 Sep 30 21:15:57 crc kubenswrapper[4756]: I0930 21:15:57.128468 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bf306b44-0f89-4344-949c-d24959e4e757" path="/var/lib/kubelet/pods/bf306b44-0f89-4344-949c-d24959e4e757/volumes" Sep 30 21:15:57 crc kubenswrapper[4756]: I0930 21:15:57.626993 4756 generic.go:334] "Generic (PLEG): container finished" podID="a003f868-fa8c-419d-842b-99b9b9ff31a3" containerID="d3d62565d27a0d541febffdf8f27dd392273a59392c381c4d76c928e57971206" exitCode=0 Sep 30 21:15:57 crc kubenswrapper[4756]: I0930 21:15:57.627028 4756 generic.go:334] "Generic (PLEG): container finished" podID="a003f868-fa8c-419d-842b-99b9b9ff31a3" containerID="0b7db4958914a538253f5d318f60bf88c38151098ac4f049aef161e44b1d98cd" exitCode=2 Sep 30 21:15:57 crc kubenswrapper[4756]: I0930 21:15:57.627038 4756 generic.go:334] "Generic (PLEG): container finished" podID="a003f868-fa8c-419d-842b-99b9b9ff31a3" containerID="eb2a9be47867fa8730ed2c29c7fb01195af7e2ce72d8a067ceec782750e4f931" exitCode=0 Sep 30 21:15:57 crc kubenswrapper[4756]: I0930 21:15:57.627055 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a003f868-fa8c-419d-842b-99b9b9ff31a3","Type":"ContainerDied","Data":"d3d62565d27a0d541febffdf8f27dd392273a59392c381c4d76c928e57971206"} Sep 30 21:15:57 crc kubenswrapper[4756]: I0930 21:15:57.627080 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a003f868-fa8c-419d-842b-99b9b9ff31a3","Type":"ContainerDied","Data":"0b7db4958914a538253f5d318f60bf88c38151098ac4f049aef161e44b1d98cd"} Sep 30 21:15:57 crc kubenswrapper[4756]: I0930 21:15:57.627090 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a003f868-fa8c-419d-842b-99b9b9ff31a3","Type":"ContainerDied","Data":"eb2a9be47867fa8730ed2c29c7fb01195af7e2ce72d8a067ceec782750e4f931"} Sep 30 21:16:01 crc kubenswrapper[4756]: I0930 21:16:01.135669 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 21:16:01 crc kubenswrapper[4756]: I0930 21:16:01.167646 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a003f868-fa8c-419d-842b-99b9b9ff31a3-log-httpd\") pod \"a003f868-fa8c-419d-842b-99b9b9ff31a3\" (UID: \"a003f868-fa8c-419d-842b-99b9b9ff31a3\") " Sep 30 21:16:01 crc kubenswrapper[4756]: I0930 21:16:01.167930 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a003f868-fa8c-419d-842b-99b9b9ff31a3-scripts\") pod \"a003f868-fa8c-419d-842b-99b9b9ff31a3\" (UID: \"a003f868-fa8c-419d-842b-99b9b9ff31a3\") " Sep 30 21:16:01 crc kubenswrapper[4756]: I0930 21:16:01.167984 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a003f868-fa8c-419d-842b-99b9b9ff31a3-sg-core-conf-yaml\") pod \"a003f868-fa8c-419d-842b-99b9b9ff31a3\" (UID: \"a003f868-fa8c-419d-842b-99b9b9ff31a3\") " Sep 30 21:16:01 crc kubenswrapper[4756]: I0930 21:16:01.168018 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a003f868-fa8c-419d-842b-99b9b9ff31a3-run-httpd\") pod \"a003f868-fa8c-419d-842b-99b9b9ff31a3\" (UID: \"a003f868-fa8c-419d-842b-99b9b9ff31a3\") " Sep 30 21:16:01 crc kubenswrapper[4756]: I0930 21:16:01.168101 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a003f868-fa8c-419d-842b-99b9b9ff31a3-config-data\") pod \"a003f868-fa8c-419d-842b-99b9b9ff31a3\" (UID: \"a003f868-fa8c-419d-842b-99b9b9ff31a3\") " Sep 30 21:16:01 crc kubenswrapper[4756]: I0930 21:16:01.168243 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a003f868-fa8c-419d-842b-99b9b9ff31a3-combined-ca-bundle\") pod \"a003f868-fa8c-419d-842b-99b9b9ff31a3\" (UID: \"a003f868-fa8c-419d-842b-99b9b9ff31a3\") " Sep 30 21:16:01 crc kubenswrapper[4756]: I0930 21:16:01.168272 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x7mpn\" (UniqueName: \"kubernetes.io/projected/a003f868-fa8c-419d-842b-99b9b9ff31a3-kube-api-access-x7mpn\") pod \"a003f868-fa8c-419d-842b-99b9b9ff31a3\" (UID: \"a003f868-fa8c-419d-842b-99b9b9ff31a3\") " Sep 30 21:16:01 crc kubenswrapper[4756]: I0930 21:16:01.168935 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a003f868-fa8c-419d-842b-99b9b9ff31a3-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "a003f868-fa8c-419d-842b-99b9b9ff31a3" (UID: "a003f868-fa8c-419d-842b-99b9b9ff31a3"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 21:16:01 crc kubenswrapper[4756]: I0930 21:16:01.181900 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a003f868-fa8c-419d-842b-99b9b9ff31a3-scripts" (OuterVolumeSpecName: "scripts") pod "a003f868-fa8c-419d-842b-99b9b9ff31a3" (UID: "a003f868-fa8c-419d-842b-99b9b9ff31a3"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 21:16:01 crc kubenswrapper[4756]: I0930 21:16:01.178720 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a003f868-fa8c-419d-842b-99b9b9ff31a3-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "a003f868-fa8c-419d-842b-99b9b9ff31a3" (UID: "a003f868-fa8c-419d-842b-99b9b9ff31a3"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 21:16:01 crc kubenswrapper[4756]: I0930 21:16:01.191879 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a003f868-fa8c-419d-842b-99b9b9ff31a3-kube-api-access-x7mpn" (OuterVolumeSpecName: "kube-api-access-x7mpn") pod "a003f868-fa8c-419d-842b-99b9b9ff31a3" (UID: "a003f868-fa8c-419d-842b-99b9b9ff31a3"). InnerVolumeSpecName "kube-api-access-x7mpn". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 21:16:01 crc kubenswrapper[4756]: I0930 21:16:01.232472 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a003f868-fa8c-419d-842b-99b9b9ff31a3-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "a003f868-fa8c-419d-842b-99b9b9ff31a3" (UID: "a003f868-fa8c-419d-842b-99b9b9ff31a3"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 21:16:01 crc kubenswrapper[4756]: I0930 21:16:01.270312 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x7mpn\" (UniqueName: \"kubernetes.io/projected/a003f868-fa8c-419d-842b-99b9b9ff31a3-kube-api-access-x7mpn\") on node \"crc\" DevicePath \"\"" Sep 30 21:16:01 crc kubenswrapper[4756]: I0930 21:16:01.270343 4756 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a003f868-fa8c-419d-842b-99b9b9ff31a3-log-httpd\") on node \"crc\" DevicePath \"\"" Sep 30 21:16:01 crc kubenswrapper[4756]: I0930 21:16:01.270353 4756 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a003f868-fa8c-419d-842b-99b9b9ff31a3-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 21:16:01 crc kubenswrapper[4756]: I0930 21:16:01.270361 4756 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a003f868-fa8c-419d-842b-99b9b9ff31a3-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Sep 30 21:16:01 crc kubenswrapper[4756]: I0930 21:16:01.270369 4756 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a003f868-fa8c-419d-842b-99b9b9ff31a3-run-httpd\") on node \"crc\" DevicePath \"\"" Sep 30 21:16:01 crc kubenswrapper[4756]: I0930 21:16:01.298887 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a003f868-fa8c-419d-842b-99b9b9ff31a3-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a003f868-fa8c-419d-842b-99b9b9ff31a3" (UID: "a003f868-fa8c-419d-842b-99b9b9ff31a3"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 21:16:01 crc kubenswrapper[4756]: I0930 21:16:01.309670 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a003f868-fa8c-419d-842b-99b9b9ff31a3-config-data" (OuterVolumeSpecName: "config-data") pod "a003f868-fa8c-419d-842b-99b9b9ff31a3" (UID: "a003f868-fa8c-419d-842b-99b9b9ff31a3"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 21:16:01 crc kubenswrapper[4756]: I0930 21:16:01.372417 4756 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a003f868-fa8c-419d-842b-99b9b9ff31a3-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 21:16:01 crc kubenswrapper[4756]: I0930 21:16:01.372442 4756 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a003f868-fa8c-419d-842b-99b9b9ff31a3-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 21:16:01 crc kubenswrapper[4756]: I0930 21:16:01.672726 4756 generic.go:334] "Generic (PLEG): container finished" podID="a003f868-fa8c-419d-842b-99b9b9ff31a3" containerID="001136e882df2faa71a7c2e5cede26ea88fb0dea0e43c2ff70f699717b4f995b" exitCode=0 Sep 30 21:16:01 crc kubenswrapper[4756]: I0930 21:16:01.672778 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a003f868-fa8c-419d-842b-99b9b9ff31a3","Type":"ContainerDied","Data":"001136e882df2faa71a7c2e5cede26ea88fb0dea0e43c2ff70f699717b4f995b"} Sep 30 21:16:01 crc kubenswrapper[4756]: I0930 21:16:01.672798 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 21:16:01 crc kubenswrapper[4756]: I0930 21:16:01.673106 4756 scope.go:117] "RemoveContainer" containerID="d3d62565d27a0d541febffdf8f27dd392273a59392c381c4d76c928e57971206" Sep 30 21:16:01 crc kubenswrapper[4756]: I0930 21:16:01.673090 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a003f868-fa8c-419d-842b-99b9b9ff31a3","Type":"ContainerDied","Data":"95e5c365ddac163e0249aa04ec639eff4bc686fa572da3ce8c4c4a370a795cfd"} Sep 30 21:16:01 crc kubenswrapper[4756]: I0930 21:16:01.713623 4756 scope.go:117] "RemoveContainer" containerID="0b7db4958914a538253f5d318f60bf88c38151098ac4f049aef161e44b1d98cd" Sep 30 21:16:01 crc kubenswrapper[4756]: I0930 21:16:01.791503 4756 scope.go:117] "RemoveContainer" containerID="001136e882df2faa71a7c2e5cede26ea88fb0dea0e43c2ff70f699717b4f995b" Sep 30 21:16:01 crc kubenswrapper[4756]: I0930 21:16:01.797978 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 30 21:16:01 crc kubenswrapper[4756]: I0930 21:16:01.813560 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Sep 30 21:16:01 crc kubenswrapper[4756]: I0930 21:16:01.823595 4756 scope.go:117] "RemoveContainer" containerID="eb2a9be47867fa8730ed2c29c7fb01195af7e2ce72d8a067ceec782750e4f931" Sep 30 21:16:01 crc kubenswrapper[4756]: I0930 21:16:01.829795 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Sep 30 21:16:01 crc kubenswrapper[4756]: E0930 21:16:01.830430 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bf306b44-0f89-4344-949c-d24959e4e757" containerName="init" Sep 30 21:16:01 crc kubenswrapper[4756]: I0930 21:16:01.830465 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="bf306b44-0f89-4344-949c-d24959e4e757" containerName="init" Sep 30 21:16:01 crc kubenswrapper[4756]: E0930 21:16:01.830497 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a003f868-fa8c-419d-842b-99b9b9ff31a3" containerName="sg-core" Sep 30 21:16:01 crc kubenswrapper[4756]: I0930 21:16:01.830508 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="a003f868-fa8c-419d-842b-99b9b9ff31a3" containerName="sg-core" Sep 30 21:16:01 crc kubenswrapper[4756]: E0930 21:16:01.830532 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a003f868-fa8c-419d-842b-99b9b9ff31a3" containerName="ceilometer-notification-agent" Sep 30 21:16:01 crc kubenswrapper[4756]: I0930 21:16:01.830540 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="a003f868-fa8c-419d-842b-99b9b9ff31a3" containerName="ceilometer-notification-agent" Sep 30 21:16:01 crc kubenswrapper[4756]: E0930 21:16:01.830551 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bf306b44-0f89-4344-949c-d24959e4e757" containerName="dnsmasq-dns" Sep 30 21:16:01 crc kubenswrapper[4756]: I0930 21:16:01.830558 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="bf306b44-0f89-4344-949c-d24959e4e757" containerName="dnsmasq-dns" Sep 30 21:16:01 crc kubenswrapper[4756]: E0930 21:16:01.830569 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a003f868-fa8c-419d-842b-99b9b9ff31a3" containerName="ceilometer-central-agent" Sep 30 21:16:01 crc kubenswrapper[4756]: I0930 21:16:01.830577 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="a003f868-fa8c-419d-842b-99b9b9ff31a3" containerName="ceilometer-central-agent" Sep 30 21:16:01 crc kubenswrapper[4756]: E0930 21:16:01.830596 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a003f868-fa8c-419d-842b-99b9b9ff31a3" containerName="proxy-httpd" Sep 30 21:16:01 crc kubenswrapper[4756]: I0930 21:16:01.830603 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="a003f868-fa8c-419d-842b-99b9b9ff31a3" containerName="proxy-httpd" Sep 30 21:16:01 crc kubenswrapper[4756]: I0930 21:16:01.830888 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="a003f868-fa8c-419d-842b-99b9b9ff31a3" containerName="ceilometer-central-agent" Sep 30 21:16:01 crc kubenswrapper[4756]: I0930 21:16:01.830913 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="a003f868-fa8c-419d-842b-99b9b9ff31a3" containerName="ceilometer-notification-agent" Sep 30 21:16:01 crc kubenswrapper[4756]: I0930 21:16:01.830928 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="a003f868-fa8c-419d-842b-99b9b9ff31a3" containerName="proxy-httpd" Sep 30 21:16:01 crc kubenswrapper[4756]: I0930 21:16:01.830960 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="a003f868-fa8c-419d-842b-99b9b9ff31a3" containerName="sg-core" Sep 30 21:16:01 crc kubenswrapper[4756]: I0930 21:16:01.830975 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="bf306b44-0f89-4344-949c-d24959e4e757" containerName="dnsmasq-dns" Sep 30 21:16:01 crc kubenswrapper[4756]: I0930 21:16:01.833735 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 21:16:01 crc kubenswrapper[4756]: I0930 21:16:01.837340 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Sep 30 21:16:01 crc kubenswrapper[4756]: I0930 21:16:01.837447 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Sep 30 21:16:01 crc kubenswrapper[4756]: I0930 21:16:01.844021 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 30 21:16:01 crc kubenswrapper[4756]: I0930 21:16:01.870707 4756 scope.go:117] "RemoveContainer" containerID="d3d62565d27a0d541febffdf8f27dd392273a59392c381c4d76c928e57971206" Sep 30 21:16:01 crc kubenswrapper[4756]: E0930 21:16:01.872151 4756 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d3d62565d27a0d541febffdf8f27dd392273a59392c381c4d76c928e57971206\": container with ID starting with d3d62565d27a0d541febffdf8f27dd392273a59392c381c4d76c928e57971206 not found: ID does not exist" containerID="d3d62565d27a0d541febffdf8f27dd392273a59392c381c4d76c928e57971206" Sep 30 21:16:01 crc kubenswrapper[4756]: I0930 21:16:01.872202 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d3d62565d27a0d541febffdf8f27dd392273a59392c381c4d76c928e57971206"} err="failed to get container status \"d3d62565d27a0d541febffdf8f27dd392273a59392c381c4d76c928e57971206\": rpc error: code = NotFound desc = could not find container \"d3d62565d27a0d541febffdf8f27dd392273a59392c381c4d76c928e57971206\": container with ID starting with d3d62565d27a0d541febffdf8f27dd392273a59392c381c4d76c928e57971206 not found: ID does not exist" Sep 30 21:16:01 crc kubenswrapper[4756]: I0930 21:16:01.872236 4756 scope.go:117] "RemoveContainer" containerID="0b7db4958914a538253f5d318f60bf88c38151098ac4f049aef161e44b1d98cd" Sep 30 21:16:01 crc kubenswrapper[4756]: E0930 21:16:01.872617 4756 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0b7db4958914a538253f5d318f60bf88c38151098ac4f049aef161e44b1d98cd\": container with ID starting with 0b7db4958914a538253f5d318f60bf88c38151098ac4f049aef161e44b1d98cd not found: ID does not exist" containerID="0b7db4958914a538253f5d318f60bf88c38151098ac4f049aef161e44b1d98cd" Sep 30 21:16:01 crc kubenswrapper[4756]: I0930 21:16:01.872649 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0b7db4958914a538253f5d318f60bf88c38151098ac4f049aef161e44b1d98cd"} err="failed to get container status \"0b7db4958914a538253f5d318f60bf88c38151098ac4f049aef161e44b1d98cd\": rpc error: code = NotFound desc = could not find container \"0b7db4958914a538253f5d318f60bf88c38151098ac4f049aef161e44b1d98cd\": container with ID starting with 0b7db4958914a538253f5d318f60bf88c38151098ac4f049aef161e44b1d98cd not found: ID does not exist" Sep 30 21:16:01 crc kubenswrapper[4756]: I0930 21:16:01.872667 4756 scope.go:117] "RemoveContainer" containerID="001136e882df2faa71a7c2e5cede26ea88fb0dea0e43c2ff70f699717b4f995b" Sep 30 21:16:01 crc kubenswrapper[4756]: E0930 21:16:01.872904 4756 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"001136e882df2faa71a7c2e5cede26ea88fb0dea0e43c2ff70f699717b4f995b\": container with ID starting with 001136e882df2faa71a7c2e5cede26ea88fb0dea0e43c2ff70f699717b4f995b not found: ID does not exist" containerID="001136e882df2faa71a7c2e5cede26ea88fb0dea0e43c2ff70f699717b4f995b" Sep 30 21:16:01 crc kubenswrapper[4756]: I0930 21:16:01.872928 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"001136e882df2faa71a7c2e5cede26ea88fb0dea0e43c2ff70f699717b4f995b"} err="failed to get container status \"001136e882df2faa71a7c2e5cede26ea88fb0dea0e43c2ff70f699717b4f995b\": rpc error: code = NotFound desc = could not find container \"001136e882df2faa71a7c2e5cede26ea88fb0dea0e43c2ff70f699717b4f995b\": container with ID starting with 001136e882df2faa71a7c2e5cede26ea88fb0dea0e43c2ff70f699717b4f995b not found: ID does not exist" Sep 30 21:16:01 crc kubenswrapper[4756]: I0930 21:16:01.872947 4756 scope.go:117] "RemoveContainer" containerID="eb2a9be47867fa8730ed2c29c7fb01195af7e2ce72d8a067ceec782750e4f931" Sep 30 21:16:01 crc kubenswrapper[4756]: E0930 21:16:01.873196 4756 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"eb2a9be47867fa8730ed2c29c7fb01195af7e2ce72d8a067ceec782750e4f931\": container with ID starting with eb2a9be47867fa8730ed2c29c7fb01195af7e2ce72d8a067ceec782750e4f931 not found: ID does not exist" containerID="eb2a9be47867fa8730ed2c29c7fb01195af7e2ce72d8a067ceec782750e4f931" Sep 30 21:16:01 crc kubenswrapper[4756]: I0930 21:16:01.873222 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"eb2a9be47867fa8730ed2c29c7fb01195af7e2ce72d8a067ceec782750e4f931"} err="failed to get container status \"eb2a9be47867fa8730ed2c29c7fb01195af7e2ce72d8a067ceec782750e4f931\": rpc error: code = NotFound desc = could not find container \"eb2a9be47867fa8730ed2c29c7fb01195af7e2ce72d8a067ceec782750e4f931\": container with ID starting with eb2a9be47867fa8730ed2c29c7fb01195af7e2ce72d8a067ceec782750e4f931 not found: ID does not exist" Sep 30 21:16:01 crc kubenswrapper[4756]: I0930 21:16:01.883522 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cfcht\" (UniqueName: \"kubernetes.io/projected/903038a7-7ef0-47aa-b14d-3a35f8c1176a-kube-api-access-cfcht\") pod \"ceilometer-0\" (UID: \"903038a7-7ef0-47aa-b14d-3a35f8c1176a\") " pod="openstack/ceilometer-0" Sep 30 21:16:01 crc kubenswrapper[4756]: I0930 21:16:01.883561 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/903038a7-7ef0-47aa-b14d-3a35f8c1176a-config-data\") pod \"ceilometer-0\" (UID: \"903038a7-7ef0-47aa-b14d-3a35f8c1176a\") " pod="openstack/ceilometer-0" Sep 30 21:16:01 crc kubenswrapper[4756]: I0930 21:16:01.883586 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/903038a7-7ef0-47aa-b14d-3a35f8c1176a-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"903038a7-7ef0-47aa-b14d-3a35f8c1176a\") " pod="openstack/ceilometer-0" Sep 30 21:16:01 crc kubenswrapper[4756]: I0930 21:16:01.883617 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/903038a7-7ef0-47aa-b14d-3a35f8c1176a-scripts\") pod \"ceilometer-0\" (UID: \"903038a7-7ef0-47aa-b14d-3a35f8c1176a\") " pod="openstack/ceilometer-0" Sep 30 21:16:01 crc kubenswrapper[4756]: I0930 21:16:01.883692 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/903038a7-7ef0-47aa-b14d-3a35f8c1176a-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"903038a7-7ef0-47aa-b14d-3a35f8c1176a\") " pod="openstack/ceilometer-0" Sep 30 21:16:01 crc kubenswrapper[4756]: I0930 21:16:01.883763 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/903038a7-7ef0-47aa-b14d-3a35f8c1176a-log-httpd\") pod \"ceilometer-0\" (UID: \"903038a7-7ef0-47aa-b14d-3a35f8c1176a\") " pod="openstack/ceilometer-0" Sep 30 21:16:01 crc kubenswrapper[4756]: I0930 21:16:01.883778 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/903038a7-7ef0-47aa-b14d-3a35f8c1176a-run-httpd\") pod \"ceilometer-0\" (UID: \"903038a7-7ef0-47aa-b14d-3a35f8c1176a\") " pod="openstack/ceilometer-0" Sep 30 21:16:01 crc kubenswrapper[4756]: I0930 21:16:01.985361 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/903038a7-7ef0-47aa-b14d-3a35f8c1176a-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"903038a7-7ef0-47aa-b14d-3a35f8c1176a\") " pod="openstack/ceilometer-0" Sep 30 21:16:01 crc kubenswrapper[4756]: I0930 21:16:01.985533 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/903038a7-7ef0-47aa-b14d-3a35f8c1176a-log-httpd\") pod \"ceilometer-0\" (UID: \"903038a7-7ef0-47aa-b14d-3a35f8c1176a\") " pod="openstack/ceilometer-0" Sep 30 21:16:01 crc kubenswrapper[4756]: I0930 21:16:01.985563 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/903038a7-7ef0-47aa-b14d-3a35f8c1176a-run-httpd\") pod \"ceilometer-0\" (UID: \"903038a7-7ef0-47aa-b14d-3a35f8c1176a\") " pod="openstack/ceilometer-0" Sep 30 21:16:01 crc kubenswrapper[4756]: I0930 21:16:01.985642 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cfcht\" (UniqueName: \"kubernetes.io/projected/903038a7-7ef0-47aa-b14d-3a35f8c1176a-kube-api-access-cfcht\") pod \"ceilometer-0\" (UID: \"903038a7-7ef0-47aa-b14d-3a35f8c1176a\") " pod="openstack/ceilometer-0" Sep 30 21:16:01 crc kubenswrapper[4756]: I0930 21:16:01.985672 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/903038a7-7ef0-47aa-b14d-3a35f8c1176a-config-data\") pod \"ceilometer-0\" (UID: \"903038a7-7ef0-47aa-b14d-3a35f8c1176a\") " pod="openstack/ceilometer-0" Sep 30 21:16:01 crc kubenswrapper[4756]: I0930 21:16:01.985701 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/903038a7-7ef0-47aa-b14d-3a35f8c1176a-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"903038a7-7ef0-47aa-b14d-3a35f8c1176a\") " pod="openstack/ceilometer-0" Sep 30 21:16:01 crc kubenswrapper[4756]: I0930 21:16:01.985750 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/903038a7-7ef0-47aa-b14d-3a35f8c1176a-scripts\") pod \"ceilometer-0\" (UID: \"903038a7-7ef0-47aa-b14d-3a35f8c1176a\") " pod="openstack/ceilometer-0" Sep 30 21:16:01 crc kubenswrapper[4756]: I0930 21:16:01.986302 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/903038a7-7ef0-47aa-b14d-3a35f8c1176a-log-httpd\") pod \"ceilometer-0\" (UID: \"903038a7-7ef0-47aa-b14d-3a35f8c1176a\") " pod="openstack/ceilometer-0" Sep 30 21:16:01 crc kubenswrapper[4756]: I0930 21:16:01.986776 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/903038a7-7ef0-47aa-b14d-3a35f8c1176a-run-httpd\") pod \"ceilometer-0\" (UID: \"903038a7-7ef0-47aa-b14d-3a35f8c1176a\") " pod="openstack/ceilometer-0" Sep 30 21:16:01 crc kubenswrapper[4756]: I0930 21:16:01.990629 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/903038a7-7ef0-47aa-b14d-3a35f8c1176a-scripts\") pod \"ceilometer-0\" (UID: \"903038a7-7ef0-47aa-b14d-3a35f8c1176a\") " pod="openstack/ceilometer-0" Sep 30 21:16:01 crc kubenswrapper[4756]: I0930 21:16:01.991281 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/903038a7-7ef0-47aa-b14d-3a35f8c1176a-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"903038a7-7ef0-47aa-b14d-3a35f8c1176a\") " pod="openstack/ceilometer-0" Sep 30 21:16:01 crc kubenswrapper[4756]: I0930 21:16:01.991589 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/903038a7-7ef0-47aa-b14d-3a35f8c1176a-config-data\") pod \"ceilometer-0\" (UID: \"903038a7-7ef0-47aa-b14d-3a35f8c1176a\") " pod="openstack/ceilometer-0" Sep 30 21:16:01 crc kubenswrapper[4756]: I0930 21:16:01.993591 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/903038a7-7ef0-47aa-b14d-3a35f8c1176a-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"903038a7-7ef0-47aa-b14d-3a35f8c1176a\") " pod="openstack/ceilometer-0" Sep 30 21:16:02 crc kubenswrapper[4756]: I0930 21:16:02.025571 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cfcht\" (UniqueName: \"kubernetes.io/projected/903038a7-7ef0-47aa-b14d-3a35f8c1176a-kube-api-access-cfcht\") pod \"ceilometer-0\" (UID: \"903038a7-7ef0-47aa-b14d-3a35f8c1176a\") " pod="openstack/ceilometer-0" Sep 30 21:16:02 crc kubenswrapper[4756]: I0930 21:16:02.154584 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 21:16:02 crc kubenswrapper[4756]: I0930 21:16:02.612574 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 30 21:16:02 crc kubenswrapper[4756]: W0930 21:16:02.615009 4756 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod903038a7_7ef0_47aa_b14d_3a35f8c1176a.slice/crio-4841c5fa8aec10abeaea76222018f0be36ce1ebb3222930625a2e5bfe19f5896 WatchSource:0}: Error finding container 4841c5fa8aec10abeaea76222018f0be36ce1ebb3222930625a2e5bfe19f5896: Status 404 returned error can't find the container with id 4841c5fa8aec10abeaea76222018f0be36ce1ebb3222930625a2e5bfe19f5896 Sep 30 21:16:02 crc kubenswrapper[4756]: I0930 21:16:02.617868 4756 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Sep 30 21:16:02 crc kubenswrapper[4756]: I0930 21:16:02.686260 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"903038a7-7ef0-47aa-b14d-3a35f8c1176a","Type":"ContainerStarted","Data":"4841c5fa8aec10abeaea76222018f0be36ce1ebb3222930625a2e5bfe19f5896"} Sep 30 21:16:03 crc kubenswrapper[4756]: I0930 21:16:03.142247 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a003f868-fa8c-419d-842b-99b9b9ff31a3" path="/var/lib/kubelet/pods/a003f868-fa8c-419d-842b-99b9b9ff31a3/volumes" Sep 30 21:16:03 crc kubenswrapper[4756]: I0930 21:16:03.700236 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"903038a7-7ef0-47aa-b14d-3a35f8c1176a","Type":"ContainerStarted","Data":"85e8aa6bcc8132f50819371c2ab20627e046c23aec5f00c20b991a52aaad361e"} Sep 30 21:16:04 crc kubenswrapper[4756]: I0930 21:16:04.710642 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"903038a7-7ef0-47aa-b14d-3a35f8c1176a","Type":"ContainerStarted","Data":"48344c729f485669043990a7051c2897a944fcc63a59dd1984ddb1e69ba740cf"} Sep 30 21:16:05 crc kubenswrapper[4756]: I0930 21:16:05.734561 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"903038a7-7ef0-47aa-b14d-3a35f8c1176a","Type":"ContainerStarted","Data":"bf6913deff4cc91442ce7af9e431630363c7b1afdc9c4c9f643048f36d3f502a"} Sep 30 21:16:05 crc kubenswrapper[4756]: I0930 21:16:05.735901 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/manila-scheduler-0" Sep 30 21:16:06 crc kubenswrapper[4756]: I0930 21:16:06.003795 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/manila-api-0" Sep 30 21:16:06 crc kubenswrapper[4756]: I0930 21:16:06.224154 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/manila-share-share1-0" Sep 30 21:16:06 crc kubenswrapper[4756]: I0930 21:16:06.747693 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"903038a7-7ef0-47aa-b14d-3a35f8c1176a","Type":"ContainerStarted","Data":"4b72b90a44e123ab88ff0e1907032d36df66849b2af5892ac07bf7b68e0b20a2"} Sep 30 21:16:06 crc kubenswrapper[4756]: I0930 21:16:06.748145 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Sep 30 21:16:06 crc kubenswrapper[4756]: I0930 21:16:06.771675 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=1.921770896 podStartE2EDuration="5.771637169s" podCreationTimestamp="2025-09-30 21:16:01 +0000 UTC" firstStartedPulling="2025-09-30 21:16:02.617619816 +0000 UTC m=+6292.238553293" lastFinishedPulling="2025-09-30 21:16:06.467486089 +0000 UTC m=+6296.088419566" observedRunningTime="2025-09-30 21:16:06.763482736 +0000 UTC m=+6296.384416253" watchObservedRunningTime="2025-09-30 21:16:06.771637169 +0000 UTC m=+6296.392570656" Sep 30 21:16:32 crc kubenswrapper[4756]: I0930 21:16:32.163060 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Sep 30 21:16:41 crc kubenswrapper[4756]: I0930 21:16:41.538584 4756 patch_prober.go:28] interesting pod/machine-config-daemon-4n9zj container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 21:16:41 crc kubenswrapper[4756]: I0930 21:16:41.539149 4756 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 21:17:11 crc kubenswrapper[4756]: I0930 21:17:11.539436 4756 patch_prober.go:28] interesting pod/machine-config-daemon-4n9zj container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 21:17:11 crc kubenswrapper[4756]: I0930 21:17:11.540143 4756 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 21:17:41 crc kubenswrapper[4756]: I0930 21:17:41.538998 4756 patch_prober.go:28] interesting pod/machine-config-daemon-4n9zj container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 21:17:41 crc kubenswrapper[4756]: I0930 21:17:41.539960 4756 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 21:17:41 crc kubenswrapper[4756]: I0930 21:17:41.540040 4756 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" Sep 30 21:17:41 crc kubenswrapper[4756]: I0930 21:17:41.541356 4756 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"6f222caf34daae427ad1ef1c9c0a49f393ad85f849cbc1acf4a5ac67e633c58d"} pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 30 21:17:41 crc kubenswrapper[4756]: I0930 21:17:41.541526 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" containerName="machine-config-daemon" containerID="cri-o://6f222caf34daae427ad1ef1c9c0a49f393ad85f849cbc1acf4a5ac67e633c58d" gracePeriod=600 Sep 30 21:17:41 crc kubenswrapper[4756]: E0930 21:17:41.680081 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4n9zj_openshift-machine-config-operator(3370c2ca-fec3-4f90-8df7-51e21e6c7e1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" Sep 30 21:17:41 crc kubenswrapper[4756]: I0930 21:17:41.864047 4756 generic.go:334] "Generic (PLEG): container finished" podID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" containerID="6f222caf34daae427ad1ef1c9c0a49f393ad85f849cbc1acf4a5ac67e633c58d" exitCode=0 Sep 30 21:17:41 crc kubenswrapper[4756]: I0930 21:17:41.864117 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" event={"ID":"3370c2ca-fec3-4f90-8df7-51e21e6c7e1c","Type":"ContainerDied","Data":"6f222caf34daae427ad1ef1c9c0a49f393ad85f849cbc1acf4a5ac67e633c58d"} Sep 30 21:17:41 crc kubenswrapper[4756]: I0930 21:17:41.864187 4756 scope.go:117] "RemoveContainer" containerID="c1aadeaab73b9102a6512a1478fc007196fc8240e5db9d60dd58fca2c5f309cc" Sep 30 21:17:41 crc kubenswrapper[4756]: I0930 21:17:41.866256 4756 scope.go:117] "RemoveContainer" containerID="6f222caf34daae427ad1ef1c9c0a49f393ad85f849cbc1acf4a5ac67e633c58d" Sep 30 21:17:41 crc kubenswrapper[4756]: E0930 21:17:41.867090 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4n9zj_openshift-machine-config-operator(3370c2ca-fec3-4f90-8df7-51e21e6c7e1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" Sep 30 21:17:45 crc kubenswrapper[4756]: I0930 21:17:45.727842 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-6qhq5/must-gather-nqgxs"] Sep 30 21:17:45 crc kubenswrapper[4756]: I0930 21:17:45.730021 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-6qhq5/must-gather-nqgxs" Sep 30 21:17:45 crc kubenswrapper[4756]: I0930 21:17:45.731856 4756 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-must-gather-6qhq5"/"default-dockercfg-grk9g" Sep 30 21:17:45 crc kubenswrapper[4756]: I0930 21:17:45.731929 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-6qhq5"/"kube-root-ca.crt" Sep 30 21:17:45 crc kubenswrapper[4756]: I0930 21:17:45.732067 4756 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-6qhq5"/"openshift-service-ca.crt" Sep 30 21:17:45 crc kubenswrapper[4756]: I0930 21:17:45.737938 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-6qhq5/must-gather-nqgxs"] Sep 30 21:17:45 crc kubenswrapper[4756]: I0930 21:17:45.826835 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lvzk5\" (UniqueName: \"kubernetes.io/projected/ddc4c1da-0ea7-49c9-abd5-a2177846ed0f-kube-api-access-lvzk5\") pod \"must-gather-nqgxs\" (UID: \"ddc4c1da-0ea7-49c9-abd5-a2177846ed0f\") " pod="openshift-must-gather-6qhq5/must-gather-nqgxs" Sep 30 21:17:45 crc kubenswrapper[4756]: I0930 21:17:45.827178 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/ddc4c1da-0ea7-49c9-abd5-a2177846ed0f-must-gather-output\") pod \"must-gather-nqgxs\" (UID: \"ddc4c1da-0ea7-49c9-abd5-a2177846ed0f\") " pod="openshift-must-gather-6qhq5/must-gather-nqgxs" Sep 30 21:17:45 crc kubenswrapper[4756]: I0930 21:17:45.930055 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lvzk5\" (UniqueName: \"kubernetes.io/projected/ddc4c1da-0ea7-49c9-abd5-a2177846ed0f-kube-api-access-lvzk5\") pod \"must-gather-nqgxs\" (UID: \"ddc4c1da-0ea7-49c9-abd5-a2177846ed0f\") " pod="openshift-must-gather-6qhq5/must-gather-nqgxs" Sep 30 21:17:45 crc kubenswrapper[4756]: I0930 21:17:45.930142 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/ddc4c1da-0ea7-49c9-abd5-a2177846ed0f-must-gather-output\") pod \"must-gather-nqgxs\" (UID: \"ddc4c1da-0ea7-49c9-abd5-a2177846ed0f\") " pod="openshift-must-gather-6qhq5/must-gather-nqgxs" Sep 30 21:17:45 crc kubenswrapper[4756]: I0930 21:17:45.930685 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/ddc4c1da-0ea7-49c9-abd5-a2177846ed0f-must-gather-output\") pod \"must-gather-nqgxs\" (UID: \"ddc4c1da-0ea7-49c9-abd5-a2177846ed0f\") " pod="openshift-must-gather-6qhq5/must-gather-nqgxs" Sep 30 21:17:45 crc kubenswrapper[4756]: I0930 21:17:45.953247 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lvzk5\" (UniqueName: \"kubernetes.io/projected/ddc4c1da-0ea7-49c9-abd5-a2177846ed0f-kube-api-access-lvzk5\") pod \"must-gather-nqgxs\" (UID: \"ddc4c1da-0ea7-49c9-abd5-a2177846ed0f\") " pod="openshift-must-gather-6qhq5/must-gather-nqgxs" Sep 30 21:17:46 crc kubenswrapper[4756]: I0930 21:17:46.061893 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-6qhq5/must-gather-nqgxs" Sep 30 21:17:46 crc kubenswrapper[4756]: I0930 21:17:46.618815 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-6qhq5/must-gather-nqgxs"] Sep 30 21:17:46 crc kubenswrapper[4756]: I0930 21:17:46.918099 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-6qhq5/must-gather-nqgxs" event={"ID":"ddc4c1da-0ea7-49c9-abd5-a2177846ed0f","Type":"ContainerStarted","Data":"72e987ac4694a681c67413bd2db50377551c5087d50ffea8d85bef5ed7aadf32"} Sep 30 21:17:47 crc kubenswrapper[4756]: I0930 21:17:47.043498 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/octavia-db-create-l88p8"] Sep 30 21:17:47 crc kubenswrapper[4756]: I0930 21:17:47.056681 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/octavia-db-create-l88p8"] Sep 30 21:17:47 crc kubenswrapper[4756]: I0930 21:17:47.139806 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="477c082d-c5b0-4c5d-8907-1f12804b3b54" path="/var/lib/kubelet/pods/477c082d-c5b0-4c5d-8907-1f12804b3b54/volumes" Sep 30 21:17:52 crc kubenswrapper[4756]: I0930 21:17:52.991349 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-6qhq5/must-gather-nqgxs" event={"ID":"ddc4c1da-0ea7-49c9-abd5-a2177846ed0f","Type":"ContainerStarted","Data":"4ff3a708c859ef44cd2a352bad215867fd485dbc5a5d03082cf85a6c7ab0783f"} Sep 30 21:17:52 crc kubenswrapper[4756]: I0930 21:17:52.991998 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-6qhq5/must-gather-nqgxs" event={"ID":"ddc4c1da-0ea7-49c9-abd5-a2177846ed0f","Type":"ContainerStarted","Data":"a6dc072c33d143d432f4e144d4be2326254d749675486a9cef18af9489f312a8"} Sep 30 21:17:53 crc kubenswrapper[4756]: I0930 21:17:53.020668 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-6qhq5/must-gather-nqgxs" podStartSLOduration=2.6798187650000003 podStartE2EDuration="8.02065046s" podCreationTimestamp="2025-09-30 21:17:45 +0000 UTC" firstStartedPulling="2025-09-30 21:17:46.622668244 +0000 UTC m=+6396.243601741" lastFinishedPulling="2025-09-30 21:17:51.963499959 +0000 UTC m=+6401.584433436" observedRunningTime="2025-09-30 21:17:53.010915626 +0000 UTC m=+6402.631849113" watchObservedRunningTime="2025-09-30 21:17:53.02065046 +0000 UTC m=+6402.641583937" Sep 30 21:17:55 crc kubenswrapper[4756]: I0930 21:17:55.116509 4756 scope.go:117] "RemoveContainer" containerID="6f222caf34daae427ad1ef1c9c0a49f393ad85f849cbc1acf4a5ac67e633c58d" Sep 30 21:17:55 crc kubenswrapper[4756]: E0930 21:17:55.117526 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4n9zj_openshift-machine-config-operator(3370c2ca-fec3-4f90-8df7-51e21e6c7e1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" Sep 30 21:17:56 crc kubenswrapper[4756]: I0930 21:17:56.587264 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-6qhq5/crc-debug-hsnd6"] Sep 30 21:17:56 crc kubenswrapper[4756]: I0930 21:17:56.589487 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-6qhq5/crc-debug-hsnd6" Sep 30 21:17:56 crc kubenswrapper[4756]: I0930 21:17:56.667142 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kzrzd\" (UniqueName: \"kubernetes.io/projected/c6d67621-1408-4935-8197-2e8fb6e43a21-kube-api-access-kzrzd\") pod \"crc-debug-hsnd6\" (UID: \"c6d67621-1408-4935-8197-2e8fb6e43a21\") " pod="openshift-must-gather-6qhq5/crc-debug-hsnd6" Sep 30 21:17:56 crc kubenswrapper[4756]: I0930 21:17:56.667209 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/c6d67621-1408-4935-8197-2e8fb6e43a21-host\") pod \"crc-debug-hsnd6\" (UID: \"c6d67621-1408-4935-8197-2e8fb6e43a21\") " pod="openshift-must-gather-6qhq5/crc-debug-hsnd6" Sep 30 21:17:56 crc kubenswrapper[4756]: I0930 21:17:56.769063 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kzrzd\" (UniqueName: \"kubernetes.io/projected/c6d67621-1408-4935-8197-2e8fb6e43a21-kube-api-access-kzrzd\") pod \"crc-debug-hsnd6\" (UID: \"c6d67621-1408-4935-8197-2e8fb6e43a21\") " pod="openshift-must-gather-6qhq5/crc-debug-hsnd6" Sep 30 21:17:56 crc kubenswrapper[4756]: I0930 21:17:56.769105 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/c6d67621-1408-4935-8197-2e8fb6e43a21-host\") pod \"crc-debug-hsnd6\" (UID: \"c6d67621-1408-4935-8197-2e8fb6e43a21\") " pod="openshift-must-gather-6qhq5/crc-debug-hsnd6" Sep 30 21:17:56 crc kubenswrapper[4756]: I0930 21:17:56.769340 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/c6d67621-1408-4935-8197-2e8fb6e43a21-host\") pod \"crc-debug-hsnd6\" (UID: \"c6d67621-1408-4935-8197-2e8fb6e43a21\") " pod="openshift-must-gather-6qhq5/crc-debug-hsnd6" Sep 30 21:17:56 crc kubenswrapper[4756]: I0930 21:17:56.790151 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kzrzd\" (UniqueName: \"kubernetes.io/projected/c6d67621-1408-4935-8197-2e8fb6e43a21-kube-api-access-kzrzd\") pod \"crc-debug-hsnd6\" (UID: \"c6d67621-1408-4935-8197-2e8fb6e43a21\") " pod="openshift-must-gather-6qhq5/crc-debug-hsnd6" Sep 30 21:17:56 crc kubenswrapper[4756]: I0930 21:17:56.909986 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-6qhq5/crc-debug-hsnd6" Sep 30 21:17:57 crc kubenswrapper[4756]: I0930 21:17:57.041745 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-6qhq5/crc-debug-hsnd6" event={"ID":"c6d67621-1408-4935-8197-2e8fb6e43a21","Type":"ContainerStarted","Data":"f33788e18b6b6e9c2cf9dccf45d7ec2ac1bd1712102512da805549e56e3e9a3e"} Sep 30 21:18:00 crc kubenswrapper[4756]: I0930 21:18:00.038537 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/octavia-b13e-account-create-2j9bs"] Sep 30 21:18:00 crc kubenswrapper[4756]: I0930 21:18:00.046385 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/octavia-b13e-account-create-2j9bs"] Sep 30 21:18:01 crc kubenswrapper[4756]: I0930 21:18:01.129987 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="818e347b-2733-4610-8188-39798cbdd1db" path="/var/lib/kubelet/pods/818e347b-2733-4610-8188-39798cbdd1db/volumes" Sep 30 21:18:05 crc kubenswrapper[4756]: I0930 21:18:05.035684 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/octavia-persistence-db-create-6jtcb"] Sep 30 21:18:05 crc kubenswrapper[4756]: I0930 21:18:05.047339 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/octavia-persistence-db-create-6jtcb"] Sep 30 21:18:05 crc kubenswrapper[4756]: I0930 21:18:05.190300 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="57f905fc-a890-404f-be63-02046a7e6774" path="/var/lib/kubelet/pods/57f905fc-a890-404f-be63-02046a7e6774/volumes" Sep 30 21:18:07 crc kubenswrapper[4756]: I0930 21:18:07.115773 4756 scope.go:117] "RemoveContainer" containerID="6f222caf34daae427ad1ef1c9c0a49f393ad85f849cbc1acf4a5ac67e633c58d" Sep 30 21:18:07 crc kubenswrapper[4756]: E0930 21:18:07.116349 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4n9zj_openshift-machine-config-operator(3370c2ca-fec3-4f90-8df7-51e21e6c7e1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" Sep 30 21:18:08 crc kubenswrapper[4756]: I0930 21:18:08.208320 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-cwcfx"] Sep 30 21:18:08 crc kubenswrapper[4756]: I0930 21:18:08.211550 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-cwcfx" Sep 30 21:18:08 crc kubenswrapper[4756]: I0930 21:18:08.247787 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-cwcfx"] Sep 30 21:18:08 crc kubenswrapper[4756]: I0930 21:18:08.310521 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zxg7g\" (UniqueName: \"kubernetes.io/projected/8cfacb12-8368-4b2a-9481-2f6319e80b11-kube-api-access-zxg7g\") pod \"redhat-marketplace-cwcfx\" (UID: \"8cfacb12-8368-4b2a-9481-2f6319e80b11\") " pod="openshift-marketplace/redhat-marketplace-cwcfx" Sep 30 21:18:08 crc kubenswrapper[4756]: I0930 21:18:08.310597 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8cfacb12-8368-4b2a-9481-2f6319e80b11-utilities\") pod \"redhat-marketplace-cwcfx\" (UID: \"8cfacb12-8368-4b2a-9481-2f6319e80b11\") " pod="openshift-marketplace/redhat-marketplace-cwcfx" Sep 30 21:18:08 crc kubenswrapper[4756]: I0930 21:18:08.310651 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8cfacb12-8368-4b2a-9481-2f6319e80b11-catalog-content\") pod \"redhat-marketplace-cwcfx\" (UID: \"8cfacb12-8368-4b2a-9481-2f6319e80b11\") " pod="openshift-marketplace/redhat-marketplace-cwcfx" Sep 30 21:18:08 crc kubenswrapper[4756]: I0930 21:18:08.412387 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zxg7g\" (UniqueName: \"kubernetes.io/projected/8cfacb12-8368-4b2a-9481-2f6319e80b11-kube-api-access-zxg7g\") pod \"redhat-marketplace-cwcfx\" (UID: \"8cfacb12-8368-4b2a-9481-2f6319e80b11\") " pod="openshift-marketplace/redhat-marketplace-cwcfx" Sep 30 21:18:08 crc kubenswrapper[4756]: I0930 21:18:08.412783 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8cfacb12-8368-4b2a-9481-2f6319e80b11-utilities\") pod \"redhat-marketplace-cwcfx\" (UID: \"8cfacb12-8368-4b2a-9481-2f6319e80b11\") " pod="openshift-marketplace/redhat-marketplace-cwcfx" Sep 30 21:18:08 crc kubenswrapper[4756]: I0930 21:18:08.412839 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8cfacb12-8368-4b2a-9481-2f6319e80b11-catalog-content\") pod \"redhat-marketplace-cwcfx\" (UID: \"8cfacb12-8368-4b2a-9481-2f6319e80b11\") " pod="openshift-marketplace/redhat-marketplace-cwcfx" Sep 30 21:18:08 crc kubenswrapper[4756]: I0930 21:18:08.413454 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8cfacb12-8368-4b2a-9481-2f6319e80b11-utilities\") pod \"redhat-marketplace-cwcfx\" (UID: \"8cfacb12-8368-4b2a-9481-2f6319e80b11\") " pod="openshift-marketplace/redhat-marketplace-cwcfx" Sep 30 21:18:08 crc kubenswrapper[4756]: I0930 21:18:08.413509 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8cfacb12-8368-4b2a-9481-2f6319e80b11-catalog-content\") pod \"redhat-marketplace-cwcfx\" (UID: \"8cfacb12-8368-4b2a-9481-2f6319e80b11\") " pod="openshift-marketplace/redhat-marketplace-cwcfx" Sep 30 21:18:08 crc kubenswrapper[4756]: I0930 21:18:08.429610 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zxg7g\" (UniqueName: \"kubernetes.io/projected/8cfacb12-8368-4b2a-9481-2f6319e80b11-kube-api-access-zxg7g\") pod \"redhat-marketplace-cwcfx\" (UID: \"8cfacb12-8368-4b2a-9481-2f6319e80b11\") " pod="openshift-marketplace/redhat-marketplace-cwcfx" Sep 30 21:18:08 crc kubenswrapper[4756]: I0930 21:18:08.546487 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-cwcfx" Sep 30 21:18:09 crc kubenswrapper[4756]: I0930 21:18:09.156586 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-cwcfx"] Sep 30 21:18:09 crc kubenswrapper[4756]: W0930 21:18:09.167788 4756 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod8cfacb12_8368_4b2a_9481_2f6319e80b11.slice/crio-16da563e7c4909724d94a8a6f1fe18688aceb7239334d6be9d8119765037e0bf WatchSource:0}: Error finding container 16da563e7c4909724d94a8a6f1fe18688aceb7239334d6be9d8119765037e0bf: Status 404 returned error can't find the container with id 16da563e7c4909724d94a8a6f1fe18688aceb7239334d6be9d8119765037e0bf Sep 30 21:18:09 crc kubenswrapper[4756]: I0930 21:18:09.180353 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-6qhq5/crc-debug-hsnd6" event={"ID":"c6d67621-1408-4935-8197-2e8fb6e43a21","Type":"ContainerStarted","Data":"37db107c3d1fe50b42bf81a3c511527b38ab952825a82f79eed4282e4c913293"} Sep 30 21:18:09 crc kubenswrapper[4756]: I0930 21:18:09.183433 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-cwcfx" event={"ID":"8cfacb12-8368-4b2a-9481-2f6319e80b11","Type":"ContainerStarted","Data":"16da563e7c4909724d94a8a6f1fe18688aceb7239334d6be9d8119765037e0bf"} Sep 30 21:18:09 crc kubenswrapper[4756]: I0930 21:18:09.201564 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-6qhq5/crc-debug-hsnd6" podStartSLOduration=1.822468411 podStartE2EDuration="13.201543582s" podCreationTimestamp="2025-09-30 21:17:56 +0000 UTC" firstStartedPulling="2025-09-30 21:17:56.944874971 +0000 UTC m=+6406.565808448" lastFinishedPulling="2025-09-30 21:18:08.323950142 +0000 UTC m=+6417.944883619" observedRunningTime="2025-09-30 21:18:09.192358413 +0000 UTC m=+6418.813291890" watchObservedRunningTime="2025-09-30 21:18:09.201543582 +0000 UTC m=+6418.822477069" Sep 30 21:18:10 crc kubenswrapper[4756]: I0930 21:18:10.195104 4756 generic.go:334] "Generic (PLEG): container finished" podID="8cfacb12-8368-4b2a-9481-2f6319e80b11" containerID="5e91a63c0b0600398f86a99f5dbbe5d84d2e6a01cf872aa18e67dab03e3309a2" exitCode=0 Sep 30 21:18:10 crc kubenswrapper[4756]: I0930 21:18:10.195301 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-cwcfx" event={"ID":"8cfacb12-8368-4b2a-9481-2f6319e80b11","Type":"ContainerDied","Data":"5e91a63c0b0600398f86a99f5dbbe5d84d2e6a01cf872aa18e67dab03e3309a2"} Sep 30 21:18:12 crc kubenswrapper[4756]: I0930 21:18:12.220985 4756 generic.go:334] "Generic (PLEG): container finished" podID="8cfacb12-8368-4b2a-9481-2f6319e80b11" containerID="9ed758d16b1255d41153ae3c27a7cda937b22baf1c01adf037d8db17683e714e" exitCode=0 Sep 30 21:18:12 crc kubenswrapper[4756]: I0930 21:18:12.221071 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-cwcfx" event={"ID":"8cfacb12-8368-4b2a-9481-2f6319e80b11","Type":"ContainerDied","Data":"9ed758d16b1255d41153ae3c27a7cda937b22baf1c01adf037d8db17683e714e"} Sep 30 21:18:14 crc kubenswrapper[4756]: I0930 21:18:14.251244 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-cwcfx" event={"ID":"8cfacb12-8368-4b2a-9481-2f6319e80b11","Type":"ContainerStarted","Data":"45052e2f1233910663b412affe9b32c0962b6b6edcec6bd92ce13e7063cc6331"} Sep 30 21:18:14 crc kubenswrapper[4756]: I0930 21:18:14.271824 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-cwcfx" podStartSLOduration=2.926077324 podStartE2EDuration="6.271793182s" podCreationTimestamp="2025-09-30 21:18:08 +0000 UTC" firstStartedPulling="2025-09-30 21:18:10.198021362 +0000 UTC m=+6419.818954839" lastFinishedPulling="2025-09-30 21:18:13.54373722 +0000 UTC m=+6423.164670697" observedRunningTime="2025-09-30 21:18:14.270640372 +0000 UTC m=+6423.891573859" watchObservedRunningTime="2025-09-30 21:18:14.271793182 +0000 UTC m=+6423.892726659" Sep 30 21:18:17 crc kubenswrapper[4756]: I0930 21:18:17.042916 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/octavia-d793-account-create-g2xr5"] Sep 30 21:18:17 crc kubenswrapper[4756]: I0930 21:18:17.049738 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/octavia-d793-account-create-g2xr5"] Sep 30 21:18:17 crc kubenswrapper[4756]: I0930 21:18:17.162685 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="95434820-0d72-4254-bf80-df546cb03774" path="/var/lib/kubelet/pods/95434820-0d72-4254-bf80-df546cb03774/volumes" Sep 30 21:18:18 crc kubenswrapper[4756]: I0930 21:18:18.546908 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-cwcfx" Sep 30 21:18:18 crc kubenswrapper[4756]: I0930 21:18:18.547700 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-cwcfx" Sep 30 21:18:18 crc kubenswrapper[4756]: I0930 21:18:18.603800 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-cwcfx" Sep 30 21:18:19 crc kubenswrapper[4756]: I0930 21:18:19.361198 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-cwcfx" Sep 30 21:18:19 crc kubenswrapper[4756]: I0930 21:18:19.416306 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-cwcfx"] Sep 30 21:18:20 crc kubenswrapper[4756]: I0930 21:18:20.117026 4756 scope.go:117] "RemoveContainer" containerID="6f222caf34daae427ad1ef1c9c0a49f393ad85f849cbc1acf4a5ac67e633c58d" Sep 30 21:18:20 crc kubenswrapper[4756]: E0930 21:18:20.118231 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4n9zj_openshift-machine-config-operator(3370c2ca-fec3-4f90-8df7-51e21e6c7e1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" Sep 30 21:18:21 crc kubenswrapper[4756]: I0930 21:18:21.316006 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-cwcfx" podUID="8cfacb12-8368-4b2a-9481-2f6319e80b11" containerName="registry-server" containerID="cri-o://45052e2f1233910663b412affe9b32c0962b6b6edcec6bd92ce13e7063cc6331" gracePeriod=2 Sep 30 21:18:22 crc kubenswrapper[4756]: I0930 21:18:22.333899 4756 generic.go:334] "Generic (PLEG): container finished" podID="8cfacb12-8368-4b2a-9481-2f6319e80b11" containerID="45052e2f1233910663b412affe9b32c0962b6b6edcec6bd92ce13e7063cc6331" exitCode=0 Sep 30 21:18:22 crc kubenswrapper[4756]: I0930 21:18:22.334183 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-cwcfx" event={"ID":"8cfacb12-8368-4b2a-9481-2f6319e80b11","Type":"ContainerDied","Data":"45052e2f1233910663b412affe9b32c0962b6b6edcec6bd92ce13e7063cc6331"} Sep 30 21:18:22 crc kubenswrapper[4756]: I0930 21:18:22.334214 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-cwcfx" event={"ID":"8cfacb12-8368-4b2a-9481-2f6319e80b11","Type":"ContainerDied","Data":"16da563e7c4909724d94a8a6f1fe18688aceb7239334d6be9d8119765037e0bf"} Sep 30 21:18:22 crc kubenswrapper[4756]: I0930 21:18:22.334250 4756 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="16da563e7c4909724d94a8a6f1fe18688aceb7239334d6be9d8119765037e0bf" Sep 30 21:18:23 crc kubenswrapper[4756]: I0930 21:18:23.844407 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-cwcfx" Sep 30 21:18:23 crc kubenswrapper[4756]: I0930 21:18:23.969836 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8cfacb12-8368-4b2a-9481-2f6319e80b11-catalog-content\") pod \"8cfacb12-8368-4b2a-9481-2f6319e80b11\" (UID: \"8cfacb12-8368-4b2a-9481-2f6319e80b11\") " Sep 30 21:18:23 crc kubenswrapper[4756]: I0930 21:18:23.969921 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zxg7g\" (UniqueName: \"kubernetes.io/projected/8cfacb12-8368-4b2a-9481-2f6319e80b11-kube-api-access-zxg7g\") pod \"8cfacb12-8368-4b2a-9481-2f6319e80b11\" (UID: \"8cfacb12-8368-4b2a-9481-2f6319e80b11\") " Sep 30 21:18:23 crc kubenswrapper[4756]: I0930 21:18:23.970027 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8cfacb12-8368-4b2a-9481-2f6319e80b11-utilities\") pod \"8cfacb12-8368-4b2a-9481-2f6319e80b11\" (UID: \"8cfacb12-8368-4b2a-9481-2f6319e80b11\") " Sep 30 21:18:23 crc kubenswrapper[4756]: I0930 21:18:23.970657 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8cfacb12-8368-4b2a-9481-2f6319e80b11-utilities" (OuterVolumeSpecName: "utilities") pod "8cfacb12-8368-4b2a-9481-2f6319e80b11" (UID: "8cfacb12-8368-4b2a-9481-2f6319e80b11"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 21:18:23 crc kubenswrapper[4756]: I0930 21:18:23.981771 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8cfacb12-8368-4b2a-9481-2f6319e80b11-kube-api-access-zxg7g" (OuterVolumeSpecName: "kube-api-access-zxg7g") pod "8cfacb12-8368-4b2a-9481-2f6319e80b11" (UID: "8cfacb12-8368-4b2a-9481-2f6319e80b11"). InnerVolumeSpecName "kube-api-access-zxg7g". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 21:18:24 crc kubenswrapper[4756]: I0930 21:18:24.016965 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8cfacb12-8368-4b2a-9481-2f6319e80b11-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "8cfacb12-8368-4b2a-9481-2f6319e80b11" (UID: "8cfacb12-8368-4b2a-9481-2f6319e80b11"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 21:18:24 crc kubenswrapper[4756]: I0930 21:18:24.072710 4756 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8cfacb12-8368-4b2a-9481-2f6319e80b11-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 21:18:24 crc kubenswrapper[4756]: I0930 21:18:24.072744 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zxg7g\" (UniqueName: \"kubernetes.io/projected/8cfacb12-8368-4b2a-9481-2f6319e80b11-kube-api-access-zxg7g\") on node \"crc\" DevicePath \"\"" Sep 30 21:18:24 crc kubenswrapper[4756]: I0930 21:18:24.072753 4756 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8cfacb12-8368-4b2a-9481-2f6319e80b11-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 21:18:24 crc kubenswrapper[4756]: I0930 21:18:24.349204 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-cwcfx" Sep 30 21:18:24 crc kubenswrapper[4756]: I0930 21:18:24.380509 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-cwcfx"] Sep 30 21:18:24 crc kubenswrapper[4756]: I0930 21:18:24.391364 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-cwcfx"] Sep 30 21:18:25 crc kubenswrapper[4756]: I0930 21:18:25.126754 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8cfacb12-8368-4b2a-9481-2f6319e80b11" path="/var/lib/kubelet/pods/8cfacb12-8368-4b2a-9481-2f6319e80b11/volumes" Sep 30 21:18:27 crc kubenswrapper[4756]: I0930 21:18:27.875239 4756 scope.go:117] "RemoveContainer" containerID="6ffb02fc015aa1aa321236eb89debcae475a29dbe3d8b2ea6d90673c22761c8c" Sep 30 21:18:27 crc kubenswrapper[4756]: I0930 21:18:27.907167 4756 scope.go:117] "RemoveContainer" containerID="d75a3de1b7613999c5993dcc38d928473addf046a976593a0050014f98ad61e8" Sep 30 21:18:27 crc kubenswrapper[4756]: I0930 21:18:27.982212 4756 scope.go:117] "RemoveContainer" containerID="06b1debc7f1165ddbd7f96ed825eb8e0a062ecf8bc081ec139520e8216cb25e5" Sep 30 21:18:28 crc kubenswrapper[4756]: I0930 21:18:28.057874 4756 scope.go:117] "RemoveContainer" containerID="c82c3c0160979cdad882fbfa9caf6e527c2515ea5483b6563790cbdf85cab886" Sep 30 21:18:32 crc kubenswrapper[4756]: I0930 21:18:32.116239 4756 scope.go:117] "RemoveContainer" containerID="6f222caf34daae427ad1ef1c9c0a49f393ad85f849cbc1acf4a5ac67e633c58d" Sep 30 21:18:32 crc kubenswrapper[4756]: E0930 21:18:32.117861 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4n9zj_openshift-machine-config-operator(3370c2ca-fec3-4f90-8df7-51e21e6c7e1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" Sep 30 21:18:45 crc kubenswrapper[4756]: I0930 21:18:45.116742 4756 scope.go:117] "RemoveContainer" containerID="6f222caf34daae427ad1ef1c9c0a49f393ad85f849cbc1acf4a5ac67e633c58d" Sep 30 21:18:45 crc kubenswrapper[4756]: E0930 21:18:45.117477 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4n9zj_openshift-machine-config-operator(3370c2ca-fec3-4f90-8df7-51e21e6c7e1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" Sep 30 21:18:59 crc kubenswrapper[4756]: I0930 21:18:59.116811 4756 scope.go:117] "RemoveContainer" containerID="6f222caf34daae427ad1ef1c9c0a49f393ad85f849cbc1acf4a5ac67e633c58d" Sep 30 21:18:59 crc kubenswrapper[4756]: E0930 21:18:59.117579 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4n9zj_openshift-machine-config-operator(3370c2ca-fec3-4f90-8df7-51e21e6c7e1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" Sep 30 21:19:11 crc kubenswrapper[4756]: I0930 21:19:11.122923 4756 scope.go:117] "RemoveContainer" containerID="6f222caf34daae427ad1ef1c9c0a49f393ad85f849cbc1acf4a5ac67e633c58d" Sep 30 21:19:11 crc kubenswrapper[4756]: E0930 21:19:11.123941 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4n9zj_openshift-machine-config-operator(3370c2ca-fec3-4f90-8df7-51e21e6c7e1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" Sep 30 21:19:12 crc kubenswrapper[4756]: I0930 21:19:12.072212 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_alertmanager-metric-storage-0_20597afa-0a2a-4763-a158-89cb5f521be9/init-config-reloader/0.log" Sep 30 21:19:12 crc kubenswrapper[4756]: I0930 21:19:12.249685 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_alertmanager-metric-storage-0_20597afa-0a2a-4763-a158-89cb5f521be9/init-config-reloader/0.log" Sep 30 21:19:12 crc kubenswrapper[4756]: I0930 21:19:12.263106 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_alertmanager-metric-storage-0_20597afa-0a2a-4763-a158-89cb5f521be9/alertmanager/0.log" Sep 30 21:19:12 crc kubenswrapper[4756]: I0930 21:19:12.293284 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_alertmanager-metric-storage-0_20597afa-0a2a-4763-a158-89cb5f521be9/config-reloader/0.log" Sep 30 21:19:12 crc kubenswrapper[4756]: I0930 21:19:12.461175 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_aodh-0_e7797bdb-6b9f-4e9a-a825-3ddcb81b24a0/aodh-api/0.log" Sep 30 21:19:12 crc kubenswrapper[4756]: I0930 21:19:12.506908 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_aodh-0_e7797bdb-6b9f-4e9a-a825-3ddcb81b24a0/aodh-evaluator/0.log" Sep 30 21:19:12 crc kubenswrapper[4756]: I0930 21:19:12.913390 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_aodh-0_e7797bdb-6b9f-4e9a-a825-3ddcb81b24a0/aodh-listener/0.log" Sep 30 21:19:12 crc kubenswrapper[4756]: I0930 21:19:12.988417 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_aodh-0_e7797bdb-6b9f-4e9a-a825-3ddcb81b24a0/aodh-notifier/0.log" Sep 30 21:19:13 crc kubenswrapper[4756]: I0930 21:19:13.099165 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_aodh-db-create-kkqjz_ced7e732-18e2-428b-9fdb-15d1d252ad85/mariadb-database-create/0.log" Sep 30 21:19:13 crc kubenswrapper[4756]: I0930 21:19:13.278081 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_aodh-db-sync-7vh8h_16598b9e-0208-4a53-87f6-1069af4b6c45/aodh-db-sync/0.log" Sep 30 21:19:13 crc kubenswrapper[4756]: I0930 21:19:13.472291 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_aodh-f034-account-create-n2g2c_1dac2869-2e8d-403d-bfe4-65d5d74ee236/mariadb-account-create/0.log" Sep 30 21:19:13 crc kubenswrapper[4756]: I0930 21:19:13.620660 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-77676b8946-qhgfw_e19642da-ea64-4716-9b75-96656c381743/barbican-api/0.log" Sep 30 21:19:13 crc kubenswrapper[4756]: I0930 21:19:13.723700 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-77676b8946-qhgfw_e19642da-ea64-4716-9b75-96656c381743/barbican-api-log/0.log" Sep 30 21:19:13 crc kubenswrapper[4756]: I0930 21:19:13.868731 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-749f4bbfdd-7rqmz_2038f9e9-ae3d-4e56-aaa7-f3346da48330/barbican-keystone-listener/0.log" Sep 30 21:19:13 crc kubenswrapper[4756]: I0930 21:19:13.987051 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-749f4bbfdd-7rqmz_2038f9e9-ae3d-4e56-aaa7-f3346da48330/barbican-keystone-listener-log/0.log" Sep 30 21:19:14 crc kubenswrapper[4756]: I0930 21:19:14.153443 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-66c7f4f69c-78cb5_094fb104-439b-4882-970f-a3913a9a40de/barbican-worker/0.log" Sep 30 21:19:14 crc kubenswrapper[4756]: I0930 21:19:14.245363 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-66c7f4f69c-78cb5_094fb104-439b-4882-970f-a3913a9a40de/barbican-worker-log/0.log" Sep 30 21:19:14 crc kubenswrapper[4756]: I0930 21:19:14.370029 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_903038a7-7ef0-47aa-b14d-3a35f8c1176a/ceilometer-central-agent/0.log" Sep 30 21:19:14 crc kubenswrapper[4756]: I0930 21:19:14.532008 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_903038a7-7ef0-47aa-b14d-3a35f8c1176a/ceilometer-notification-agent/0.log" Sep 30 21:19:14 crc kubenswrapper[4756]: I0930 21:19:14.643769 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_903038a7-7ef0-47aa-b14d-3a35f8c1176a/proxy-httpd/0.log" Sep 30 21:19:14 crc kubenswrapper[4756]: I0930 21:19:14.747942 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_903038a7-7ef0-47aa-b14d-3a35f8c1176a/sg-core/0.log" Sep 30 21:19:14 crc kubenswrapper[4756]: I0930 21:19:14.876886 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_a4a49533-8aea-4c26-a272-8849cd7cab10/cinder-api/0.log" Sep 30 21:19:14 crc kubenswrapper[4756]: I0930 21:19:14.964647 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_a4a49533-8aea-4c26-a272-8849cd7cab10/cinder-api-log/0.log" Sep 30 21:19:15 crc kubenswrapper[4756]: I0930 21:19:15.232014 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-backup-0_313af6b2-d2f9-4bf6-90d5-6de0ada1ca1a/probe/0.log" Sep 30 21:19:15 crc kubenswrapper[4756]: I0930 21:19:15.326362 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-backup-0_313af6b2-d2f9-4bf6-90d5-6de0ada1ca1a/cinder-backup/0.log" Sep 30 21:19:15 crc kubenswrapper[4756]: I0930 21:19:15.502418 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_7c75023e-8962-4ef9-8da3-f2a36ac299d3/probe/0.log" Sep 30 21:19:15 crc kubenswrapper[4756]: I0930 21:19:15.521705 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_7c75023e-8962-4ef9-8da3-f2a36ac299d3/cinder-scheduler/0.log" Sep 30 21:19:15 crc kubenswrapper[4756]: I0930 21:19:15.672115 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-volume-volume1-0_10f935ba-d208-4e87-ac0f-e80fbb14c586/cinder-volume/0.log" Sep 30 21:19:15 crc kubenswrapper[4756]: I0930 21:19:15.782617 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-volume-volume1-0_10f935ba-d208-4e87-ac0f-e80fbb14c586/probe/0.log" Sep 30 21:19:15 crc kubenswrapper[4756]: I0930 21:19:15.815294 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-7cdd98bb8f-cmpfq_0482d439-b7e2-4502-8d28-85ac4f826cc4/init/0.log" Sep 30 21:19:15 crc kubenswrapper[4756]: I0930 21:19:15.974916 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-7cdd98bb8f-cmpfq_0482d439-b7e2-4502-8d28-85ac4f826cc4/init/0.log" Sep 30 21:19:15 crc kubenswrapper[4756]: I0930 21:19:15.997931 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-7cdd98bb8f-cmpfq_0482d439-b7e2-4502-8d28-85ac4f826cc4/dnsmasq-dns/0.log" Sep 30 21:19:16 crc kubenswrapper[4756]: I0930 21:19:16.056695 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_7fb95e15-f324-4549-959e-2f3edba3bce3/glance-httpd/0.log" Sep 30 21:19:16 crc kubenswrapper[4756]: I0930 21:19:16.193833 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_7fb95e15-f324-4549-959e-2f3edba3bce3/glance-log/0.log" Sep 30 21:19:16 crc kubenswrapper[4756]: I0930 21:19:16.269931 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_50ebcdc4-6975-4b2f-b08a-b09034017b42/glance-httpd/0.log" Sep 30 21:19:16 crc kubenswrapper[4756]: I0930 21:19:16.295808 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_50ebcdc4-6975-4b2f-b08a-b09034017b42/glance-log/0.log" Sep 30 21:19:16 crc kubenswrapper[4756]: I0930 21:19:16.493562 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_heat-1e35-account-create-fr9l6_bb9200a1-8617-478b-a82c-c91af7397ea0/mariadb-account-create/0.log" Sep 30 21:19:16 crc kubenswrapper[4756]: I0930 21:19:16.560556 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_heat-api-c557b4776-tw52r_5e2144fa-2a37-4e9c-882b-0cbc001e0206/heat-api/0.log" Sep 30 21:19:16 crc kubenswrapper[4756]: I0930 21:19:16.728987 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_heat-cfnapi-5f6d774f5b-qwz4z_9755231c-4c74-409a-8d6a-626b63c508a2/heat-cfnapi/0.log" Sep 30 21:19:16 crc kubenswrapper[4756]: I0930 21:19:16.821922 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_heat-db-create-hdf69_d5acee4c-8040-4b34-ae2c-e81a40d3c0da/mariadb-database-create/0.log" Sep 30 21:19:17 crc kubenswrapper[4756]: I0930 21:19:17.145184 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_heat-db-sync-t9cp4_fd547ce9-ce4c-4a7d-9f4b-3c06aeb45d1d/heat-db-sync/0.log" Sep 30 21:19:17 crc kubenswrapper[4756]: I0930 21:19:17.292857 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_heat-engine-67b79b8959-pv8z8_a8308f94-a593-4155-ba5b-c734ca1e7911/heat-engine/0.log" Sep 30 21:19:17 crc kubenswrapper[4756]: I0930 21:19:17.461964 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_horizon-bcc99cb89-l9p64_53c3034d-5847-407a-9003-3526fee71366/horizon-log/0.log" Sep 30 21:19:17 crc kubenswrapper[4756]: I0930 21:19:17.466144 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_horizon-bcc99cb89-l9p64_53c3034d-5847-407a-9003-3526fee71366/horizon/0.log" Sep 30 21:19:17 crc kubenswrapper[4756]: I0930 21:19:17.662265 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-cron-29321101-8h52b_bccb727e-65d8-493b-a3cb-f5f8e535dd57/keystone-cron/0.log" Sep 30 21:19:17 crc kubenswrapper[4756]: I0930 21:19:17.730465 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-79dfdbf96-s7xnv_f65b915b-51d9-4a4f-aa56-aba336e5f7d4/keystone-api/0.log" Sep 30 21:19:17 crc kubenswrapper[4756]: I0930 21:19:17.896160 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_kube-state-metrics-0_6b29dd70-bdc6-4616-8503-f787d4602e39/kube-state-metrics/0.log" Sep 30 21:19:18 crc kubenswrapper[4756]: I0930 21:19:18.051943 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_manila-api-0_c029f432-5bb2-48e9-9526-533ac7d999c4/manila-api/0.log" Sep 30 21:19:18 crc kubenswrapper[4756]: I0930 21:19:18.094562 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_manila-api-0_c029f432-5bb2-48e9-9526-533ac7d999c4/manila-api-log/0.log" Sep 30 21:19:18 crc kubenswrapper[4756]: I0930 21:19:18.219966 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_manila-c8e9-account-create-jnb24_33741501-84e5-400c-b66f-897f2cbe23f0/mariadb-account-create/0.log" Sep 30 21:19:18 crc kubenswrapper[4756]: I0930 21:19:18.272676 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_manila-db-create-nmdrj_1ce68e47-019b-472e-8c36-7d3edef58f90/mariadb-database-create/0.log" Sep 30 21:19:18 crc kubenswrapper[4756]: I0930 21:19:18.481583 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_manila-db-sync-wplbx_e980e664-e5cd-44f9-90fd-4078378e83bf/manila-db-sync/0.log" Sep 30 21:19:18 crc kubenswrapper[4756]: I0930 21:19:18.623769 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_manila-scheduler-0_2e6e6c35-e89e-412f-b025-dcf18e6d2691/probe/0.log" Sep 30 21:19:18 crc kubenswrapper[4756]: I0930 21:19:18.624081 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_manila-scheduler-0_2e6e6c35-e89e-412f-b025-dcf18e6d2691/manila-scheduler/0.log" Sep 30 21:19:18 crc kubenswrapper[4756]: I0930 21:19:18.755027 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_manila-share-share1-0_34f74470-0f4b-4c5d-8842-b8ba95456f82/manila-share/0.log" Sep 30 21:19:18 crc kubenswrapper[4756]: I0930 21:19:18.832228 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_manila-share-share1-0_34f74470-0f4b-4c5d-8842-b8ba95456f82/probe/0.log" Sep 30 21:19:19 crc kubenswrapper[4756]: I0930 21:19:19.036558 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_mariadb-copy-data_68468641-7a26-4d0b-8386-ed0a3d2280f3/adoption/0.log" Sep 30 21:19:19 crc kubenswrapper[4756]: I0930 21:19:19.299184 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-796b75875-n2t79_ebb9042d-1363-40d6-85e0-c6325b2eac1d/neutron-api/0.log" Sep 30 21:19:19 crc kubenswrapper[4756]: I0930 21:19:19.357651 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-796b75875-n2t79_ebb9042d-1363-40d6-85e0-c6325b2eac1d/neutron-httpd/0.log" Sep 30 21:19:19 crc kubenswrapper[4756]: I0930 21:19:19.465385 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_memcached-0_1a9a3913-4c34-4606-817a-31686e00be78/memcached/0.log" Sep 30 21:19:19 crc kubenswrapper[4756]: I0930 21:19:19.574933 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_6e750932-dd9c-4dbc-b42b-b8c17481e118/nova-api-api/0.log" Sep 30 21:19:19 crc kubenswrapper[4756]: I0930 21:19:19.675143 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_6e750932-dd9c-4dbc-b42b-b8c17481e118/nova-api-log/0.log" Sep 30 21:19:19 crc kubenswrapper[4756]: I0930 21:19:19.784157 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell0-conductor-0_26ca4516-71bc-4d89-8008-feac31530eba/nova-cell0-conductor-conductor/0.log" Sep 30 21:19:19 crc kubenswrapper[4756]: I0930 21:19:19.976636 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-conductor-0_6c2c0ab7-db88-4062-808b-9db9cb1f2264/nova-cell1-conductor-conductor/0.log" Sep 30 21:19:20 crc kubenswrapper[4756]: I0930 21:19:20.075526 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-novncproxy-0_a23cdb4e-60a7-4298-824c-29f6ccf5d502/nova-cell1-novncproxy-novncproxy/0.log" Sep 30 21:19:20 crc kubenswrapper[4756]: I0930 21:19:20.328921 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_7fa0d9a9-bf84-42ba-980e-0ca8010de3e7/nova-metadata-log/0.log" Sep 30 21:19:20 crc kubenswrapper[4756]: I0930 21:19:20.524511 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_7fa0d9a9-bf84-42ba-980e-0ca8010de3e7/nova-metadata-metadata/0.log" Sep 30 21:19:20 crc kubenswrapper[4756]: I0930 21:19:20.650875 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-scheduler-0_cd2a9763-32c6-4c38-8565-42dea0aff93c/nova-scheduler-scheduler/0.log" Sep 30 21:19:20 crc kubenswrapper[4756]: I0930 21:19:20.796130 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_octavia-api-66b5c6cccd-rcf58_d38fa27e-0974-4f1d-922c-43f94ab027df/init/0.log" Sep 30 21:19:21 crc kubenswrapper[4756]: I0930 21:19:21.008310 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_octavia-api-66b5c6cccd-rcf58_d38fa27e-0974-4f1d-922c-43f94ab027df/init/0.log" Sep 30 21:19:21 crc kubenswrapper[4756]: I0930 21:19:21.025352 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_octavia-api-66b5c6cccd-rcf58_d38fa27e-0974-4f1d-922c-43f94ab027df/octavia-api-provider-agent/0.log" Sep 30 21:19:21 crc kubenswrapper[4756]: I0930 21:19:21.108210 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_octavia-api-66b5c6cccd-rcf58_d38fa27e-0974-4f1d-922c-43f94ab027df/octavia-api/0.log" Sep 30 21:19:21 crc kubenswrapper[4756]: I0930 21:19:21.215831 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_octavia-db-sync-2m9fn_cf3ed40b-5636-4917-82bc-b9c60bff311f/init/0.log" Sep 30 21:19:21 crc kubenswrapper[4756]: I0930 21:19:21.354867 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_octavia-db-sync-2m9fn_cf3ed40b-5636-4917-82bc-b9c60bff311f/init/0.log" Sep 30 21:19:21 crc kubenswrapper[4756]: I0930 21:19:21.403053 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_octavia-db-sync-2m9fn_cf3ed40b-5636-4917-82bc-b9c60bff311f/octavia-db-sync/0.log" Sep 30 21:19:21 crc kubenswrapper[4756]: I0930 21:19:21.453027 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_octavia-healthmanager-h4j74_1be3323c-dbb7-42fe-a95d-56b44b70fc98/init/0.log" Sep 30 21:19:21 crc kubenswrapper[4756]: I0930 21:19:21.639806 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_octavia-healthmanager-h4j74_1be3323c-dbb7-42fe-a95d-56b44b70fc98/octavia-healthmanager/0.log" Sep 30 21:19:21 crc kubenswrapper[4756]: I0930 21:19:21.645958 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_octavia-healthmanager-h4j74_1be3323c-dbb7-42fe-a95d-56b44b70fc98/init/0.log" Sep 30 21:19:21 crc kubenswrapper[4756]: I0930 21:19:21.714919 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_octavia-housekeeping-qzvbg_1a3d7e55-8c6d-4b7e-8c52-8e9c1a2a5e55/init/0.log" Sep 30 21:19:21 crc kubenswrapper[4756]: I0930 21:19:21.918197 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_octavia-housekeeping-qzvbg_1a3d7e55-8c6d-4b7e-8c52-8e9c1a2a5e55/octavia-housekeeping/0.log" Sep 30 21:19:21 crc kubenswrapper[4756]: I0930 21:19:21.951272 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_octavia-housekeeping-qzvbg_1a3d7e55-8c6d-4b7e-8c52-8e9c1a2a5e55/init/0.log" Sep 30 21:19:22 crc kubenswrapper[4756]: I0930 21:19:22.009981 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_octavia-image-upload-59f8cff499-dwt7s_3091d122-9691-440c-a0a1-ed14393f46fc/init/0.log" Sep 30 21:19:22 crc kubenswrapper[4756]: I0930 21:19:22.120739 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_octavia-image-upload-59f8cff499-dwt7s_3091d122-9691-440c-a0a1-ed14393f46fc/init/0.log" Sep 30 21:19:22 crc kubenswrapper[4756]: I0930 21:19:22.151347 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_octavia-image-upload-59f8cff499-dwt7s_3091d122-9691-440c-a0a1-ed14393f46fc/octavia-amphora-httpd/0.log" Sep 30 21:19:22 crc kubenswrapper[4756]: I0930 21:19:22.262861 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_octavia-rsyslog-zjxht_8e308d95-d34a-4264-86a0-d4bad47490de/init/0.log" Sep 30 21:19:22 crc kubenswrapper[4756]: I0930 21:19:22.419255 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_octavia-rsyslog-zjxht_8e308d95-d34a-4264-86a0-d4bad47490de/init/0.log" Sep 30 21:19:22 crc kubenswrapper[4756]: I0930 21:19:22.432200 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_octavia-rsyslog-zjxht_8e308d95-d34a-4264-86a0-d4bad47490de/octavia-rsyslog/0.log" Sep 30 21:19:22 crc kubenswrapper[4756]: I0930 21:19:22.511652 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_octavia-worker-wnndt_3c1ed8ea-284a-4ae3-b08d-e167a9caf5f9/init/0.log" Sep 30 21:19:22 crc kubenswrapper[4756]: I0930 21:19:22.624167 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_octavia-worker-wnndt_3c1ed8ea-284a-4ae3-b08d-e167a9caf5f9/init/0.log" Sep 30 21:19:22 crc kubenswrapper[4756]: I0930 21:19:22.734589 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_be947271-1bee-445b-be89-945f5ffe659b/mysql-bootstrap/0.log" Sep 30 21:19:22 crc kubenswrapper[4756]: I0930 21:19:22.750161 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_octavia-worker-wnndt_3c1ed8ea-284a-4ae3-b08d-e167a9caf5f9/octavia-worker/0.log" Sep 30 21:19:22 crc kubenswrapper[4756]: I0930 21:19:22.929424 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_be947271-1bee-445b-be89-945f5ffe659b/mysql-bootstrap/0.log" Sep 30 21:19:22 crc kubenswrapper[4756]: I0930 21:19:22.963523 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_be947271-1bee-445b-be89-945f5ffe659b/galera/0.log" Sep 30 21:19:23 crc kubenswrapper[4756]: I0930 21:19:23.047363 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_c86195f6-b0ce-4e59-81e8-25d5a978d2b5/mysql-bootstrap/0.log" Sep 30 21:19:23 crc kubenswrapper[4756]: I0930 21:19:23.171910 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_c86195f6-b0ce-4e59-81e8-25d5a978d2b5/mysql-bootstrap/0.log" Sep 30 21:19:23 crc kubenswrapper[4756]: I0930 21:19:23.224253 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_c86195f6-b0ce-4e59-81e8-25d5a978d2b5/galera/0.log" Sep 30 21:19:23 crc kubenswrapper[4756]: I0930 21:19:23.246933 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstackclient_bc558c9b-5ec2-4fd2-9d2b-2a7d5e3e59b1/openstackclient/0.log" Sep 30 21:19:23 crc kubenswrapper[4756]: I0930 21:19:23.419630 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-gltp7_5b6960d9-1e9a-48e5-832a-d740084ceede/ovn-controller/0.log" Sep 30 21:19:23 crc kubenswrapper[4756]: I0930 21:19:23.461242 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-metrics-zwrsz_0c0bde9d-e4ed-466c-8ebd-7383eacd6157/openstack-network-exporter/0.log" Sep 30 21:19:23 crc kubenswrapper[4756]: I0930 21:19:23.650990 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-2nzxh_55f82193-72c2-4f43-b9bc-3e3f9b832b05/ovsdb-server-init/0.log" Sep 30 21:19:23 crc kubenswrapper[4756]: I0930 21:19:23.824252 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-2nzxh_55f82193-72c2-4f43-b9bc-3e3f9b832b05/ovsdb-server-init/0.log" Sep 30 21:19:23 crc kubenswrapper[4756]: I0930 21:19:23.841861 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-2nzxh_55f82193-72c2-4f43-b9bc-3e3f9b832b05/ovsdb-server/0.log" Sep 30 21:19:23 crc kubenswrapper[4756]: I0930 21:19:23.845332 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-2nzxh_55f82193-72c2-4f43-b9bc-3e3f9b832b05/ovs-vswitchd/0.log" Sep 30 21:19:24 crc kubenswrapper[4756]: I0930 21:19:24.070266 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-copy-data_4f059cb9-0352-4976-b189-a13a57de725e/adoption/0.log" Sep 30 21:19:24 crc kubenswrapper[4756]: I0930 21:19:24.082789 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_e184d685-977f-412b-b88f-1aec1b3c53d3/openstack-network-exporter/0.log" Sep 30 21:19:24 crc kubenswrapper[4756]: I0930 21:19:24.217261 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_e184d685-977f-412b-b88f-1aec1b3c53d3/ovn-northd/0.log" Sep 30 21:19:24 crc kubenswrapper[4756]: I0930 21:19:24.321852 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_e3a83234-34b6-4c4c-8150-e85b357a7249/openstack-network-exporter/0.log" Sep 30 21:19:24 crc kubenswrapper[4756]: I0930 21:19:24.436451 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_e3a83234-34b6-4c4c-8150-e85b357a7249/ovsdbserver-nb/0.log" Sep 30 21:19:24 crc kubenswrapper[4756]: I0930 21:19:24.595534 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-1_97be13c3-6dba-47c2-91dc-b14cdc6c45dc/ovsdbserver-nb/0.log" Sep 30 21:19:24 crc kubenswrapper[4756]: I0930 21:19:24.596893 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-1_97be13c3-6dba-47c2-91dc-b14cdc6c45dc/openstack-network-exporter/0.log" Sep 30 21:19:24 crc kubenswrapper[4756]: I0930 21:19:24.775674 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-2_63cc3cb7-a06a-43d3-8963-11254e723416/openstack-network-exporter/0.log" Sep 30 21:19:24 crc kubenswrapper[4756]: I0930 21:19:24.792867 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-2_63cc3cb7-a06a-43d3-8963-11254e723416/ovsdbserver-nb/0.log" Sep 30 21:19:25 crc kubenswrapper[4756]: I0930 21:19:25.061606 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/octavia-db-sync-2m9fn"] Sep 30 21:19:25 crc kubenswrapper[4756]: I0930 21:19:25.069226 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/octavia-db-sync-2m9fn"] Sep 30 21:19:25 crc kubenswrapper[4756]: I0930 21:19:25.116772 4756 scope.go:117] "RemoveContainer" containerID="6f222caf34daae427ad1ef1c9c0a49f393ad85f849cbc1acf4a5ac67e633c58d" Sep 30 21:19:25 crc kubenswrapper[4756]: E0930 21:19:25.117056 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4n9zj_openshift-machine-config-operator(3370c2ca-fec3-4f90-8df7-51e21e6c7e1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" Sep 30 21:19:25 crc kubenswrapper[4756]: I0930 21:19:25.150890 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cf3ed40b-5636-4917-82bc-b9c60bff311f" path="/var/lib/kubelet/pods/cf3ed40b-5636-4917-82bc-b9c60bff311f/volumes" Sep 30 21:19:25 crc kubenswrapper[4756]: I0930 21:19:25.303418 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_a46c3457-8a08-4958-9b56-1583c6d5dc99/openstack-network-exporter/0.log" Sep 30 21:19:25 crc kubenswrapper[4756]: I0930 21:19:25.361066 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_a46c3457-8a08-4958-9b56-1583c6d5dc99/ovsdbserver-sb/0.log" Sep 30 21:19:25 crc kubenswrapper[4756]: I0930 21:19:25.447975 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-1_1765ebd7-500e-4d06-8354-7121b40002ad/openstack-network-exporter/0.log" Sep 30 21:19:25 crc kubenswrapper[4756]: I0930 21:19:25.490623 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-1_1765ebd7-500e-4d06-8354-7121b40002ad/ovsdbserver-sb/0.log" Sep 30 21:19:25 crc kubenswrapper[4756]: I0930 21:19:25.571503 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-2_9d1172a6-25c7-4141-a33a-0cb6e7532927/openstack-network-exporter/0.log" Sep 30 21:19:25 crc kubenswrapper[4756]: I0930 21:19:25.704068 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-2_9d1172a6-25c7-4141-a33a-0cb6e7532927/ovsdbserver-sb/0.log" Sep 30 21:19:25 crc kubenswrapper[4756]: I0930 21:19:25.820878 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-7f6fcc4bd8-b42zh_0fd50551-db2a-414f-9c25-712ff7b4ec70/placement-api/0.log" Sep 30 21:19:25 crc kubenswrapper[4756]: I0930 21:19:25.854201 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-7f6fcc4bd8-b42zh_0fd50551-db2a-414f-9c25-712ff7b4ec70/placement-log/0.log" Sep 30 21:19:25 crc kubenswrapper[4756]: I0930 21:19:25.989718 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_c79a9792-587e-498a-9506-be25271a0884/init-config-reloader/0.log" Sep 30 21:19:26 crc kubenswrapper[4756]: I0930 21:19:26.223323 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_c79a9792-587e-498a-9506-be25271a0884/prometheus/0.log" Sep 30 21:19:26 crc kubenswrapper[4756]: I0930 21:19:26.228313 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_c79a9792-587e-498a-9506-be25271a0884/init-config-reloader/0.log" Sep 30 21:19:26 crc kubenswrapper[4756]: I0930 21:19:26.233677 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_c79a9792-587e-498a-9506-be25271a0884/config-reloader/0.log" Sep 30 21:19:26 crc kubenswrapper[4756]: I0930 21:19:26.279918 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_c79a9792-587e-498a-9506-be25271a0884/thanos-sidecar/0.log" Sep 30 21:19:26 crc kubenswrapper[4756]: I0930 21:19:26.428875 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_35150b99-e380-48a6-beae-db98734c02e2/setup-container/0.log" Sep 30 21:19:26 crc kubenswrapper[4756]: I0930 21:19:26.626542 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_35150b99-e380-48a6-beae-db98734c02e2/rabbitmq/0.log" Sep 30 21:19:26 crc kubenswrapper[4756]: I0930 21:19:26.682683 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_35150b99-e380-48a6-beae-db98734c02e2/setup-container/0.log" Sep 30 21:19:26 crc kubenswrapper[4756]: I0930 21:19:26.722074 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_18633e6c-8de8-42d7-b97f-cefca7bcb2b5/setup-container/0.log" Sep 30 21:19:26 crc kubenswrapper[4756]: I0930 21:19:26.886641 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_18633e6c-8de8-42d7-b97f-cefca7bcb2b5/setup-container/0.log" Sep 30 21:19:26 crc kubenswrapper[4756]: I0930 21:19:26.938924 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_18633e6c-8de8-42d7-b97f-cefca7bcb2b5/rabbitmq/0.log" Sep 30 21:19:28 crc kubenswrapper[4756]: I0930 21:19:28.247864 4756 scope.go:117] "RemoveContainer" containerID="6d3776eac851b6ba5a88a3ddd9930a8e3c663b99917a9447bac10fdbd018b63c" Sep 30 21:19:28 crc kubenswrapper[4756]: I0930 21:19:28.286946 4756 scope.go:117] "RemoveContainer" containerID="86facede1871635eb01714c5f8eff8ee47c34beb142ccd161bd2d3856da9420a" Sep 30 21:19:38 crc kubenswrapper[4756]: I0930 21:19:38.116706 4756 scope.go:117] "RemoveContainer" containerID="6f222caf34daae427ad1ef1c9c0a49f393ad85f849cbc1acf4a5ac67e633c58d" Sep 30 21:19:38 crc kubenswrapper[4756]: E0930 21:19:38.117448 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4n9zj_openshift-machine-config-operator(3370c2ca-fec3-4f90-8df7-51e21e6c7e1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" Sep 30 21:19:49 crc kubenswrapper[4756]: I0930 21:19:49.116659 4756 scope.go:117] "RemoveContainer" containerID="6f222caf34daae427ad1ef1c9c0a49f393ad85f849cbc1acf4a5ac67e633c58d" Sep 30 21:19:49 crc kubenswrapper[4756]: E0930 21:19:49.117572 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4n9zj_openshift-machine-config-operator(3370c2ca-fec3-4f90-8df7-51e21e6c7e1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" Sep 30 21:20:02 crc kubenswrapper[4756]: I0930 21:20:02.119924 4756 scope.go:117] "RemoveContainer" containerID="6f222caf34daae427ad1ef1c9c0a49f393ad85f849cbc1acf4a5ac67e633c58d" Sep 30 21:20:02 crc kubenswrapper[4756]: E0930 21:20:02.121859 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4n9zj_openshift-machine-config-operator(3370c2ca-fec3-4f90-8df7-51e21e6c7e1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" Sep 30 21:20:10 crc kubenswrapper[4756]: I0930 21:20:10.459293 4756 generic.go:334] "Generic (PLEG): container finished" podID="c6d67621-1408-4935-8197-2e8fb6e43a21" containerID="37db107c3d1fe50b42bf81a3c511527b38ab952825a82f79eed4282e4c913293" exitCode=0 Sep 30 21:20:10 crc kubenswrapper[4756]: I0930 21:20:10.459372 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-6qhq5/crc-debug-hsnd6" event={"ID":"c6d67621-1408-4935-8197-2e8fb6e43a21","Type":"ContainerDied","Data":"37db107c3d1fe50b42bf81a3c511527b38ab952825a82f79eed4282e4c913293"} Sep 30 21:20:11 crc kubenswrapper[4756]: I0930 21:20:11.609017 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-6qhq5/crc-debug-hsnd6" Sep 30 21:20:11 crc kubenswrapper[4756]: I0930 21:20:11.645848 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-6qhq5/crc-debug-hsnd6"] Sep 30 21:20:11 crc kubenswrapper[4756]: I0930 21:20:11.655102 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-6qhq5/crc-debug-hsnd6"] Sep 30 21:20:11 crc kubenswrapper[4756]: I0930 21:20:11.730618 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kzrzd\" (UniqueName: \"kubernetes.io/projected/c6d67621-1408-4935-8197-2e8fb6e43a21-kube-api-access-kzrzd\") pod \"c6d67621-1408-4935-8197-2e8fb6e43a21\" (UID: \"c6d67621-1408-4935-8197-2e8fb6e43a21\") " Sep 30 21:20:11 crc kubenswrapper[4756]: I0930 21:20:11.730778 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/c6d67621-1408-4935-8197-2e8fb6e43a21-host\") pod \"c6d67621-1408-4935-8197-2e8fb6e43a21\" (UID: \"c6d67621-1408-4935-8197-2e8fb6e43a21\") " Sep 30 21:20:11 crc kubenswrapper[4756]: I0930 21:20:11.730863 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/c6d67621-1408-4935-8197-2e8fb6e43a21-host" (OuterVolumeSpecName: "host") pod "c6d67621-1408-4935-8197-2e8fb6e43a21" (UID: "c6d67621-1408-4935-8197-2e8fb6e43a21"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 21:20:11 crc kubenswrapper[4756]: I0930 21:20:11.731494 4756 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/c6d67621-1408-4935-8197-2e8fb6e43a21-host\") on node \"crc\" DevicePath \"\"" Sep 30 21:20:11 crc kubenswrapper[4756]: I0930 21:20:11.738265 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c6d67621-1408-4935-8197-2e8fb6e43a21-kube-api-access-kzrzd" (OuterVolumeSpecName: "kube-api-access-kzrzd") pod "c6d67621-1408-4935-8197-2e8fb6e43a21" (UID: "c6d67621-1408-4935-8197-2e8fb6e43a21"). InnerVolumeSpecName "kube-api-access-kzrzd". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 21:20:11 crc kubenswrapper[4756]: I0930 21:20:11.834669 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kzrzd\" (UniqueName: \"kubernetes.io/projected/c6d67621-1408-4935-8197-2e8fb6e43a21-kube-api-access-kzrzd\") on node \"crc\" DevicePath \"\"" Sep 30 21:20:12 crc kubenswrapper[4756]: I0930 21:20:12.485744 4756 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f33788e18b6b6e9c2cf9dccf45d7ec2ac1bd1712102512da805549e56e3e9a3e" Sep 30 21:20:12 crc kubenswrapper[4756]: I0930 21:20:12.485784 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-6qhq5/crc-debug-hsnd6" Sep 30 21:20:12 crc kubenswrapper[4756]: I0930 21:20:12.812875 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-6qhq5/crc-debug-b6pgn"] Sep 30 21:20:12 crc kubenswrapper[4756]: E0930 21:20:12.813948 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8cfacb12-8368-4b2a-9481-2f6319e80b11" containerName="registry-server" Sep 30 21:20:12 crc kubenswrapper[4756]: I0930 21:20:12.813967 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="8cfacb12-8368-4b2a-9481-2f6319e80b11" containerName="registry-server" Sep 30 21:20:12 crc kubenswrapper[4756]: E0930 21:20:12.813980 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8cfacb12-8368-4b2a-9481-2f6319e80b11" containerName="extract-content" Sep 30 21:20:12 crc kubenswrapper[4756]: I0930 21:20:12.813987 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="8cfacb12-8368-4b2a-9481-2f6319e80b11" containerName="extract-content" Sep 30 21:20:12 crc kubenswrapper[4756]: E0930 21:20:12.814021 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c6d67621-1408-4935-8197-2e8fb6e43a21" containerName="container-00" Sep 30 21:20:12 crc kubenswrapper[4756]: I0930 21:20:12.814029 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="c6d67621-1408-4935-8197-2e8fb6e43a21" containerName="container-00" Sep 30 21:20:12 crc kubenswrapper[4756]: E0930 21:20:12.814046 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8cfacb12-8368-4b2a-9481-2f6319e80b11" containerName="extract-utilities" Sep 30 21:20:12 crc kubenswrapper[4756]: I0930 21:20:12.814054 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="8cfacb12-8368-4b2a-9481-2f6319e80b11" containerName="extract-utilities" Sep 30 21:20:12 crc kubenswrapper[4756]: I0930 21:20:12.814320 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="c6d67621-1408-4935-8197-2e8fb6e43a21" containerName="container-00" Sep 30 21:20:12 crc kubenswrapper[4756]: I0930 21:20:12.814331 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="8cfacb12-8368-4b2a-9481-2f6319e80b11" containerName="registry-server" Sep 30 21:20:12 crc kubenswrapper[4756]: I0930 21:20:12.815352 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-6qhq5/crc-debug-b6pgn" Sep 30 21:20:12 crc kubenswrapper[4756]: I0930 21:20:12.959388 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/50092538-3d1c-485a-b6b1-f460081baee6-host\") pod \"crc-debug-b6pgn\" (UID: \"50092538-3d1c-485a-b6b1-f460081baee6\") " pod="openshift-must-gather-6qhq5/crc-debug-b6pgn" Sep 30 21:20:12 crc kubenswrapper[4756]: I0930 21:20:12.959821 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gbksc\" (UniqueName: \"kubernetes.io/projected/50092538-3d1c-485a-b6b1-f460081baee6-kube-api-access-gbksc\") pod \"crc-debug-b6pgn\" (UID: \"50092538-3d1c-485a-b6b1-f460081baee6\") " pod="openshift-must-gather-6qhq5/crc-debug-b6pgn" Sep 30 21:20:13 crc kubenswrapper[4756]: I0930 21:20:13.061910 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gbksc\" (UniqueName: \"kubernetes.io/projected/50092538-3d1c-485a-b6b1-f460081baee6-kube-api-access-gbksc\") pod \"crc-debug-b6pgn\" (UID: \"50092538-3d1c-485a-b6b1-f460081baee6\") " pod="openshift-must-gather-6qhq5/crc-debug-b6pgn" Sep 30 21:20:13 crc kubenswrapper[4756]: I0930 21:20:13.062119 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/50092538-3d1c-485a-b6b1-f460081baee6-host\") pod \"crc-debug-b6pgn\" (UID: \"50092538-3d1c-485a-b6b1-f460081baee6\") " pod="openshift-must-gather-6qhq5/crc-debug-b6pgn" Sep 30 21:20:13 crc kubenswrapper[4756]: I0930 21:20:13.062297 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/50092538-3d1c-485a-b6b1-f460081baee6-host\") pod \"crc-debug-b6pgn\" (UID: \"50092538-3d1c-485a-b6b1-f460081baee6\") " pod="openshift-must-gather-6qhq5/crc-debug-b6pgn" Sep 30 21:20:13 crc kubenswrapper[4756]: I0930 21:20:13.100846 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gbksc\" (UniqueName: \"kubernetes.io/projected/50092538-3d1c-485a-b6b1-f460081baee6-kube-api-access-gbksc\") pod \"crc-debug-b6pgn\" (UID: \"50092538-3d1c-485a-b6b1-f460081baee6\") " pod="openshift-must-gather-6qhq5/crc-debug-b6pgn" Sep 30 21:20:13 crc kubenswrapper[4756]: I0930 21:20:13.115713 4756 scope.go:117] "RemoveContainer" containerID="6f222caf34daae427ad1ef1c9c0a49f393ad85f849cbc1acf4a5ac67e633c58d" Sep 30 21:20:13 crc kubenswrapper[4756]: E0930 21:20:13.115957 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4n9zj_openshift-machine-config-operator(3370c2ca-fec3-4f90-8df7-51e21e6c7e1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" Sep 30 21:20:13 crc kubenswrapper[4756]: I0930 21:20:13.134555 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-6qhq5/crc-debug-b6pgn" Sep 30 21:20:13 crc kubenswrapper[4756]: I0930 21:20:13.138080 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c6d67621-1408-4935-8197-2e8fb6e43a21" path="/var/lib/kubelet/pods/c6d67621-1408-4935-8197-2e8fb6e43a21/volumes" Sep 30 21:20:13 crc kubenswrapper[4756]: I0930 21:20:13.498212 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-6qhq5/crc-debug-b6pgn" event={"ID":"50092538-3d1c-485a-b6b1-f460081baee6","Type":"ContainerStarted","Data":"31dcf8b04162474ab1425410b787e3355eafa6c7e8055b9346093953d102809d"} Sep 30 21:20:13 crc kubenswrapper[4756]: I0930 21:20:13.498744 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-6qhq5/crc-debug-b6pgn" event={"ID":"50092538-3d1c-485a-b6b1-f460081baee6","Type":"ContainerStarted","Data":"c1d3576adaf58499d30f567ad7e187258642c28781f7da106b5df92793d3b4b8"} Sep 30 21:20:13 crc kubenswrapper[4756]: I0930 21:20:13.516277 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-6qhq5/crc-debug-b6pgn" podStartSLOduration=1.516260059 podStartE2EDuration="1.516260059s" podCreationTimestamp="2025-09-30 21:20:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 21:20:13.511005782 +0000 UTC m=+6543.131939269" watchObservedRunningTime="2025-09-30 21:20:13.516260059 +0000 UTC m=+6543.137193556" Sep 30 21:20:14 crc kubenswrapper[4756]: I0930 21:20:14.507303 4756 generic.go:334] "Generic (PLEG): container finished" podID="50092538-3d1c-485a-b6b1-f460081baee6" containerID="31dcf8b04162474ab1425410b787e3355eafa6c7e8055b9346093953d102809d" exitCode=0 Sep 30 21:20:14 crc kubenswrapper[4756]: I0930 21:20:14.507613 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-6qhq5/crc-debug-b6pgn" event={"ID":"50092538-3d1c-485a-b6b1-f460081baee6","Type":"ContainerDied","Data":"31dcf8b04162474ab1425410b787e3355eafa6c7e8055b9346093953d102809d"} Sep 30 21:20:15 crc kubenswrapper[4756]: I0930 21:20:15.612789 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-6qhq5/crc-debug-b6pgn" Sep 30 21:20:15 crc kubenswrapper[4756]: I0930 21:20:15.709255 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/50092538-3d1c-485a-b6b1-f460081baee6-host\") pod \"50092538-3d1c-485a-b6b1-f460081baee6\" (UID: \"50092538-3d1c-485a-b6b1-f460081baee6\") " Sep 30 21:20:15 crc kubenswrapper[4756]: I0930 21:20:15.709360 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/50092538-3d1c-485a-b6b1-f460081baee6-host" (OuterVolumeSpecName: "host") pod "50092538-3d1c-485a-b6b1-f460081baee6" (UID: "50092538-3d1c-485a-b6b1-f460081baee6"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 21:20:15 crc kubenswrapper[4756]: I0930 21:20:15.709462 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gbksc\" (UniqueName: \"kubernetes.io/projected/50092538-3d1c-485a-b6b1-f460081baee6-kube-api-access-gbksc\") pod \"50092538-3d1c-485a-b6b1-f460081baee6\" (UID: \"50092538-3d1c-485a-b6b1-f460081baee6\") " Sep 30 21:20:15 crc kubenswrapper[4756]: I0930 21:20:15.710218 4756 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/50092538-3d1c-485a-b6b1-f460081baee6-host\") on node \"crc\" DevicePath \"\"" Sep 30 21:20:15 crc kubenswrapper[4756]: I0930 21:20:15.718565 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/50092538-3d1c-485a-b6b1-f460081baee6-kube-api-access-gbksc" (OuterVolumeSpecName: "kube-api-access-gbksc") pod "50092538-3d1c-485a-b6b1-f460081baee6" (UID: "50092538-3d1c-485a-b6b1-f460081baee6"). InnerVolumeSpecName "kube-api-access-gbksc". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 21:20:15 crc kubenswrapper[4756]: I0930 21:20:15.812449 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gbksc\" (UniqueName: \"kubernetes.io/projected/50092538-3d1c-485a-b6b1-f460081baee6-kube-api-access-gbksc\") on node \"crc\" DevicePath \"\"" Sep 30 21:20:16 crc kubenswrapper[4756]: I0930 21:20:16.537044 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-6qhq5/crc-debug-b6pgn" event={"ID":"50092538-3d1c-485a-b6b1-f460081baee6","Type":"ContainerDied","Data":"c1d3576adaf58499d30f567ad7e187258642c28781f7da106b5df92793d3b4b8"} Sep 30 21:20:16 crc kubenswrapper[4756]: I0930 21:20:16.537086 4756 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c1d3576adaf58499d30f567ad7e187258642c28781f7da106b5df92793d3b4b8" Sep 30 21:20:16 crc kubenswrapper[4756]: I0930 21:20:16.537138 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-6qhq5/crc-debug-b6pgn" Sep 30 21:20:22 crc kubenswrapper[4756]: I0930 21:20:22.671292 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-6qhq5/crc-debug-b6pgn"] Sep 30 21:20:22 crc kubenswrapper[4756]: I0930 21:20:22.678592 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-6qhq5/crc-debug-b6pgn"] Sep 30 21:20:23 crc kubenswrapper[4756]: I0930 21:20:23.133301 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="50092538-3d1c-485a-b6b1-f460081baee6" path="/var/lib/kubelet/pods/50092538-3d1c-485a-b6b1-f460081baee6/volumes" Sep 30 21:20:23 crc kubenswrapper[4756]: I0930 21:20:23.904079 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-6qhq5/crc-debug-mpwjg"] Sep 30 21:20:23 crc kubenswrapper[4756]: E0930 21:20:23.904846 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="50092538-3d1c-485a-b6b1-f460081baee6" containerName="container-00" Sep 30 21:20:23 crc kubenswrapper[4756]: I0930 21:20:23.904860 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="50092538-3d1c-485a-b6b1-f460081baee6" containerName="container-00" Sep 30 21:20:23 crc kubenswrapper[4756]: I0930 21:20:23.905106 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="50092538-3d1c-485a-b6b1-f460081baee6" containerName="container-00" Sep 30 21:20:23 crc kubenswrapper[4756]: I0930 21:20:23.905981 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-6qhq5/crc-debug-mpwjg" Sep 30 21:20:24 crc kubenswrapper[4756]: I0930 21:20:24.084050 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zxvkb\" (UniqueName: \"kubernetes.io/projected/b484e073-737b-4843-9656-d3361e2cef9e-kube-api-access-zxvkb\") pod \"crc-debug-mpwjg\" (UID: \"b484e073-737b-4843-9656-d3361e2cef9e\") " pod="openshift-must-gather-6qhq5/crc-debug-mpwjg" Sep 30 21:20:24 crc kubenswrapper[4756]: I0930 21:20:24.084198 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/b484e073-737b-4843-9656-d3361e2cef9e-host\") pod \"crc-debug-mpwjg\" (UID: \"b484e073-737b-4843-9656-d3361e2cef9e\") " pod="openshift-must-gather-6qhq5/crc-debug-mpwjg" Sep 30 21:20:24 crc kubenswrapper[4756]: I0930 21:20:24.186638 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zxvkb\" (UniqueName: \"kubernetes.io/projected/b484e073-737b-4843-9656-d3361e2cef9e-kube-api-access-zxvkb\") pod \"crc-debug-mpwjg\" (UID: \"b484e073-737b-4843-9656-d3361e2cef9e\") " pod="openshift-must-gather-6qhq5/crc-debug-mpwjg" Sep 30 21:20:24 crc kubenswrapper[4756]: I0930 21:20:24.186877 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/b484e073-737b-4843-9656-d3361e2cef9e-host\") pod \"crc-debug-mpwjg\" (UID: \"b484e073-737b-4843-9656-d3361e2cef9e\") " pod="openshift-must-gather-6qhq5/crc-debug-mpwjg" Sep 30 21:20:24 crc kubenswrapper[4756]: I0930 21:20:24.187006 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/b484e073-737b-4843-9656-d3361e2cef9e-host\") pod \"crc-debug-mpwjg\" (UID: \"b484e073-737b-4843-9656-d3361e2cef9e\") " pod="openshift-must-gather-6qhq5/crc-debug-mpwjg" Sep 30 21:20:24 crc kubenswrapper[4756]: I0930 21:20:24.207285 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zxvkb\" (UniqueName: \"kubernetes.io/projected/b484e073-737b-4843-9656-d3361e2cef9e-kube-api-access-zxvkb\") pod \"crc-debug-mpwjg\" (UID: \"b484e073-737b-4843-9656-d3361e2cef9e\") " pod="openshift-must-gather-6qhq5/crc-debug-mpwjg" Sep 30 21:20:24 crc kubenswrapper[4756]: I0930 21:20:24.230251 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-6qhq5/crc-debug-mpwjg" Sep 30 21:20:24 crc kubenswrapper[4756]: I0930 21:20:24.644515 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-6qhq5/crc-debug-mpwjg" event={"ID":"b484e073-737b-4843-9656-d3361e2cef9e","Type":"ContainerStarted","Data":"fd082400062e4581873030301c882746a4efabbdec914d2fc480cc5471299613"} Sep 30 21:20:24 crc kubenswrapper[4756]: I0930 21:20:24.644841 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-6qhq5/crc-debug-mpwjg" event={"ID":"b484e073-737b-4843-9656-d3361e2cef9e","Type":"ContainerStarted","Data":"d24a9e82bdb2b510416b8a6382373706a4f6f81bd64ba443a6f7d6bbdc1c7c53"} Sep 30 21:20:24 crc kubenswrapper[4756]: I0930 21:20:24.687411 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-6qhq5/crc-debug-mpwjg"] Sep 30 21:20:24 crc kubenswrapper[4756]: I0930 21:20:24.697056 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-6qhq5/crc-debug-mpwjg"] Sep 30 21:20:25 crc kubenswrapper[4756]: I0930 21:20:25.654420 4756 generic.go:334] "Generic (PLEG): container finished" podID="b484e073-737b-4843-9656-d3361e2cef9e" containerID="fd082400062e4581873030301c882746a4efabbdec914d2fc480cc5471299613" exitCode=0 Sep 30 21:20:25 crc kubenswrapper[4756]: I0930 21:20:25.768924 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-6qhq5/crc-debug-mpwjg" Sep 30 21:20:25 crc kubenswrapper[4756]: I0930 21:20:25.947773 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/b484e073-737b-4843-9656-d3361e2cef9e-host\") pod \"b484e073-737b-4843-9656-d3361e2cef9e\" (UID: \"b484e073-737b-4843-9656-d3361e2cef9e\") " Sep 30 21:20:25 crc kubenswrapper[4756]: I0930 21:20:25.947908 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/b484e073-737b-4843-9656-d3361e2cef9e-host" (OuterVolumeSpecName: "host") pod "b484e073-737b-4843-9656-d3361e2cef9e" (UID: "b484e073-737b-4843-9656-d3361e2cef9e"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 21:20:25 crc kubenswrapper[4756]: I0930 21:20:25.948028 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zxvkb\" (UniqueName: \"kubernetes.io/projected/b484e073-737b-4843-9656-d3361e2cef9e-kube-api-access-zxvkb\") pod \"b484e073-737b-4843-9656-d3361e2cef9e\" (UID: \"b484e073-737b-4843-9656-d3361e2cef9e\") " Sep 30 21:20:25 crc kubenswrapper[4756]: I0930 21:20:25.948746 4756 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/b484e073-737b-4843-9656-d3361e2cef9e-host\") on node \"crc\" DevicePath \"\"" Sep 30 21:20:25 crc kubenswrapper[4756]: I0930 21:20:25.953151 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b484e073-737b-4843-9656-d3361e2cef9e-kube-api-access-zxvkb" (OuterVolumeSpecName: "kube-api-access-zxvkb") pod "b484e073-737b-4843-9656-d3361e2cef9e" (UID: "b484e073-737b-4843-9656-d3361e2cef9e"). InnerVolumeSpecName "kube-api-access-zxvkb". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 21:20:26 crc kubenswrapper[4756]: I0930 21:20:26.050923 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zxvkb\" (UniqueName: \"kubernetes.io/projected/b484e073-737b-4843-9656-d3361e2cef9e-kube-api-access-zxvkb\") on node \"crc\" DevicePath \"\"" Sep 30 21:20:26 crc kubenswrapper[4756]: I0930 21:20:26.272182 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_53b06609af47042a6f8e2d87c80942a2e7fa4642ad364cacb64cd797b3jkrwf_82fec36a-fd50-4bc6-ab33-1ce3e87bf1d7/util/0.log" Sep 30 21:20:26 crc kubenswrapper[4756]: I0930 21:20:26.446986 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_53b06609af47042a6f8e2d87c80942a2e7fa4642ad364cacb64cd797b3jkrwf_82fec36a-fd50-4bc6-ab33-1ce3e87bf1d7/util/0.log" Sep 30 21:20:26 crc kubenswrapper[4756]: I0930 21:20:26.474289 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_53b06609af47042a6f8e2d87c80942a2e7fa4642ad364cacb64cd797b3jkrwf_82fec36a-fd50-4bc6-ab33-1ce3e87bf1d7/pull/0.log" Sep 30 21:20:26 crc kubenswrapper[4756]: I0930 21:20:26.521428 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_53b06609af47042a6f8e2d87c80942a2e7fa4642ad364cacb64cd797b3jkrwf_82fec36a-fd50-4bc6-ab33-1ce3e87bf1d7/pull/0.log" Sep 30 21:20:26 crc kubenswrapper[4756]: I0930 21:20:26.672558 4756 scope.go:117] "RemoveContainer" containerID="fd082400062e4581873030301c882746a4efabbdec914d2fc480cc5471299613" Sep 30 21:20:26 crc kubenswrapper[4756]: I0930 21:20:26.672720 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-6qhq5/crc-debug-mpwjg" Sep 30 21:20:26 crc kubenswrapper[4756]: I0930 21:20:26.713662 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_53b06609af47042a6f8e2d87c80942a2e7fa4642ad364cacb64cd797b3jkrwf_82fec36a-fd50-4bc6-ab33-1ce3e87bf1d7/pull/0.log" Sep 30 21:20:26 crc kubenswrapper[4756]: I0930 21:20:26.741897 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_53b06609af47042a6f8e2d87c80942a2e7fa4642ad364cacb64cd797b3jkrwf_82fec36a-fd50-4bc6-ab33-1ce3e87bf1d7/util/0.log" Sep 30 21:20:26 crc kubenswrapper[4756]: I0930 21:20:26.742888 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_53b06609af47042a6f8e2d87c80942a2e7fa4642ad364cacb64cd797b3jkrwf_82fec36a-fd50-4bc6-ab33-1ce3e87bf1d7/extract/0.log" Sep 30 21:20:26 crc kubenswrapper[4756]: I0930 21:20:26.899571 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-6ff8b75857-7cv4w_31f39b27-7fcc-453c-8ffd-3c5d7b12aed2/kube-rbac-proxy/0.log" Sep 30 21:20:27 crc kubenswrapper[4756]: I0930 21:20:27.035591 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-644bddb6d8-klbxz_7871ae32-4b8d-4902-af0f-152e38693ee5/kube-rbac-proxy/0.log" Sep 30 21:20:27 crc kubenswrapper[4756]: I0930 21:20:27.048340 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-6ff8b75857-7cv4w_31f39b27-7fcc-453c-8ffd-3c5d7b12aed2/manager/0.log" Sep 30 21:20:27 crc kubenswrapper[4756]: I0930 21:20:27.128506 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b484e073-737b-4843-9656-d3361e2cef9e" path="/var/lib/kubelet/pods/b484e073-737b-4843-9656-d3361e2cef9e/volumes" Sep 30 21:20:27 crc kubenswrapper[4756]: I0930 21:20:27.197471 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-644bddb6d8-klbxz_7871ae32-4b8d-4902-af0f-152e38693ee5/manager/0.log" Sep 30 21:20:27 crc kubenswrapper[4756]: I0930 21:20:27.246015 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-84f4f7b77b-ldpdd_7a303d5c-39f9-4cb5-94ee-9b126d8cc4f3/kube-rbac-proxy/0.log" Sep 30 21:20:27 crc kubenswrapper[4756]: I0930 21:20:27.265980 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-84f4f7b77b-ldpdd_7a303d5c-39f9-4cb5-94ee-9b126d8cc4f3/manager/0.log" Sep 30 21:20:27 crc kubenswrapper[4756]: I0930 21:20:27.377742 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-84958c4d49-xg9fd_a5234caa-5273-4074-837e-30ee316c5d73/kube-rbac-proxy/0.log" Sep 30 21:20:27 crc kubenswrapper[4756]: I0930 21:20:27.565552 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-5d889d78cf-gz6p9_869ee50e-ceab-400c-a7b1-b76b81b29bbc/kube-rbac-proxy/0.log" Sep 30 21:20:27 crc kubenswrapper[4756]: I0930 21:20:27.570088 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-84958c4d49-xg9fd_a5234caa-5273-4074-837e-30ee316c5d73/manager/0.log" Sep 30 21:20:27 crc kubenswrapper[4756]: I0930 21:20:27.638386 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-5d889d78cf-gz6p9_869ee50e-ceab-400c-a7b1-b76b81b29bbc/manager/0.log" Sep 30 21:20:27 crc kubenswrapper[4756]: I0930 21:20:27.737145 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-9f4696d94-6rqqt_18189d34-2947-4744-b2ca-e970a456289f/kube-rbac-proxy/0.log" Sep 30 21:20:27 crc kubenswrapper[4756]: I0930 21:20:27.791764 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-9f4696d94-6rqqt_18189d34-2947-4744-b2ca-e970a456289f/manager/0.log" Sep 30 21:20:27 crc kubenswrapper[4756]: I0930 21:20:27.945362 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-9d6c5db85-r8xqm_a76989a9-9056-4bc1-9e9c-5d7513a3e81b/kube-rbac-proxy/0.log" Sep 30 21:20:28 crc kubenswrapper[4756]: I0930 21:20:28.114834 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-5cd4858477-k5ccf_2c4a76fe-4543-43e7-9e7e-7e3b4c149e46/kube-rbac-proxy/0.log" Sep 30 21:20:28 crc kubenswrapper[4756]: I0930 21:20:28.116076 4756 scope.go:117] "RemoveContainer" containerID="6f222caf34daae427ad1ef1c9c0a49f393ad85f849cbc1acf4a5ac67e633c58d" Sep 30 21:20:28 crc kubenswrapper[4756]: E0930 21:20:28.116312 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4n9zj_openshift-machine-config-operator(3370c2ca-fec3-4f90-8df7-51e21e6c7e1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" Sep 30 21:20:28 crc kubenswrapper[4756]: I0930 21:20:28.168520 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-5cd4858477-k5ccf_2c4a76fe-4543-43e7-9e7e-7e3b4c149e46/manager/0.log" Sep 30 21:20:28 crc kubenswrapper[4756]: I0930 21:20:28.208369 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-9d6c5db85-r8xqm_a76989a9-9056-4bc1-9e9c-5d7513a3e81b/manager/0.log" Sep 30 21:20:28 crc kubenswrapper[4756]: I0930 21:20:28.287811 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-5bd55b4bff-c46nt_511b7524-d136-4131-9949-72c4f46a60d8/kube-rbac-proxy/0.log" Sep 30 21:20:28 crc kubenswrapper[4756]: I0930 21:20:28.411468 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-5bd55b4bff-c46nt_511b7524-d136-4131-9949-72c4f46a60d8/manager/0.log" Sep 30 21:20:28 crc kubenswrapper[4756]: I0930 21:20:28.435386 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-6d68dbc695-54d4j_9ffa6269-c199-4388-93a5-cdb131bf656d/kube-rbac-proxy/0.log" Sep 30 21:20:28 crc kubenswrapper[4756]: I0930 21:20:28.509542 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-6d68dbc695-54d4j_9ffa6269-c199-4388-93a5-cdb131bf656d/manager/0.log" Sep 30 21:20:28 crc kubenswrapper[4756]: I0930 21:20:28.654734 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-88c7-h8tt7_e42a9d09-d1ac-4d6c-853a-bc9f472e2b91/manager/0.log" Sep 30 21:20:28 crc kubenswrapper[4756]: I0930 21:20:28.667274 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-88c7-h8tt7_e42a9d09-d1ac-4d6c-853a-bc9f472e2b91/kube-rbac-proxy/0.log" Sep 30 21:20:28 crc kubenswrapper[4756]: I0930 21:20:28.800004 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-849d5b9b84-b7v6v_081b46ad-0352-4dbf-8fde-1e064cad9b24/kube-rbac-proxy/0.log" Sep 30 21:20:28 crc kubenswrapper[4756]: I0930 21:20:28.886952 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-849d5b9b84-b7v6v_081b46ad-0352-4dbf-8fde-1e064cad9b24/manager/0.log" Sep 30 21:20:28 crc kubenswrapper[4756]: I0930 21:20:28.925485 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-64cd67b5cb-m2fvs_3dc3c221-bd19-4634-8979-2c5f843c9b06/kube-rbac-proxy/0.log" Sep 30 21:20:29 crc kubenswrapper[4756]: I0930 21:20:29.103573 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-7b787867f4-rsz5s_860ff2da-8a1c-49a2-8f9b-e0e5fa8a6b2a/kube-rbac-proxy/0.log" Sep 30 21:20:29 crc kubenswrapper[4756]: I0930 21:20:29.141019 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-64cd67b5cb-m2fvs_3dc3c221-bd19-4634-8979-2c5f843c9b06/manager/0.log" Sep 30 21:20:29 crc kubenswrapper[4756]: I0930 21:20:29.189942 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-7b787867f4-rsz5s_860ff2da-8a1c-49a2-8f9b-e0e5fa8a6b2a/manager/0.log" Sep 30 21:20:29 crc kubenswrapper[4756]: I0930 21:20:29.353863 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-77b9676b8c6lblw_89223991-b3cb-47fb-9efc-ac95917de547/manager/0.log" Sep 30 21:20:29 crc kubenswrapper[4756]: I0930 21:20:29.359340 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-77b9676b8c6lblw_89223991-b3cb-47fb-9efc-ac95917de547/kube-rbac-proxy/0.log" Sep 30 21:20:29 crc kubenswrapper[4756]: I0930 21:20:29.488557 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-manager-98d66ccb9-2xh6m_fbc8173b-fdca-44d0-af4e-5f2f106bd031/kube-rbac-proxy/0.log" Sep 30 21:20:29 crc kubenswrapper[4756]: I0930 21:20:29.654475 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-operator-67dd46bc9f-7pbdn_27ec2c4e-9af3-46a8-a478-178875319a55/kube-rbac-proxy/0.log" Sep 30 21:20:29 crc kubenswrapper[4756]: I0930 21:20:29.812274 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-index-c57rv_32314f62-22f9-45ae-9434-8345b30e8761/registry-server/0.log" Sep 30 21:20:29 crc kubenswrapper[4756]: I0930 21:20:29.860010 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-operator-67dd46bc9f-7pbdn_27ec2c4e-9af3-46a8-a478-178875319a55/operator/0.log" Sep 30 21:20:29 crc kubenswrapper[4756]: I0930 21:20:29.989540 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-9976ff44c-lc6f7_c1408e4f-3a38-4b09-8418-a9a3ab3ca3b1/kube-rbac-proxy/0.log" Sep 30 21:20:30 crc kubenswrapper[4756]: I0930 21:20:30.113790 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-589c58c6c-j8wpg_9f9592fa-373c-436e-ab8a-96745a6c3f2d/kube-rbac-proxy/0.log" Sep 30 21:20:30 crc kubenswrapper[4756]: I0930 21:20:30.177917 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-9976ff44c-lc6f7_c1408e4f-3a38-4b09-8418-a9a3ab3ca3b1/manager/0.log" Sep 30 21:20:30 crc kubenswrapper[4756]: I0930 21:20:30.250664 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-589c58c6c-j8wpg_9f9592fa-373c-436e-ab8a-96745a6c3f2d/manager/0.log" Sep 30 21:20:30 crc kubenswrapper[4756]: I0930 21:20:30.358801 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_rabbitmq-cluster-operator-manager-5f97d8c699-xxhjq_401ab06e-1973-4cbc-9cb6-f1d3a1af5ba8/operator/0.log" Sep 30 21:20:30 crc kubenswrapper[4756]: I0930 21:20:30.484266 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-84d6b4b759-vrf7m_e8efd43e-cc1b-4812-acff-c618a78692a6/kube-rbac-proxy/0.log" Sep 30 21:20:30 crc kubenswrapper[4756]: I0930 21:20:30.551383 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-84d6b4b759-vrf7m_e8efd43e-cc1b-4812-acff-c618a78692a6/manager/0.log" Sep 30 21:20:30 crc kubenswrapper[4756]: I0930 21:20:30.711179 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-b8d54b5d7-nvz55_f7d0b1cb-4c87-4f60-89fb-a2ee19b62ceb/kube-rbac-proxy/0.log" Sep 30 21:20:30 crc kubenswrapper[4756]: I0930 21:20:30.872777 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-85777745bb-gfsx6_e592430a-6d1d-4e0e-8220-d51f46bbd54e/kube-rbac-proxy/0.log" Sep 30 21:20:30 crc kubenswrapper[4756]: I0930 21:20:30.899279 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-b8d54b5d7-nvz55_f7d0b1cb-4c87-4f60-89fb-a2ee19b62ceb/manager/0.log" Sep 30 21:20:30 crc kubenswrapper[4756]: I0930 21:20:30.915769 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-85777745bb-gfsx6_e592430a-6d1d-4e0e-8220-d51f46bbd54e/manager/0.log" Sep 30 21:20:31 crc kubenswrapper[4756]: I0930 21:20:31.101653 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-6b9957f54f-p4prj_8ca7950c-897c-4a5b-bf6e-2a9aa33bfa2c/kube-rbac-proxy/0.log" Sep 30 21:20:31 crc kubenswrapper[4756]: I0930 21:20:31.151741 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-6b9957f54f-p4prj_8ca7950c-897c-4a5b-bf6e-2a9aa33bfa2c/manager/0.log" Sep 30 21:20:31 crc kubenswrapper[4756]: I0930 21:20:31.304634 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-manager-98d66ccb9-2xh6m_fbc8173b-fdca-44d0-af4e-5f2f106bd031/manager/0.log" Sep 30 21:20:43 crc kubenswrapper[4756]: I0930 21:20:43.115984 4756 scope.go:117] "RemoveContainer" containerID="6f222caf34daae427ad1ef1c9c0a49f393ad85f849cbc1acf4a5ac67e633c58d" Sep 30 21:20:43 crc kubenswrapper[4756]: E0930 21:20:43.118836 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4n9zj_openshift-machine-config-operator(3370c2ca-fec3-4f90-8df7-51e21e6c7e1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" Sep 30 21:20:47 crc kubenswrapper[4756]: I0930 21:20:47.087559 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-czh5t"] Sep 30 21:20:47 crc kubenswrapper[4756]: E0930 21:20:47.088523 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b484e073-737b-4843-9656-d3361e2cef9e" containerName="container-00" Sep 30 21:20:47 crc kubenswrapper[4756]: I0930 21:20:47.088536 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="b484e073-737b-4843-9656-d3361e2cef9e" containerName="container-00" Sep 30 21:20:47 crc kubenswrapper[4756]: I0930 21:20:47.088731 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="b484e073-737b-4843-9656-d3361e2cef9e" containerName="container-00" Sep 30 21:20:47 crc kubenswrapper[4756]: I0930 21:20:47.090309 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-czh5t" Sep 30 21:20:47 crc kubenswrapper[4756]: I0930 21:20:47.098699 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-czh5t"] Sep 30 21:20:47 crc kubenswrapper[4756]: I0930 21:20:47.118404 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rlk9z\" (UniqueName: \"kubernetes.io/projected/947eae9f-30e0-4560-9f8c-72362238c397-kube-api-access-rlk9z\") pod \"certified-operators-czh5t\" (UID: \"947eae9f-30e0-4560-9f8c-72362238c397\") " pod="openshift-marketplace/certified-operators-czh5t" Sep 30 21:20:47 crc kubenswrapper[4756]: I0930 21:20:47.118529 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/947eae9f-30e0-4560-9f8c-72362238c397-utilities\") pod \"certified-operators-czh5t\" (UID: \"947eae9f-30e0-4560-9f8c-72362238c397\") " pod="openshift-marketplace/certified-operators-czh5t" Sep 30 21:20:47 crc kubenswrapper[4756]: I0930 21:20:47.118585 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/947eae9f-30e0-4560-9f8c-72362238c397-catalog-content\") pod \"certified-operators-czh5t\" (UID: \"947eae9f-30e0-4560-9f8c-72362238c397\") " pod="openshift-marketplace/certified-operators-czh5t" Sep 30 21:20:47 crc kubenswrapper[4756]: I0930 21:20:47.131876 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_control-plane-machine-set-operator-78cbb6b69f-trj9z_baa40fd8-6584-4742-992d-ccc029178c3e/control-plane-machine-set-operator/0.log" Sep 30 21:20:47 crc kubenswrapper[4756]: I0930 21:20:47.220496 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rlk9z\" (UniqueName: \"kubernetes.io/projected/947eae9f-30e0-4560-9f8c-72362238c397-kube-api-access-rlk9z\") pod \"certified-operators-czh5t\" (UID: \"947eae9f-30e0-4560-9f8c-72362238c397\") " pod="openshift-marketplace/certified-operators-czh5t" Sep 30 21:20:47 crc kubenswrapper[4756]: I0930 21:20:47.220550 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/947eae9f-30e0-4560-9f8c-72362238c397-utilities\") pod \"certified-operators-czh5t\" (UID: \"947eae9f-30e0-4560-9f8c-72362238c397\") " pod="openshift-marketplace/certified-operators-czh5t" Sep 30 21:20:47 crc kubenswrapper[4756]: I0930 21:20:47.220576 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/947eae9f-30e0-4560-9f8c-72362238c397-catalog-content\") pod \"certified-operators-czh5t\" (UID: \"947eae9f-30e0-4560-9f8c-72362238c397\") " pod="openshift-marketplace/certified-operators-czh5t" Sep 30 21:20:47 crc kubenswrapper[4756]: I0930 21:20:47.221002 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/947eae9f-30e0-4560-9f8c-72362238c397-catalog-content\") pod \"certified-operators-czh5t\" (UID: \"947eae9f-30e0-4560-9f8c-72362238c397\") " pod="openshift-marketplace/certified-operators-czh5t" Sep 30 21:20:47 crc kubenswrapper[4756]: I0930 21:20:47.222416 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/947eae9f-30e0-4560-9f8c-72362238c397-utilities\") pod \"certified-operators-czh5t\" (UID: \"947eae9f-30e0-4560-9f8c-72362238c397\") " pod="openshift-marketplace/certified-operators-czh5t" Sep 30 21:20:47 crc kubenswrapper[4756]: I0930 21:20:47.242198 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rlk9z\" (UniqueName: \"kubernetes.io/projected/947eae9f-30e0-4560-9f8c-72362238c397-kube-api-access-rlk9z\") pod \"certified-operators-czh5t\" (UID: \"947eae9f-30e0-4560-9f8c-72362238c397\") " pod="openshift-marketplace/certified-operators-czh5t" Sep 30 21:20:47 crc kubenswrapper[4756]: I0930 21:20:47.255125 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-ln592_1f640044-1129-4119-a819-bc37a9aaf617/kube-rbac-proxy/0.log" Sep 30 21:20:47 crc kubenswrapper[4756]: I0930 21:20:47.359155 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-ln592_1f640044-1129-4119-a819-bc37a9aaf617/machine-api-operator/0.log" Sep 30 21:20:47 crc kubenswrapper[4756]: I0930 21:20:47.419714 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-czh5t" Sep 30 21:20:47 crc kubenswrapper[4756]: I0930 21:20:47.977937 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-czh5t"] Sep 30 21:20:48 crc kubenswrapper[4756]: I0930 21:20:48.895430 4756 generic.go:334] "Generic (PLEG): container finished" podID="947eae9f-30e0-4560-9f8c-72362238c397" containerID="b9278c5a10c4c66510d852d32155516db3c3592339cdda0a4033f3829eefaf16" exitCode=0 Sep 30 21:20:48 crc kubenswrapper[4756]: I0930 21:20:48.895512 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-czh5t" event={"ID":"947eae9f-30e0-4560-9f8c-72362238c397","Type":"ContainerDied","Data":"b9278c5a10c4c66510d852d32155516db3c3592339cdda0a4033f3829eefaf16"} Sep 30 21:20:48 crc kubenswrapper[4756]: I0930 21:20:48.896725 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-czh5t" event={"ID":"947eae9f-30e0-4560-9f8c-72362238c397","Type":"ContainerStarted","Data":"db064f6a6e0ae1dcfef4c0ae7fe517aa726a4affeb819f4df703316457e87cb0"} Sep 30 21:20:49 crc kubenswrapper[4756]: I0930 21:20:49.909234 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-czh5t" event={"ID":"947eae9f-30e0-4560-9f8c-72362238c397","Type":"ContainerStarted","Data":"e2e362458888b1a4f41822549ba043362b31a8b571a68394c56042656b1dd890"} Sep 30 21:20:51 crc kubenswrapper[4756]: I0930 21:20:51.933579 4756 generic.go:334] "Generic (PLEG): container finished" podID="947eae9f-30e0-4560-9f8c-72362238c397" containerID="e2e362458888b1a4f41822549ba043362b31a8b571a68394c56042656b1dd890" exitCode=0 Sep 30 21:20:51 crc kubenswrapper[4756]: I0930 21:20:51.933637 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-czh5t" event={"ID":"947eae9f-30e0-4560-9f8c-72362238c397","Type":"ContainerDied","Data":"e2e362458888b1a4f41822549ba043362b31a8b571a68394c56042656b1dd890"} Sep 30 21:20:52 crc kubenswrapper[4756]: I0930 21:20:52.946216 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-czh5t" event={"ID":"947eae9f-30e0-4560-9f8c-72362238c397","Type":"ContainerStarted","Data":"9efe9107596c13cdeadc35857d444d68d3ef94f2a94d6fa79b9782e7d79108ab"} Sep 30 21:20:52 crc kubenswrapper[4756]: I0930 21:20:52.970119 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-czh5t" podStartSLOduration=2.360199821 podStartE2EDuration="5.970096328s" podCreationTimestamp="2025-09-30 21:20:47 +0000 UTC" firstStartedPulling="2025-09-30 21:20:48.89756526 +0000 UTC m=+6578.518498747" lastFinishedPulling="2025-09-30 21:20:52.507461737 +0000 UTC m=+6582.128395254" observedRunningTime="2025-09-30 21:20:52.959131773 +0000 UTC m=+6582.580065270" watchObservedRunningTime="2025-09-30 21:20:52.970096328 +0000 UTC m=+6582.591029815" Sep 30 21:20:57 crc kubenswrapper[4756]: I0930 21:20:57.420797 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-czh5t" Sep 30 21:20:57 crc kubenswrapper[4756]: I0930 21:20:57.421216 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-czh5t" Sep 30 21:20:57 crc kubenswrapper[4756]: I0930 21:20:57.516080 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-czh5t" Sep 30 21:20:58 crc kubenswrapper[4756]: I0930 21:20:58.068757 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-czh5t" Sep 30 21:20:58 crc kubenswrapper[4756]: I0930 21:20:58.115696 4756 scope.go:117] "RemoveContainer" containerID="6f222caf34daae427ad1ef1c9c0a49f393ad85f849cbc1acf4a5ac67e633c58d" Sep 30 21:20:58 crc kubenswrapper[4756]: E0930 21:20:58.116105 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4n9zj_openshift-machine-config-operator(3370c2ca-fec3-4f90-8df7-51e21e6c7e1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" Sep 30 21:20:58 crc kubenswrapper[4756]: I0930 21:20:58.147244 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-czh5t"] Sep 30 21:20:59 crc kubenswrapper[4756]: I0930 21:20:59.488033 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-7d4cc89fcb-c2pg9_093e0d0f-7fdc-49ca-ba01-928bf465b0a8/cert-manager-controller/0.log" Sep 30 21:20:59 crc kubenswrapper[4756]: I0930 21:20:59.666776 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-cainjector-7d9f95dbf-xnqxh_6d4a1d50-a63f-4a46-a545-4af49e08fc6a/cert-manager-cainjector/0.log" Sep 30 21:20:59 crc kubenswrapper[4756]: I0930 21:20:59.672143 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-webhook-d969966f-gk77c_01b70269-7116-446c-b3d7-f0ae3ff0a1c4/cert-manager-webhook/0.log" Sep 30 21:21:00 crc kubenswrapper[4756]: I0930 21:21:00.025635 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-czh5t" podUID="947eae9f-30e0-4560-9f8c-72362238c397" containerName="registry-server" containerID="cri-o://9efe9107596c13cdeadc35857d444d68d3ef94f2a94d6fa79b9782e7d79108ab" gracePeriod=2 Sep 30 21:21:00 crc kubenswrapper[4756]: I0930 21:21:00.534137 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-czh5t" Sep 30 21:21:00 crc kubenswrapper[4756]: I0930 21:21:00.620797 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/947eae9f-30e0-4560-9f8c-72362238c397-catalog-content\") pod \"947eae9f-30e0-4560-9f8c-72362238c397\" (UID: \"947eae9f-30e0-4560-9f8c-72362238c397\") " Sep 30 21:21:00 crc kubenswrapper[4756]: I0930 21:21:00.620875 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/947eae9f-30e0-4560-9f8c-72362238c397-utilities\") pod \"947eae9f-30e0-4560-9f8c-72362238c397\" (UID: \"947eae9f-30e0-4560-9f8c-72362238c397\") " Sep 30 21:21:00 crc kubenswrapper[4756]: I0930 21:21:00.621065 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rlk9z\" (UniqueName: \"kubernetes.io/projected/947eae9f-30e0-4560-9f8c-72362238c397-kube-api-access-rlk9z\") pod \"947eae9f-30e0-4560-9f8c-72362238c397\" (UID: \"947eae9f-30e0-4560-9f8c-72362238c397\") " Sep 30 21:21:00 crc kubenswrapper[4756]: I0930 21:21:00.621913 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/947eae9f-30e0-4560-9f8c-72362238c397-utilities" (OuterVolumeSpecName: "utilities") pod "947eae9f-30e0-4560-9f8c-72362238c397" (UID: "947eae9f-30e0-4560-9f8c-72362238c397"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 21:21:00 crc kubenswrapper[4756]: I0930 21:21:00.628181 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/947eae9f-30e0-4560-9f8c-72362238c397-kube-api-access-rlk9z" (OuterVolumeSpecName: "kube-api-access-rlk9z") pod "947eae9f-30e0-4560-9f8c-72362238c397" (UID: "947eae9f-30e0-4560-9f8c-72362238c397"). InnerVolumeSpecName "kube-api-access-rlk9z". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 21:21:00 crc kubenswrapper[4756]: I0930 21:21:00.678509 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/947eae9f-30e0-4560-9f8c-72362238c397-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "947eae9f-30e0-4560-9f8c-72362238c397" (UID: "947eae9f-30e0-4560-9f8c-72362238c397"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 21:21:00 crc kubenswrapper[4756]: I0930 21:21:00.723503 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rlk9z\" (UniqueName: \"kubernetes.io/projected/947eae9f-30e0-4560-9f8c-72362238c397-kube-api-access-rlk9z\") on node \"crc\" DevicePath \"\"" Sep 30 21:21:00 crc kubenswrapper[4756]: I0930 21:21:00.723545 4756 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/947eae9f-30e0-4560-9f8c-72362238c397-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 21:21:00 crc kubenswrapper[4756]: I0930 21:21:00.723558 4756 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/947eae9f-30e0-4560-9f8c-72362238c397-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 21:21:01 crc kubenswrapper[4756]: I0930 21:21:01.039469 4756 generic.go:334] "Generic (PLEG): container finished" podID="947eae9f-30e0-4560-9f8c-72362238c397" containerID="9efe9107596c13cdeadc35857d444d68d3ef94f2a94d6fa79b9782e7d79108ab" exitCode=0 Sep 30 21:21:01 crc kubenswrapper[4756]: I0930 21:21:01.039533 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-czh5t" Sep 30 21:21:01 crc kubenswrapper[4756]: I0930 21:21:01.039582 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-czh5t" event={"ID":"947eae9f-30e0-4560-9f8c-72362238c397","Type":"ContainerDied","Data":"9efe9107596c13cdeadc35857d444d68d3ef94f2a94d6fa79b9782e7d79108ab"} Sep 30 21:21:01 crc kubenswrapper[4756]: I0930 21:21:01.039719 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-czh5t" event={"ID":"947eae9f-30e0-4560-9f8c-72362238c397","Type":"ContainerDied","Data":"db064f6a6e0ae1dcfef4c0ae7fe517aa726a4affeb819f4df703316457e87cb0"} Sep 30 21:21:01 crc kubenswrapper[4756]: I0930 21:21:01.039745 4756 scope.go:117] "RemoveContainer" containerID="9efe9107596c13cdeadc35857d444d68d3ef94f2a94d6fa79b9782e7d79108ab" Sep 30 21:21:01 crc kubenswrapper[4756]: I0930 21:21:01.082470 4756 scope.go:117] "RemoveContainer" containerID="e2e362458888b1a4f41822549ba043362b31a8b571a68394c56042656b1dd890" Sep 30 21:21:01 crc kubenswrapper[4756]: I0930 21:21:01.084364 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-czh5t"] Sep 30 21:21:01 crc kubenswrapper[4756]: I0930 21:21:01.105529 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-czh5t"] Sep 30 21:21:01 crc kubenswrapper[4756]: I0930 21:21:01.107327 4756 scope.go:117] "RemoveContainer" containerID="b9278c5a10c4c66510d852d32155516db3c3592339cdda0a4033f3829eefaf16" Sep 30 21:21:01 crc kubenswrapper[4756]: I0930 21:21:01.135199 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="947eae9f-30e0-4560-9f8c-72362238c397" path="/var/lib/kubelet/pods/947eae9f-30e0-4560-9f8c-72362238c397/volumes" Sep 30 21:21:01 crc kubenswrapper[4756]: I0930 21:21:01.160710 4756 scope.go:117] "RemoveContainer" containerID="9efe9107596c13cdeadc35857d444d68d3ef94f2a94d6fa79b9782e7d79108ab" Sep 30 21:21:01 crc kubenswrapper[4756]: E0930 21:21:01.161202 4756 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9efe9107596c13cdeadc35857d444d68d3ef94f2a94d6fa79b9782e7d79108ab\": container with ID starting with 9efe9107596c13cdeadc35857d444d68d3ef94f2a94d6fa79b9782e7d79108ab not found: ID does not exist" containerID="9efe9107596c13cdeadc35857d444d68d3ef94f2a94d6fa79b9782e7d79108ab" Sep 30 21:21:01 crc kubenswrapper[4756]: I0930 21:21:01.161239 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9efe9107596c13cdeadc35857d444d68d3ef94f2a94d6fa79b9782e7d79108ab"} err="failed to get container status \"9efe9107596c13cdeadc35857d444d68d3ef94f2a94d6fa79b9782e7d79108ab\": rpc error: code = NotFound desc = could not find container \"9efe9107596c13cdeadc35857d444d68d3ef94f2a94d6fa79b9782e7d79108ab\": container with ID starting with 9efe9107596c13cdeadc35857d444d68d3ef94f2a94d6fa79b9782e7d79108ab not found: ID does not exist" Sep 30 21:21:01 crc kubenswrapper[4756]: I0930 21:21:01.161264 4756 scope.go:117] "RemoveContainer" containerID="e2e362458888b1a4f41822549ba043362b31a8b571a68394c56042656b1dd890" Sep 30 21:21:01 crc kubenswrapper[4756]: E0930 21:21:01.161701 4756 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e2e362458888b1a4f41822549ba043362b31a8b571a68394c56042656b1dd890\": container with ID starting with e2e362458888b1a4f41822549ba043362b31a8b571a68394c56042656b1dd890 not found: ID does not exist" containerID="e2e362458888b1a4f41822549ba043362b31a8b571a68394c56042656b1dd890" Sep 30 21:21:01 crc kubenswrapper[4756]: I0930 21:21:01.161744 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e2e362458888b1a4f41822549ba043362b31a8b571a68394c56042656b1dd890"} err="failed to get container status \"e2e362458888b1a4f41822549ba043362b31a8b571a68394c56042656b1dd890\": rpc error: code = NotFound desc = could not find container \"e2e362458888b1a4f41822549ba043362b31a8b571a68394c56042656b1dd890\": container with ID starting with e2e362458888b1a4f41822549ba043362b31a8b571a68394c56042656b1dd890 not found: ID does not exist" Sep 30 21:21:01 crc kubenswrapper[4756]: I0930 21:21:01.161768 4756 scope.go:117] "RemoveContainer" containerID="b9278c5a10c4c66510d852d32155516db3c3592339cdda0a4033f3829eefaf16" Sep 30 21:21:01 crc kubenswrapper[4756]: E0930 21:21:01.162090 4756 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b9278c5a10c4c66510d852d32155516db3c3592339cdda0a4033f3829eefaf16\": container with ID starting with b9278c5a10c4c66510d852d32155516db3c3592339cdda0a4033f3829eefaf16 not found: ID does not exist" containerID="b9278c5a10c4c66510d852d32155516db3c3592339cdda0a4033f3829eefaf16" Sep 30 21:21:01 crc kubenswrapper[4756]: I0930 21:21:01.162114 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b9278c5a10c4c66510d852d32155516db3c3592339cdda0a4033f3829eefaf16"} err="failed to get container status \"b9278c5a10c4c66510d852d32155516db3c3592339cdda0a4033f3829eefaf16\": rpc error: code = NotFound desc = could not find container \"b9278c5a10c4c66510d852d32155516db3c3592339cdda0a4033f3829eefaf16\": container with ID starting with b9278c5a10c4c66510d852d32155516db3c3592339cdda0a4033f3829eefaf16 not found: ID does not exist" Sep 30 21:21:11 crc kubenswrapper[4756]: I0930 21:21:11.316940 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-console-plugin-864bb6dfb5-bs656_7962f4e9-16e9-46dc-b9f7-e5a31b1e79eb/nmstate-console-plugin/0.log" Sep 30 21:21:11 crc kubenswrapper[4756]: I0930 21:21:11.519173 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-handler-jgcq4_c017e6b9-1931-4cac-9df1-4f8f03bc9c8c/nmstate-handler/0.log" Sep 30 21:21:11 crc kubenswrapper[4756]: I0930 21:21:11.533697 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-58fcddf996-pcnln_745caeb3-41a0-4698-9e0f-66e9b4f0f318/kube-rbac-proxy/0.log" Sep 30 21:21:11 crc kubenswrapper[4756]: I0930 21:21:11.582968 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-58fcddf996-pcnln_745caeb3-41a0-4698-9e0f-66e9b4f0f318/nmstate-metrics/0.log" Sep 30 21:21:11 crc kubenswrapper[4756]: I0930 21:21:11.694529 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-operator-5d6f6cfd66-w8zn2_2def6bf4-9b5a-4c15-9fdd-86918ae44098/nmstate-operator/0.log" Sep 30 21:21:11 crc kubenswrapper[4756]: I0930 21:21:11.766876 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-webhook-6d689559c5-9swr2_0f0f1f9b-37a6-4af8-9a39-cdfd7656f1b3/nmstate-webhook/0.log" Sep 30 21:21:13 crc kubenswrapper[4756]: I0930 21:21:13.116503 4756 scope.go:117] "RemoveContainer" containerID="6f222caf34daae427ad1ef1c9c0a49f393ad85f849cbc1acf4a5ac67e633c58d" Sep 30 21:21:13 crc kubenswrapper[4756]: E0930 21:21:13.117244 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4n9zj_openshift-machine-config-operator(3370c2ca-fec3-4f90-8df7-51e21e6c7e1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" Sep 30 21:21:25 crc kubenswrapper[4756]: I0930 21:21:25.283159 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-5d688f5ffc-c9zc5_44d5093f-1239-42d2-9ff5-393109958a08/kube-rbac-proxy/0.log" Sep 30 21:21:25 crc kubenswrapper[4756]: I0930 21:21:25.599523 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-prvjv_beeaebc2-50a1-44eb-b336-d731d9658490/cp-frr-files/0.log" Sep 30 21:21:25 crc kubenswrapper[4756]: I0930 21:21:25.600349 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-5d688f5ffc-c9zc5_44d5093f-1239-42d2-9ff5-393109958a08/controller/0.log" Sep 30 21:21:25 crc kubenswrapper[4756]: I0930 21:21:25.796390 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-prvjv_beeaebc2-50a1-44eb-b336-d731d9658490/cp-reloader/0.log" Sep 30 21:21:25 crc kubenswrapper[4756]: I0930 21:21:25.813964 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-prvjv_beeaebc2-50a1-44eb-b336-d731d9658490/cp-reloader/0.log" Sep 30 21:21:25 crc kubenswrapper[4756]: I0930 21:21:25.830946 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-prvjv_beeaebc2-50a1-44eb-b336-d731d9658490/cp-metrics/0.log" Sep 30 21:21:25 crc kubenswrapper[4756]: I0930 21:21:25.850273 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-zm4qk"] Sep 30 21:21:25 crc kubenswrapper[4756]: E0930 21:21:25.850728 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="947eae9f-30e0-4560-9f8c-72362238c397" containerName="extract-utilities" Sep 30 21:21:25 crc kubenswrapper[4756]: I0930 21:21:25.850739 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="947eae9f-30e0-4560-9f8c-72362238c397" containerName="extract-utilities" Sep 30 21:21:25 crc kubenswrapper[4756]: E0930 21:21:25.850765 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="947eae9f-30e0-4560-9f8c-72362238c397" containerName="registry-server" Sep 30 21:21:25 crc kubenswrapper[4756]: I0930 21:21:25.850771 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="947eae9f-30e0-4560-9f8c-72362238c397" containerName="registry-server" Sep 30 21:21:25 crc kubenswrapper[4756]: E0930 21:21:25.850788 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="947eae9f-30e0-4560-9f8c-72362238c397" containerName="extract-content" Sep 30 21:21:25 crc kubenswrapper[4756]: I0930 21:21:25.850794 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="947eae9f-30e0-4560-9f8c-72362238c397" containerName="extract-content" Sep 30 21:21:25 crc kubenswrapper[4756]: I0930 21:21:25.850981 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="947eae9f-30e0-4560-9f8c-72362238c397" containerName="registry-server" Sep 30 21:21:25 crc kubenswrapper[4756]: I0930 21:21:25.852441 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-zm4qk" Sep 30 21:21:25 crc kubenswrapper[4756]: I0930 21:21:25.861461 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-zm4qk"] Sep 30 21:21:25 crc kubenswrapper[4756]: I0930 21:21:25.956538 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-prvjv_beeaebc2-50a1-44eb-b336-d731d9658490/cp-frr-files/0.log" Sep 30 21:21:25 crc kubenswrapper[4756]: I0930 21:21:25.987876 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h4fkn\" (UniqueName: \"kubernetes.io/projected/b2b393f9-e7ad-4bd7-a4e0-d792e9d6c51c-kube-api-access-h4fkn\") pod \"community-operators-zm4qk\" (UID: \"b2b393f9-e7ad-4bd7-a4e0-d792e9d6c51c\") " pod="openshift-marketplace/community-operators-zm4qk" Sep 30 21:21:25 crc kubenswrapper[4756]: I0930 21:21:25.987918 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b2b393f9-e7ad-4bd7-a4e0-d792e9d6c51c-utilities\") pod \"community-operators-zm4qk\" (UID: \"b2b393f9-e7ad-4bd7-a4e0-d792e9d6c51c\") " pod="openshift-marketplace/community-operators-zm4qk" Sep 30 21:21:25 crc kubenswrapper[4756]: I0930 21:21:25.987971 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b2b393f9-e7ad-4bd7-a4e0-d792e9d6c51c-catalog-content\") pod \"community-operators-zm4qk\" (UID: \"b2b393f9-e7ad-4bd7-a4e0-d792e9d6c51c\") " pod="openshift-marketplace/community-operators-zm4qk" Sep 30 21:21:26 crc kubenswrapper[4756]: I0930 21:21:26.093629 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h4fkn\" (UniqueName: \"kubernetes.io/projected/b2b393f9-e7ad-4bd7-a4e0-d792e9d6c51c-kube-api-access-h4fkn\") pod \"community-operators-zm4qk\" (UID: \"b2b393f9-e7ad-4bd7-a4e0-d792e9d6c51c\") " pod="openshift-marketplace/community-operators-zm4qk" Sep 30 21:21:26 crc kubenswrapper[4756]: I0930 21:21:26.093677 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b2b393f9-e7ad-4bd7-a4e0-d792e9d6c51c-utilities\") pod \"community-operators-zm4qk\" (UID: \"b2b393f9-e7ad-4bd7-a4e0-d792e9d6c51c\") " pod="openshift-marketplace/community-operators-zm4qk" Sep 30 21:21:26 crc kubenswrapper[4756]: I0930 21:21:26.093734 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b2b393f9-e7ad-4bd7-a4e0-d792e9d6c51c-catalog-content\") pod \"community-operators-zm4qk\" (UID: \"b2b393f9-e7ad-4bd7-a4e0-d792e9d6c51c\") " pod="openshift-marketplace/community-operators-zm4qk" Sep 30 21:21:26 crc kubenswrapper[4756]: I0930 21:21:26.094843 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b2b393f9-e7ad-4bd7-a4e0-d792e9d6c51c-utilities\") pod \"community-operators-zm4qk\" (UID: \"b2b393f9-e7ad-4bd7-a4e0-d792e9d6c51c\") " pod="openshift-marketplace/community-operators-zm4qk" Sep 30 21:21:26 crc kubenswrapper[4756]: I0930 21:21:26.095076 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b2b393f9-e7ad-4bd7-a4e0-d792e9d6c51c-catalog-content\") pod \"community-operators-zm4qk\" (UID: \"b2b393f9-e7ad-4bd7-a4e0-d792e9d6c51c\") " pod="openshift-marketplace/community-operators-zm4qk" Sep 30 21:21:26 crc kubenswrapper[4756]: I0930 21:21:26.121163 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h4fkn\" (UniqueName: \"kubernetes.io/projected/b2b393f9-e7ad-4bd7-a4e0-d792e9d6c51c-kube-api-access-h4fkn\") pod \"community-operators-zm4qk\" (UID: \"b2b393f9-e7ad-4bd7-a4e0-d792e9d6c51c\") " pod="openshift-marketplace/community-operators-zm4qk" Sep 30 21:21:26 crc kubenswrapper[4756]: I0930 21:21:26.169750 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-zm4qk" Sep 30 21:21:26 crc kubenswrapper[4756]: I0930 21:21:26.643139 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-prvjv_beeaebc2-50a1-44eb-b336-d731d9658490/cp-metrics/0.log" Sep 30 21:21:26 crc kubenswrapper[4756]: I0930 21:21:26.733277 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-zm4qk"] Sep 30 21:21:26 crc kubenswrapper[4756]: I0930 21:21:26.748031 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-prvjv_beeaebc2-50a1-44eb-b336-d731d9658490/cp-reloader/0.log" Sep 30 21:21:26 crc kubenswrapper[4756]: I0930 21:21:26.755868 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-prvjv_beeaebc2-50a1-44eb-b336-d731d9658490/cp-frr-files/0.log" Sep 30 21:21:26 crc kubenswrapper[4756]: I0930 21:21:26.829991 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-prvjv_beeaebc2-50a1-44eb-b336-d731d9658490/cp-metrics/0.log" Sep 30 21:21:27 crc kubenswrapper[4756]: I0930 21:21:27.037466 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-prvjv_beeaebc2-50a1-44eb-b336-d731d9658490/cp-frr-files/0.log" Sep 30 21:21:27 crc kubenswrapper[4756]: I0930 21:21:27.071444 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-prvjv_beeaebc2-50a1-44eb-b336-d731d9658490/cp-reloader/0.log" Sep 30 21:21:27 crc kubenswrapper[4756]: I0930 21:21:27.075761 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-prvjv_beeaebc2-50a1-44eb-b336-d731d9658490/cp-metrics/0.log" Sep 30 21:21:27 crc kubenswrapper[4756]: I0930 21:21:27.102847 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-prvjv_beeaebc2-50a1-44eb-b336-d731d9658490/controller/0.log" Sep 30 21:21:27 crc kubenswrapper[4756]: I0930 21:21:27.316737 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-prvjv_beeaebc2-50a1-44eb-b336-d731d9658490/kube-rbac-proxy/0.log" Sep 30 21:21:27 crc kubenswrapper[4756]: I0930 21:21:27.342473 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-prvjv_beeaebc2-50a1-44eb-b336-d731d9658490/frr-metrics/0.log" Sep 30 21:21:27 crc kubenswrapper[4756]: I0930 21:21:27.369094 4756 generic.go:334] "Generic (PLEG): container finished" podID="b2b393f9-e7ad-4bd7-a4e0-d792e9d6c51c" containerID="0dca18199d6648c8f3a89fc7360e64c8e751aa5a63ca0bc445e1051044a31dfa" exitCode=0 Sep 30 21:21:27 crc kubenswrapper[4756]: I0930 21:21:27.369135 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-zm4qk" event={"ID":"b2b393f9-e7ad-4bd7-a4e0-d792e9d6c51c","Type":"ContainerDied","Data":"0dca18199d6648c8f3a89fc7360e64c8e751aa5a63ca0bc445e1051044a31dfa"} Sep 30 21:21:27 crc kubenswrapper[4756]: I0930 21:21:27.369164 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-zm4qk" event={"ID":"b2b393f9-e7ad-4bd7-a4e0-d792e9d6c51c","Type":"ContainerStarted","Data":"d753597bc805ab2435bdc220bcfaa5629387afa0a8e620e07c846fa7ed468b2c"} Sep 30 21:21:27 crc kubenswrapper[4756]: I0930 21:21:27.371733 4756 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Sep 30 21:21:27 crc kubenswrapper[4756]: I0930 21:21:27.385932 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-prvjv_beeaebc2-50a1-44eb-b336-d731d9658490/kube-rbac-proxy-frr/0.log" Sep 30 21:21:27 crc kubenswrapper[4756]: I0930 21:21:27.561932 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-prvjv_beeaebc2-50a1-44eb-b336-d731d9658490/reloader/0.log" Sep 30 21:21:27 crc kubenswrapper[4756]: I0930 21:21:27.664077 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-webhook-server-5478bdb765-f2dkx_6c83c6d0-f45f-4c8d-89d6-4434da2f24c9/frr-k8s-webhook-server/0.log" Sep 30 21:21:27 crc kubenswrapper[4756]: I0930 21:21:27.845628 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-controller-manager-6787fd9965-mp4jt_7a236abf-4700-48d8-a59e-2ad5ff34d034/manager/0.log" Sep 30 21:21:27 crc kubenswrapper[4756]: I0930 21:21:27.967971 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-webhook-server-6b8b4f7c6f-s2bh6_87993ac9-83b4-4481-8611-f3f458f4b424/webhook-server/0.log" Sep 30 21:21:28 crc kubenswrapper[4756]: I0930 21:21:28.115996 4756 scope.go:117] "RemoveContainer" containerID="6f222caf34daae427ad1ef1c9c0a49f393ad85f849cbc1acf4a5ac67e633c58d" Sep 30 21:21:28 crc kubenswrapper[4756]: E0930 21:21:28.116282 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4n9zj_openshift-machine-config-operator(3370c2ca-fec3-4f90-8df7-51e21e6c7e1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" Sep 30 21:21:28 crc kubenswrapper[4756]: I0930 21:21:28.194352 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-kqcpz_0a9cec75-23b0-43d9-8914-4eb7fa93c7fa/kube-rbac-proxy/0.log" Sep 30 21:21:29 crc kubenswrapper[4756]: I0930 21:21:29.130339 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-kqcpz_0a9cec75-23b0-43d9-8914-4eb7fa93c7fa/speaker/0.log" Sep 30 21:21:29 crc kubenswrapper[4756]: I0930 21:21:29.406044 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-zm4qk" event={"ID":"b2b393f9-e7ad-4bd7-a4e0-d792e9d6c51c","Type":"ContainerStarted","Data":"f115f36ed6533a9aa5b43493b73141473a57c4b05f7d5810615c7bd7eab99a5a"} Sep 30 21:21:29 crc kubenswrapper[4756]: I0930 21:21:29.846887 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-prvjv_beeaebc2-50a1-44eb-b336-d731d9658490/frr/0.log" Sep 30 21:21:30 crc kubenswrapper[4756]: I0930 21:21:30.415845 4756 generic.go:334] "Generic (PLEG): container finished" podID="b2b393f9-e7ad-4bd7-a4e0-d792e9d6c51c" containerID="f115f36ed6533a9aa5b43493b73141473a57c4b05f7d5810615c7bd7eab99a5a" exitCode=0 Sep 30 21:21:30 crc kubenswrapper[4756]: I0930 21:21:30.415885 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-zm4qk" event={"ID":"b2b393f9-e7ad-4bd7-a4e0-d792e9d6c51c","Type":"ContainerDied","Data":"f115f36ed6533a9aa5b43493b73141473a57c4b05f7d5810615c7bd7eab99a5a"} Sep 30 21:21:31 crc kubenswrapper[4756]: I0930 21:21:31.427970 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-zm4qk" event={"ID":"b2b393f9-e7ad-4bd7-a4e0-d792e9d6c51c","Type":"ContainerStarted","Data":"c7063f644695c37b379e9900d01d2a99919c0a21772acb883a7ade7227c4c54d"} Sep 30 21:21:31 crc kubenswrapper[4756]: I0930 21:21:31.450575 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-zm4qk" podStartSLOduration=2.946473962 podStartE2EDuration="6.4505527s" podCreationTimestamp="2025-09-30 21:21:25 +0000 UTC" firstStartedPulling="2025-09-30 21:21:27.371468581 +0000 UTC m=+6616.992402058" lastFinishedPulling="2025-09-30 21:21:30.875547319 +0000 UTC m=+6620.496480796" observedRunningTime="2025-09-30 21:21:31.443807454 +0000 UTC m=+6621.064740941" watchObservedRunningTime="2025-09-30 21:21:31.4505527 +0000 UTC m=+6621.071486197" Sep 30 21:21:36 crc kubenswrapper[4756]: I0930 21:21:36.170214 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-zm4qk" Sep 30 21:21:36 crc kubenswrapper[4756]: I0930 21:21:36.170965 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-zm4qk" Sep 30 21:21:36 crc kubenswrapper[4756]: I0930 21:21:36.222779 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-zm4qk" Sep 30 21:21:36 crc kubenswrapper[4756]: I0930 21:21:36.600198 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-zm4qk" Sep 30 21:21:36 crc kubenswrapper[4756]: I0930 21:21:36.648783 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-zm4qk"] Sep 30 21:21:38 crc kubenswrapper[4756]: I0930 21:21:38.515293 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-zm4qk" podUID="b2b393f9-e7ad-4bd7-a4e0-d792e9d6c51c" containerName="registry-server" containerID="cri-o://c7063f644695c37b379e9900d01d2a99919c0a21772acb883a7ade7227c4c54d" gracePeriod=2 Sep 30 21:21:39 crc kubenswrapper[4756]: I0930 21:21:39.126645 4756 scope.go:117] "RemoveContainer" containerID="6f222caf34daae427ad1ef1c9c0a49f393ad85f849cbc1acf4a5ac67e633c58d" Sep 30 21:21:39 crc kubenswrapper[4756]: E0930 21:21:39.127518 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4n9zj_openshift-machine-config-operator(3370c2ca-fec3-4f90-8df7-51e21e6c7e1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" Sep 30 21:21:39 crc kubenswrapper[4756]: I0930 21:21:39.176702 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-zm4qk" Sep 30 21:21:39 crc kubenswrapper[4756]: I0930 21:21:39.271207 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b2b393f9-e7ad-4bd7-a4e0-d792e9d6c51c-catalog-content\") pod \"b2b393f9-e7ad-4bd7-a4e0-d792e9d6c51c\" (UID: \"b2b393f9-e7ad-4bd7-a4e0-d792e9d6c51c\") " Sep 30 21:21:39 crc kubenswrapper[4756]: I0930 21:21:39.271326 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b2b393f9-e7ad-4bd7-a4e0-d792e9d6c51c-utilities\") pod \"b2b393f9-e7ad-4bd7-a4e0-d792e9d6c51c\" (UID: \"b2b393f9-e7ad-4bd7-a4e0-d792e9d6c51c\") " Sep 30 21:21:39 crc kubenswrapper[4756]: I0930 21:21:39.271538 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-h4fkn\" (UniqueName: \"kubernetes.io/projected/b2b393f9-e7ad-4bd7-a4e0-d792e9d6c51c-kube-api-access-h4fkn\") pod \"b2b393f9-e7ad-4bd7-a4e0-d792e9d6c51c\" (UID: \"b2b393f9-e7ad-4bd7-a4e0-d792e9d6c51c\") " Sep 30 21:21:39 crc kubenswrapper[4756]: I0930 21:21:39.273443 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b2b393f9-e7ad-4bd7-a4e0-d792e9d6c51c-utilities" (OuterVolumeSpecName: "utilities") pod "b2b393f9-e7ad-4bd7-a4e0-d792e9d6c51c" (UID: "b2b393f9-e7ad-4bd7-a4e0-d792e9d6c51c"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 21:21:39 crc kubenswrapper[4756]: I0930 21:21:39.307625 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b2b393f9-e7ad-4bd7-a4e0-d792e9d6c51c-kube-api-access-h4fkn" (OuterVolumeSpecName: "kube-api-access-h4fkn") pod "b2b393f9-e7ad-4bd7-a4e0-d792e9d6c51c" (UID: "b2b393f9-e7ad-4bd7-a4e0-d792e9d6c51c"). InnerVolumeSpecName "kube-api-access-h4fkn". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 21:21:39 crc kubenswrapper[4756]: I0930 21:21:39.374851 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-h4fkn\" (UniqueName: \"kubernetes.io/projected/b2b393f9-e7ad-4bd7-a4e0-d792e9d6c51c-kube-api-access-h4fkn\") on node \"crc\" DevicePath \"\"" Sep 30 21:21:39 crc kubenswrapper[4756]: I0930 21:21:39.375556 4756 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b2b393f9-e7ad-4bd7-a4e0-d792e9d6c51c-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 21:21:39 crc kubenswrapper[4756]: I0930 21:21:39.411507 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b2b393f9-e7ad-4bd7-a4e0-d792e9d6c51c-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b2b393f9-e7ad-4bd7-a4e0-d792e9d6c51c" (UID: "b2b393f9-e7ad-4bd7-a4e0-d792e9d6c51c"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 21:21:39 crc kubenswrapper[4756]: I0930 21:21:39.478079 4756 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b2b393f9-e7ad-4bd7-a4e0-d792e9d6c51c-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 21:21:39 crc kubenswrapper[4756]: I0930 21:21:39.526864 4756 generic.go:334] "Generic (PLEG): container finished" podID="b2b393f9-e7ad-4bd7-a4e0-d792e9d6c51c" containerID="c7063f644695c37b379e9900d01d2a99919c0a21772acb883a7ade7227c4c54d" exitCode=0 Sep 30 21:21:39 crc kubenswrapper[4756]: I0930 21:21:39.526917 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-zm4qk" event={"ID":"b2b393f9-e7ad-4bd7-a4e0-d792e9d6c51c","Type":"ContainerDied","Data":"c7063f644695c37b379e9900d01d2a99919c0a21772acb883a7ade7227c4c54d"} Sep 30 21:21:39 crc kubenswrapper[4756]: I0930 21:21:39.526950 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-zm4qk" event={"ID":"b2b393f9-e7ad-4bd7-a4e0-d792e9d6c51c","Type":"ContainerDied","Data":"d753597bc805ab2435bdc220bcfaa5629387afa0a8e620e07c846fa7ed468b2c"} Sep 30 21:21:39 crc kubenswrapper[4756]: I0930 21:21:39.526971 4756 scope.go:117] "RemoveContainer" containerID="c7063f644695c37b379e9900d01d2a99919c0a21772acb883a7ade7227c4c54d" Sep 30 21:21:39 crc kubenswrapper[4756]: I0930 21:21:39.527147 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-zm4qk" Sep 30 21:21:39 crc kubenswrapper[4756]: I0930 21:21:39.564089 4756 scope.go:117] "RemoveContainer" containerID="f115f36ed6533a9aa5b43493b73141473a57c4b05f7d5810615c7bd7eab99a5a" Sep 30 21:21:39 crc kubenswrapper[4756]: I0930 21:21:39.572614 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-zm4qk"] Sep 30 21:21:39 crc kubenswrapper[4756]: I0930 21:21:39.584765 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-zm4qk"] Sep 30 21:21:39 crc kubenswrapper[4756]: I0930 21:21:39.601837 4756 scope.go:117] "RemoveContainer" containerID="0dca18199d6648c8f3a89fc7360e64c8e751aa5a63ca0bc445e1051044a31dfa" Sep 30 21:21:39 crc kubenswrapper[4756]: I0930 21:21:39.636184 4756 scope.go:117] "RemoveContainer" containerID="c7063f644695c37b379e9900d01d2a99919c0a21772acb883a7ade7227c4c54d" Sep 30 21:21:39 crc kubenswrapper[4756]: E0930 21:21:39.636663 4756 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c7063f644695c37b379e9900d01d2a99919c0a21772acb883a7ade7227c4c54d\": container with ID starting with c7063f644695c37b379e9900d01d2a99919c0a21772acb883a7ade7227c4c54d not found: ID does not exist" containerID="c7063f644695c37b379e9900d01d2a99919c0a21772acb883a7ade7227c4c54d" Sep 30 21:21:39 crc kubenswrapper[4756]: I0930 21:21:39.636802 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c7063f644695c37b379e9900d01d2a99919c0a21772acb883a7ade7227c4c54d"} err="failed to get container status \"c7063f644695c37b379e9900d01d2a99919c0a21772acb883a7ade7227c4c54d\": rpc error: code = NotFound desc = could not find container \"c7063f644695c37b379e9900d01d2a99919c0a21772acb883a7ade7227c4c54d\": container with ID starting with c7063f644695c37b379e9900d01d2a99919c0a21772acb883a7ade7227c4c54d not found: ID does not exist" Sep 30 21:21:39 crc kubenswrapper[4756]: I0930 21:21:39.636928 4756 scope.go:117] "RemoveContainer" containerID="f115f36ed6533a9aa5b43493b73141473a57c4b05f7d5810615c7bd7eab99a5a" Sep 30 21:21:39 crc kubenswrapper[4756]: E0930 21:21:39.637314 4756 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f115f36ed6533a9aa5b43493b73141473a57c4b05f7d5810615c7bd7eab99a5a\": container with ID starting with f115f36ed6533a9aa5b43493b73141473a57c4b05f7d5810615c7bd7eab99a5a not found: ID does not exist" containerID="f115f36ed6533a9aa5b43493b73141473a57c4b05f7d5810615c7bd7eab99a5a" Sep 30 21:21:39 crc kubenswrapper[4756]: I0930 21:21:39.637451 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f115f36ed6533a9aa5b43493b73141473a57c4b05f7d5810615c7bd7eab99a5a"} err="failed to get container status \"f115f36ed6533a9aa5b43493b73141473a57c4b05f7d5810615c7bd7eab99a5a\": rpc error: code = NotFound desc = could not find container \"f115f36ed6533a9aa5b43493b73141473a57c4b05f7d5810615c7bd7eab99a5a\": container with ID starting with f115f36ed6533a9aa5b43493b73141473a57c4b05f7d5810615c7bd7eab99a5a not found: ID does not exist" Sep 30 21:21:39 crc kubenswrapper[4756]: I0930 21:21:39.637546 4756 scope.go:117] "RemoveContainer" containerID="0dca18199d6648c8f3a89fc7360e64c8e751aa5a63ca0bc445e1051044a31dfa" Sep 30 21:21:39 crc kubenswrapper[4756]: E0930 21:21:39.637870 4756 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0dca18199d6648c8f3a89fc7360e64c8e751aa5a63ca0bc445e1051044a31dfa\": container with ID starting with 0dca18199d6648c8f3a89fc7360e64c8e751aa5a63ca0bc445e1051044a31dfa not found: ID does not exist" containerID="0dca18199d6648c8f3a89fc7360e64c8e751aa5a63ca0bc445e1051044a31dfa" Sep 30 21:21:39 crc kubenswrapper[4756]: I0930 21:21:39.637983 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0dca18199d6648c8f3a89fc7360e64c8e751aa5a63ca0bc445e1051044a31dfa"} err="failed to get container status \"0dca18199d6648c8f3a89fc7360e64c8e751aa5a63ca0bc445e1051044a31dfa\": rpc error: code = NotFound desc = could not find container \"0dca18199d6648c8f3a89fc7360e64c8e751aa5a63ca0bc445e1051044a31dfa\": container with ID starting with 0dca18199d6648c8f3a89fc7360e64c8e751aa5a63ca0bc445e1051044a31dfa not found: ID does not exist" Sep 30 21:21:41 crc kubenswrapper[4756]: I0930 21:21:41.132214 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b2b393f9-e7ad-4bd7-a4e0-d792e9d6c51c" path="/var/lib/kubelet/pods/b2b393f9-e7ad-4bd7-a4e0-d792e9d6c51c/volumes" Sep 30 21:21:42 crc kubenswrapper[4756]: I0930 21:21:42.015138 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69ckkjc_e562b41d-d5b4-4053-86a7-90bfc0442280/util/0.log" Sep 30 21:21:42 crc kubenswrapper[4756]: I0930 21:21:42.164385 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69ckkjc_e562b41d-d5b4-4053-86a7-90bfc0442280/util/0.log" Sep 30 21:21:42 crc kubenswrapper[4756]: I0930 21:21:42.189567 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69ckkjc_e562b41d-d5b4-4053-86a7-90bfc0442280/pull/0.log" Sep 30 21:21:42 crc kubenswrapper[4756]: I0930 21:21:42.255124 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69ckkjc_e562b41d-d5b4-4053-86a7-90bfc0442280/pull/0.log" Sep 30 21:21:42 crc kubenswrapper[4756]: I0930 21:21:42.366439 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69ckkjc_e562b41d-d5b4-4053-86a7-90bfc0442280/util/0.log" Sep 30 21:21:42 crc kubenswrapper[4756]: I0930 21:21:42.393472 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69ckkjc_e562b41d-d5b4-4053-86a7-90bfc0442280/extract/0.log" Sep 30 21:21:42 crc kubenswrapper[4756]: I0930 21:21:42.412765 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69ckkjc_e562b41d-d5b4-4053-86a7-90bfc0442280/pull/0.log" Sep 30 21:21:42 crc kubenswrapper[4756]: I0930 21:21:42.543729 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcvmq86_c8aaa37a-07d0-4d40-a4b4-5204a6357700/util/0.log" Sep 30 21:21:42 crc kubenswrapper[4756]: I0930 21:21:42.739201 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcvmq86_c8aaa37a-07d0-4d40-a4b4-5204a6357700/util/0.log" Sep 30 21:21:42 crc kubenswrapper[4756]: I0930 21:21:42.740240 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcvmq86_c8aaa37a-07d0-4d40-a4b4-5204a6357700/pull/0.log" Sep 30 21:21:42 crc kubenswrapper[4756]: I0930 21:21:42.765125 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcvmq86_c8aaa37a-07d0-4d40-a4b4-5204a6357700/pull/0.log" Sep 30 21:21:42 crc kubenswrapper[4756]: I0930 21:21:42.944994 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcvmq86_c8aaa37a-07d0-4d40-a4b4-5204a6357700/util/0.log" Sep 30 21:21:42 crc kubenswrapper[4756]: I0930 21:21:42.950408 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcvmq86_c8aaa37a-07d0-4d40-a4b4-5204a6357700/pull/0.log" Sep 30 21:21:42 crc kubenswrapper[4756]: I0930 21:21:42.976868 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcvmq86_c8aaa37a-07d0-4d40-a4b4-5204a6357700/extract/0.log" Sep 30 21:21:43 crc kubenswrapper[4756]: I0930 21:21:43.096738 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2d8r5nq_7f8e9cd9-d70a-43d4-829d-bcf006fe3916/util/0.log" Sep 30 21:21:43 crc kubenswrapper[4756]: I0930 21:21:43.267750 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2d8r5nq_7f8e9cd9-d70a-43d4-829d-bcf006fe3916/util/0.log" Sep 30 21:21:43 crc kubenswrapper[4756]: I0930 21:21:43.283333 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2d8r5nq_7f8e9cd9-d70a-43d4-829d-bcf006fe3916/pull/0.log" Sep 30 21:21:43 crc kubenswrapper[4756]: I0930 21:21:43.302238 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2d8r5nq_7f8e9cd9-d70a-43d4-829d-bcf006fe3916/pull/0.log" Sep 30 21:21:43 crc kubenswrapper[4756]: I0930 21:21:43.488463 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2d8r5nq_7f8e9cd9-d70a-43d4-829d-bcf006fe3916/pull/0.log" Sep 30 21:21:43 crc kubenswrapper[4756]: I0930 21:21:43.512188 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2d8r5nq_7f8e9cd9-d70a-43d4-829d-bcf006fe3916/extract/0.log" Sep 30 21:21:43 crc kubenswrapper[4756]: I0930 21:21:43.534485 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2d8r5nq_7f8e9cd9-d70a-43d4-829d-bcf006fe3916/util/0.log" Sep 30 21:21:43 crc kubenswrapper[4756]: I0930 21:21:43.678145 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-zmzd7_a2916419-e636-4f43-9b71-e80307c8f0ef/extract-utilities/0.log" Sep 30 21:21:43 crc kubenswrapper[4756]: I0930 21:21:43.865018 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-zmzd7_a2916419-e636-4f43-9b71-e80307c8f0ef/extract-content/0.log" Sep 30 21:21:43 crc kubenswrapper[4756]: I0930 21:21:43.869824 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-zmzd7_a2916419-e636-4f43-9b71-e80307c8f0ef/extract-content/0.log" Sep 30 21:21:43 crc kubenswrapper[4756]: I0930 21:21:43.870026 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-zmzd7_a2916419-e636-4f43-9b71-e80307c8f0ef/extract-utilities/0.log" Sep 30 21:21:44 crc kubenswrapper[4756]: I0930 21:21:44.044100 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-zmzd7_a2916419-e636-4f43-9b71-e80307c8f0ef/extract-content/0.log" Sep 30 21:21:44 crc kubenswrapper[4756]: I0930 21:21:44.052939 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-zmzd7_a2916419-e636-4f43-9b71-e80307c8f0ef/extract-utilities/0.log" Sep 30 21:21:44 crc kubenswrapper[4756]: I0930 21:21:44.387718 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-hjcpm_094a83f2-0234-4c62-ba77-e622e631e9fc/extract-utilities/0.log" Sep 30 21:21:44 crc kubenswrapper[4756]: I0930 21:21:44.579096 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-hjcpm_094a83f2-0234-4c62-ba77-e622e631e9fc/extract-content/0.log" Sep 30 21:21:44 crc kubenswrapper[4756]: I0930 21:21:44.689799 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-hjcpm_094a83f2-0234-4c62-ba77-e622e631e9fc/extract-utilities/0.log" Sep 30 21:21:44 crc kubenswrapper[4756]: I0930 21:21:44.713767 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-hjcpm_094a83f2-0234-4c62-ba77-e622e631e9fc/extract-content/0.log" Sep 30 21:21:44 crc kubenswrapper[4756]: I0930 21:21:44.927045 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-zmzd7_a2916419-e636-4f43-9b71-e80307c8f0ef/registry-server/0.log" Sep 30 21:21:44 crc kubenswrapper[4756]: I0930 21:21:44.992625 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-hjcpm_094a83f2-0234-4c62-ba77-e622e631e9fc/extract-utilities/0.log" Sep 30 21:21:45 crc kubenswrapper[4756]: I0930 21:21:45.075457 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-hjcpm_094a83f2-0234-4c62-ba77-e622e631e9fc/extract-content/0.log" Sep 30 21:21:45 crc kubenswrapper[4756]: I0930 21:21:45.222711 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d964x6lm_63c2f5ab-2343-4339-9fcf-f0fd80b703af/util/0.log" Sep 30 21:21:45 crc kubenswrapper[4756]: I0930 21:21:45.466966 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d964x6lm_63c2f5ab-2343-4339-9fcf-f0fd80b703af/util/0.log" Sep 30 21:21:45 crc kubenswrapper[4756]: I0930 21:21:45.481308 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d964x6lm_63c2f5ab-2343-4339-9fcf-f0fd80b703af/pull/0.log" Sep 30 21:21:45 crc kubenswrapper[4756]: I0930 21:21:45.579320 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d964x6lm_63c2f5ab-2343-4339-9fcf-f0fd80b703af/pull/0.log" Sep 30 21:21:45 crc kubenswrapper[4756]: I0930 21:21:45.752624 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d964x6lm_63c2f5ab-2343-4339-9fcf-f0fd80b703af/pull/0.log" Sep 30 21:21:45 crc kubenswrapper[4756]: I0930 21:21:45.767883 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d964x6lm_63c2f5ab-2343-4339-9fcf-f0fd80b703af/util/0.log" Sep 30 21:21:45 crc kubenswrapper[4756]: I0930 21:21:45.833004 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d964x6lm_63c2f5ab-2343-4339-9fcf-f0fd80b703af/extract/0.log" Sep 30 21:21:45 crc kubenswrapper[4756]: I0930 21:21:45.880502 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-hjcpm_094a83f2-0234-4c62-ba77-e622e631e9fc/registry-server/0.log" Sep 30 21:21:45 crc kubenswrapper[4756]: I0930 21:21:45.948436 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-p27l9_72e24494-4294-4d3e-934e-4a31a0e7601f/marketplace-operator/0.log" Sep 30 21:21:46 crc kubenswrapper[4756]: I0930 21:21:46.026379 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-wzt5t_59f8fa32-72a8-4202-839f-545d6b81ea1a/extract-utilities/0.log" Sep 30 21:21:46 crc kubenswrapper[4756]: I0930 21:21:46.198148 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-wzt5t_59f8fa32-72a8-4202-839f-545d6b81ea1a/extract-content/0.log" Sep 30 21:21:46 crc kubenswrapper[4756]: I0930 21:21:46.216268 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-wzt5t_59f8fa32-72a8-4202-839f-545d6b81ea1a/extract-utilities/0.log" Sep 30 21:21:46 crc kubenswrapper[4756]: I0930 21:21:46.228521 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-wzt5t_59f8fa32-72a8-4202-839f-545d6b81ea1a/extract-content/0.log" Sep 30 21:21:46 crc kubenswrapper[4756]: I0930 21:21:46.380919 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-wzt5t_59f8fa32-72a8-4202-839f-545d6b81ea1a/extract-utilities/0.log" Sep 30 21:21:46 crc kubenswrapper[4756]: I0930 21:21:46.403470 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-wzt5t_59f8fa32-72a8-4202-839f-545d6b81ea1a/extract-content/0.log" Sep 30 21:21:46 crc kubenswrapper[4756]: I0930 21:21:46.451439 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-5jlx7_37aac716-15ee-430e-a9ab-bf0bcd10a1c0/extract-utilities/0.log" Sep 30 21:21:46 crc kubenswrapper[4756]: I0930 21:21:46.608968 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-wzt5t_59f8fa32-72a8-4202-839f-545d6b81ea1a/registry-server/0.log" Sep 30 21:21:46 crc kubenswrapper[4756]: I0930 21:21:46.659843 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-5jlx7_37aac716-15ee-430e-a9ab-bf0bcd10a1c0/extract-utilities/0.log" Sep 30 21:21:46 crc kubenswrapper[4756]: I0930 21:21:46.664746 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-5jlx7_37aac716-15ee-430e-a9ab-bf0bcd10a1c0/extract-content/0.log" Sep 30 21:21:46 crc kubenswrapper[4756]: I0930 21:21:46.721386 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-5jlx7_37aac716-15ee-430e-a9ab-bf0bcd10a1c0/extract-content/0.log" Sep 30 21:21:46 crc kubenswrapper[4756]: I0930 21:21:46.831559 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-5jlx7_37aac716-15ee-430e-a9ab-bf0bcd10a1c0/extract-content/0.log" Sep 30 21:21:46 crc kubenswrapper[4756]: I0930 21:21:46.870960 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-5jlx7_37aac716-15ee-430e-a9ab-bf0bcd10a1c0/extract-utilities/0.log" Sep 30 21:21:47 crc kubenswrapper[4756]: I0930 21:21:47.612591 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-5jlx7_37aac716-15ee-430e-a9ab-bf0bcd10a1c0/registry-server/0.log" Sep 30 21:21:52 crc kubenswrapper[4756]: I0930 21:21:52.116212 4756 scope.go:117] "RemoveContainer" containerID="6f222caf34daae427ad1ef1c9c0a49f393ad85f849cbc1acf4a5ac67e633c58d" Sep 30 21:21:52 crc kubenswrapper[4756]: E0930 21:21:52.117213 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4n9zj_openshift-machine-config-operator(3370c2ca-fec3-4f90-8df7-51e21e6c7e1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" Sep 30 21:21:54 crc kubenswrapper[4756]: I0930 21:21:54.047731 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/heat-db-create-hdf69"] Sep 30 21:21:54 crc kubenswrapper[4756]: I0930 21:21:54.058987 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/heat-db-create-hdf69"] Sep 30 21:21:55 crc kubenswrapper[4756]: I0930 21:21:55.139176 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d5acee4c-8040-4b34-ae2c-e81a40d3c0da" path="/var/lib/kubelet/pods/d5acee4c-8040-4b34-ae2c-e81a40d3c0da/volumes" Sep 30 21:21:58 crc kubenswrapper[4756]: I0930 21:21:58.549865 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-7c8cf85677-pkmns_6710b308-b5a5-4043-97c2-68b7816fac9a/prometheus-operator/0.log" Sep 30 21:21:58 crc kubenswrapper[4756]: I0930 21:21:58.696148 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-admission-webhook-76d859999c-rr264_72fdf198-38d1-43f1-896e-4ed41cc91785/prometheus-operator-admission-webhook/0.log" Sep 30 21:21:58 crc kubenswrapper[4756]: I0930 21:21:58.726494 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-admission-webhook-76d859999c-v2j65_fc61e8ad-97b2-4569-afbc-b78a6536b09f/prometheus-operator-admission-webhook/0.log" Sep 30 21:21:58 crc kubenswrapper[4756]: I0930 21:21:58.871071 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_observability-operator-cc5f78dfc-87pm5_32f5b188-86d1-4426-a043-04707dc47288/operator/0.log" Sep 30 21:21:58 crc kubenswrapper[4756]: I0930 21:21:58.909491 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_perses-operator-54bc95c9fb-vhmnz_ddaf74a9-b3d6-426a-8e7f-f2bd8d51fc84/perses-operator/0.log" Sep 30 21:22:03 crc kubenswrapper[4756]: I0930 21:22:03.116080 4756 scope.go:117] "RemoveContainer" containerID="6f222caf34daae427ad1ef1c9c0a49f393ad85f849cbc1acf4a5ac67e633c58d" Sep 30 21:22:03 crc kubenswrapper[4756]: E0930 21:22:03.116780 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4n9zj_openshift-machine-config-operator(3370c2ca-fec3-4f90-8df7-51e21e6c7e1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" Sep 30 21:22:04 crc kubenswrapper[4756]: I0930 21:22:04.071507 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/heat-1e35-account-create-fr9l6"] Sep 30 21:22:04 crc kubenswrapper[4756]: I0930 21:22:04.091256 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/heat-1e35-account-create-fr9l6"] Sep 30 21:22:05 crc kubenswrapper[4756]: I0930 21:22:05.131362 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bb9200a1-8617-478b-a82c-c91af7397ea0" path="/var/lib/kubelet/pods/bb9200a1-8617-478b-a82c-c91af7397ea0/volumes" Sep 30 21:22:15 crc kubenswrapper[4756]: I0930 21:22:15.116456 4756 scope.go:117] "RemoveContainer" containerID="6f222caf34daae427ad1ef1c9c0a49f393ad85f849cbc1acf4a5ac67e633c58d" Sep 30 21:22:15 crc kubenswrapper[4756]: E0930 21:22:15.117184 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4n9zj_openshift-machine-config-operator(3370c2ca-fec3-4f90-8df7-51e21e6c7e1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" Sep 30 21:22:21 crc kubenswrapper[4756]: I0930 21:22:21.040287 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/heat-db-sync-t9cp4"] Sep 30 21:22:21 crc kubenswrapper[4756]: I0930 21:22:21.053308 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/heat-db-sync-t9cp4"] Sep 30 21:22:21 crc kubenswrapper[4756]: I0930 21:22:21.129418 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fd547ce9-ce4c-4a7d-9f4b-3c06aeb45d1d" path="/var/lib/kubelet/pods/fd547ce9-ce4c-4a7d-9f4b-3c06aeb45d1d/volumes" Sep 30 21:22:21 crc kubenswrapper[4756]: E0930 21:22:21.566759 4756 upgradeaware.go:441] Error proxying data from backend to client: writeto tcp 38.102.83.12:47312->38.102.83.12:35835: read tcp 38.102.83.12:47312->38.102.83.12:35835: read: connection reset by peer Sep 30 21:22:28 crc kubenswrapper[4756]: I0930 21:22:28.471659 4756 scope.go:117] "RemoveContainer" containerID="97abc75d3577dc1b769c05ea6591436fbc9c0bff6e16c7ddf58f0097efa9812f" Sep 30 21:22:28 crc kubenswrapper[4756]: I0930 21:22:28.516206 4756 scope.go:117] "RemoveContainer" containerID="f816c20cae3fb7173c5c2afd3b569ba21ba73211130fb7f47fbbb1d28c8167d2" Sep 30 21:22:28 crc kubenswrapper[4756]: I0930 21:22:28.568887 4756 scope.go:117] "RemoveContainer" containerID="eb4d3d4bc885a9df1108c5e3a04ccf0e5372ba7e031cc4508c38ea0de4b8ae7a" Sep 30 21:22:29 crc kubenswrapper[4756]: I0930 21:22:29.116448 4756 scope.go:117] "RemoveContainer" containerID="6f222caf34daae427ad1ef1c9c0a49f393ad85f849cbc1acf4a5ac67e633c58d" Sep 30 21:22:29 crc kubenswrapper[4756]: E0930 21:22:29.116725 4756 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4n9zj_openshift-machine-config-operator(3370c2ca-fec3-4f90-8df7-51e21e6c7e1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" Sep 30 21:22:42 crc kubenswrapper[4756]: I0930 21:22:42.116913 4756 scope.go:117] "RemoveContainer" containerID="6f222caf34daae427ad1ef1c9c0a49f393ad85f849cbc1acf4a5ac67e633c58d" Sep 30 21:22:43 crc kubenswrapper[4756]: I0930 21:22:43.250689 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" event={"ID":"3370c2ca-fec3-4f90-8df7-51e21e6c7e1c","Type":"ContainerStarted","Data":"7a1e61507965b5ed8612643c32c1d7bed9ec38df6904eb5ed49a0be1b0fd68af"} Sep 30 21:23:52 crc kubenswrapper[4756]: I0930 21:23:52.224800 4756 generic.go:334] "Generic (PLEG): container finished" podID="ddc4c1da-0ea7-49c9-abd5-a2177846ed0f" containerID="a6dc072c33d143d432f4e144d4be2326254d749675486a9cef18af9489f312a8" exitCode=0 Sep 30 21:23:52 crc kubenswrapper[4756]: I0930 21:23:52.225491 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-6qhq5/must-gather-nqgxs" event={"ID":"ddc4c1da-0ea7-49c9-abd5-a2177846ed0f","Type":"ContainerDied","Data":"a6dc072c33d143d432f4e144d4be2326254d749675486a9cef18af9489f312a8"} Sep 30 21:23:52 crc kubenswrapper[4756]: I0930 21:23:52.227144 4756 scope.go:117] "RemoveContainer" containerID="a6dc072c33d143d432f4e144d4be2326254d749675486a9cef18af9489f312a8" Sep 30 21:23:53 crc kubenswrapper[4756]: I0930 21:23:53.181956 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-6qhq5_must-gather-nqgxs_ddc4c1da-0ea7-49c9-abd5-a2177846ed0f/gather/0.log" Sep 30 21:24:00 crc kubenswrapper[4756]: I0930 21:24:00.778584 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-6qhq5/must-gather-nqgxs"] Sep 30 21:24:00 crc kubenswrapper[4756]: I0930 21:24:00.779593 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-must-gather-6qhq5/must-gather-nqgxs" podUID="ddc4c1da-0ea7-49c9-abd5-a2177846ed0f" containerName="copy" containerID="cri-o://4ff3a708c859ef44cd2a352bad215867fd485dbc5a5d03082cf85a6c7ab0783f" gracePeriod=2 Sep 30 21:24:00 crc kubenswrapper[4756]: I0930 21:24:00.795342 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-6qhq5/must-gather-nqgxs"] Sep 30 21:24:01 crc kubenswrapper[4756]: I0930 21:24:01.356354 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-6qhq5_must-gather-nqgxs_ddc4c1da-0ea7-49c9-abd5-a2177846ed0f/copy/0.log" Sep 30 21:24:01 crc kubenswrapper[4756]: I0930 21:24:01.356586 4756 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-6qhq5_must-gather-nqgxs_ddc4c1da-0ea7-49c9-abd5-a2177846ed0f/copy/0.log" Sep 30 21:24:01 crc kubenswrapper[4756]: I0930 21:24:01.357259 4756 generic.go:334] "Generic (PLEG): container finished" podID="ddc4c1da-0ea7-49c9-abd5-a2177846ed0f" containerID="4ff3a708c859ef44cd2a352bad215867fd485dbc5a5d03082cf85a6c7ab0783f" exitCode=143 Sep 30 21:24:01 crc kubenswrapper[4756]: I0930 21:24:01.357304 4756 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="72e987ac4694a681c67413bd2db50377551c5087d50ffea8d85bef5ed7aadf32" Sep 30 21:24:01 crc kubenswrapper[4756]: I0930 21:24:01.357468 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-6qhq5/must-gather-nqgxs" Sep 30 21:24:01 crc kubenswrapper[4756]: I0930 21:24:01.435671 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lvzk5\" (UniqueName: \"kubernetes.io/projected/ddc4c1da-0ea7-49c9-abd5-a2177846ed0f-kube-api-access-lvzk5\") pod \"ddc4c1da-0ea7-49c9-abd5-a2177846ed0f\" (UID: \"ddc4c1da-0ea7-49c9-abd5-a2177846ed0f\") " Sep 30 21:24:01 crc kubenswrapper[4756]: I0930 21:24:01.435919 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/ddc4c1da-0ea7-49c9-abd5-a2177846ed0f-must-gather-output\") pod \"ddc4c1da-0ea7-49c9-abd5-a2177846ed0f\" (UID: \"ddc4c1da-0ea7-49c9-abd5-a2177846ed0f\") " Sep 30 21:24:01 crc kubenswrapper[4756]: I0930 21:24:01.444633 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ddc4c1da-0ea7-49c9-abd5-a2177846ed0f-kube-api-access-lvzk5" (OuterVolumeSpecName: "kube-api-access-lvzk5") pod "ddc4c1da-0ea7-49c9-abd5-a2177846ed0f" (UID: "ddc4c1da-0ea7-49c9-abd5-a2177846ed0f"). InnerVolumeSpecName "kube-api-access-lvzk5". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 21:24:01 crc kubenswrapper[4756]: I0930 21:24:01.539018 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lvzk5\" (UniqueName: \"kubernetes.io/projected/ddc4c1da-0ea7-49c9-abd5-a2177846ed0f-kube-api-access-lvzk5\") on node \"crc\" DevicePath \"\"" Sep 30 21:24:01 crc kubenswrapper[4756]: I0930 21:24:01.634327 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ddc4c1da-0ea7-49c9-abd5-a2177846ed0f-must-gather-output" (OuterVolumeSpecName: "must-gather-output") pod "ddc4c1da-0ea7-49c9-abd5-a2177846ed0f" (UID: "ddc4c1da-0ea7-49c9-abd5-a2177846ed0f"). InnerVolumeSpecName "must-gather-output". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 21:24:01 crc kubenswrapper[4756]: I0930 21:24:01.641075 4756 reconciler_common.go:293] "Volume detached for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/ddc4c1da-0ea7-49c9-abd5-a2177846ed0f-must-gather-output\") on node \"crc\" DevicePath \"\"" Sep 30 21:24:02 crc kubenswrapper[4756]: I0930 21:24:02.363837 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-6qhq5/must-gather-nqgxs" Sep 30 21:24:03 crc kubenswrapper[4756]: I0930 21:24:03.127180 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ddc4c1da-0ea7-49c9-abd5-a2177846ed0f" path="/var/lib/kubelet/pods/ddc4c1da-0ea7-49c9-abd5-a2177846ed0f/volumes" Sep 30 21:24:28 crc kubenswrapper[4756]: I0930 21:24:28.716956 4756 scope.go:117] "RemoveContainer" containerID="37db107c3d1fe50b42bf81a3c511527b38ab952825a82f79eed4282e4c913293" Sep 30 21:24:28 crc kubenswrapper[4756]: I0930 21:24:28.745544 4756 scope.go:117] "RemoveContainer" containerID="45052e2f1233910663b412affe9b32c0962b6b6edcec6bd92ce13e7063cc6331" Sep 30 21:24:28 crc kubenswrapper[4756]: I0930 21:24:28.805333 4756 scope.go:117] "RemoveContainer" containerID="5e91a63c0b0600398f86a99f5dbbe5d84d2e6a01cf872aa18e67dab03e3309a2" Sep 30 21:24:28 crc kubenswrapper[4756]: I0930 21:24:28.833104 4756 scope.go:117] "RemoveContainer" containerID="9ed758d16b1255d41153ae3c27a7cda937b22baf1c01adf037d8db17683e714e" Sep 30 21:24:28 crc kubenswrapper[4756]: I0930 21:24:28.880665 4756 scope.go:117] "RemoveContainer" containerID="4ff3a708c859ef44cd2a352bad215867fd485dbc5a5d03082cf85a6c7ab0783f" Sep 30 21:24:28 crc kubenswrapper[4756]: I0930 21:24:28.926665 4756 scope.go:117] "RemoveContainer" containerID="a6dc072c33d143d432f4e144d4be2326254d749675486a9cef18af9489f312a8" Sep 30 21:24:37 crc kubenswrapper[4756]: I0930 21:24:37.066559 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/aodh-db-create-kkqjz"] Sep 30 21:24:37 crc kubenswrapper[4756]: I0930 21:24:37.077579 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/aodh-db-create-kkqjz"] Sep 30 21:24:37 crc kubenswrapper[4756]: I0930 21:24:37.129989 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ced7e732-18e2-428b-9fdb-15d1d252ad85" path="/var/lib/kubelet/pods/ced7e732-18e2-428b-9fdb-15d1d252ad85/volumes" Sep 30 21:24:47 crc kubenswrapper[4756]: I0930 21:24:47.048767 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/aodh-f034-account-create-n2g2c"] Sep 30 21:24:47 crc kubenswrapper[4756]: I0930 21:24:47.062315 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/aodh-f034-account-create-n2g2c"] Sep 30 21:24:47 crc kubenswrapper[4756]: I0930 21:24:47.129009 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1dac2869-2e8d-403d-bfe4-65d5d74ee236" path="/var/lib/kubelet/pods/1dac2869-2e8d-403d-bfe4-65d5d74ee236/volumes" Sep 30 21:24:51 crc kubenswrapper[4756]: I0930 21:24:51.891081 4756 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-5zncm"] Sep 30 21:24:51 crc kubenswrapper[4756]: E0930 21:24:51.892353 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b2b393f9-e7ad-4bd7-a4e0-d792e9d6c51c" containerName="extract-content" Sep 30 21:24:51 crc kubenswrapper[4756]: I0930 21:24:51.892378 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="b2b393f9-e7ad-4bd7-a4e0-d792e9d6c51c" containerName="extract-content" Sep 30 21:24:51 crc kubenswrapper[4756]: E0930 21:24:51.892432 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ddc4c1da-0ea7-49c9-abd5-a2177846ed0f" containerName="copy" Sep 30 21:24:51 crc kubenswrapper[4756]: I0930 21:24:51.892445 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="ddc4c1da-0ea7-49c9-abd5-a2177846ed0f" containerName="copy" Sep 30 21:24:51 crc kubenswrapper[4756]: E0930 21:24:51.892489 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ddc4c1da-0ea7-49c9-abd5-a2177846ed0f" containerName="gather" Sep 30 21:24:51 crc kubenswrapper[4756]: I0930 21:24:51.892503 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="ddc4c1da-0ea7-49c9-abd5-a2177846ed0f" containerName="gather" Sep 30 21:24:51 crc kubenswrapper[4756]: E0930 21:24:51.892636 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b2b393f9-e7ad-4bd7-a4e0-d792e9d6c51c" containerName="extract-utilities" Sep 30 21:24:51 crc kubenswrapper[4756]: I0930 21:24:51.892650 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="b2b393f9-e7ad-4bd7-a4e0-d792e9d6c51c" containerName="extract-utilities" Sep 30 21:24:51 crc kubenswrapper[4756]: E0930 21:24:51.892714 4756 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b2b393f9-e7ad-4bd7-a4e0-d792e9d6c51c" containerName="registry-server" Sep 30 21:24:51 crc kubenswrapper[4756]: I0930 21:24:51.892727 4756 state_mem.go:107] "Deleted CPUSet assignment" podUID="b2b393f9-e7ad-4bd7-a4e0-d792e9d6c51c" containerName="registry-server" Sep 30 21:24:51 crc kubenswrapper[4756]: I0930 21:24:51.893071 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="ddc4c1da-0ea7-49c9-abd5-a2177846ed0f" containerName="gather" Sep 30 21:24:51 crc kubenswrapper[4756]: I0930 21:24:51.893116 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="b2b393f9-e7ad-4bd7-a4e0-d792e9d6c51c" containerName="registry-server" Sep 30 21:24:51 crc kubenswrapper[4756]: I0930 21:24:51.893152 4756 memory_manager.go:354] "RemoveStaleState removing state" podUID="ddc4c1da-0ea7-49c9-abd5-a2177846ed0f" containerName="copy" Sep 30 21:24:51 crc kubenswrapper[4756]: I0930 21:24:51.900858 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-5zncm" Sep 30 21:24:51 crc kubenswrapper[4756]: I0930 21:24:51.942491 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-5zncm"] Sep 30 21:24:51 crc kubenswrapper[4756]: I0930 21:24:51.997098 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/68322c57-caa9-4db4-a2c2-bf4780587733-catalog-content\") pod \"redhat-operators-5zncm\" (UID: \"68322c57-caa9-4db4-a2c2-bf4780587733\") " pod="openshift-marketplace/redhat-operators-5zncm" Sep 30 21:24:51 crc kubenswrapper[4756]: I0930 21:24:51.997285 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5xsll\" (UniqueName: \"kubernetes.io/projected/68322c57-caa9-4db4-a2c2-bf4780587733-kube-api-access-5xsll\") pod \"redhat-operators-5zncm\" (UID: \"68322c57-caa9-4db4-a2c2-bf4780587733\") " pod="openshift-marketplace/redhat-operators-5zncm" Sep 30 21:24:51 crc kubenswrapper[4756]: I0930 21:24:51.997332 4756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/68322c57-caa9-4db4-a2c2-bf4780587733-utilities\") pod \"redhat-operators-5zncm\" (UID: \"68322c57-caa9-4db4-a2c2-bf4780587733\") " pod="openshift-marketplace/redhat-operators-5zncm" Sep 30 21:24:52 crc kubenswrapper[4756]: I0930 21:24:52.098949 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5xsll\" (UniqueName: \"kubernetes.io/projected/68322c57-caa9-4db4-a2c2-bf4780587733-kube-api-access-5xsll\") pod \"redhat-operators-5zncm\" (UID: \"68322c57-caa9-4db4-a2c2-bf4780587733\") " pod="openshift-marketplace/redhat-operators-5zncm" Sep 30 21:24:52 crc kubenswrapper[4756]: I0930 21:24:52.099023 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/68322c57-caa9-4db4-a2c2-bf4780587733-utilities\") pod \"redhat-operators-5zncm\" (UID: \"68322c57-caa9-4db4-a2c2-bf4780587733\") " pod="openshift-marketplace/redhat-operators-5zncm" Sep 30 21:24:52 crc kubenswrapper[4756]: I0930 21:24:52.099123 4756 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/68322c57-caa9-4db4-a2c2-bf4780587733-catalog-content\") pod \"redhat-operators-5zncm\" (UID: \"68322c57-caa9-4db4-a2c2-bf4780587733\") " pod="openshift-marketplace/redhat-operators-5zncm" Sep 30 21:24:52 crc kubenswrapper[4756]: I0930 21:24:52.099694 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/68322c57-caa9-4db4-a2c2-bf4780587733-catalog-content\") pod \"redhat-operators-5zncm\" (UID: \"68322c57-caa9-4db4-a2c2-bf4780587733\") " pod="openshift-marketplace/redhat-operators-5zncm" Sep 30 21:24:52 crc kubenswrapper[4756]: I0930 21:24:52.099971 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/68322c57-caa9-4db4-a2c2-bf4780587733-utilities\") pod \"redhat-operators-5zncm\" (UID: \"68322c57-caa9-4db4-a2c2-bf4780587733\") " pod="openshift-marketplace/redhat-operators-5zncm" Sep 30 21:24:52 crc kubenswrapper[4756]: I0930 21:24:52.123604 4756 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5xsll\" (UniqueName: \"kubernetes.io/projected/68322c57-caa9-4db4-a2c2-bf4780587733-kube-api-access-5xsll\") pod \"redhat-operators-5zncm\" (UID: \"68322c57-caa9-4db4-a2c2-bf4780587733\") " pod="openshift-marketplace/redhat-operators-5zncm" Sep 30 21:24:52 crc kubenswrapper[4756]: I0930 21:24:52.247644 4756 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-5zncm" Sep 30 21:24:52 crc kubenswrapper[4756]: I0930 21:24:52.729000 4756 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-5zncm"] Sep 30 21:24:52 crc kubenswrapper[4756]: I0930 21:24:52.977765 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5zncm" event={"ID":"68322c57-caa9-4db4-a2c2-bf4780587733","Type":"ContainerStarted","Data":"5b127256c4c7d9c05fb053e9a9d122b8743b3792fea5216e55467e131852b76b"} Sep 30 21:24:52 crc kubenswrapper[4756]: I0930 21:24:52.977804 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5zncm" event={"ID":"68322c57-caa9-4db4-a2c2-bf4780587733","Type":"ContainerStarted","Data":"b3b94e050bd95ba7e08f79b78d5f6e3d77a45f9d7ad72a0b2e8ffee7b44ec8c7"} Sep 30 21:24:54 crc kubenswrapper[4756]: I0930 21:24:54.011206 4756 generic.go:334] "Generic (PLEG): container finished" podID="68322c57-caa9-4db4-a2c2-bf4780587733" containerID="5b127256c4c7d9c05fb053e9a9d122b8743b3792fea5216e55467e131852b76b" exitCode=0 Sep 30 21:24:54 crc kubenswrapper[4756]: I0930 21:24:54.011548 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5zncm" event={"ID":"68322c57-caa9-4db4-a2c2-bf4780587733","Type":"ContainerDied","Data":"5b127256c4c7d9c05fb053e9a9d122b8743b3792fea5216e55467e131852b76b"} Sep 30 21:24:56 crc kubenswrapper[4756]: I0930 21:24:56.044431 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5zncm" event={"ID":"68322c57-caa9-4db4-a2c2-bf4780587733","Type":"ContainerStarted","Data":"7b89216da352db5370190d84cee708511a685bc8295acd33774921561527e97e"} Sep 30 21:24:58 crc kubenswrapper[4756]: I0930 21:24:58.042418 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/aodh-db-sync-7vh8h"] Sep 30 21:24:58 crc kubenswrapper[4756]: I0930 21:24:58.058658 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/aodh-db-sync-7vh8h"] Sep 30 21:24:59 crc kubenswrapper[4756]: I0930 21:24:59.090517 4756 generic.go:334] "Generic (PLEG): container finished" podID="68322c57-caa9-4db4-a2c2-bf4780587733" containerID="7b89216da352db5370190d84cee708511a685bc8295acd33774921561527e97e" exitCode=0 Sep 30 21:24:59 crc kubenswrapper[4756]: I0930 21:24:59.090587 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5zncm" event={"ID":"68322c57-caa9-4db4-a2c2-bf4780587733","Type":"ContainerDied","Data":"7b89216da352db5370190d84cee708511a685bc8295acd33774921561527e97e"} Sep 30 21:24:59 crc kubenswrapper[4756]: I0930 21:24:59.152259 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="16598b9e-0208-4a53-87f6-1069af4b6c45" path="/var/lib/kubelet/pods/16598b9e-0208-4a53-87f6-1069af4b6c45/volumes" Sep 30 21:25:00 crc kubenswrapper[4756]: I0930 21:25:00.103983 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5zncm" event={"ID":"68322c57-caa9-4db4-a2c2-bf4780587733","Type":"ContainerStarted","Data":"f509e5301d76176bdc0e7b361d4c30f238eddef917afa76a378e4f3679a3ebb4"} Sep 30 21:25:00 crc kubenswrapper[4756]: I0930 21:25:00.130380 4756 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-5zncm" podStartSLOduration=3.486627289 podStartE2EDuration="9.13035659s" podCreationTimestamp="2025-09-30 21:24:51 +0000 UTC" firstStartedPulling="2025-09-30 21:24:54.014819158 +0000 UTC m=+6823.635752675" lastFinishedPulling="2025-09-30 21:24:59.658548469 +0000 UTC m=+6829.279481976" observedRunningTime="2025-09-30 21:25:00.125325719 +0000 UTC m=+6829.746259206" watchObservedRunningTime="2025-09-30 21:25:00.13035659 +0000 UTC m=+6829.751290097" Sep 30 21:25:02 crc kubenswrapper[4756]: I0930 21:25:02.247829 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-5zncm" Sep 30 21:25:02 crc kubenswrapper[4756]: I0930 21:25:02.248175 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-5zncm" Sep 30 21:25:03 crc kubenswrapper[4756]: I0930 21:25:03.334366 4756 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-5zncm" podUID="68322c57-caa9-4db4-a2c2-bf4780587733" containerName="registry-server" probeResult="failure" output=< Sep 30 21:25:03 crc kubenswrapper[4756]: timeout: failed to connect service ":50051" within 1s Sep 30 21:25:03 crc kubenswrapper[4756]: > Sep 30 21:25:11 crc kubenswrapper[4756]: I0930 21:25:11.539514 4756 patch_prober.go:28] interesting pod/machine-config-daemon-4n9zj container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 21:25:11 crc kubenswrapper[4756]: I0930 21:25:11.540240 4756 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4n9zj" podUID="3370c2ca-fec3-4f90-8df7-51e21e6c7e1c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 21:25:12 crc kubenswrapper[4756]: I0930 21:25:12.313204 4756 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-5zncm" Sep 30 21:25:12 crc kubenswrapper[4756]: I0930 21:25:12.361802 4756 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-5zncm" Sep 30 21:25:12 crc kubenswrapper[4756]: I0930 21:25:12.556053 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-5zncm"] Sep 30 21:25:14 crc kubenswrapper[4756]: I0930 21:25:14.295056 4756 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-5zncm" podUID="68322c57-caa9-4db4-a2c2-bf4780587733" containerName="registry-server" containerID="cri-o://f509e5301d76176bdc0e7b361d4c30f238eddef917afa76a378e4f3679a3ebb4" gracePeriod=2 Sep 30 21:25:14 crc kubenswrapper[4756]: I0930 21:25:14.810719 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-5zncm" Sep 30 21:25:14 crc kubenswrapper[4756]: I0930 21:25:14.886248 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/68322c57-caa9-4db4-a2c2-bf4780587733-catalog-content\") pod \"68322c57-caa9-4db4-a2c2-bf4780587733\" (UID: \"68322c57-caa9-4db4-a2c2-bf4780587733\") " Sep 30 21:25:14 crc kubenswrapper[4756]: I0930 21:25:14.886440 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5xsll\" (UniqueName: \"kubernetes.io/projected/68322c57-caa9-4db4-a2c2-bf4780587733-kube-api-access-5xsll\") pod \"68322c57-caa9-4db4-a2c2-bf4780587733\" (UID: \"68322c57-caa9-4db4-a2c2-bf4780587733\") " Sep 30 21:25:14 crc kubenswrapper[4756]: I0930 21:25:14.886535 4756 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/68322c57-caa9-4db4-a2c2-bf4780587733-utilities\") pod \"68322c57-caa9-4db4-a2c2-bf4780587733\" (UID: \"68322c57-caa9-4db4-a2c2-bf4780587733\") " Sep 30 21:25:14 crc kubenswrapper[4756]: I0930 21:25:14.887478 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/68322c57-caa9-4db4-a2c2-bf4780587733-utilities" (OuterVolumeSpecName: "utilities") pod "68322c57-caa9-4db4-a2c2-bf4780587733" (UID: "68322c57-caa9-4db4-a2c2-bf4780587733"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 21:25:14 crc kubenswrapper[4756]: I0930 21:25:14.897641 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/68322c57-caa9-4db4-a2c2-bf4780587733-kube-api-access-5xsll" (OuterVolumeSpecName: "kube-api-access-5xsll") pod "68322c57-caa9-4db4-a2c2-bf4780587733" (UID: "68322c57-caa9-4db4-a2c2-bf4780587733"). InnerVolumeSpecName "kube-api-access-5xsll". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 21:25:14 crc kubenswrapper[4756]: I0930 21:25:14.989088 4756 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5xsll\" (UniqueName: \"kubernetes.io/projected/68322c57-caa9-4db4-a2c2-bf4780587733-kube-api-access-5xsll\") on node \"crc\" DevicePath \"\"" Sep 30 21:25:14 crc kubenswrapper[4756]: I0930 21:25:14.989124 4756 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/68322c57-caa9-4db4-a2c2-bf4780587733-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 21:25:15 crc kubenswrapper[4756]: I0930 21:25:15.006132 4756 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/68322c57-caa9-4db4-a2c2-bf4780587733-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "68322c57-caa9-4db4-a2c2-bf4780587733" (UID: "68322c57-caa9-4db4-a2c2-bf4780587733"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 21:25:15 crc kubenswrapper[4756]: I0930 21:25:15.090687 4756 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/68322c57-caa9-4db4-a2c2-bf4780587733-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 21:25:15 crc kubenswrapper[4756]: I0930 21:25:15.314750 4756 generic.go:334] "Generic (PLEG): container finished" podID="68322c57-caa9-4db4-a2c2-bf4780587733" containerID="f509e5301d76176bdc0e7b361d4c30f238eddef917afa76a378e4f3679a3ebb4" exitCode=0 Sep 30 21:25:15 crc kubenswrapper[4756]: I0930 21:25:15.314833 4756 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-5zncm" Sep 30 21:25:15 crc kubenswrapper[4756]: I0930 21:25:15.314819 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5zncm" event={"ID":"68322c57-caa9-4db4-a2c2-bf4780587733","Type":"ContainerDied","Data":"f509e5301d76176bdc0e7b361d4c30f238eddef917afa76a378e4f3679a3ebb4"} Sep 30 21:25:15 crc kubenswrapper[4756]: I0930 21:25:15.314977 4756 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5zncm" event={"ID":"68322c57-caa9-4db4-a2c2-bf4780587733","Type":"ContainerDied","Data":"b3b94e050bd95ba7e08f79b78d5f6e3d77a45f9d7ad72a0b2e8ffee7b44ec8c7"} Sep 30 21:25:15 crc kubenswrapper[4756]: I0930 21:25:15.315012 4756 scope.go:117] "RemoveContainer" containerID="f509e5301d76176bdc0e7b361d4c30f238eddef917afa76a378e4f3679a3ebb4" Sep 30 21:25:15 crc kubenswrapper[4756]: I0930 21:25:15.365132 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-5zncm"] Sep 30 21:25:15 crc kubenswrapper[4756]: I0930 21:25:15.365895 4756 scope.go:117] "RemoveContainer" containerID="7b89216da352db5370190d84cee708511a685bc8295acd33774921561527e97e" Sep 30 21:25:15 crc kubenswrapper[4756]: I0930 21:25:15.379039 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-5zncm"] Sep 30 21:25:15 crc kubenswrapper[4756]: I0930 21:25:15.407290 4756 scope.go:117] "RemoveContainer" containerID="5b127256c4c7d9c05fb053e9a9d122b8743b3792fea5216e55467e131852b76b" Sep 30 21:25:15 crc kubenswrapper[4756]: I0930 21:25:15.469982 4756 scope.go:117] "RemoveContainer" containerID="f509e5301d76176bdc0e7b361d4c30f238eddef917afa76a378e4f3679a3ebb4" Sep 30 21:25:15 crc kubenswrapper[4756]: E0930 21:25:15.470765 4756 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f509e5301d76176bdc0e7b361d4c30f238eddef917afa76a378e4f3679a3ebb4\": container with ID starting with f509e5301d76176bdc0e7b361d4c30f238eddef917afa76a378e4f3679a3ebb4 not found: ID does not exist" containerID="f509e5301d76176bdc0e7b361d4c30f238eddef917afa76a378e4f3679a3ebb4" Sep 30 21:25:15 crc kubenswrapper[4756]: I0930 21:25:15.470848 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f509e5301d76176bdc0e7b361d4c30f238eddef917afa76a378e4f3679a3ebb4"} err="failed to get container status \"f509e5301d76176bdc0e7b361d4c30f238eddef917afa76a378e4f3679a3ebb4\": rpc error: code = NotFound desc = could not find container \"f509e5301d76176bdc0e7b361d4c30f238eddef917afa76a378e4f3679a3ebb4\": container with ID starting with f509e5301d76176bdc0e7b361d4c30f238eddef917afa76a378e4f3679a3ebb4 not found: ID does not exist" Sep 30 21:25:15 crc kubenswrapper[4756]: I0930 21:25:15.470891 4756 scope.go:117] "RemoveContainer" containerID="7b89216da352db5370190d84cee708511a685bc8295acd33774921561527e97e" Sep 30 21:25:15 crc kubenswrapper[4756]: E0930 21:25:15.471734 4756 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7b89216da352db5370190d84cee708511a685bc8295acd33774921561527e97e\": container with ID starting with 7b89216da352db5370190d84cee708511a685bc8295acd33774921561527e97e not found: ID does not exist" containerID="7b89216da352db5370190d84cee708511a685bc8295acd33774921561527e97e" Sep 30 21:25:15 crc kubenswrapper[4756]: I0930 21:25:15.471786 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7b89216da352db5370190d84cee708511a685bc8295acd33774921561527e97e"} err="failed to get container status \"7b89216da352db5370190d84cee708511a685bc8295acd33774921561527e97e\": rpc error: code = NotFound desc = could not find container \"7b89216da352db5370190d84cee708511a685bc8295acd33774921561527e97e\": container with ID starting with 7b89216da352db5370190d84cee708511a685bc8295acd33774921561527e97e not found: ID does not exist" Sep 30 21:25:15 crc kubenswrapper[4756]: I0930 21:25:15.471821 4756 scope.go:117] "RemoveContainer" containerID="5b127256c4c7d9c05fb053e9a9d122b8743b3792fea5216e55467e131852b76b" Sep 30 21:25:15 crc kubenswrapper[4756]: E0930 21:25:15.472465 4756 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5b127256c4c7d9c05fb053e9a9d122b8743b3792fea5216e55467e131852b76b\": container with ID starting with 5b127256c4c7d9c05fb053e9a9d122b8743b3792fea5216e55467e131852b76b not found: ID does not exist" containerID="5b127256c4c7d9c05fb053e9a9d122b8743b3792fea5216e55467e131852b76b" Sep 30 21:25:15 crc kubenswrapper[4756]: I0930 21:25:15.472519 4756 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5b127256c4c7d9c05fb053e9a9d122b8743b3792fea5216e55467e131852b76b"} err="failed to get container status \"5b127256c4c7d9c05fb053e9a9d122b8743b3792fea5216e55467e131852b76b\": rpc error: code = NotFound desc = could not find container \"5b127256c4c7d9c05fb053e9a9d122b8743b3792fea5216e55467e131852b76b\": container with ID starting with 5b127256c4c7d9c05fb053e9a9d122b8743b3792fea5216e55467e131852b76b not found: ID does not exist" Sep 30 21:25:17 crc kubenswrapper[4756]: I0930 21:25:17.130002 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="68322c57-caa9-4db4-a2c2-bf4780587733" path="/var/lib/kubelet/pods/68322c57-caa9-4db4-a2c2-bf4780587733/volumes" Sep 30 21:25:20 crc kubenswrapper[4756]: I0930 21:25:20.055895 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/manila-db-create-nmdrj"] Sep 30 21:25:20 crc kubenswrapper[4756]: I0930 21:25:20.073788 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/manila-db-create-nmdrj"] Sep 30 21:25:21 crc kubenswrapper[4756]: I0930 21:25:21.132733 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1ce68e47-019b-472e-8c36-7d3edef58f90" path="/var/lib/kubelet/pods/1ce68e47-019b-472e-8c36-7d3edef58f90/volumes" Sep 30 21:25:29 crc kubenswrapper[4756]: I0930 21:25:29.079322 4756 scope.go:117] "RemoveContainer" containerID="5eecfb54244f451e4fa19dd8b0bf107f3f6bcc10d8cae05d8259b06d8e1a91be" Sep 30 21:25:29 crc kubenswrapper[4756]: I0930 21:25:29.119530 4756 scope.go:117] "RemoveContainer" containerID="8a089b5063061775d2a83998f225b590b328d1b9e6eab35c6d079004e6512cb2" Sep 30 21:25:29 crc kubenswrapper[4756]: I0930 21:25:29.188541 4756 scope.go:117] "RemoveContainer" containerID="3ef02ab4a3614d88783435bf5129a7e80ad1288520852c76ded7cd3c2c82e23c" Sep 30 21:25:29 crc kubenswrapper[4756]: I0930 21:25:29.227574 4756 scope.go:117] "RemoveContainer" containerID="81e871acea470b2f0f08557c5fe480030d01259a99afbd54dd9e939d91d10216" Sep 30 21:25:30 crc kubenswrapper[4756]: I0930 21:25:30.048996 4756 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/manila-c8e9-account-create-jnb24"] Sep 30 21:25:30 crc kubenswrapper[4756]: I0930 21:25:30.061765 4756 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/manila-c8e9-account-create-jnb24"] Sep 30 21:25:31 crc kubenswrapper[4756]: I0930 21:25:31.155716 4756 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="33741501-84e5-400c-b66f-897f2cbe23f0" path="/var/lib/kubelet/pods/33741501-84e5-400c-b66f-897f2cbe23f0/volumes" var/home/core/zuul-output/logs/crc-cloud-workdir-crc-all-logs.tar.gz0000644000175000000000000000005515067045376024462 0ustar coreroot‹íÁ  ÷Om7 €7šÞ'(var/home/core/zuul-output/logs/crc-cloud/0000755000175000000000000000000015067045377017400 5ustar corerootvar/home/core/zuul-output/artifacts/0000755000175000017500000000000015067027444016516 5ustar corecorevar/home/core/zuul-output/docs/0000755000175000017500000000000015067027444015466 5ustar corecore